Compare commits

...

55 commits

Author SHA1 Message Date
Rae Mac
4e304d11c6 attribute deletion test 2022-03-29 16:06:08 -07:00
Orion Henry
08e6a86f28 fmt 2022-03-29 12:07:59 -04:00
Orion Henry
979b9fd362 Merge branch 'experiment' into marks 2022-03-29 12:04:12 -04:00
Orion Henry
c149da3a6d attr bug 2022-03-26 13:31:39 -04:00
Orion Henry
af02ba6b86 0.0.23 - getChangeByHash 2022-03-23 09:49:06 -04:00
Orion Henry
657bd22d61 Merge branch 'experiment' into marks 2022-03-23 09:35:20 -04:00
Orion Henry
2663e0315c fix test 2022-03-22 13:38:46 -04:00
Orion Henry
bebd310ab6
Merge pull request #314 from automerge/list-changed
Example test for confusing behavior
2022-03-22 13:17:50 -04:00
Rae Mac
bc98b1ecc9 Example test for confusing behavior 2022-03-22 10:06:13 -07:00
Orion Henry
84619d8331 Merge branch 'changed_objs' into marks 2022-03-21 17:39:34 -04:00
Orion Henry
5d4e1f0c42 return touched objects from apply_changes 2022-03-21 17:36:11 -04:00
Orion Henry
25afa0b12b unmark() - 0.0.21 2022-03-21 13:36:01 -04:00
Orion Henry
0cf54c36a8 0.0.21 2022-03-17 19:15:21 -04:00
Orion Henry
99b1127f5c Merge branch 'experiment' into marks 2022-03-17 14:42:35 -04:00
Orion Henry
ae87d7bc00 v20 - object replacement char 2022-03-14 14:47:12 -04:00
Orion Henry
ce9771b29c
Merge pull request #306 from blaine/marks
Additional Attribution Tests
2022-03-10 19:41:16 -05:00
Blaine Cook
e00797c512 test for attribution correctly not surfacing temporary text (inserted and deleted after baseline) 2022-03-10 15:42:57 -08:00
Orion Henry
57a0f62b75 v0.0.19 wasm 2022-03-10 09:23:23 -05:00
Orion Henry
a0f78561c4 Merge branch 'paths' into marks 2022-03-09 19:28:10 -05:00
Andrew Jeffery
ff1a20c626 Document some sync api 2022-03-09 15:15:37 -05:00
Andrew Jeffery
b14d874dfc Move sync structs to module 2022-03-09 15:15:36 -05:00
Andrew Jeffery
aad4852e30 Misc API updates
- Commit now returns just a single hash rather than a vec. Since the
  change we create from committing has all of the heads as deps there
  can only be one hash/head after committing.
- Apply changes now takes a Vec rather than a slice. This avoids having
  to clone them inside.
- transact_with now passes the result of the closure to the commit
  options function
- Remove patch struct
- Change receive_sync_message to return a () instead of the
  `Option<Patch>`
- Change `Transaction*` structs to just `*` and use the transaction
  module
- Make CommitOptions fields public
2022-03-09 15:14:22 -05:00
Andrew Jeffery
63b4c96e71 Update save call 2022-03-09 15:14:22 -05:00
Andrew Jeffery
1b1d50dfaf Update delete nothing tests 2022-03-09 15:14:22 -05:00
Andrew Jeffery
d02737ad12 Fix del missing key in map 2022-03-09 15:14:22 -05:00
Andrew Jeffery
8f4c1fc209 Add failing tests for deleting nothing 2022-03-09 15:14:22 -05:00
Andrew Jeffery
304195d720 Fix typo on QueryResult 2022-03-09 15:14:21 -05:00
Orion Henry
b81e0fd619 update wasm test for set_object 2022-03-09 15:14:20 -05:00
Orion Henry
22b62b14b5 forgot to add the new file 2022-03-08 12:03:31 -05:00
Orion Henry
cbf1ac03b2 added attribute2() - janky version 2022-03-08 12:00:02 -05:00
Orion Henry
4094e82f04 rename tests to attribute 2022-03-07 13:50:05 -05:00
Orion Henry
42446fa5c2 blame -> attribute 2022-03-07 13:45:56 -05:00
Orion Henry
6d5f16c9cd Merge branch 'experiment' into marks 2022-03-04 17:26:14 -05:00
Orion Henry
dbbdd616fd clippy/fmt 2022-03-04 14:16:06 -05:00
Orion Henry
523af57a26 Merge branch 'experiment' into marks 2022-03-04 14:09:00 -05:00
Orion Henry
d195a81d49 v17 --release 2022-03-02 18:35:58 -05:00
Orion Henry
4c11c86532 v0.0.16 - properly blame items deleted by both 2022-03-02 10:27:54 -05:00
Orion Henry
42b6ffe9d8 v0.0.15 2022-03-02 09:33:04 -05:00
Orion Henry
b21b59e6a1 blame v0.1 2022-03-01 22:09:21 -05:00
Orion Henry
c1be06a6c7 blame wip 1 2022-02-28 19:02:36 -05:00
Orion Henry
e07211278f v0.0.14 2022-02-24 18:46:20 -05:00
Orion Henry
3c3f411329 update to new autotransaction api 2022-02-24 18:43:44 -05:00
Orion Henry
5aad691e31 Merge branch 'experiment' into marks 2022-02-24 18:10:19 -05:00
Orion Henry
872efc5756 v10 2022-02-24 17:41:55 -05:00
Orion Henry
e37395f975 make() defaults to text 2022-02-24 17:41:35 -05:00
Orion Henry
a84fa64554 change MAP,LIST,TEXT to be {},[],'' - allow recursion 2022-02-24 17:41:33 -05:00
Orion Henry
a37d4a6870 spans will now respect non-graphmem values 2022-02-24 16:41:01 -05:00
Blaine Cook
5eb5714c13 add failing test for marks handling in 3-way merge scenario 2022-02-24 16:24:17 -05:00
Blaine Cook
4f9b95b5b8 add test for merge behaviour of marks 2022-02-24 16:24:17 -05:00
Orion Henry
36b4f08d20 wasm to 0.0.7 2022-02-22 12:13:01 -05:00
Orion Henry
015e8ce465 choking on bad value function 2022-02-22 12:12:59 -05:00
Orion Henry
ea2f29d681 wasm to 0.0.6 2022-02-22 12:11:49 -05:00
Orion Henry
c8cd069e51 tweak files 2022-02-22 12:11:49 -05:00
Orion Henry
2ba2da95a8 attempt at new packaging 2022-02-22 12:11:49 -05:00
Orion Henry
561cad44e3 Revert "remove marks"
This reverts commit c8c695618b.
2022-02-22 12:11:49 -05:00
33 changed files with 1803 additions and 791 deletions

View file

@ -10,7 +10,7 @@
"mocha": "^9.1.1"
},
"dependencies": {
"automerge-wasm": "file:../automerge-wasm/dev",
"automerge-wasm": "file:../automerge-wasm",
"fast-sha256": "^1.3.0",
"pako": "^2.0.4",
"uuid": "^8.3"

View file

@ -1,5 +1,7 @@
/node_modules
/dev
/node
/web
/target
Cargo.lock
yarn.lock

View file

@ -3,7 +3,7 @@
name = "automerge-wasm"
description = "An js/wasm wrapper for the rust implementation of automerge-backend"
# repository = "https://github.com/automerge/automerge-rs"
version = "0.1.0"
version = "0.0.4"
authors = ["Alex Good <alex@memoryandthought.me>","Orion Henry <orion@inkandswitch.com>", "Martin Kleppmann"]
categories = ["wasm"]
readme = "README.md"
@ -40,10 +40,10 @@ version = "^0.2"
features = ["serde-serialize", "std"]
[package.metadata.wasm-pack.profile.release]
# wasm-opt = false
wasm-opt = true
[package.metadata.wasm-pack.profile.profiling]
wasm-opt = false
wasm-opt = true
# The `web-sys` crate allows you to interact with the various browser APIs,
# like the DOM.

View file

@ -2,695 +2,3 @@
This is a low level automerge library written in rust exporting a javascript API via WASM. This low level api is the underpinning to the `automerge-js` library that reimplements the Automerge API via these low level functions.
### Static Functions
### Methods
`doc.clone(actor?: string)` : Make a complete
`doc.free()` : deallocate WASM memory associated with a document
```rust
#[wasm_bindgen]
pub fn free(self) {}
#[wasm_bindgen(js_name = pendingOps)]
pub fn pending_ops(&self) -> JsValue {
(self.0.pending_ops() as u32).into()
}
pub fn commit(&mut self, message: Option<String>, time: Option<f64>) -> Array {
let heads = self.0.commit(message, time.map(|n| n as i64));
let heads: Array = heads
.iter()
.map(|h| JsValue::from_str(&hex::encode(&h.0)))
.collect();
heads
}
pub fn rollback(&mut self) -> f64 {
self.0.rollback() as f64
}
pub fn keys(&mut self, obj: String, heads: Option<Array>) -> Result<Array, JsValue> {
let obj = self.import(obj)?;
let result = if let Some(heads) = get_heads(heads) {
self.0.keys_at(&obj, &heads)
} else {
self.0.keys(&obj)
}
.iter()
.map(|s| JsValue::from_str(s))
.collect();
Ok(result)
}
pub fn text(&mut self, obj: String, heads: Option<Array>) -> Result<String, JsValue> {
let obj = self.import(obj)?;
if let Some(heads) = get_heads(heads) {
self.0.text_at(&obj, &heads)
} else {
self.0.text(&obj)
}
.map_err(to_js_err)
}
pub fn splice(
&mut self,
obj: String,
start: f64,
delete_count: f64,
text: JsValue,
) -> Result<Option<Array>, JsValue> {
let obj = self.import(obj)?;
let start = start as usize;
let delete_count = delete_count as usize;
let mut vals = vec![];
if let Some(t) = text.as_string() {
self.0
.splice_text(&obj, start, delete_count, &t)
.map_err(to_js_err)?;
Ok(None)
} else {
if let Ok(array) = text.dyn_into::<Array>() {
for i in array.iter() {
if let Ok(array) = i.clone().dyn_into::<Array>() {
let value = array.get(1);
let datatype = array.get(2);
let value = self.import_value(value, datatype.as_string())?;
vals.push(value);
} else {
let value = self.import_value(i, None)?;
vals.push(value);
}
}
}
let result = self
.0
.splice(&obj, start, delete_count, vals)
.map_err(to_js_err)?;
if result.is_empty() {
Ok(None)
} else {
let result: Array = result
.iter()
.map(|r| JsValue::from(r.to_string()))
.collect();
Ok(result.into())
}
}
}
pub fn push(
&mut self,
obj: String,
value: JsValue,
datatype: Option<String>,
) -> Result<Option<String>, JsValue> {
let obj = self.import(obj)?;
let value = self.import_value(value, datatype)?;
let index = self.0.length(&obj);
let opid = self.0.insert(&obj, index, value).map_err(to_js_err)?;
Ok(opid.map(|id| id.to_string()))
}
pub fn insert(
&mut self,
obj: String,
index: f64,
value: JsValue,
datatype: Option<String>,
) -> Result<Option<String>, JsValue> {
let obj = self.import(obj)?;
let index = index as f64;
let value = self.import_value(value, datatype)?;
let opid = self
.0
.insert(&obj, index as usize, value)
.map_err(to_js_err)?;
Ok(opid.map(|id| id.to_string()))
}
pub fn set(
&mut self,
obj: String,
prop: JsValue,
value: JsValue,
datatype: Option<String>,
) -> Result<Option<String>, JsValue> {
let obj = self.import(obj)?;
let prop = self.import_prop(prop)?;
let value = self.import_value(value, datatype)?;
let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?;
Ok(opid.map(|id| id.to_string()))
}
pub fn make(
&mut self,
obj: String,
prop: JsValue,
value: JsValue,
) -> Result<String, JsValue> {
let obj = self.import(obj)?;
let prop = self.import_prop(prop)?;
let value = self.import_value(value, None)?;
if value.is_object() {
let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?;
Ok(opid.unwrap().to_string())
} else {
Err("invalid object type".into())
}
}
pub fn inc(&mut self, obj: String, prop: JsValue, value: JsValue) -> Result<(), JsValue> {
let obj = self.import(obj)?;
let prop = self.import_prop(prop)?;
let value: f64 = value
.as_f64()
.ok_or("inc needs a numberic value")
.map_err(to_js_err)?;
self.0.inc(&obj, prop, value as i64).map_err(to_js_err)?;
Ok(())
}
pub fn value(
&mut self,
obj: String,
prop: JsValue,
heads: Option<Array>,
) -> Result<Array, JsValue> {
let obj = self.import(obj)?;
let result = Array::new();
let prop = to_prop(prop);
let heads = get_heads(heads);
if let Ok(prop) = prop {
let value = if let Some(h) = heads {
self.0.value_at(&obj, prop, &h)
} else {
self.0.value(&obj, prop)
}
.map_err(to_js_err)?;
match value {
Some((Value::Object(obj_type), obj_id)) => {
result.push(&obj_type.to_string().into());
result.push(&obj_id.to_string().into());
}
Some((Value::Scalar(value), _)) => {
result.push(&datatype(&value).into());
result.push(&ScalarValue(value).into());
}
None => {}
}
}
Ok(result)
}
pub fn values(
&mut self,
obj: String,
arg: JsValue,
heads: Option<Array>,
) -> Result<Array, JsValue> {
let obj = self.import(obj)?;
let result = Array::new();
let prop = to_prop(arg);
if let Ok(prop) = prop {
let values = if let Some(heads) = get_heads(heads) {
self.0.values_at(&obj, prop, &heads)
} else {
self.0.values(&obj, prop)
}
.map_err(to_js_err)?;
for value in values {
match value {
(Value::Object(obj_type), obj_id) => {
let sub = Array::new();
sub.push(&obj_type.to_string().into());
sub.push(&obj_id.to_string().into());
result.push(&sub.into());
}
(Value::Scalar(value), id) => {
let sub = Array::new();
sub.push(&datatype(&value).into());
sub.push(&ScalarValue(value).into());
sub.push(&id.to_string().into());
result.push(&sub.into());
}
}
}
}
Ok(result)
}
pub fn length(&mut self, obj: String, heads: Option<Array>) -> Result<f64, JsValue> {
let obj = self.import(obj)?;
if let Some(heads) = get_heads(heads) {
Ok(self.0.length_at(&obj, &heads) as f64)
} else {
Ok(self.0.length(&obj) as f64)
}
}
pub fn del(&mut self, obj: String, prop: JsValue) -> Result<(), JsValue> {
let obj = self.import(obj)?;
let prop = to_prop(prop)?;
self.0.del(&obj, prop).map_err(to_js_err)?;
Ok(())
}
pub fn mark(
&mut self,
obj: JsValue,
range: JsValue,
name: JsValue,
value: JsValue,
datatype: JsValue,
) -> Result<(), JsValue> {
let obj = self.import(obj)?;
let re = Regex::new(r"([\[\(])(\d+)\.\.(\d+)([\)\]])").unwrap();
let range = range.as_string().ok_or("range must be a string")?;
let cap = re.captures_iter(&range).next().ok_or("range must be in the form of (start..end] or [start..end) etc... () for sticky, [] for normal")?;
let start: usize = cap[2].parse().map_err(|_| to_js_err("invalid start"))?;
let end: usize = cap[3].parse().map_err(|_| to_js_err("invalid end"))?;
let start_sticky = &cap[1] == "(";
let end_sticky = &cap[4] == ")";
let name = name
.as_string()
.ok_or("invalid mark name")
.map_err(to_js_err)?;
let value = self.import_scalar(&value, datatype.as_string())?;
self.0
.mark(&obj, start, start_sticky, end, end_sticky, &name, value)
.map_err(to_js_err)?;
Ok(())
}
pub fn spans(&mut self, obj: JsValue) -> Result<JsValue, JsValue> {
let obj = self.import(obj)?;
let text = self.0.text(&obj).map_err(to_js_err)?;
let spans = self.0.spans(&obj).map_err(to_js_err)?;
let mut last_pos = 0;
let result = Array::new();
for s in spans {
let marks = Array::new();
for m in s.marks {
let mark = Array::new();
mark.push(&m.0.into());
mark.push(&datatype(&m.1).into());
mark.push(&ScalarValue(m.1).into());
marks.push(&mark.into());
}
let text_span = &text[last_pos..s.pos]; //.slice(last_pos, s.pos);
if text_span.len() > 0 {
result.push(&text_span.into());
}
result.push(&marks);
last_pos = s.pos;
//let obj = Object::new().into();
//js_set(&obj, "pos", s.pos as i32)?;
//js_set(&obj, "marks", marks)?;
//result.push(&obj.into());
}
let text_span = &text[last_pos..];
if text_span.len() > 0 {
result.push(&text_span.into());
}
Ok(result.into())
}
pub fn save(&mut self) -> Result<Uint8Array, JsValue> {
self.0
.save()
.map(|v| Uint8Array::from(v.as_slice()))
.map_err(to_js_err)
}
#[wasm_bindgen(js_name = saveIncremental)]
pub fn save_incremental(&mut self) -> Uint8Array {
let bytes = self.0.save_incremental();
Uint8Array::from(bytes.as_slice())
}
#[wasm_bindgen(js_name = loadIncremental)]
pub fn load_incremental(&mut self, data: Uint8Array) -> Result<f64, JsValue> {
let data = data.to_vec();
let len = self.0.load_incremental(&data).map_err(to_js_err)?;
Ok(len as f64)
}
#[wasm_bindgen(js_name = applyChanges)]
pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> {
let changes: Vec<_> = JS(changes).try_into()?;
self.0.apply_changes(&changes).map_err(to_js_err)?;
Ok(())
}
#[wasm_bindgen(js_name = getChanges)]
pub fn get_changes(&mut self, have_deps: JsValue) -> Result<Array, JsValue> {
let deps: Vec<_> = JS(have_deps).try_into()?;
let changes = self.0.get_changes(&deps);
let changes: Array = changes
.iter()
.map(|c| Uint8Array::from(c.raw_bytes()))
.collect();
Ok(changes)
}
#[wasm_bindgen(js_name = getChangesAdded)]
pub fn get_changes_added(&mut self, other: &Automerge) -> Result<Array, JsValue> {
let changes = self.0.get_changes_added(&other.0);
let changes: Array = changes
.iter()
.map(|c| Uint8Array::from(c.raw_bytes()))
.collect();
Ok(changes)
}
#[wasm_bindgen(js_name = getHeads)]
pub fn get_heads(&mut self) -> Array {
let heads = self.0.get_heads();
let heads: Array = heads
.iter()
.map(|h| JsValue::from_str(&hex::encode(&h.0)))
.collect();
heads
}
#[wasm_bindgen(js_name = getActorId)]
pub fn get_actor_id(&mut self) -> String {
let actor = self.0.get_actor();
actor.to_string()
}
#[wasm_bindgen(js_name = getLastLocalChange)]
pub fn get_last_local_change(&mut self) -> Result<Option<Uint8Array>, JsValue> {
if let Some(change) = self.0.get_last_local_change() {
Ok(Some(Uint8Array::from(change.raw_bytes())))
} else {
Ok(None)
}
}
pub fn dump(&self) {
self.0.dump()
}
#[wasm_bindgen(js_name = getMissingDeps)]
pub fn get_missing_deps(&mut self, heads: Option<Array>) -> Result<Array, JsValue> {
let heads = get_heads(heads).unwrap_or_default();
let deps = self.0.get_missing_deps(&heads);
let deps: Array = deps
.iter()
.map(|h| JsValue::from_str(&hex::encode(&h.0)))
.collect();
Ok(deps)
}
#[wasm_bindgen(js_name = receiveSyncMessage)]
pub fn receive_sync_message(
&mut self,
state: &mut SyncState,
message: Uint8Array,
) -> Result<(), JsValue> {
let message = message.to_vec();
let message = am::SyncMessage::decode(message.as_slice()).map_err(to_js_err)?;
self.0
.receive_sync_message(&mut state.0, message)
.map_err(to_js_err)?;
Ok(())
}
#[wasm_bindgen(js_name = generateSyncMessage)]
pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result<JsValue, JsValue> {
if let Some(message) = self.0.generate_sync_message(&mut state.0) {
Ok(Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into())
} else {
Ok(JsValue::null())
}
}
#[wasm_bindgen(js_name = toJS)]
pub fn to_js(&self) -> JsValue {
map_to_js(&self.0, ROOT)
}
fn import(&self, id: String) -> Result<ObjId, JsValue> {
self.0.import(&id).map_err(to_js_err)
}
fn import_prop(&mut self, prop: JsValue) -> Result<Prop, JsValue> {
if let Some(s) = prop.as_string() {
Ok(s.into())
} else if let Some(n) = prop.as_f64() {
Ok((n as usize).into())
} else {
Err(format!("invalid prop {:?}", prop).into())
}
}
fn import_scalar(
&mut self,
value: &JsValue,
datatype: Option<String>,
) -> Result<am::ScalarValue, JsValue> {
match datatype.as_deref() {
Some("boolean") => value
.as_bool()
.ok_or_else(|| "value must be a bool".into())
.map(am::ScalarValue::Boolean),
Some("int") => value
.as_f64()
.ok_or_else(|| "value must be a number".into())
.map(|v| am::ScalarValue::Int(v as i64)),
Some("uint") => value
.as_f64()
.ok_or_else(|| "value must be a number".into())
.map(|v| am::ScalarValue::Uint(v as u64)),
Some("f64") => value
.as_f64()
.ok_or_else(|| "value must be a number".into())
.map(am::ScalarValue::F64),
Some("bytes") => Ok(am::ScalarValue::Bytes(
value.clone().dyn_into::<Uint8Array>().unwrap().to_vec(),
)),
Some("counter") => value
.as_f64()
.ok_or_else(|| "value must be a number".into())
.map(|v| am::ScalarValue::counter(v as i64)),
Some("timestamp") => value
.as_f64()
.ok_or_else(|| "value must be a number".into())
.map(|v| am::ScalarValue::Timestamp(v as i64)),
/*
Some("bytes") => unimplemented!(),
Some("cursor") => unimplemented!(),
*/
Some("null") => Ok(am::ScalarValue::Null),
Some(_) => Err(format!("unknown datatype {:?}", datatype).into()),
None => {
if value.is_null() {
Ok(am::ScalarValue::Null)
} else if let Some(b) = value.as_bool() {
Ok(am::ScalarValue::Boolean(b))
} else if let Some(s) = value.as_string() {
// FIXME - we need to detect str vs int vs float vs bool here :/
Ok(am::ScalarValue::Str(s.into()))
} else if let Some(n) = value.as_f64() {
if (n.round() - n).abs() < f64::EPSILON {
Ok(am::ScalarValue::Int(n as i64))
} else {
Ok(am::ScalarValue::F64(n))
}
// } else if let Some(o) = to_objtype(&value) {
// Ok(o.into())
} else if let Ok(d) = value.clone().dyn_into::<js_sys::Date>() {
Ok(am::ScalarValue::Timestamp(d.get_time() as i64))
} else if let Ok(o) = &value.clone().dyn_into::<Uint8Array>() {
Ok(am::ScalarValue::Bytes(o.to_vec()))
} else {
Err("value is invalid".into())
}
}
}
}
fn import_value(&mut self, value: JsValue, datatype: Option<String>) -> Result<Value, JsValue> {
match self.import_scalar(&value, datatype) {
Ok(val) => Ok(val.into()),
Err(err) => {
if let Some(o) = to_objtype(&value) {
Ok(o.into())
} else {
Err(err)
}
}
}
/*
match datatype.as_deref() {
Some("boolean") => value
.as_bool()
.ok_or_else(|| "value must be a bool".into())
.map(|v| am::ScalarValue::Boolean(v).into()),
Some("int") => value
.as_f64()
.ok_or_else(|| "value must be a number".into())
.map(|v| am::ScalarValue::Int(v as i64).into()),
Some("uint") => value
.as_f64()
.ok_or_else(|| "value must be a number".into())
.map(|v| am::ScalarValue::Uint(v as u64).into()),
Some("f64") => value
.as_f64()
.ok_or_else(|| "value must be a number".into())
.map(|n| am::ScalarValue::F64(n).into()),
Some("bytes") => {
Ok(am::ScalarValue::Bytes(value.dyn_into::<Uint8Array>().unwrap().to_vec()).into())
}
Some("counter") => value
.as_f64()
.ok_or_else(|| "value must be a number".into())
.map(|v| am::ScalarValue::counter(v as i64).into()),
Some("timestamp") => value
.as_f64()
.ok_or_else(|| "value must be a number".into())
.map(|v| am::ScalarValue::Timestamp(v as i64).into()),
Some("null") => Ok(am::ScalarValue::Null.into()),
Some(_) => Err(format!("unknown datatype {:?}", datatype).into()),
None => {
if value.is_null() {
Ok(am::ScalarValue::Null.into())
} else if let Some(b) = value.as_bool() {
Ok(am::ScalarValue::Boolean(b).into())
} else if let Some(s) = value.as_string() {
// FIXME - we need to detect str vs int vs float vs bool here :/
Ok(am::ScalarValue::Str(s.into()).into())
} else if let Some(n) = value.as_f64() {
if (n.round() - n).abs() < f64::EPSILON {
Ok(am::ScalarValue::Int(n as i64).into())
} else {
Ok(am::ScalarValue::F64(n).into())
}
} else if let Some(o) = to_objtype(&value) {
Ok(o.into())
} else if let Ok(d) = value.clone().dyn_into::<js_sys::Date>() {
Ok(am::ScalarValue::Timestamp(d.get_time() as i64).into())
} else if let Ok(o) = &value.dyn_into::<Uint8Array>() {
Ok(am::ScalarValue::Bytes(o.to_vec()).into())
} else {
Err("value is invalid".into())
}
}
}
*/
}
}
#[wasm_bindgen(js_name = create)]
pub fn init(actor: Option<String>) -> Result<Automerge, JsValue> {
console_error_panic_hook::set_once();
Automerge::new(actor)
}
#[wasm_bindgen(js_name = loadDoc)]
pub fn load(data: Uint8Array, actor: Option<String>) -> Result<Automerge, JsValue> {
let data = data.to_vec();
let mut automerge = am::Automerge::load(&data).map_err(to_js_err)?;
if let Some(s) = actor {
let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
automerge.set_actor(actor)
}
Ok(Automerge(automerge))
}
#[wasm_bindgen(js_name = encodeChange)]
pub fn encode_change(change: JsValue) -> Result<Uint8Array, JsValue> {
let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?;
let change: Change = change.into();
Ok(Uint8Array::from(change.raw_bytes()))
}
#[wasm_bindgen(js_name = decodeChange)]
pub fn decode_change(change: Uint8Array) -> Result<JsValue, JsValue> {
let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?;
let change: am::ExpandedChange = change.decode();
JsValue::from_serde(&change).map_err(to_js_err)
}
#[wasm_bindgen(js_name = initSyncState)]
pub fn init_sync_state() -> SyncState {
SyncState(am::SyncState::new())
}
// this is needed to be compatible with the automerge-js api
#[wasm_bindgen(js_name = importSyncState)]
pub fn import_sync_state(state: JsValue) -> Result<SyncState, JsValue> {
Ok(SyncState(JS(state).try_into()?))
}
// this is needed to be compatible with the automerge-js api
#[wasm_bindgen(js_name = exportSyncState)]
pub fn export_sync_state(state: SyncState) -> JsValue {
JS::from(state.0).into()
}
#[wasm_bindgen(js_name = encodeSyncMessage)]
pub fn encode_sync_message(message: JsValue) -> Result<Uint8Array, JsValue> {
let heads = js_get(&message, "heads")?.try_into()?;
let need = js_get(&message, "need")?.try_into()?;
let changes = js_get(&message, "changes")?.try_into()?;
let have = js_get(&message, "have")?.try_into()?;
Ok(Uint8Array::from(
am::SyncMessage {
heads,
need,
have,
changes,
}
.encode()
.unwrap()
.as_slice(),
))
}
#[wasm_bindgen(js_name = decodeSyncMessage)]
pub fn decode_sync_message(msg: Uint8Array) -> Result<JsValue, JsValue> {
let data = msg.to_vec();
let msg = am::SyncMessage::decode(&data).map_err(to_js_err)?;
let heads = AR::from(msg.heads.as_slice());
let need = AR::from(msg.need.as_slice());
let changes = AR::from(msg.changes.as_slice());
let have = AR::from(msg.have.as_slice());
let obj = Object::new().into();
js_set(&obj, "heads", heads)?;
js_set(&obj, "need", need)?;
js_set(&obj, "have", have)?;
js_set(&obj, "changes", changes)?;
Ok(obj)
}
#[wasm_bindgen(js_name = encodeSyncState)]
pub fn encode_sync_state(state: SyncState) -> Result<Uint8Array, JsValue> {
let state = state.0;
Ok(Uint8Array::from(
state.encode().map_err(to_js_err)?.as_slice(),
))
}
#[wasm_bindgen(js_name = decodeSyncState)]
pub fn decode_sync_state(data: Uint8Array) -> Result<SyncState, JsValue> {
SyncState::decode(data)
}
#[wasm_bindgen(js_name = MAP)]
pub struct Map {}
#[wasm_bindgen(js_name = LIST)]
pub struct List {}
#[wasm_bindgen(js_name = TEXT)]
pub struct Text {}
#[wasm_bindgen(js_name = TABLE)]
pub struct Table {}
```

File diff suppressed because one or more lines are too long

View file

@ -62,6 +62,23 @@ export type DecodedChange = {
ops: Op[]
}
export type ChangeSetAddition = {
actor: string,
start: number,
end: number,
}
export type ChangeSetDeletion = {
actor: string,
pos: number,
val: string
}
export type ChangeSet = {
add: ChangeSetAddition[],
del: ChangeSetDeletion[]
}
export type Op = {
action: string,
obj: ObjID,
@ -102,9 +119,18 @@ export class Automerge {
length(obj: ObjID, heads?: Heads): number;
materialize(obj?: ObjID): any;
// experimental spans api - unstable!
mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void;
unmark(obj: ObjID, mark: ObjID): void;
spans(obj: ObjID): any;
raw_spans(obj: ObjID): any;
blame(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
attribute(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
attribute2(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
// transactions
commit(message?: string, time?: number): Heads;
merge(other: Automerge): Heads;
merge(other: Automerge): ObjID[];
getActorId(): Actor;
pendingOps(): number;
rollback(): number;
@ -112,14 +138,14 @@ export class Automerge {
// save and load to local store
save(): Uint8Array;
saveIncremental(): Uint8Array;
loadIncremental(data: Uint8Array): number;
loadIncremental(data: Uint8Array): ObjID[];
// sync over network
receiveSyncMessage(state: SyncState, message: SyncMessage): void;
receiveSyncMessage(state: SyncState, message: SyncMessage): ObjID[];
generateSyncMessage(state: SyncState): SyncMessage | null;
// low level change functions
applyChanges(changes: Change[]): void;
applyChanges(changes: Change[]): ObjID[];
getChanges(have_deps: Heads): Change[];
getChangeByHash(hash: Hash): Change | null;
getChangesAdded(other: Automerge): Change[];

View file

@ -4,24 +4,27 @@
"Alex Good <alex@memoryandthought.me>",
"Martin Kleppmann"
],
"name": "automerge-wasm",
"name": "automerge-wasm-pack",
"description": "wasm-bindgen bindings to the automerge rust implementation",
"version": "0.0.1",
"version": "0.0.23",
"license": "MIT",
"files": [
"README.md",
"LICENSE",
"package.json",
"automerge_wasm_bg.wasm",
"automerge_wasm.js"
"index.d.ts",
"node/index.js",
"node/index_bg.wasm",
"web/index.js",
"web/index_bg.wasm"
],
"module": "./pkg/index.js",
"main": "./dev/index.js",
"types": "index.d.ts",
"module": "./web/index.js",
"main": "./node/index.js",
"scripts": {
"build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev && cp index.d.ts dev",
"release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && cp index.d.ts dev",
"pkg": "rimraf ./pkg && wasm-pack build --target web --release --out-name index -d pkg && cp index.d.ts pkg && cd pkg && yarn pack && mv automerge-wasm*tgz ..",
"prof": "rimraf ./dev && wasm-pack build --target nodejs --profiling --out-name index -d dev",
"build": "rimraf ./node && wasm-pack build --target nodejs --dev --out-name index -d node && cp index.d.ts node",
"release-w": "rimraf ./web && wasm-pack build --target web --release --out-name index -d web && cp index.d.ts web",
"release-n": "rimraf ./node && wasm-pack build --target nodejs --release --out-name index -d node && cp index.d.ts node",
"release": "yarn release-w && yarn release-n",
"test": "yarn build && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts"
},
"dependencies": {},

View file

@ -329,6 +329,15 @@ pub(crate) fn get_heads(heads: Option<Array>) -> Option<Vec<ChangeHash>> {
heads.ok()
}
pub(crate) fn get_js_heads(heads: JsValue) -> Result<Vec<ChangeHash>, JsValue> {
let heads = heads.dyn_into::<Array>()?;
heads
.iter()
.map(|j| j.into_serde())
.collect::<Result<Vec<_>, _>>()
.map_err(to_js_err)
}
pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue {
let keys = doc.keys(obj);
let map = Object::new();

View file

@ -4,6 +4,7 @@ use am::transaction::Transactable;
use automerge as am;
use automerge::{Change, ObjId, Prop, Value, ROOT};
use js_sys::{Array, Object, Uint8Array};
use regex::Regex;
use std::convert::TryInto;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
@ -13,7 +14,8 @@ mod sync;
mod value;
use interop::{
get_heads, js_get, js_set, list_to_js, map_to_js, to_js_err, to_objtype, to_prop, AR, JS,
get_heads, get_js_heads, js_get, js_set, list_to_js, map_to_js, to_js_err, to_objtype, to_prop,
AR, JS,
};
use sync::SyncState;
use value::{datatype, ScalarValue};
@ -89,12 +91,9 @@ impl Automerge {
}
pub fn merge(&mut self, other: &mut Automerge) -> Result<Array, JsValue> {
let heads = self.0.merge(&mut other.0)?;
let heads: Array = heads
.iter()
.map(|h| JsValue::from_str(&hex::encode(&h.0)))
.collect();
Ok(heads)
let objs = self.0.merge(&mut other.0)?;
let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect();
Ok(objs)
}
pub fn rollback(&mut self) -> f64 {
@ -217,6 +216,18 @@ impl Automerge {
Ok(())
}
pub fn make(
&mut self,
obj: JsValue,
prop: JsValue,
value: JsValue,
_datatype: JsValue,
) -> Result<JsValue, JsValue> {
// remove this
am::log!("doc.make() is depricated - please use doc.set_object() or doc.insert_object()");
self.set_object(obj, prop, value)
}
pub fn set_object(
&mut self,
obj: JsValue,
@ -354,6 +365,209 @@ impl Automerge {
Ok(())
}
pub fn mark(
&mut self,
obj: JsValue,
range: JsValue,
name: JsValue,
value: JsValue,
datatype: JsValue,
) -> Result<(), JsValue> {
let obj = self.import(obj)?;
let re = Regex::new(r"([\[\(])(\d+)\.\.(\d+)([\)\]])").unwrap();
let range = range.as_string().ok_or("range must be a string")?;
let cap = re.captures_iter(&range).next().ok_or("range must be in the form of (start..end] or [start..end) etc... () for sticky, [] for normal")?;
let start: usize = cap[2].parse().map_err(|_| to_js_err("invalid start"))?;
let end: usize = cap[3].parse().map_err(|_| to_js_err("invalid end"))?;
let start_sticky = &cap[1] == "(";
let end_sticky = &cap[4] == ")";
let name = name
.as_string()
.ok_or("invalid mark name")
.map_err(to_js_err)?;
let value = self
.import_scalar(&value, &datatype.as_string())
.ok_or_else(|| to_js_err("invalid value"))?;
self.0
.mark(&obj, start, start_sticky, end, end_sticky, &name, value)
.map_err(to_js_err)?;
Ok(())
}
pub fn unmark(&mut self, obj: JsValue, mark: JsValue) -> Result<(), JsValue> {
let obj = self.import(obj)?;
let mark = self.import(mark)?;
self.0.unmark(&obj, &mark).map_err(to_js_err)?;
Ok(())
}
pub fn spans(&mut self, obj: JsValue) -> Result<JsValue, JsValue> {
let obj = self.import(obj)?;
let text = self.0.list(&obj).map_err(to_js_err)?;
let spans = self.0.spans(&obj).map_err(to_js_err)?;
let mut last_pos = 0;
let result = Array::new();
for s in spans {
let marks = Array::new();
for m in s.marks {
let mark = Array::new();
mark.push(&m.0.into());
mark.push(&datatype(&m.1).into());
mark.push(&ScalarValue(m.1).into());
marks.push(&mark.into());
}
let text_span = &text[last_pos..s.pos]; //.slice(last_pos, s.pos);
if !text_span.is_empty() {
let t: String = text_span
.iter()
.filter_map(|(v, _)| v.as_string())
.collect();
result.push(&t.into());
}
result.push(&marks);
last_pos = s.pos;
//let obj = Object::new().into();
//js_set(&obj, "pos", s.pos as i32)?;
//js_set(&obj, "marks", marks)?;
//result.push(&obj.into());
}
let text_span = &text[last_pos..];
if !text_span.is_empty() {
let t: String = text_span
.iter()
.filter_map(|(v, _)| v.as_string())
.collect();
result.push(&t.into());
}
Ok(result.into())
}
pub fn raw_spans(&mut self, obj: JsValue) -> Result<Array, JsValue> {
let obj = self.import(obj)?;
let spans = self.0.raw_spans(&obj).map_err(to_js_err)?;
let result = Array::new();
for s in spans {
result.push(&JsValue::from_serde(&s).map_err(to_js_err)?);
}
Ok(result)
}
pub fn blame(
&mut self,
obj: JsValue,
baseline: JsValue,
change_sets: JsValue,
) -> Result<Array, JsValue> {
am::log!("doc.blame() is depricated - please use doc.attribute()");
self.attribute(obj, baseline, change_sets)
}
pub fn attribute(
&mut self,
obj: JsValue,
baseline: JsValue,
change_sets: JsValue,
) -> Result<Array, JsValue> {
let obj = self.import(obj)?;
let baseline = get_js_heads(baseline)?;
let change_sets = change_sets.dyn_into::<Array>()?;
let change_sets = change_sets
.iter()
.map(get_js_heads)
.collect::<Result<Vec<_>, _>>()?;
let result = self.0.attribute(&obj, &baseline, &change_sets)?;
let result = result
.into_iter()
.map(|cs| {
let add = cs
.add
.iter()
.map::<Result<JsValue, JsValue>, _>(|range| {
let r = Object::new();
js_set(&r, "start", range.start as f64)?;
js_set(&r, "end", range.end as f64)?;
Ok(JsValue::from(&r))
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
let del = cs
.del
.iter()
.map::<Result<JsValue, JsValue>, _>(|d| {
let r = Object::new();
js_set(&r, "pos", d.0 as f64)?;
js_set(&r, "val", &d.1)?;
Ok(JsValue::from(&r))
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
let obj = Object::new();
js_set(&obj, "add", add)?;
js_set(&obj, "del", del)?;
Ok(obj.into())
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
Ok(result)
}
pub fn attribute2(
&mut self,
obj: JsValue,
baseline: JsValue,
change_sets: JsValue,
) -> Result<Array, JsValue> {
let obj = self.import(obj)?;
let baseline = get_js_heads(baseline)?;
let change_sets = change_sets.dyn_into::<Array>()?;
let change_sets = change_sets
.iter()
.map(get_js_heads)
.collect::<Result<Vec<_>, _>>()?;
let result = self.0.attribute2(&obj, &baseline, &change_sets)?;
let result = result
.into_iter()
.map(|cs| {
let add = cs
.add
.iter()
.map::<Result<JsValue, JsValue>, _>(|a| {
let r = Object::new();
js_set(&r, "actor", &self.0.actor_to_str(a.actor))?;
js_set(&r, "start", a.range.start as f64)?;
js_set(&r, "end", a.range.end as f64)?;
Ok(JsValue::from(&r))
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
let del = cs
.del
.iter()
.map::<Result<JsValue, JsValue>, _>(|d| {
let r = Object::new();
js_set(&r, "actor", &self.0.actor_to_str(d.actor))?;
js_set(&r, "pos", d.pos as f64)?;
js_set(&r, "val", &d.span)?;
Ok(JsValue::from(&r))
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
let obj = Object::new();
js_set(&obj, "add", add)?;
js_set(&obj, "del", del)?;
Ok(obj.into())
})
.collect::<Result<Vec<JsValue>, JsValue>>()?
.iter()
.collect::<Array>();
Ok(result)
}
pub fn save(&mut self) -> Uint8Array {
Uint8Array::from(self.0.save().as_slice())
}
@ -365,17 +579,19 @@ impl Automerge {
}
#[wasm_bindgen(js_name = loadIncremental)]
pub fn load_incremental(&mut self, data: Uint8Array) -> Result<f64, JsValue> {
pub fn load_incremental(&mut self, data: Uint8Array) -> Result<Array, JsValue> {
let data = data.to_vec();
let len = self.0.load_incremental(&data).map_err(to_js_err)?;
Ok(len as f64)
let objs = self.0.load_incremental(&data).map_err(to_js_err)?;
let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect();
Ok(objs)
}
#[wasm_bindgen(js_name = applyChanges)]
pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> {
pub fn apply_changes(&mut self, changes: JsValue) -> Result<Array, JsValue> {
let changes: Vec<_> = JS(changes).try_into()?;
self.0.apply_changes(changes).map_err(to_js_err)?;
Ok(())
let objs = self.0.apply_changes(changes).map_err(to_js_err)?;
let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect();
Ok(objs)
}
#[wasm_bindgen(js_name = getChanges)]
@ -455,13 +671,15 @@ impl Automerge {
&mut self,
state: &mut SyncState,
message: Uint8Array,
) -> Result<(), JsValue> {
) -> Result<Array, JsValue> {
let message = message.to_vec();
let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?;
self.0
let objs = self
.0
.receive_sync_message(&mut state.0, message)
.map_err(to_js_err)?;
Ok(())
let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect();
Ok(objs)
}
#[wasm_bindgen(js_name = generateSyncMessage)]

View file

@ -0,0 +1,189 @@
import { describe, it } from 'mocha';
//@ts-ignore
import assert from 'assert'
//@ts-ignore
import { BloomFilter } from './helpers/sync'
import { create, loadDoc, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..'
import { DecodedSyncMessage, Hash } from '..'
describe('Automerge', () => {
describe('attribute', () => {
it('should be able to attribute text segments on change sets', () => {
let doc1 = create()
let text = doc1.set_object("_root", "notes","hello little world")
let h1 = doc1.getHeads();
let doc2 = doc1.fork();
doc2.splice(text, 5, 7, " big");
doc2.text(text)
let h2 = doc2.getHeads();
assert.deepEqual(doc2.text(text), "hello big world")
let doc3 = doc1.fork();
doc3.splice(text, 0, 0, "Well, ");
let h3 = doc3.getHeads();
assert.deepEqual(doc3.text(text), "Well, hello little world")
doc1.merge(doc2)
doc1.merge(doc3)
assert.deepEqual(doc1.text(text), "Well, hello big world")
let attribute = doc1.attribute(text, h1, [h2, h3])
assert.deepEqual(attribute, [
{ add: [ { start: 11, end: 15 } ], del: [ { pos: 15, val: ' little' } ] },
{ add: [ { start: 0, end: 6 } ], del: [] }
])
})
it('should be able to hand complex attribute change sets', () => {
let doc1 = create("aaaa")
let text = doc1.set_object("_root", "notes","AAAAAA")
let h1 = doc1.getHeads();
let doc2 = doc1.fork("bbbb");
doc2.splice(text, 0, 2, "BB");
doc2.commit()
doc2.splice(text, 2, 2, "BB");
doc2.commit()
doc2.splice(text, 6, 0, "BB");
doc2.commit()
let h2 = doc2.getHeads();
assert.deepEqual(doc2.text(text), "BBBBAABB")
let doc3 = doc1.fork("cccc");
doc3.splice(text, 1, 1, "C");
doc3.commit()
doc3.splice(text, 3, 1, "C");
doc3.commit()
doc3.splice(text, 5, 1, "C");
doc3.commit()
let h3 = doc3.getHeads();
// with tombstones its
// AC.AC.AC.
assert.deepEqual(doc3.text(text), "ACACAC")
doc1.merge(doc2)
assert.deepEqual(doc1.attribute(text, h1, [h2]), [
{ add: [ {start:0, end: 4}, { start: 6, end: 8 } ], del: [ { pos: 4, val: 'AAAA' } ] },
])
doc1.merge(doc3)
assert.deepEqual(doc1.text(text), "BBBBCCACBB")
// with tombstones its
// BBBB.C..C.AC.BB
assert.deepEqual(doc1.attribute(text, h1, [h2,h3]), [
{ add: [ {start:0, end: 4}, { start: 8, end: 10 } ], del: [ { pos: 4, val: 'A' }, { pos: 5, val: 'AA' }, { pos: 6, val: 'A' } ] },
{ add: [ {start:4, end: 6}, { start: 7, end: 8 } ], del: [ { pos: 5, val: 'A' }, { pos: 6, val: 'A' }, { pos: 8, val: 'A' } ] }
])
})
it('should not include attribution of text that is inserted and deleted only within change sets', () => {
let doc1 = create()
let text = doc1.set_object("_root", "notes","hello little world")
let h1 = doc1.getHeads();
let doc2 = doc1.fork();
doc2.splice(text, 5, 7, " big");
doc2.splice(text, 9, 0, " bad");
doc2.splice(text, 9, 4)
doc2.text(text)
let h2 = doc2.getHeads();
assert.deepEqual(doc2.text(text), "hello big world")
let doc3 = doc1.fork();
doc3.splice(text, 0, 0, "Well, HI THERE");
doc3.splice(text, 6, 8, "")
let h3 = doc3.getHeads();
assert.deepEqual(doc3.text(text), "Well, hello little world")
doc1.merge(doc2)
doc1.merge(doc3)
assert.deepEqual(doc1.text(text), "Well, hello big world")
let attribute = doc1.attribute(text, h1, [h2, h3])
assert.deepEqual(attribute, [
{ add: [ { start: 11, end: 15 } ], del: [ { pos: 15, val: ' little' } ] },
{ add: [ { start: 0, end: 6 } ], del: [] }
])
})
})
describe('attribute2', () => {
it('should be able to attribute text segments on change sets', () => {
let doc1 = create("aaaa")
let text = doc1.set_object("_root", "notes","hello little world")
let h1 = doc1.getHeads();
let doc2 = doc1.fork("bbbb");
doc2.splice(text, 5, 7, " big");
doc2.text(text)
let h2 = doc2.getHeads();
assert.deepEqual(doc2.text(text), "hello big world")
let doc3 = doc1.fork("cccc");
doc3.splice(text, 0, 0, "Well, ");
let doc4 = doc3.fork("dddd")
doc4.splice(text, 0, 0, "Gee, ");
let h3 = doc4.getHeads();
assert.deepEqual(doc4.text(text), "Gee, Well, hello little world")
doc1.merge(doc2)
doc1.merge(doc4)
assert.deepEqual(doc1.text(text), "Gee, Well, hello big world")
let attribute = doc1.attribute2(text, h1, [h2, h3])
assert.deepEqual(attribute, [
{ add: [ { actor: "bbbb", start: 16, end: 20 } ], del: [ { actor: "bbbb", pos: 20, val: ' little' } ] },
{ add: [ { actor: "dddd", start:0, end: 5 }, { actor: "cccc", start: 5, end: 11 } ], del: [] }
])
})
it('should not include attribution of text that is inserted and deleted only within change sets', () => {
let doc1 = create("aaaa")
let text = doc1.set_object("_root", "notes","hello little world")
let h1 = doc1.getHeads();
let doc2 = doc1.fork("bbbb");
doc2.splice(text, 5, 7, " big");
doc2.splice(text, 9, 0, " bad");
doc2.splice(text, 9, 4)
doc2.text(text)
let h2 = doc2.getHeads();
assert.deepEqual(doc2.text(text), "hello big world")
let doc3 = doc1.fork("cccc");
doc3.splice(text, 0, 0, "Well, HI THERE");
doc3.splice(text, 6, 8, "")
let h3 = doc3.getHeads();
assert.deepEqual(doc3.text(text), "Well, hello little world")
doc1.merge(doc2)
doc1.merge(doc3)
assert.deepEqual(doc1.text(text), "Well, hello big world")
let attribute = doc1.attribute2(text, h1, [h2, h3])
assert.deepEqual(attribute, [
{ add: [ { start: 11, end: 15, actor: "bbbb" } ], del: [ { pos: 15, val: ' little', actor: "bbbb" } ] },
{ add: [ { start: 0, end: 6, actor: "cccc" } ], del: [] }
])
let h4 = doc1.getHeads()
doc3.splice(text, 24, 0, "!!!")
doc1.merge(doc3)
let h5 = doc1.getHeads()
assert.deepEqual(doc1.text(text), "Well, hello big world!!!")
attribute = doc1.attribute2(text, h4, [h5])
assert.deepEqual(attribute, [
{ add: [ { start: 21, end: 24, actor: "cccc" } ], del: [] },
{ add: [], del: [] }
])
})
})
})

View file

@ -0,0 +1,203 @@
import { describe, it } from 'mocha';
//@ts-ignore
import assert from 'assert'
//@ts-ignore
import { create, loadDoc, Automerge, encodeChange, decodeChange } from '..'
describe('Automerge', () => {
describe('marks', () => {
it('should handle marks [..]', () => {
let doc = create()
let list = doc.set_object("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[3..6]", "bold" , true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
doc.insert(list, 6, "A")
doc.insert(list, 3, "A")
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaaA', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'Accc' ]);
})
it('should handle marks [..] at the beginning of a string', () => {
let doc = create()
let list = doc.set_object("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[0..3]", "bold", true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]);
let doc2 = doc.fork()
doc2.insert(list, 0, "A")
doc2.insert(list, 4, "B")
doc.merge(doc2)
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'A', [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'Bbbbccc' ]);
})
it('should handle marks [..] with splice', () => {
let doc = create()
let list = doc.set_object("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[0..3]", "bold", true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]);
let doc2 = doc.fork()
doc2.splice(list, 0, 2, "AAA")
doc2.splice(list, 4, 0, "BBB")
doc.merge(doc2)
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'AAA', [ [ 'bold', 'boolean', true ] ], 'a', [], 'BBBbbbccc' ]);
})
it('should handle marks across multiple forks', () => {
let doc = create()
let list = doc.set_object("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[0..3]", "bold", true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]);
let doc2 = doc.fork()
doc2.splice(list, 1, 1, "Z") // replace 'aaa' with 'aZa' inside mark.
let doc3 = doc.fork()
doc3.insert(list, 0, "AAA") // should not be included in mark.
doc.merge(doc2)
doc.merge(doc3)
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'AAA', [ [ 'bold', 'boolean', true ] ], 'aZa', [], 'bbbccc' ]);
})
it('should handle marks with deleted ends [..]', () => {
let doc = create()
let list = doc.set_object("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "[3..6]", "bold" , true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
doc.del(list,5);
doc.del(list,5);
doc.del(list,2);
doc.del(list,2);
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ])
doc.insert(list, 3, "A")
doc.insert(list, 2, "A")
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaA', [ [ 'bold', 'boolean', true ] ], 'b', [], 'Acc' ])
})
it('should handle sticky marks (..)', () => {
let doc = create()
let list = doc.set_object("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "(3..6)", "bold" , true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
doc.insert(list, 6, "A")
doc.insert(list, 3, "A")
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'AbbbA', [], 'ccc' ]);
})
it('should handle sticky marks with deleted ends (..)', () => {
let doc = create()
let list = doc.set_object("_root", "list", "")
doc.splice(list, 0, 0, "aaabbbccc")
doc.mark(list, "(3..6)", "bold" , true)
let spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
doc.del(list,5);
doc.del(list,5);
doc.del(list,2);
doc.del(list,2);
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ])
doc.insert(list, 3, "A")
doc.insert(list, 2, "A")
spans = doc.spans(list);
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ])
// make sure save/load can handle marks
let doc2 = loadDoc(doc.save())
spans = doc2.spans(list);
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ])
assert.deepStrictEqual(doc.getHeads(), doc2.getHeads())
assert.deepStrictEqual(doc.save(), doc2.save())
})
it('should handle overlapping marks', () => {
let doc : Automerge = create("aabbcc")
let list = doc.set_object("_root", "list", "")
doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
doc.mark(list, "[0..37]", "bold" , true)
doc.mark(list, "[4..19]", "itallic" , true)
doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!")
doc.commit("marks");
let spans = doc.spans(list);
assert.deepStrictEqual(spans,
[
[ [ 'bold', 'boolean', true ] ],
'the ',
[ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ],
'quick ',
[
[ 'bold', 'boolean', true ],
[ 'comment', 'str', 'foxes are my favorite animal!' ],
[ 'itallic', 'boolean', true ]
],
'fox',
[ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ],
' jumps',
[ [ 'bold', 'boolean', true ] ],
' over the lazy dog',
[],
]
)
let text = doc.text(list);
assert.deepStrictEqual(text, "the quick fox jumps over the lazy dog");
let raw_spans = doc.raw_spans(list);
assert.deepStrictEqual(raw_spans,
[
{ id: "39@aabbcc", start: 0, end: 37, type: 'bold', value: true },
{ id: "41@aabbcc", start: 4, end: 19, type: 'itallic', value: true },
{ id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' }
]);
doc.unmark(list, "41@aabbcc")
raw_spans = doc.raw_spans(list);
assert.deepStrictEqual(raw_spans,
[
{ id: "39@aabbcc", start: 0, end: 37, type: 'bold', value: true },
{ id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' }
]);
// mark sure encode decode can handle marks
doc.unmark(list, "39@aabbcc")
raw_spans = doc.raw_spans(list);
assert.deepStrictEqual(raw_spans,
[
{ id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' }
]);
let all = doc.getChanges([])
let decoded = all.map((c) => decodeChange(c))
let encoded = decoded.map((c) => encodeChange(c))
let doc2 = create();
doc2.applyChanges(encoded)
doc.dump()
doc2.dump()
assert.deepStrictEqual(doc.spans(list) , doc2.spans(list))
assert.deepStrictEqual(doc.save(), doc2.save())
})
})
})

View file

@ -3,9 +3,8 @@ import { describe, it } from 'mocha';
import assert from 'assert'
//@ts-ignore
import { BloomFilter } from './helpers/sync'
import { create, loadDoc, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '../dev/index'
import { DecodedSyncMessage } from '../index';
import { Hash } from '../dev/index';
import { create, loadDoc, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..'
import { DecodedSyncMessage, Hash } from '..'
function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) {
const MAX_ITER = 10
@ -229,7 +228,6 @@ describe('Automerge', () => {
let root = "_root";
let text = doc.set_object(root, "text", "");
if (!text) throw new Error('should not be undefined')
doc.splice(text, 0, 0, "hello ")
doc.splice(text, 6, 0, ["w","o","r","l","d"])
doc.splice(text, 11, 0, ["!","?"])
@ -276,8 +274,9 @@ describe('Automerge', () => {
let docA = loadDoc(saveA);
let docB = loadDoc(saveB);
let docC = loadDoc(saveMidway)
docC.loadIncremental(save3)
let touched = docC.loadIncremental(save3)
assert.deepEqual(touched, ["_root"]);
assert.deepEqual(docA.keys("_root"), docB.keys("_root"));
assert.deepEqual(docA.save(), docB.save());
assert.deepEqual(docA.save(), docC.save());
@ -344,9 +343,11 @@ describe('Automerge', () => {
doc1.set(seq, 0, 20)
doc2.set(seq, 0, 0, "counter")
doc3.set(seq, 0, 10, "counter")
doc1.applyChanges(doc2.getChanges(doc1.getHeads()))
doc1.applyChanges(doc3.getChanges(doc1.getHeads()))
let touched1 = doc1.applyChanges(doc2.getChanges(doc1.getHeads()))
let touched2 = doc1.applyChanges(doc3.getChanges(doc1.getHeads()))
let result = doc1.values(seq, 0)
assert.deepEqual(touched1,["1@aaaa"])
assert.deepEqual(touched2,["1@aaaa"])
assert.deepEqual(result,[
['int',20,'3@aaaa'],
['counter',0,'3@bbbb'],
@ -388,6 +389,8 @@ describe('Automerge', () => {
assert.deepEqual(change2, null)
if (change1 === null) { throw new RangeError("change1 should not be null") }
assert.deepEqual(decodeChange(change1).hash, head1[0])
assert.deepEqual(head1.some((hash) => doc1.getChangeByHash(hash) === null), false)
assert.deepEqual(head2.some((hash) => doc1.getChangeByHash(hash) === null), true)
})
it('recursive sets are possible', () => {
@ -475,6 +478,19 @@ describe('Automerge', () => {
assert.deepEqual(C.text(At), 'hell! world')
})
it('should return opIds that were changed', () => {
let A = create("aabbcc")
let At = A.set_object('_root', 'list', [])
A.insert('/list', 0, 'a')
A.insert('/list', 1, 'b')
let B = A.fork()
A.insert('/list', 2, 'c')
let opIds = A.merge(B)
assert.equal(opIds.length, 0)
})
})
describe('sync', () => {
it('should send a sync message implying no local data', () => {
@ -1087,16 +1103,20 @@ describe('Automerge', () => {
m2 = n2.generateSyncMessage(s2)
if (m1 === null) { throw new RangeError("message should not be null") }
if (m2 === null) { throw new RangeError("message should not be null") }
n1.receiveSyncMessage(s1, m2)
n2.receiveSyncMessage(s2, m1)
let touched1 = n1.receiveSyncMessage(s1, m2)
let touched2 = n2.receiveSyncMessage(s2, m1)
assert.deepEqual(touched1, []);
assert.deepEqual(touched2, []);
// Then n1 and n2 send each other their changes, except for the false positive
m1 = n1.generateSyncMessage(s1)
m2 = n2.generateSyncMessage(s2)
if (m1 === null) { throw new RangeError("message should not be null") }
if (m2 === null) { throw new RangeError("message should not be null") }
n1.receiveSyncMessage(s1, m2)
n2.receiveSyncMessage(s2, m1)
let touched3 = n1.receiveSyncMessage(s1, m2)
let touched4 = n2.receiveSyncMessage(s2, m1)
assert.deepEqual(touched3, []);
assert.deepEqual(touched4, ["_root"]);
assert.strictEqual(decodeSyncMessage(m1).changes.length, 2) // n1c1 and n1c2
assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent

View file

@ -1,9 +1,8 @@
use crate::exid::ExId;
use crate::transaction::{CommitOptions, Transactable};
use crate::{sync, Keys, KeysAt, ObjType, ScalarValue};
use crate::{
transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop,
Value,
query, sync, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change,
ChangeHash, Keys, KeysAt, ObjType, Prop, ScalarValue, Value,
};
/// An automerge document that automatically manages transactions.
@ -27,6 +26,11 @@ impl AutoCommit {
}
}
// FIXME : temp
pub fn actor_to_str(&self, actor: usize) -> String {
self.doc.ops.m.actors.cache[actor].to_hex_string()
}
/// Get the inner document.
#[doc(hidden)]
pub fn document(&mut self) -> &Automerge {
@ -78,18 +82,18 @@ impl AutoCommit {
})
}
pub fn load_incremental(&mut self, data: &[u8]) -> Result<usize, AutomergeError> {
pub fn load_incremental(&mut self, data: &[u8]) -> Result<Vec<ExId>, AutomergeError> {
self.ensure_transaction_closed();
self.doc.load_incremental(data)
}
pub fn apply_changes(&mut self, changes: Vec<Change>) -> Result<(), AutomergeError> {
pub fn apply_changes(&mut self, changes: Vec<Change>) -> Result<Vec<ExId>, AutomergeError> {
self.ensure_transaction_closed();
self.doc.apply_changes(changes)
}
/// Takes all the changes in `other` which are not in `self` and applies them
pub fn merge(&mut self, other: &mut Self) -> Result<Vec<ChangeHash>, AutomergeError> {
pub fn merge(&mut self, other: &mut Self) -> Result<Vec<ExId>, AutomergeError> {
self.ensure_transaction_closed();
other.ensure_transaction_closed();
self.doc.merge(&mut other.doc)
@ -149,7 +153,7 @@ impl AutoCommit {
&mut self,
sync_state: &mut sync::State,
message: sync::Message,
) -> Result<(), AutomergeError> {
) -> Result<Vec<ExId>, AutomergeError> {
self.ensure_transaction_closed();
self.doc.receive_sync_message(sync_state, message)
}
@ -285,6 +289,37 @@ impl Transactable for AutoCommit {
tx.insert(&mut self.doc, obj.as_ref(), index, value)
}
#[allow(clippy::too_many_arguments)]
fn mark<O: AsRef<ExId>>(
&mut self,
obj: O,
start: usize,
expand_start: bool,
end: usize,
expand_end: bool,
mark: &str,
value: ScalarValue,
) -> Result<(), AutomergeError> {
self.ensure_transaction_open();
let tx = self.transaction.as_mut().unwrap();
tx.mark(
&mut self.doc,
obj,
start,
expand_start,
end,
expand_end,
mark,
value,
)
}
fn unmark<O: AsRef<ExId>>(&mut self, obj: O, mark: O) -> Result<(), AutomergeError> {
self.ensure_transaction_open();
let tx = self.transaction.as_mut().unwrap();
tx.unmark(&mut self.doc, obj, mark)
}
fn insert_object(
&mut self,
obj: &ExId,
@ -343,6 +378,44 @@ impl Transactable for AutoCommit {
self.doc.text_at(obj, heads)
}
fn list<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<(Value, ExId)>, AutomergeError> {
self.doc.list(obj)
}
fn list_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<Vec<(Value, ExId)>, AutomergeError> {
self.doc.list_at(obj, heads)
}
fn spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::Span>, AutomergeError> {
self.doc.spans(obj)
}
fn raw_spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::SpanInfo>, AutomergeError> {
self.doc.raw_spans(obj)
}
fn attribute<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet>, AutomergeError> {
self.doc.attribute(obj, baseline, change_sets)
}
fn attribute2<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet2>, AutomergeError> {
self.doc.attribute2(obj, baseline, change_sets)
}
// TODO - I need to return these OpId's here **only** to get
// the legacy conflicts format of { [opid]: value }
// Something better?

View file

@ -13,7 +13,6 @@ use crate::types::{
use crate::KeysAt;
use crate::{legacy, query, types, ObjType};
use crate::{AutomergeError, Change, Prop};
use serde::Serialize;
#[derive(Debug, Clone, PartialEq)]
pub(crate) enum Actor {
@ -269,7 +268,11 @@ impl Automerge {
}
pub(crate) fn id_to_exid(&self, id: OpId) -> ExId {
ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1)
if id == types::ROOT {
ExId::Root
} else {
ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1)
}
}
/// Get the string represented by the given text object.
@ -305,6 +308,90 @@ impl Automerge {
Ok(buffer)
}
pub fn list<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<(Value, ExId)>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let query = self.ops.search(&obj, query::ListVals::new());
Ok(query
.ops
.iter()
.map(|o| (o.value(), self.id_to_exid(o.id)))
.collect())
}
pub fn list_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<Vec<(Value, ExId)>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let clock = self.clock_at(heads);
let query = self.ops.search(&obj, query::ListValsAt::new(clock));
Ok(query
.ops
.iter()
.map(|o| (o.value(), self.id_to_exid(o.id)))
.collect())
}
pub fn spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::Span>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let mut query = self.ops.search(&obj, query::Spans::new());
query.check_marks();
Ok(query.spans)
}
pub fn attribute<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let baseline = self.clock_at(baseline);
let change_sets: Vec<Clock> = change_sets.iter().map(|p| self.clock_at(p)).collect();
let mut query = self
.ops
.search(&obj, query::Attribute::new(baseline, change_sets));
query.finish();
Ok(query.change_sets)
}
pub fn attribute2<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet2>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let baseline = self.clock_at(baseline);
let change_sets: Vec<Clock> = change_sets.iter().map(|p| self.clock_at(p)).collect();
let mut query = self
.ops
.search(&obj, query::Attribute2::new(baseline, change_sets));
query.finish();
Ok(query.change_sets)
}
pub fn raw_spans<O: AsRef<ExId>>(
&self,
obj: O,
) -> Result<Vec<query::SpanInfo>, AutomergeError> {
let obj = self.exid_to_obj(obj.as_ref())?;
let query = self.ops.search(&obj, query::RawSpans::new());
let result = query
.spans
.into_iter()
.map(|s| query::SpanInfo {
id: self.id_to_exid(s.id),
start: s.start,
end: s.end,
span_type: s.name,
value: s.value,
})
.collect();
Ok(result)
}
// TODO - I need to return these OpId's here **only** to get
// the legacy conflicts format of { [opid]: value }
// Something better?
@ -409,12 +496,9 @@ impl Automerge {
}
/// Load an incremental save of a document.
pub fn load_incremental(&mut self, data: &[u8]) -> Result<usize, AutomergeError> {
pub fn load_incremental(&mut self, data: &[u8]) -> Result<Vec<ExId>, AutomergeError> {
let changes = Change::load_document(data)?;
let start = self.ops.len();
self.apply_changes(changes)?;
let delta = self.ops.len() - start;
Ok(delta)
self.apply_changes(changes)
}
fn duplicate_seq(&self, change: &Change) -> bool {
@ -428,7 +512,8 @@ impl Automerge {
}
/// Apply changes to this document.
pub fn apply_changes(&mut self, changes: Vec<Change>) -> Result<(), AutomergeError> {
pub fn apply_changes(&mut self, changes: Vec<Change>) -> Result<Vec<ExId>, AutomergeError> {
let mut objs = HashSet::new();
for c in changes {
if !self.history_index.contains_key(&c.hash) {
if self.duplicate_seq(&c) {
@ -438,23 +523,24 @@ impl Automerge {
));
}
if self.is_causally_ready(&c) {
self.apply_change(c);
self.apply_change(c, &mut objs);
} else {
self.queue.push(c);
}
}
}
while let Some(c) = self.pop_next_causally_ready_change() {
self.apply_change(c);
self.apply_change(c, &mut objs);
}
Ok(())
Ok(objs.into_iter().map(|obj| self.id_to_exid(obj.0)).collect())
}
/// Apply a single change to this document.
fn apply_change(&mut self, change: Change) {
fn apply_change(&mut self, change: Change, objs: &mut HashSet<ObjId>) {
let ops = self.import_ops(&change);
self.update_history(change, ops.len());
for (obj, op) in ops {
objs.insert(obj);
self.insert_op(&obj, op);
}
}
@ -516,15 +602,14 @@ impl Automerge {
}
/// Takes all the changes in `other` which are not in `self` and applies them
pub fn merge(&mut self, other: &mut Self) -> Result<Vec<ChangeHash>, AutomergeError> {
pub fn merge(&mut self, other: &mut Self) -> Result<Vec<ExId>, AutomergeError> {
// TODO: Make this fallible and figure out how to do this transactionally
let changes = self
.get_changes_added(other)
.into_iter()
.cloned()
.collect::<Vec<_>>();
self.apply_changes(changes)?;
Ok(self.get_heads())
self.apply_changes(changes)
}
/// Save the entirety of this document in a compact form.
@ -857,6 +942,8 @@ impl Automerge {
OpType::Set(value) => format!("{}", value),
OpType::Make(obj) => format!("make({})", obj),
OpType::Inc(obj) => format!("inc({})", obj),
OpType::MarkBegin(m) => format!("mark({}={})", m.name, m.value),
OpType::MarkEnd(_) => "/mark".into(),
OpType::Del => format!("del{}", 0),
};
let pred: Vec<_> = op.pred.iter().map(|id| self.to_string(*id)).collect();
@ -885,17 +972,6 @@ impl Default for Automerge {
}
}
#[derive(Serialize, Debug, Clone, PartialEq)]
pub struct SpanInfo {
pub id: ExId,
pub time: i64,
pub start: usize,
pub end: usize,
#[serde(rename = "type")]
pub span_type: String,
pub value: ScalarValue,
}
#[cfg(test)]
mod tests {
use itertools::Itertools;
@ -1178,8 +1254,7 @@ mod tests {
assert!(doc.value_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10));
assert!(doc.length_at(&list, &heads3) == 2);
//doc.dump();
log!("{:?}", doc.value_at(&list, 0, &heads3)?.unwrap().0);
assert!(doc.value_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30));
assert!(doc.value_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20));

View file

@ -137,6 +137,15 @@ impl<'a> Iterator for OperationIterator<'a> {
Action::MakeTable => OpType::Make(ObjType::Table),
Action::Del => OpType::Del,
Action::Inc => OpType::Inc(value.to_i64()?),
Action::MarkBegin => {
// mark has 3 things in the val column
let name = value.as_string()?;
let expand = self.value.next()?.to_bool()?;
let value = self.value.next()?;
OpType::mark(name, expand, value)
}
Action::MarkEnd => OpType::MarkEnd(value.to_bool()?),
Action::Unused => panic!("invalid action"),
};
Some(amp::Op {
action,
@ -178,6 +187,15 @@ impl<'a> Iterator for DocOpIterator<'a> {
Action::MakeTable => OpType::Make(ObjType::Table),
Action::Del => OpType::Del,
Action::Inc => OpType::Inc(value.to_i64()?),
Action::MarkBegin => {
// mark has 3 things in the val column
let name = value.as_string()?;
let expand = self.value.next()?.to_bool()?;
let value = self.value.next()?;
OpType::mark(name, expand, value)
}
Action::MarkEnd => OpType::MarkEnd(value.to_bool()?),
Action::Unused => panic!("invalid action"),
};
Some(DocOp {
actor,
@ -1063,6 +1081,16 @@ impl DocOpEncoder {
self.val.append_null();
Action::Del
}
amp::OpType::MarkBegin(m) => {
self.val.append_value(&m.name.clone().into(), actors);
self.val.append_value(&m.expand.into(), actors);
self.val.append_value(&m.value.clone(), actors);
Action::MarkBegin
}
amp::OpType::MarkEnd(s) => {
self.val.append_value(&(*s).into(), actors);
Action::MarkEnd
}
amp::OpType::Make(kind) => {
self.val.append_null();
match kind {
@ -1169,6 +1197,16 @@ impl ColumnEncoder {
self.val.append_null();
Action::Del
}
OpType::MarkBegin(m) => {
self.val.append_value2(&m.name.clone().into(), actors);
self.val.append_value2(&m.expand.into(), actors);
self.val.append_value2(&m.value.clone(), actors);
Action::MarkBegin
}
OpType::MarkEnd(s) => {
self.val.append_value2(&(*s).into(), actors);
Action::MarkEnd
}
OpType::Make(kind) => {
self.val.append_null();
match kind {
@ -1274,8 +1312,11 @@ pub(crate) enum Action {
MakeText,
Inc,
MakeTable,
MarkBegin,
Unused, // final bit is used to mask `Make` actions
MarkEnd,
}
const ACTIONS: [Action; 7] = [
const ACTIONS: [Action; 10] = [
Action::MakeMap,
Action::Set,
Action::MakeList,
@ -1283,6 +1324,9 @@ const ACTIONS: [Action; 7] = [
Action::MakeText,
Action::Inc,
Action::MakeTable,
Action::MarkBegin,
Action::Unused,
Action::MarkEnd,
];
impl Decodable for Action {

View file

@ -49,6 +49,12 @@ impl Serialize for Op {
match &self.action {
OpType::Inc(n) => op.serialize_field("value", &n)?,
OpType::Set(value) => op.serialize_field("value", &value)?,
OpType::MarkBegin(m) => {
op.serialize_field("name", &m.name)?;
op.serialize_field("expand", &m.expand)?;
op.serialize_field("value", &m.value)?;
}
OpType::MarkEnd(s) => op.serialize_field("expand", &s)?,
_ => {}
}
op.serialize_field("pred", &self.pred)?;
@ -70,6 +76,8 @@ pub(crate) enum RawOpType {
Del,
Inc,
Set,
MarkBegin,
MarkEnd,
}
impl Serialize for RawOpType {
@ -85,6 +93,8 @@ impl Serialize for RawOpType {
RawOpType::Del => "del",
RawOpType::Inc => "inc",
RawOpType::Set => "set",
RawOpType::MarkBegin => "mark_begin",
RawOpType::MarkEnd => "mark_end",
};
serializer.serialize_str(s)
}
@ -116,6 +126,8 @@ impl<'de> Deserialize<'de> for RawOpType {
"del" => Ok(RawOpType::Del),
"inc" => Ok(RawOpType::Inc),
"set" => Ok(RawOpType::Set),
"mark_begin" => Ok(RawOpType::MarkBegin),
"mark_end" => Ok(RawOpType::MarkEnd),
other => Err(Error::unknown_variant(other, VARIANTS)),
}
}
@ -188,6 +200,30 @@ impl<'de> Deserialize<'de> for Op {
RawOpType::MakeList => OpType::Make(ObjType::List),
RawOpType::MakeText => OpType::Make(ObjType::Text),
RawOpType::Del => OpType::Del,
RawOpType::MarkBegin => {
let name = name.ok_or_else(|| Error::missing_field("mark(name)"))?;
let expand = expand.unwrap_or(false);
let value = if let Some(datatype) = datatype {
let raw_value = value
.ok_or_else(|| Error::missing_field("value"))?
.unwrap_or(ScalarValue::Null);
raw_value.as_datatype(datatype).map_err(|e| {
Error::invalid_value(
Unexpected::Other(e.unexpected.as_str()),
&e.expected.as_str(),
)
})?
} else {
value
.ok_or_else(|| Error::missing_field("value"))?
.unwrap_or(ScalarValue::Null)
};
OpType::mark(name, expand, value)
}
RawOpType::MarkEnd => {
let expand = expand.unwrap_or(true);
OpType::MarkEnd(expand)
}
RawOpType::Set => {
let value = if let Some(datatype) = datatype {
let raw_value = value

View file

@ -15,6 +15,8 @@ impl Serialize for OpType {
OpType::Make(ObjType::Table) => RawOpType::MakeTable,
OpType::Make(ObjType::List) => RawOpType::MakeList,
OpType::Make(ObjType::Text) => RawOpType::MakeText,
OpType::MarkBegin(_) => RawOpType::MarkBegin,
OpType::MarkEnd(_) => RawOpType::MarkEnd,
OpType::Del => RawOpType::Del,
OpType::Inc(_) => RawOpType::Inc,
OpType::Set(_) => RawOpType::Set,

View file

@ -90,10 +90,6 @@ impl<const B: usize> OpSetInternal<B> {
op
}
pub fn len(&self) -> usize {
self.length
}
pub fn insert(&mut self, index: usize, obj: &ObjId, element: Op) {
if let OpType::Make(typ) = element.action {
self.trees

View file

@ -1,10 +1,14 @@
use crate::exid::ExId;
use crate::op_tree::{OpSetMetadata, OpTreeNode};
use crate::types::{Clock, Counter, ElemId, Op, OpId, OpType, ScalarValue};
use fxhash::FxBuildHasher;
use serde::Serialize;
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet};
use std::fmt::Debug;
mod attribute;
mod attribute2;
mod insert;
mod keys;
mod keys_at;
@ -17,8 +21,12 @@ mod nth_at;
mod opid;
mod prop;
mod prop_at;
mod raw_spans;
mod seek_op;
mod spans;
pub(crate) use attribute::{Attribute, ChangeSet};
pub(crate) use attribute2::{Attribute2, ChangeSet2};
pub(crate) use insert::InsertNth;
pub(crate) use keys::Keys;
pub(crate) use keys_at::KeysAt;
@ -31,7 +39,19 @@ pub(crate) use nth_at::NthAt;
pub(crate) use opid::OpIdSearch;
pub(crate) use prop::Prop;
pub(crate) use prop_at::PropAt;
pub(crate) use raw_spans::RawSpans;
pub(crate) use seek_op::SeekOp;
pub(crate) use spans::{Span, Spans};
#[derive(Serialize, Debug, Clone, PartialEq)]
pub struct SpanInfo {
pub id: ExId,
pub start: usize,
pub end: usize,
#[serde(rename = "type")]
pub span_type: String,
pub value: ScalarValue,
}
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct CounterData {

View file

@ -0,0 +1,128 @@
use crate::clock::Clock;
use crate::query::{OpSetMetadata, QueryResult, TreeQuery};
use crate::types::{ElemId, Op};
use std::fmt::Debug;
use std::ops::Range;
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct Attribute<const B: usize> {
pos: usize,
seen: usize,
last_seen: Option<ElemId>,
baseline: Clock,
pub change_sets: Vec<ChangeSet>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ChangeSet {
clock: Clock,
next_add: Option<Range<usize>>,
next_del: Option<(usize, String)>,
pub add: Vec<Range<usize>>,
pub del: Vec<(usize, String)>,
}
impl From<Clock> for ChangeSet {
fn from(clock: Clock) -> Self {
ChangeSet {
clock,
next_add: None,
next_del: None,
add: Vec::new(),
del: Vec::new(),
}
}
}
impl ChangeSet {
fn cut_add(&mut self) {
if let Some(add) = self.next_add.take() {
self.add.push(add)
}
}
fn cut_del(&mut self) {
if let Some(del) = self.next_del.take() {
self.del.push(del)
}
}
}
impl<const B: usize> Attribute<B> {
pub fn new(baseline: Clock, change_sets: Vec<Clock>) -> Self {
Attribute {
pos: 0,
seen: 0,
last_seen: None,
baseline,
change_sets: change_sets.into_iter().map(|c| c.into()).collect(),
}
}
fn update_add(&mut self, element: &Op) {
let baseline = self.baseline.covers(&element.id);
for cs in &mut self.change_sets {
if !baseline && cs.clock.covers(&element.id) {
// is part of the change_set
if let Some(range) = &mut cs.next_add {
range.end += 1;
} else {
cs.next_add = Some(Range {
start: self.seen,
end: self.seen + 1,
});
}
} else {
cs.cut_add();
}
cs.cut_del();
}
}
// id is in baseline
// succ is not in baseline but is in cs
fn update_del(&mut self, element: &Op) {
let baseline = self.baseline.covers(&element.id);
for cs in &mut self.change_sets {
if baseline && element.succ.iter().any(|id| cs.clock.covers(id)) {
// was deleted by change set
if let Some(s) = element.as_string() {
if let Some((_, span)) = &mut cs.next_del {
span.push_str(&s);
} else {
cs.next_del = Some((self.seen, s))
}
}
} else {
//cs.cut_del();
}
//cs.cut_add();
}
}
pub fn finish(&mut self) {
for cs in &mut self.change_sets {
cs.cut_add();
cs.cut_del();
}
}
}
impl<const B: usize> TreeQuery<B> for Attribute<B> {
fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult {
if element.insert {
self.last_seen = None;
}
if self.last_seen.is_none() && element.visible() {
self.update_add(element);
self.seen += 1;
self.last_seen = element.elemid();
}
if !element.succ.is_empty() {
self.update_del(element);
}
self.pos += 1;
QueryResult::Next
}
}

View file

@ -0,0 +1,172 @@
use crate::clock::Clock;
use crate::query::{OpSetMetadata, QueryResult, TreeQuery};
use crate::types::{ElemId, Op};
use std::fmt::Debug;
use std::ops::Range;
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct Attribute2<const B: usize> {
pos: usize,
seen: usize,
last_seen: Option<ElemId>,
baseline: Clock,
pub change_sets: Vec<ChangeSet2>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ChangeSet2 {
clock: Clock,
next_add: Option<CS2Add>,
next_del: Option<CS2Del>,
pub add: Vec<CS2Add>,
pub del: Vec<CS2Del>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct CS2Add {
pub actor: usize,
pub range: Range<usize>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct CS2Del {
pub pos: usize,
pub actor: usize,
pub span: String,
}
impl From<Clock> for ChangeSet2 {
fn from(clock: Clock) -> Self {
ChangeSet2 {
clock,
next_add: None,
next_del: None,
add: Vec::new(),
del: Vec::new(),
}
}
}
impl ChangeSet2 {
fn cut_add(&mut self) {
if let Some(add) = self.next_add.take() {
self.add.push(add)
}
}
fn cut_del(&mut self) {
if let Some(del) = self.next_del.take() {
self.del.push(del)
}
}
}
impl<const B: usize> Attribute2<B> {
pub fn new(baseline: Clock, change_sets: Vec<Clock>) -> Self {
Attribute2 {
pos: 0,
seen: 0,
last_seen: None,
baseline,
change_sets: change_sets.into_iter().map(|c| c.into()).collect(),
}
}
fn update_add(&mut self, element: &Op) {
let baseline = self.baseline.covers(&element.id);
for cs in &mut self.change_sets {
if !baseline && cs.clock.covers(&element.id) {
// is part of the change_set
if let Some(CS2Add { range, actor }) = &mut cs.next_add {
if *actor == element.id.actor() {
range.end += 1;
} else {
cs.cut_add();
cs.next_add = Some(CS2Add {
actor: element.id.actor(),
range: Range {
start: self.seen,
end: self.seen + 1,
},
});
}
} else {
cs.next_add = Some(CS2Add {
actor: element.id.actor(),
range: Range {
start: self.seen,
end: self.seen + 1,
},
});
}
} else {
cs.cut_add();
}
cs.cut_del();
}
}
// id is in baseline
// succ is not in baseline but is in cs
fn update_del(&mut self, element: &Op) {
if !self.baseline.covers(&element.id) {
return;
}
for cs in &mut self.change_sets {
let succ: Vec<_> = element
.succ
.iter()
.filter(|id| cs.clock.covers(id))
.collect();
// was deleted by change set
if let Some(suc) = succ.get(0) {
if let Some(s) = element.as_string() {
if let Some(CS2Del { actor, span, .. }) = &mut cs.next_del {
if suc.actor() == *actor {
span.push_str(&s);
} else {
cs.cut_del();
cs.next_del = Some(CS2Del {
pos: self.seen,
actor: suc.actor(),
span: s,
})
}
} else {
cs.next_del = Some(CS2Del {
pos: self.seen,
actor: suc.actor(),
span: s,
})
}
}
}
}
}
pub fn finish(&mut self) {
for cs in &mut self.change_sets {
cs.cut_add();
cs.cut_del();
}
}
}
impl<const B: usize> TreeQuery<B> for Attribute2<B> {
fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult {
if element.insert {
self.last_seen = None;
}
if self.last_seen.is_none() && element.visible() {
self.update_add(element);
self.seen += 1;
self.last_seen = element.elemid();
}
if !element.succ.is_empty() {
self.update_del(element);
}
self.pos += 1;
QueryResult::Next
}
}

View file

@ -99,6 +99,10 @@ impl<const B: usize> TreeQuery<B> for InsertNth {
self.last_seen = None;
self.last_insert = element.elemid();
}
if self.valid.is_some() && element.valid_mark_anchor() {
self.last_valid_insert = element.elemid();
self.valid = None;
}
if self.last_seen.is_none() && element.visible() {
if self.seen >= self.target {
return QueryResult::Finish;

View file

@ -0,0 +1,78 @@
use crate::query::{OpSetMetadata, QueryResult, TreeQuery};
use crate::types::{ElemId, Op, OpId, OpType, ScalarValue};
use std::fmt::Debug;
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct RawSpans<const B: usize> {
pos: usize,
seen: usize,
last_seen: Option<ElemId>,
last_insert: Option<ElemId>,
changed: bool,
pub spans: Vec<RawSpan>,
}
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct RawSpan {
pub id: OpId,
pub start: usize,
pub end: usize,
pub name: String,
pub value: ScalarValue,
}
impl<const B: usize> RawSpans<B> {
pub fn new() -> Self {
RawSpans {
pos: 0,
seen: 0,
last_seen: None,
last_insert: None,
changed: false,
spans: Vec::new(),
}
}
}
impl<const B: usize> TreeQuery<B> for RawSpans<B> {
fn query_element_with_metadata(&mut self, element: &Op, m: &OpSetMetadata) -> QueryResult {
// find location to insert
// mark or set
if element.succ.is_empty() {
if let OpType::MarkBegin(md) = &element.action {
let pos = self
.spans
.binary_search_by(|probe| m.lamport_cmp(probe.id, element.id))
.unwrap_err();
self.spans.insert(
pos,
RawSpan {
id: element.id,
start: self.seen,
end: 0,
name: md.name.clone(),
value: md.value.clone(),
},
);
}
if let OpType::MarkEnd(_) = &element.action {
for s in self.spans.iter_mut() {
if s.id == element.id.prev() {
s.end = self.seen;
break;
}
}
}
}
if element.insert {
self.last_seen = None;
self.last_insert = element.elemid();
}
if self.last_seen.is_none() && element.visible() {
self.seen += 1;
self.last_seen = element.elemid();
}
self.pos += 1;
QueryResult::Next
}
}

View file

@ -0,0 +1,108 @@
use crate::query::{OpSetMetadata, QueryResult, TreeQuery};
use crate::types::{ElemId, Op, OpType, ScalarValue};
use std::collections::HashMap;
use std::fmt::Debug;
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct Spans<const B: usize> {
pos: usize,
seen: usize,
last_seen: Option<ElemId>,
last_insert: Option<ElemId>,
seen_at_this_mark: Option<ElemId>,
seen_at_last_mark: Option<ElemId>,
ops: Vec<Op>,
marks: HashMap<String, ScalarValue>,
changed: bool,
pub spans: Vec<Span>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Span {
pub pos: usize,
pub marks: Vec<(String, ScalarValue)>,
}
impl<const B: usize> Spans<B> {
pub fn new() -> Self {
Spans {
pos: 0,
seen: 0,
last_seen: None,
last_insert: None,
seen_at_last_mark: None,
seen_at_this_mark: None,
changed: false,
ops: Vec::new(),
marks: HashMap::new(),
spans: Vec::new(),
}
}
pub fn check_marks(&mut self) {
let mut new_marks = HashMap::new();
for op in &self.ops {
if let OpType::MarkBegin(m) = &op.action {
new_marks.insert(m.name.clone(), m.value.clone());
}
}
if new_marks != self.marks {
self.changed = true;
self.marks = new_marks;
}
if self.changed
&& (self.seen_at_last_mark != self.seen_at_this_mark
|| self.seen_at_last_mark.is_none() && self.seen_at_this_mark.is_none())
{
self.changed = false;
self.seen_at_last_mark = self.seen_at_this_mark;
let mut marks: Vec<_> = self
.marks
.iter()
.map(|(key, val)| (key.clone(), val.clone()))
.collect();
marks.sort_by(|(k1, _), (k2, _)| k1.cmp(k2));
self.spans.push(Span {
pos: self.seen,
marks,
});
}
}
}
impl<const B: usize> TreeQuery<B> for Spans<B> {
/*
fn query_node(&mut self, _child: &OpTreeNode<B>) -> QueryResult {
unimplemented!()
}
*/
fn query_element_with_metadata(&mut self, element: &Op, m: &OpSetMetadata) -> QueryResult {
// find location to insert
// mark or set
if element.succ.is_empty() {
if let OpType::MarkBegin(_) = &element.action {
let pos = self
.ops
.binary_search_by(|probe| m.lamport_cmp(probe.id, element.id))
.unwrap_err();
self.ops.insert(pos, element.clone());
}
if let OpType::MarkEnd(_) = &element.action {
self.ops.retain(|op| op.id != element.id.prev());
}
}
if element.insert {
self.last_seen = None;
self.last_insert = element.elemid();
}
if self.last_seen.is_none() && element.visible() {
self.check_marks();
self.seen += 1;
self.last_seen = element.elemid();
self.seen_at_this_mark = element.elemid();
}
self.pos += 1;
QueryResult::Next
}
}

View file

@ -1,3 +1,7 @@
use crate::exid::ExId;
use crate::{
decoding, decoding::Decoder, encoding::Encodable, Automerge, AutomergeError, Change, ChangeHash,
};
use itertools::Itertools;
use std::{
borrow::Cow,
@ -6,10 +10,6 @@ use std::{
io::Write,
};
use crate::{
decoding, decoding::Decoder, encoding::Encodable, Automerge, AutomergeError, Change, ChangeHash,
};
mod bloom;
mod state;
@ -97,7 +97,8 @@ impl Automerge {
&mut self,
sync_state: &mut State,
message: Message,
) -> Result<(), AutomergeError> {
) -> Result<Vec<ExId>, AutomergeError> {
let mut result = vec![];
let before_heads = self.get_heads();
let Message {
@ -109,7 +110,7 @@ impl Automerge {
let changes_is_empty = message_changes.is_empty();
if !changes_is_empty {
self.apply_changes(message_changes)?;
result = self.apply_changes(message_changes)?;
sync_state.shared_heads = advance_heads(
&before_heads.iter().collect(),
&self.get_heads().into_iter().collect(),
@ -150,7 +151,7 @@ impl Automerge {
sync_state.their_heads = Some(message_heads);
sync_state.their_need = Some(message_need);
Ok(())
Ok(result)
}
fn make_bloom_filter(&self, last_sync: Vec<ChangeHash>) -> Have {

View file

@ -152,6 +152,71 @@ impl TransactionInner {
self.operations.push((obj, op));
}
#[allow(clippy::too_many_arguments)]
pub fn mark<O: AsRef<ExId>>(
&mut self,
doc: &mut Automerge,
obj: O,
start: usize,
expand_start: bool,
end: usize,
expand_end: bool,
mark: &str,
value: ScalarValue,
) -> Result<(), AutomergeError> {
let obj = doc.exid_to_obj(obj.as_ref())?;
self.do_insert(
doc,
obj,
start,
OpType::mark(mark.into(), expand_start, value),
)?;
self.do_insert(doc, obj, end, OpType::MarkEnd(expand_end))?;
Ok(())
}
pub fn unmark<O: AsRef<ExId>>(
&mut self,
doc: &mut Automerge,
obj: O,
mark: O,
) -> Result<(), AutomergeError> {
let obj = doc.exid_to_obj(obj.as_ref())?;
let markid = doc.exid_to_obj(mark.as_ref())?.0;
let op1 = Op {
id: self.next_id(),
action: OpType::Del,
key: markid.into(),
succ: Default::default(),
pred: vec![markid],
insert: false,
};
let q1 = doc.ops.search(&obj, query::SeekOp::new(&op1));
for i in q1.succ {
doc.ops.replace(&obj, i, |old_op| old_op.add_succ(&op1));
}
self.operations.push((obj, op1));
let markid = markid.next();
let op2 = Op {
id: self.next_id(),
action: OpType::Del,
key: markid.into(),
succ: Default::default(),
pred: vec![markid],
insert: false,
};
let q2 = doc.ops.search(&obj, query::SeekOp::new(&op2));
for i in q2.succ {
doc.ops.replace(&obj, i, |old_op| old_op.add_succ(&op2));
}
self.operations.push((obj, op2));
Ok(())
}
pub fn insert<V: Into<ScalarValue>>(
&mut self,
doc: &mut Automerge,
@ -189,6 +254,7 @@ impl TransactionInner {
let query = doc.ops.search(&obj, query::InsertNth::new(index));
let key = query.key()?;
let is_make = matches!(&action, OpType::Make(_));
let op = Op {

View file

@ -1,6 +1,6 @@
use crate::exid::ExId;
use crate::{Automerge, ChangeHash, KeysAt, ObjType, Prop, ScalarValue, Value};
use crate::{AutomergeError, Keys};
use crate::AutomergeError;
use crate::{query, Automerge, ChangeHash, Keys, KeysAt, ObjType, Prop, ScalarValue, Value};
use super::{CommitOptions, Transactable, TransactionInner};
@ -121,6 +121,33 @@ impl<'a> Transactable for Transaction<'a> {
.insert(self.doc, obj.as_ref(), index, value)
}
#[allow(clippy::too_many_arguments)]
fn mark<O: AsRef<ExId>>(
&mut self,
obj: O,
start: usize,
expand_start: bool,
end: usize,
expand_end: bool,
mark: &str,
value: ScalarValue,
) -> Result<(), AutomergeError> {
self.inner.as_mut().unwrap().mark(
self.doc,
obj,
start,
expand_start,
end,
expand_end,
mark,
value,
)
}
fn unmark<O: AsRef<ExId>>(&mut self, obj: O, mark: O) -> Result<(), AutomergeError> {
self.inner.as_mut().unwrap().unmark(self.doc, obj, mark)
}
fn insert_object(
&mut self,
obj: &ExId,
@ -203,6 +230,44 @@ impl<'a> Transactable for Transaction<'a> {
self.doc.text_at(obj, heads)
}
fn list<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<(Value, ExId)>, AutomergeError> {
self.doc.list(obj)
}
fn list_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<Vec<(Value, ExId)>, AutomergeError> {
self.doc.list_at(obj, heads)
}
fn spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::Span>, AutomergeError> {
self.doc.spans(obj)
}
fn raw_spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::SpanInfo>, AutomergeError> {
self.doc.raw_spans(obj)
}
fn attribute<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet>, AutomergeError> {
self.doc.attribute(obj, baseline, change_sets)
}
fn attribute2<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet2>, AutomergeError> {
self.doc.attribute2(obj, baseline, change_sets)
}
fn value<O: AsRef<ExId>, P: Into<Prop>>(
&self,
obj: O,

View file

@ -1,4 +1,5 @@
use crate::exid::ExId;
use crate::query;
use crate::{AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Prop, ScalarValue, Value};
use unicode_segmentation::UnicodeSegmentation;
@ -57,6 +58,21 @@ pub trait Transactable {
object: ObjType,
) -> Result<ExId, AutomergeError>;
/// Set a mark within a range on a list
#[allow(clippy::too_many_arguments)]
fn mark<O: AsRef<ExId>>(
&mut self,
obj: O,
start: usize,
expand_start: bool,
end: usize,
expand_end: bool,
mark: &str,
value: ScalarValue,
) -> Result<(), AutomergeError>;
fn unmark<O: AsRef<ExId>>(&mut self, obj: O, mark: O) -> Result<(), AutomergeError>;
/// Increment the counter at the prop in the object by `value`.
fn inc<O: AsRef<ExId>, P: Into<Prop>>(
&mut self,
@ -115,6 +131,38 @@ pub trait Transactable {
heads: &[ChangeHash],
) -> Result<String, AutomergeError>;
/// Get the string that this text object represents.
fn list<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<(Value, ExId)>, AutomergeError>;
/// Get the string that this text object represents at a point in history.
fn list_at<O: AsRef<ExId>>(
&self,
obj: O,
heads: &[ChangeHash],
) -> Result<Vec<(Value, ExId)>, AutomergeError>;
/// test spans api for mark/span experiment
fn spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::Span>, AutomergeError>;
/// test raw_spans api for mark/span experiment
fn raw_spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::SpanInfo>, AutomergeError>;
/// test attribute api for mark/span experiment
fn attribute<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet>, AutomergeError>;
/// test attribute api for mark/span experiment
fn attribute2<O: AsRef<ExId>>(
&self,
obj: O,
baseline: &[ChangeHash],
change_sets: &[Vec<ChangeHash>],
) -> Result<Vec<query::ChangeSet2>, AutomergeError>;
/// Get the value at this prop in the object.
fn value<O: AsRef<ExId>, P: Into<Prop>>(
&self,

View file

@ -171,6 +171,25 @@ pub enum OpType {
Del,
Inc(i64),
Set(ScalarValue),
MarkBegin(MarkData),
MarkEnd(bool),
}
impl OpType {
pub(crate) fn mark(name: String, expand: bool, value: ScalarValue) -> Self {
OpType::MarkBegin(MarkData {
name,
expand,
value,
})
}
}
#[derive(PartialEq, Debug, Clone)]
pub struct MarkData {
pub name: String,
pub value: ScalarValue,
pub expand: bool,
}
impl From<ObjType> for OpType {
@ -205,6 +224,14 @@ impl OpId {
pub fn actor(&self) -> usize {
self.1
}
#[inline]
pub fn prev(&self) -> OpId {
OpId(self.0 - 1, self.1)
}
#[inline]
pub fn next(&self) -> OpId {
OpId(self.0 + 1, self.1)
}
}
impl Exportable for ObjId {
@ -396,7 +423,7 @@ impl Op {
}
pub fn visible(&self) -> bool {
if self.is_inc() {
if self.is_inc() || self.is_mark() {
false
} else if self.is_counter() {
self.succ.len() <= self.incs()
@ -421,6 +448,18 @@ impl Op {
matches!(&self.action, OpType::Inc(_))
}
pub fn valid_mark_anchor(&self) -> bool {
self.succ.is_empty()
&& matches!(
&self.action,
OpType::MarkBegin(MarkData { expand: true, .. }) | OpType::MarkEnd(false)
)
}
pub fn is_mark(&self) -> bool {
matches!(&self.action, OpType::MarkBegin(_) | OpType::MarkEnd(_))
}
pub fn is_counter(&self) -> bool {
matches!(&self.action, OpType::Set(ScalarValue::Counter(_)))
}
@ -441,6 +480,13 @@ impl Op {
}
}
pub fn as_string(&self) -> Option<String> {
match &self.action {
OpType::Set(scalar) => scalar.as_string(),
_ => None,
}
}
pub fn value(&self) -> Value {
match &self.action {
OpType::Make(obj_type) => Value::Object(*obj_type),
@ -455,6 +501,8 @@ impl Op {
OpType::Set(value) if self.insert => format!("i:{}", value),
OpType::Set(value) => format!("s:{}", value),
OpType::Make(obj) => format!("make{}", obj),
OpType::MarkBegin(m) => format!("mark{}={}", m.name, m.value),
OpType::MarkEnd(_) => "unmark".into(),
OpType::Inc(val) => format!("inc:{}", val),
OpType::Del => "del".to_string(),
}

View file

@ -11,6 +11,13 @@ pub enum Value {
}
impl Value {
pub fn as_string(&self) -> Option<String> {
match self {
Value::Scalar(val) => val.as_string(),
_ => None,
}
}
pub fn map() -> Value {
Value::Object(ObjType::Map)
}
@ -591,6 +598,13 @@ impl ScalarValue {
}
}
pub fn as_string(&self) -> Option<String> {
match self {
ScalarValue::Str(s) => Some(s.to_string()),
_ => None,
}
}
pub fn counter(n: i64) -> ScalarValue {
ScalarValue::Counter(n.into())
}

View file

@ -238,6 +238,8 @@ impl OpTableRow {
crate::OpType::Set(v) => format!("set {}", v),
crate::OpType::Make(obj) => format!("make {}", obj),
crate::OpType::Inc(v) => format!("inc {}", v),
crate::OpType::MarkBegin(v) => format!("mark {}={}", v.name, v.value),
crate::OpType::MarkEnd(v) => format!("/mark {}", v),
};
let prop = match op.key {
crate::types::Key::Map(k) => metadata.props[k].clone(),

View file

@ -0,0 +1,39 @@
use automerge::transaction::Transactable;
use automerge::{AutoCommit, AutomergeError, ROOT};
/*
mod helpers;
use helpers::{
pretty_print, realize, realize_obj,
RealizedObject,
};
*/
#[test]
fn simple_attribute_text() -> Result<(), AutomergeError> {
let mut doc = AutoCommit::new();
let note = doc.set_object(&ROOT, "note", automerge::ObjType::Text)?;
doc.splice_text(&note, 0, 0, "hello little world")?;
let baseline = doc.get_heads();
assert!(doc.text(&note).unwrap() == "hello little world");
let mut doc2 = doc.fork();
doc2.splice_text(&note, 5, 7, " big")?;
let h2 = doc2.get_heads();
assert!(doc2.text(&note)? == "hello big world");
let mut doc3 = doc.fork();
doc3.splice_text(&note, 0, 0, "Well, ")?;
let h3 = doc3.get_heads();
assert!(doc3.text(&note)? == "Well, hello little world");
doc.merge(&mut doc2)?;
doc.merge(&mut doc3)?;
let text = doc.text(&note)?;
assert!(text == "Well, hello big world");
let cs = vec![h2, h3];
let attribute = doc.attribute(&note, &baseline, &cs)?;
assert!(&text[attribute[0].add[0].clone()] == " big");
assert!(attribute[0].del[0] == (15, " little".to_owned()));
//println!("{:?} == {:?}", attribute[0].del[0] , (15, " little".to_owned()));
assert!(&text[attribute[1].add[0].clone()] == "Well, ");
//println!("- ------- attribute = {:?}", attribute);
Ok(())
}

View file

@ -7,9 +7,9 @@ yarn --cwd $WASM_PROJECT install;
yarn --cwd $WASM_PROJECT build;
# If the dependencies are already installed we delete automerge-wasm. This makes
# this script usable for iterative development.
if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then
rm -rf $JS_PROJECT/node_modules/automerge-wasm
fi
#if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then
# rm -rf $JS_PROJECT/node_modules/automerge-wasm
#fi
# --check-files forces yarn to check if the local dep has changed
yarn --cwd $JS_PROJECT install --check-files;
yarn --cwd $JS_PROJECT test;