diff --git a/.github/workflows/advisory-cron.yaml b/.github/workflows/advisory-cron.yaml
index 31bac5a3..90923191 100644
--- a/.github/workflows/advisory-cron.yaml
+++ b/.github/workflows/advisory-cron.yaml
@@ -1,4 +1,4 @@
-name: Advisories
+name: ci
on:
schedule:
- cron: '0 18 * * *'
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 8519ac5e..9a9753d0 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -1,11 +1,11 @@
-name: CI
+name: ci
on:
push:
branches:
- - main
+ - experiment
pull_request:
branches:
- - main
+ - experiment
jobs:
fmt:
runs-on: ubuntu-latest
@@ -14,8 +14,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: 1.67.0
- default: true
+ toolchain: stable
components: rustfmt
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/fmt
@@ -28,8 +27,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: 1.67.0
- default: true
+ toolchain: stable
components: clippy
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/lint
@@ -42,14 +40,9 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: 1.67.0
- default: true
+ toolchain: stable
- uses: Swatinem/rust-cache@v1
- - name: Build rust docs
- run: ./scripts/ci/rust-docs
- shell: bash
- - name: Install doxygen
- run: sudo apt-get install -y doxygen
+ - run: ./scripts/ci/docs
shell: bash
cargo-deny:
@@ -64,88 +57,40 @@ jobs:
- uses: actions/checkout@v2
- uses: EmbarkStudios/cargo-deny-action@v1
with:
- arguments: '--manifest-path ./rust/Cargo.toml'
command: check ${{ matrix.checks }}
wasm_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- - name: Install wasm-bindgen-cli
- run: cargo install wasm-bindgen-cli wasm-opt
- - name: Install wasm32 target
- run: rustup target add wasm32-unknown-unknown
+ - name: Install wasm-pack
+ run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
- name: run tests
run: ./scripts/ci/wasm_tests
- deno_tests:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - uses: denoland/setup-deno@v1
- with:
- deno-version: v1.x
- - name: Install wasm-bindgen-cli
- run: cargo install wasm-bindgen-cli wasm-opt
- - name: Install wasm32 target
- run: rustup target add wasm32-unknown-unknown
- - name: run tests
- run: ./scripts/ci/deno_tests
-
- js_fmt:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: install
- run: yarn global add prettier
- - name: format
- run: prettier -c javascript/.prettierrc javascript
js_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- - name: Install wasm-bindgen-cli
- run: cargo install wasm-bindgen-cli wasm-opt
- - name: Install wasm32 target
- run: rustup target add wasm32-unknown-unknown
+ - name: Install wasm-pack
+ run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
- name: run tests
run: ./scripts/ci/js_tests
- cmake_build:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: nightly-2023-01-26
- default: true
- - uses: Swatinem/rust-cache@v1
- - name: Install CMocka
- run: sudo apt-get install -y libcmocka-dev
- - name: Install/update CMake
- uses: jwlawson/actions-setup-cmake@v1.12
- with:
- cmake-version: latest
- - name: Install rust-src
- run: rustup component add rust-src
- - name: Build and test C bindings
- run: ./scripts/ci/cmake-build Release Static
- shell: bash
-
linux:
runs-on: ubuntu-latest
strategy:
matrix:
toolchain:
- - 1.67.0
+ - stable
+ - nightly
+ continue-on-error: ${{ matrix.toolchain == 'nightly' }}
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.toolchain }}
- default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
@@ -157,8 +102,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: 1.67.0
- default: true
+ toolchain: stable
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
@@ -170,8 +114,8 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: 1.67.0
- default: true
+ toolchain: stable
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
+
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
deleted file mode 100644
index b501d526..00000000
--- a/.github/workflows/docs.yaml
+++ /dev/null
@@ -1,52 +0,0 @@
-on:
- push:
- branches:
- - main
-
-name: Documentation
-
-jobs:
- deploy-docs:
- concurrency: deploy-docs
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
-
- - name: Toolchain
- uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: stable
- override: true
-
- - name: Cache
- uses: Swatinem/rust-cache@v1
-
- - name: Clean docs dir
- run: rm -rf docs
- shell: bash
-
- - name: Clean Rust docs dir
- uses: actions-rs/cargo@v1
- with:
- command: clean
- args: --manifest-path ./rust/Cargo.toml --doc
-
- - name: Build Rust docs
- uses: actions-rs/cargo@v1
- with:
- command: doc
- args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps
-
- - name: Move Rust docs
- run: mkdir -p docs && mv rust/target/doc/* docs/.
- shell: bash
-
- - name: Configure root page
- run: echo '' > docs/index.html
-
- - name: Deploy docs
- uses: peaceiris/actions-gh-pages@v3
- with:
- github_token: ${{ secrets.GITHUB_TOKEN }}
- publish_dir: ./docs
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
deleted file mode 100644
index 762671ff..00000000
--- a/.github/workflows/release.yaml
+++ /dev/null
@@ -1,214 +0,0 @@
-name: Release
-on:
- push:
- branches:
- - main
-
-jobs:
- check_if_wasm_version_upgraded:
- name: Check if WASM version has been upgraded
- runs-on: ubuntu-latest
- outputs:
- wasm_version: ${{ steps.version-updated.outputs.current-package-version }}
- wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }}
- steps:
- - uses: JiPaix/package-json-updated-action@v1.0.5
- id: version-updated
- with:
- path: rust/automerge-wasm/package.json
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- publish-wasm:
- name: Publish WASM package
- runs-on: ubuntu-latest
- needs:
- - check_if_wasm_version_upgraded
- # We create release only if the version in the package.json has been upgraded
- if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true'
- steps:
- - uses: actions/setup-node@v3
- with:
- node-version: '16.x'
- registry-url: 'https://registry.npmjs.org'
- - uses: denoland/setup-deno@v1
- - uses: actions/checkout@v3
- with:
- fetch-depth: 0
- ref: ${{ github.ref }}
- - name: Get rid of local github workflows
- run: rm -r .github/workflows
- - name: Remove tmp_branch if it exists
- run: git push origin :tmp_branch || true
- - run: git checkout -b tmp_branch
- - name: Install wasm-bindgen-cli
- run: cargo install wasm-bindgen-cli wasm-opt
- - name: Install wasm32 target
- run: rustup target add wasm32-unknown-unknown
- - name: run wasm js tests
- id: wasm_js_tests
- run: ./scripts/ci/wasm_tests
- - name: run wasm deno tests
- id: wasm_deno_tests
- run: ./scripts/ci/deno_tests
- - name: build release
- id: build_release
- run: |
- npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release
- - name: Collate deno release files
- if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
- run: |
- mkdir $GITHUB_WORKSPACE/deno_wasm_dist
- cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist
- cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist
- cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist
- cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist
- sed -i '1i /// ' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js
- - name: Create npm release
- if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
- run: |
- if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then
- echo "This version is already published"
- exit 0
- fi
- EXTRA_ARGS="--access public"
- if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
- echo "Is pre-release version"
- EXTRA_ARGS="$EXTRA_ARGS --tag next"
- fi
- if [ "$NODE_AUTH_TOKEN" = "" ]; then
- echo "Can't publish on NPM, You need a NPM_TOKEN secret."
- false
- fi
- npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS
- env:
- NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
- VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- - name: Commit wasm deno release files
- run: |
- git config --global user.name "actions"
- git config --global user.email actions@github.com
- git add $GITHUB_WORKSPACE/deno_wasm_dist
- git commit -am "Add deno release files"
- git push origin tmp_branch
- - name: Tag wasm release
- if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
- uses: softprops/action-gh-release@v1
- with:
- name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- target_commitish: tmp_branch
- generate_release_notes: false
- draft: false
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- - name: Remove tmp_branch
- run: git push origin :tmp_branch
- check_if_js_version_upgraded:
- name: Check if JS version has been upgraded
- runs-on: ubuntu-latest
- outputs:
- js_version: ${{ steps.version-updated.outputs.current-package-version }}
- js_has_updated: ${{ steps.version-updated.outputs.has-updated }}
- steps:
- - uses: JiPaix/package-json-updated-action@v1.0.5
- id: version-updated
- with:
- path: javascript/package.json
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- publish-js:
- name: Publish JS package
- runs-on: ubuntu-latest
- needs:
- - check_if_js_version_upgraded
- - check_if_wasm_version_upgraded
- - publish-wasm
- # We create release only if the version in the package.json has been upgraded and after the WASM release
- if: |
- (always() && ! cancelled()) &&
- (needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') &&
- needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true'
- steps:
- - uses: actions/setup-node@v3
- with:
- node-version: '16.x'
- registry-url: 'https://registry.npmjs.org'
- - uses: denoland/setup-deno@v1
- - uses: actions/checkout@v3
- with:
- fetch-depth: 0
- ref: ${{ github.ref }}
- - name: Get rid of local github workflows
- run: rm -r .github/workflows
- - name: Remove js_tmp_branch if it exists
- run: git push origin :js_tmp_branch || true
- - run: git checkout -b js_tmp_branch
- - name: check js formatting
- run: |
- yarn global add prettier
- prettier -c javascript/.prettierrc javascript
- - name: run js tests
- id: js_tests
- run: |
- cargo install wasm-bindgen-cli wasm-opt
- rustup target add wasm32-unknown-unknown
- ./scripts/ci/js_tests
- - name: build js release
- id: build_release
- run: |
- npm --prefix $GITHUB_WORKSPACE/javascript run build
- - name: build js deno release
- id: build_deno_release
- run: |
- VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build
- env:
- WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- - name: run deno tests
- id: deno_tests
- run: |
- npm --prefix $GITHUB_WORKSPACE/javascript run deno:test
- - name: Collate deno release files
- if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
- run: |
- mkdir $GITHUB_WORKSPACE/deno_js_dist
- cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist
- - name: Create npm release
- if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
- run: |
- if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then
- echo "This version is already published"
- exit 0
- fi
- EXTRA_ARGS="--access public"
- if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
- echo "Is pre-release version"
- EXTRA_ARGS="$EXTRA_ARGS --tag next"
- fi
- if [ "$NODE_AUTH_TOKEN" = "" ]; then
- echo "Can't publish on NPM, You need a NPM_TOKEN secret."
- false
- fi
- npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS
- env:
- NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
- VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }}
- - name: Commit js deno release files
- run: |
- git config --global user.name "actions"
- git config --global user.email actions@github.com
- git add $GITHUB_WORKSPACE/deno_js_dist
- git commit -am "Add deno js release files"
- git push origin js_tmp_branch
- - name: Tag JS release
- if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
- uses: softprops/action-gh-release@v1
- with:
- name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }}
- tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }}
- target_commitish: js_tmp_branch
- generate_release_notes: false
- draft: false
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- - name: Remove js_tmp_branch
- run: git push origin :js_tmp_branch
diff --git a/.gitignore b/.gitignore
index f77865d0..95d3d639 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,4 @@
+/target
/.direnv
perf.*
/Cargo.lock
-build/
-.vim/*
-/target
diff --git a/rust/Cargo.toml b/Cargo.toml
similarity index 63%
rename from rust/Cargo.toml
rename to Cargo.toml
index 5d29fc9f..e1941120 100644
--- a/rust/Cargo.toml
+++ b/Cargo.toml
@@ -1,17 +1,15 @@
[workspace]
members = [
"automerge",
- "automerge-c",
- "automerge-cli",
- "automerge-test",
"automerge-wasm",
+ "automerge-cli",
"edit-trace",
]
-resolver = "2"
[profile.release]
+debug = true
lto = true
-codegen-units = 1
+opt-level = 3
[profile.bench]
-debug = true
\ No newline at end of file
+debug = true
diff --git a/Makefile b/Makefile
new file mode 100644
index 00000000..9f8db2d1
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,13 @@
+rust:
+ cd automerge && cargo test
+
+wasm:
+ cd automerge-wasm && yarn
+ cd automerge-wasm && yarn build
+ cd automerge-wasm && yarn test
+ cd automerge-wasm && yarn link
+
+js: wasm
+ cd automerge-js && yarn
+ cd automerge-js && yarn link "automerge-wasm"
+ cd automerge-js && yarn test
diff --git a/README.md b/README.md
index ad174da4..e7a277a8 100644
--- a/README.md
+++ b/README.md
@@ -1,147 +1,81 @@
-# Automerge
+# Automerge - NEXT
-
+This is pretty much a ground up rewrite of automerge-rs. The objective of this
+rewrite is to radically simplify the API. The end goal being to produce a library
+which is easy to work with both in Rust and from FFI.
-[](https://automerge.org/)
-[](https://automerge.org/automerge-rs/automerge/)
-[](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml)
-[](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml)
+## How?
-Automerge is a library which provides fast implementations of several different
-CRDTs, a compact compression format for these CRDTs, and a sync protocol for
-efficiently transmitting those changes over the network. The objective of the
-project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational
-databases support server applications - by providing mechanisms for persistence
-which allow application developers to avoid thinking about hard distributed
-computing problems. Automerge aims to be PostgreSQL for your local-first app.
+The current iteration of automerge-rs is complicated to work with because it
+adopts the frontend/backend split architecture of the JS implementation. This
+architecture was necessary due to basic operations on the automerge opset being
+too slow to perform on the UI thread. Recently @orionz has been able to improve
+the performance to the point where the split is no longer necessary. This means
+we can adopt a much simpler mutable API.
-If you're looking for documentation on the JavaScript implementation take a look
-at https://automerge.org/docs/hello/. There are other implementations in both
-Rust and C, but they are earlier and don't have documentation yet. You can find
-them in `rust/automerge` and `rust/automerge-c` if you are comfortable
-reading the code and tests to figure out how to use them.
-
-If you're familiar with CRDTs and interested in the design of Automerge in
-particular take a look at https://automerge.org/docs/how-it-works/backend/
-
-Finally, if you want to talk to us about this project please [join the
-Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw)
+The architecture is now built around the `OpTree`. This is a data structure
+which supports efficiently inserting new operations and realising values of
+existing operations. Most interactions with the `OpTree` are in the form of
+implementations of `TreeQuery` - a trait which can be used to traverse the
+optree and producing state of some kind. User facing operations are exposed on
+an `Automerge` object, under the covers these operations typically instantiate
+some `TreeQuery` and run it over the `OpTree`.
## Status
-This project is formed of a core Rust implementation which is exposed via FFI in
-javascript+WASM, C, and soon other languages. Alex
-([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining
-automerge, other members of Ink and Switch are also contributing time and there
-are several other maintainers. The focus is currently on shipping the new JS
-package. We expect to be iterating the API and adding new features over the next
-six months so there will likely be several major version bumps in all packages
-in that time.
+We have working code which passes all of the tests in the JS test suite. We're
+now working on writing a bunch more tests and cleaning up the API.
-In general we try and respect semver.
+## Development
-### JavaScript
+### Running CI
-A stable release of the javascript package is currently available as
-`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are
-available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at
-https://deno.land/x/automerge
+The steps CI will run are all defined in `./scripts/ci`. Obviously CI will run
+everything when you submit a PR, but if you want to run everything locally
+before you push you can run `./scripts/ci/run` to run everything.
-### Rust
+### Running the JS tests
-The rust codebase is currently oriented around producing a performant backend
-for the Javascript wrapper and as such the API for Rust code is low level and
-not well documented. We will be returning to this over the next few months but
-for now you will need to be comfortable reading the tests and asking questions
-to figure out how to use it. If you are looking to build rust applications which
-use automerge you may want to look into
-[autosurgeon](https://github.com/alexjg/autosurgeon)
+You will need to have [node](https://nodejs.org/en/), [yarn](https://yarnpkg.com/getting-started/install), [rust](https://rustup.rs/) and [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/) installed.
-## Repository Organisation
+To build and test the rust library:
-- `./rust` - the rust rust implementation and also the Rust components of
- platform specific wrappers (e.g. `automerge-wasm` for the WASM API or
- `automerge-c` for the C FFI bindings)
-- `./javascript` - The javascript library which uses `automerge-wasm`
- internally but presents a more idiomatic javascript interface
-- `./scripts` - scripts which are useful to maintenance of the repository.
- This includes the scripts which are run in CI.
-- `./img` - static assets for use in `.md` files
-
-## Building
-
-To build this codebase you will need:
-
-- `rust`
-- `node`
-- `yarn`
-- `cmake`
-- `cmocka`
-
-You will also need to install the following with `cargo install`
-
-- `wasm-bindgen-cli`
-- `wasm-opt`
-- `cargo-deny`
-
-And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation.
-
-The various subprojects (the rust code, the wrapper projects) have their own
-build instructions, but to run the tests that will be run in CI you can run
-`./scripts/ci/run`.
-
-### For macOS
-
-These instructions worked to build locally on macOS 13.1 (arm64) as of
-Nov 29th 2022.
-
-```bash
-# clone the repo
-git clone https://github.com/automerge/automerge-rs
-cd automerge-rs
-
-# install rustup
-curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
-
-# install homebrew
-/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
-
-# install cmake, node, cmocka
-brew install cmake node cmocka
-
-# install yarn
-npm install --global yarn
-
-# install javascript dependencies
-yarn --cwd ./javascript
-
-# install rust dependencies
-cargo install wasm-bindgen-cli wasm-opt cargo-deny
-
-# get nightly rust to produce optimized automerge-c builds
-rustup toolchain install nightly
-rustup component add rust-src --toolchain nightly
-
-# add wasm target in addition to current architecture
-rustup target add wasm32-unknown-unknown
-
-# Run ci script
-./scripts/ci/run
+```shell
+ $ cd automerge
+ $ cargo test
```
-If your build fails to find `cmocka.h` you may need to teach it about homebrew's
-installation location:
+To build and test the wasm library:
-```
-export CPATH=/opt/homebrew/include
-export LIBRARY_PATH=/opt/homebrew/lib
-./scripts/ci/run
+```shell
+ ## setup
+ $ cd automerge-wasm
+ $ yarn
+
+ ## building or testing
+ $ yarn build
+ $ yarn test
+
+ ## without this the js library wont automatically use changes
+ $ yarn link
+
+ ## cutting a release or doing benchmarking
+ $ yarn release
+ $ yarn opt ## or set `wasm-opt = false` in Cargo.toml on supported platforms (not arm64 osx)
```
-## Contributing
+And finally to test the js library. This is where most of the tests reside.
-Please try and split your changes up into relatively independent commits which
-change one subsystem at a time and add good commit messages which describe what
-the change is and why you're making it (err on the side of longer commit
-messages). `git blame` should give future maintainers a good idea of why
-something is the way it is.
+```shell
+ ## setup
+ $ cd automerge-js
+ $ yarn
+ $ yarn link "automerge-wasm"
+
+ ## testing
+ $ yarn test
+```
+
+## Benchmarking
+
+The `edit-trace` folder has the main code for running the edit trace benchmarking.
diff --git a/TODO.md b/TODO.md
new file mode 100644
index 00000000..646c0c20
--- /dev/null
+++ b/TODO.md
@@ -0,0 +1,32 @@
+### next steps:
+ 1. C API
+ 2. port rust command line tool
+ 3. fast load
+
+### ergonomics:
+ 1. value() -> () or something that into's a value
+
+### automerge:
+ 1. single pass (fast) load
+ 2. micro-patches / bare bones observation API / fully hydrated documents
+
+### future:
+ 1. handle columns with unknown data in and out
+ 2. branches with different indexes
+
+### Peritext
+ 1. add mark / remove mark -- type, start/end elemid (inclusive,exclusive)
+ 2. track any formatting ops that start or end on a character
+ 3. ops right before the character, ops right after that character
+ 4. query a single character - character, plus marks that start or end on that character
+ what is its current formatting,
+ what are the ops that include that in their span,
+ None = same as last time, Set( bold, italic ),
+ keep these on index
+ 5. op probably belongs with the start character - possible packed at the beginning or end of the list
+
+### maybe:
+ 1. tables
+
+### no:
+ 1. cursors
diff --git a/rust/automerge-cli/.gitignore b/automerge-cli/.gitignore
similarity index 100%
rename from rust/automerge-cli/.gitignore
rename to automerge-cli/.gitignore
diff --git a/automerge-cli/Cargo.lock b/automerge-cli/Cargo.lock
new file mode 100644
index 00000000..a330ee89
--- /dev/null
+++ b/automerge-cli/Cargo.lock
@@ -0,0 +1,857 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "ansi_term"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.55"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "automerge"
+version = "0.1.0"
+dependencies = [
+ "flate2",
+ "fxhash",
+ "hex",
+ "itertools",
+ "js-sys",
+ "leb128",
+ "nonzero_ext",
+ "rand",
+ "serde",
+ "sha2",
+ "smol_str",
+ "thiserror",
+ "tinyvec",
+ "tracing",
+ "unicode-segmentation",
+ "uuid",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "automerge-cli"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "atty",
+ "automerge",
+ "clap",
+ "colored_json",
+ "combine",
+ "duct",
+ "maplit",
+ "serde_json",
+ "thiserror",
+ "tracing-subscriber",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "block-buffer"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899"
+
+[[package]]
+name = "byteorder"
+version = "1.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
+
+[[package]]
+name = "bytes"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "clap"
+version = "3.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ced1892c55c910c1219e98d6fc8d71f6bddba7905866ce740066d8bfea859312"
+dependencies = [
+ "atty",
+ "bitflags",
+ "clap_derive",
+ "indexmap",
+ "lazy_static",
+ "os_str_bytes",
+ "strsim",
+ "termcolor",
+ "textwrap",
+]
+
+[[package]]
+name = "clap_derive"
+version = "3.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da95d038ede1a964ce99f49cbe27a7fb538d1da595e4b4f70b8c8f338d17bf16"
+dependencies = [
+ "heck",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "colored_json"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fd32eb54d016e203b7c2600e3a7802c75843a92e38ccc4869aefeca21771a64"
+dependencies = [
+ "ansi_term",
+ "atty",
+ "libc",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "combine"
+version = "4.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50b727aacc797f9fc28e355d21f34709ac4fc9adecfe470ad07b8f4464f53062"
+dependencies = [
+ "bytes",
+ "memchr",
+]
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "crc32fast"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crypto-common"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
+[[package]]
+name = "digest"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506"
+dependencies = [
+ "block-buffer",
+ "crypto-common",
+]
+
+[[package]]
+name = "duct"
+version = "0.13.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fc6a0a59ed0888e0041cf708e66357b7ae1a82f1c67247e1f93b5e0818f7d8d"
+dependencies = [
+ "libc",
+ "once_cell",
+ "os_pipe",
+ "shared_child",
+]
+
+[[package]]
+name = "either"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+
+[[package]]
+name = "flate2"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
+dependencies = [
+ "cfg-if",
+ "crc32fast",
+ "libc",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "fxhash"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
+dependencies = [
+ "byteorder",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "libc",
+ "wasi",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
+
+[[package]]
+name = "heck"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "hex"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+
+[[package]]
+name = "indexmap"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
+
+[[package]]
+name = "js-sys"
+version = "0.3.56"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "leb128"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
+
+[[package]]
+name = "libc"
+version = "0.2.119"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4"
+
+[[package]]
+name = "log"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "maplit"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
+
+[[package]]
+name = "memchr"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
+dependencies = [
+ "adler",
+ "autocfg",
+]
+
+[[package]]
+name = "nonzero_ext"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44a1290799eababa63ea60af0cbc3f03363e328e58f32fb0294798ed3e85f444"
+
+[[package]]
+name = "once_cell"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5"
+
+[[package]]
+name = "os_pipe"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fb233f06c2307e1f5ce2ecad9f8121cffbbee2c95428f44ea85222e460d0d213"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "os_str_bytes"
+version = "6.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
+
+[[package]]
+name = "proc-macro-error"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+dependencies = [
+ "proc-macro-error-attr",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro-error-attr"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
+
+[[package]]
+name = "serde"
+version = "1.0.136"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.136"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha2"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
+[[package]]
+name = "sharded-slab"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "shared_child"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6be9f7d5565b1483af3e72975e2dee33879b3b86bd48c0929fccf6585d79e65a"
+dependencies = [
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
+
+[[package]]
+name = "smol_str"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61d15c83e300cce35b7c8cd39ff567c1ef42dde6d4a1a38dbdbf9a59902261bd"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "strsim"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
+
+[[package]]
+name = "syn"
+version = "1.0.86"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "termcolor"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
+
+[[package]]
+name = "thiserror"
+version = "1.0.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
+
+[[package]]
+name = "tracing"
+version = "0.1.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6c650a8ef0cd2dd93736f033d21cbd1224c5a967aa0c258d00fcf7dafef9b9f"
+dependencies = [
+ "cfg-if",
+ "log",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23"
+dependencies = [
+ "lazy_static",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3"
+dependencies = [
+ "lazy_static",
+ "log",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e0ab7bdc962035a87fba73f3acca9b8a8d0034c2e6f60b84aeaaddddc155dce"
+dependencies = [
+ "ansi_term",
+ "sharded-slab",
+ "smallvec",
+ "thread_local",
+ "tracing-core",
+ "tracing-log",
+]
+
+[[package]]
+name = "typenum"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
+
+[[package]]
+name = "uuid"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
+dependencies = [
+ "getrandom",
+ "serde",
+]
+
+[[package]]
+name = "valuable"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "wasi"
+version = "0.10.2+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca"
+dependencies = [
+ "bumpalo",
+ "lazy_static",
+ "log",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2"
+
+[[package]]
+name = "web-sys"
+version = "0.3.56"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/rust/automerge-cli/Cargo.toml b/automerge-cli/Cargo.toml
similarity index 76%
rename from rust/automerge-cli/Cargo.toml
rename to automerge-cli/Cargo.toml
index 430090a6..38dec0e6 100644
--- a/rust/automerge-cli/Cargo.toml
+++ b/automerge-cli/Cargo.toml
@@ -4,7 +4,6 @@ version = "0.1.0"
authors = ["Alex Good "]
edition = "2018"
license = "MIT"
-rust-version = "1.57.0"
[[bin]]
name = "automerge"
@@ -13,18 +12,17 @@ bench = false
doc = false
[dependencies]
-clap = {version = "~4", features = ["derive"]}
+clap = {version = "~3.1", features = ["derive"]}
serde_json = "^1.0"
anyhow = "1.0"
+atty = "^0.2"
thiserror = "^1.0"
combine = "^4.5"
maplit = "^1.0"
+colored_json = "^2.1"
tracing-subscriber = "~0.3"
automerge = { path = "../automerge" }
-is-terminal = "0.4.1"
-termcolor = "1.1.3"
-serde = "1.0.150"
[dev-dependencies]
duct = "^0.13"
diff --git a/rust/automerge-cli/IDEAS.md b/automerge-cli/IDEAS.md
similarity index 100%
rename from rust/automerge-cli/IDEAS.md
rename to automerge-cli/IDEAS.md
diff --git a/rust/automerge-cli/src/change.rs b/automerge-cli/src/change.rs
similarity index 100%
rename from rust/automerge-cli/src/change.rs
rename to automerge-cli/src/change.rs
diff --git a/rust/automerge-cli/src/examine.rs b/automerge-cli/src/examine.rs
similarity index 77%
rename from rust/automerge-cli/src/examine.rs
rename to automerge-cli/src/examine.rs
index 0ee102fb..010fa0f1 100644
--- a/rust/automerge-cli/src/examine.rs
+++ b/automerge-cli/src/examine.rs
@@ -1,8 +1,6 @@
use automerge as am;
use thiserror::Error;
-use crate::{color_json::print_colored_json, SkipVerifyFlag};
-
#[derive(Error, Debug)]
pub enum ExamineError {
#[error("Error reading change file: {:?}", source)]
@@ -22,28 +20,21 @@ pub enum ExamineError {
},
}
-pub(crate) fn examine(
+pub fn examine(
mut input: impl std::io::Read,
mut output: impl std::io::Write,
- skip: SkipVerifyFlag,
is_tty: bool,
) -> Result<(), ExamineError> {
let mut buf: Vec = Vec::new();
input
.read_to_end(&mut buf)
.map_err(|e| ExamineError::ReadingChanges { source: e })?;
- let doc = skip
- .load(&buf)
+ let doc = am::Automerge::load(&buf)
.map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?;
- let uncompressed_changes: Vec<_> = doc
- .get_changes(&[])
- .unwrap()
- .iter()
- .map(|c| c.decode())
- .collect();
+ let uncompressed_changes: Vec<_> = doc.get_changes(&[]).iter().map(|c| c.decode()).collect();
if is_tty {
let json_changes = serde_json::to_value(uncompressed_changes).unwrap();
- print_colored_json(&json_changes).unwrap();
+ colored_json::write_colored_json(&json_changes, &mut output).unwrap();
writeln!(output).unwrap();
} else {
let json_changes = serde_json::to_string_pretty(&uncompressed_changes).unwrap();
diff --git a/rust/automerge-cli/src/export.rs b/automerge-cli/src/export.rs
similarity index 81%
rename from rust/automerge-cli/src/export.rs
rename to automerge-cli/src/export.rs
index 45f39101..7b0be98e 100644
--- a/rust/automerge-cli/src/export.rs
+++ b/automerge-cli/src/export.rs
@@ -1,14 +1,11 @@
use anyhow::Result;
use automerge as am;
-use automerge::ReadDoc;
-
-use crate::{color_json::print_colored_json, SkipVerifyFlag};
pub(crate) fn map_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value {
let keys = doc.keys(obj);
let mut map = serde_json::Map::new();
for k in keys {
- let val = doc.get(obj, &k);
+ let val = doc.value(obj, &k);
match val {
Ok(Some((am::Value::Object(o), exid)))
if o == am::ObjType::Map || o == am::ObjType::Table =>
@@ -31,7 +28,7 @@ fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value {
let len = doc.length(obj);
let mut array = Vec::new();
for i in 0..len {
- let val = doc.get(obj, i);
+ let val = doc.value(obj, i as usize);
match val {
Ok(Some((am::Value::Object(o), exid)))
if o == am::ObjType::Map || o == am::ObjType::Table =>
@@ -53,13 +50,11 @@ fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value {
fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value {
match val {
am::ScalarValue::Str(s) => serde_json::Value::String(s.to_string()),
- am::ScalarValue::Bytes(b) | am::ScalarValue::Unknown { bytes: b, .. } => {
- serde_json::Value::Array(
- b.iter()
- .map(|byte| serde_json::Value::Number((*byte).into()))
- .collect(),
- )
- }
+ am::ScalarValue::Bytes(b) => serde_json::Value::Array(
+ b.iter()
+ .map(|byte| serde_json::Value::Number((*byte).into()))
+ .collect(),
+ ),
am::ScalarValue::Int(n) => serde_json::Value::Number((*n).into()),
am::ScalarValue::Uint(n) => serde_json::Value::Number((*n).into()),
am::ScalarValue::F64(n) => serde_json::Number::from_f64(*n)
@@ -72,23 +67,22 @@ fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value {
}
}
-fn get_state_json(input_data: Vec, skip: SkipVerifyFlag) -> Result {
- let doc = skip.load(&input_data).unwrap(); // FIXME
+fn get_state_json(input_data: Vec) -> Result {
+ let doc = am::Automerge::load(&input_data).unwrap(); // FIXME
Ok(map_to_json(&doc, &am::ObjId::Root))
}
-pub(crate) fn export_json(
+pub fn export_json(
mut changes_reader: impl std::io::Read,
mut writer: impl std::io::Write,
- skip: SkipVerifyFlag,
is_tty: bool,
) -> Result<()> {
let mut input_data = vec![];
changes_reader.read_to_end(&mut input_data)?;
- let state_json = get_state_json(input_data, skip)?;
+ let state_json = get_state_json(input_data)?;
if is_tty {
- print_colored_json(&state_json).unwrap();
+ colored_json::write_colored_json(&state_json, &mut writer).unwrap();
writeln!(writer).unwrap();
} else {
writeln!(
@@ -107,10 +101,7 @@ mod tests {
#[test]
fn cli_export_with_empty_input() {
- assert_eq!(
- get_state_json(vec![], Default::default()).unwrap(),
- serde_json::json!({})
- )
+ assert_eq!(get_state_json(vec![]).unwrap(), serde_json::json!({}))
}
#[test]
@@ -124,7 +115,7 @@ mod tests {
let mut backend = initialize_from_json(&initial_state_json).unwrap();
let change_bytes = backend.save();
assert_eq!(
- get_state_json(change_bytes, Default::default()).unwrap(),
+ get_state_json(change_bytes).unwrap(),
serde_json::json!({"sparrows": 15.0})
)
}
@@ -151,7 +142,7 @@ mod tests {
*/
let change_bytes = backend.save();
assert_eq!(
- get_state_json(change_bytes, Default::default()).unwrap(),
+ get_state_json(change_bytes).unwrap(),
serde_json::json!({
"birds": {
"wrens": 3.0,
diff --git a/rust/automerge-cli/src/import.rs b/automerge-cli/src/import.rs
similarity index 87%
rename from rust/automerge-cli/src/import.rs
rename to automerge-cli/src/import.rs
index a9556071..9f9a3210 100644
--- a/rust/automerge-cli/src/import.rs
+++ b/automerge-cli/src/import.rs
@@ -22,31 +22,31 @@ fn import_map(
for (key, value) in map {
match value {
serde_json::Value::Null => {
- doc.put(obj, key, ())?;
+ doc.set(obj, key, ())?;
}
serde_json::Value::Bool(b) => {
- doc.put(obj, key, *b)?;
+ doc.set(obj, key, *b)?;
}
serde_json::Value::String(s) => {
- doc.put(obj, key, s)?;
+ doc.set(obj, key, s.as_ref())?;
}
serde_json::Value::Array(vec) => {
- let id = doc.put_object(obj, key, am::ObjType::List)?;
+ let id = doc.set_object(obj, key, am::ObjType::List)?;
import_list(doc, &id, vec)?;
}
serde_json::Value::Number(n) => {
if let Some(m) = n.as_i64() {
- doc.put(obj, key, m)?;
+ doc.set(obj, key, m)?;
} else if let Some(m) = n.as_u64() {
- doc.put(obj, key, m)?;
+ doc.set(obj, key, m)?;
} else if let Some(m) = n.as_f64() {
- doc.put(obj, key, m)?;
+ doc.set(obj, key, m)?;
} else {
anyhow::bail!("not a number");
}
}
serde_json::Value::Object(map) => {
- let id = doc.put_object(obj, key, am::ObjType::Map)?;
+ let id = doc.set_object(obj, key, am::ObjType::Map)?;
import_map(doc, &id, map)?;
}
}
@@ -68,7 +68,7 @@ fn import_list(
doc.insert(obj, i, *b)?;
}
serde_json::Value::String(s) => {
- doc.insert(obj, i, s)?;
+ doc.insert(obj, i, s.as_ref())?;
}
serde_json::Value::Array(vec) => {
let id = doc.insert_object(obj, i, am::ObjType::List)?;
diff --git a/rust/automerge-cli/src/main.rs b/automerge-cli/src/main.rs
similarity index 60%
rename from rust/automerge-cli/src/main.rs
rename to automerge-cli/src/main.rs
index 8f3f816d..ffc13012 100644
--- a/rust/automerge-cli/src/main.rs
+++ b/automerge-cli/src/main.rs
@@ -1,15 +1,10 @@
use std::{fs::File, path::PathBuf, str::FromStr};
use anyhow::{anyhow, Result};
-use clap::{
- builder::{BoolishValueParser, TypedValueParser, ValueParserFactory},
- Parser,
-};
-use is_terminal::IsTerminal;
+use clap::Parser;
-mod color_json;
+//mod change;
mod examine;
-mod examine_sync;
mod export;
mod import;
mod merge;
@@ -21,50 +16,12 @@ struct Opts {
cmd: Command,
}
-#[derive(clap::ValueEnum, Clone, Debug)]
+#[derive(Debug)]
enum ExportFormat {
Json,
Toml,
}
-#[derive(Copy, Clone, Default, Debug)]
-pub(crate) struct SkipVerifyFlag(bool);
-
-impl SkipVerifyFlag {
- fn load(&self, buf: &[u8]) -> Result {
- if self.0 {
- automerge::Automerge::load(buf)
- } else {
- automerge::Automerge::load_unverified_heads(buf)
- }
- }
-}
-
-#[derive(Clone)]
-struct SkipVerifyFlagParser;
-impl ValueParserFactory for SkipVerifyFlag {
- type Parser = SkipVerifyFlagParser;
-
- fn value_parser() -> Self::Parser {
- SkipVerifyFlagParser
- }
-}
-
-impl TypedValueParser for SkipVerifyFlagParser {
- type Value = SkipVerifyFlag;
-
- fn parse_ref(
- &self,
- cmd: &clap::Command,
- arg: Option<&clap::Arg>,
- value: &std::ffi::OsStr,
- ) -> Result {
- BoolishValueParser::new()
- .parse_ref(cmd, arg, value)
- .map(SkipVerifyFlag)
- }
-}
-
impl FromStr for ExportFormat {
type Err = anyhow::Error;
@@ -86,15 +43,12 @@ enum Command {
format: ExportFormat,
/// Path that contains Automerge changes
+ #[clap(parse(from_os_str))]
changes_file: Option,
/// The file to write to. If omitted assumes stdout
- #[clap(long("out"), short('o'))]
+ #[clap(parse(from_os_str), long("out"), short('o'))]
output_file: Option,
-
- /// Whether to verify the head hashes of a compressed document
- #[clap(long, action = clap::ArgAction::SetFalse)]
- skip_verifying_heads: SkipVerifyFlag,
},
Import {
@@ -102,37 +56,69 @@ enum Command {
#[clap(long, short, default_value = "json")]
format: ExportFormat,
+ #[clap(parse(from_os_str))]
input_file: Option,
/// Path to write Automerge changes to
- #[clap(long("out"), short('o'))]
+ #[clap(parse(from_os_str), long("out"), short('o'))]
changes_file: Option,
},
- /// Read an automerge document and print a JSON representation of the changes in it to stdout
- Examine {
+ /// Read an automerge document from a file or stdin, perform a change on it and write a new
+ /// document to stdout or the specified output file.
+ Change {
+ /// The change script to perform. Change scripts have the form [].
+ /// The possible commands are 'set', 'insert', 'delete', and 'increment'.
+ ///
+ /// Paths look like this: $["mapkey"][0]. They always lways start with a '$', then each
+ /// subsequent segment of the path is either a string in double quotes to index a key in a
+ /// map, or an integer index to address an array element.
+ ///
+ /// Examples
+ ///
+ /// ## set
+ ///
+ /// > automerge change 'set $["someobject"] {"items": []}' somefile
+ ///
+ /// ## insert
+ ///
+ /// > automerge change 'insert $["someobject"]["items"][0] "item1"' somefile
+ ///
+ /// ## increment
+ ///
+ /// > automerge change 'increment $["mycounter"]'
+ ///
+ /// ## delete
+ ///
+ /// > automerge change 'delete $["someobject"]["items"]' somefile
+ script: String,
+
+ /// The file to change, if omitted will assume stdin
+ #[clap(parse(from_os_str))]
input_file: Option,
- skip_verifying_heads: SkipVerifyFlag,
+
+ /// Path to write Automerge changes to, if omitted will write to stdout
+ #[clap(parse(from_os_str), long("out"), short('o'))]
+ output_file: Option,
},
- /// Read an automerge sync messaage and print a JSON representation of it
- ExamineSync { input_file: Option },
+ /// Read an automerge document and print a JSON representation of the changes in it to stdout
+ Examine { input_file: Option },
/// Read one or more automerge documents and output a merged, compacted version of them
Merge {
/// The file to write to. If omitted assumes stdout
- #[clap(long("out"), short('o'))]
+ #[clap(parse(from_os_str), long("out"), short('o'))]
output_file: Option,
-
/// The file(s) to compact. If empty assumes stdin
input: Vec,
},
}
fn open_file_or_stdin(maybe_path: Option) -> Result> {
- if std::io::stdin().is_terminal() {
+ if atty::is(atty::Stream::Stdin) {
if let Some(path) = maybe_path {
- Ok(Box::new(File::open(path).unwrap()))
+ Ok(Box::new(File::open(&path).unwrap()))
} else {
Err(anyhow!(
"Must provide file path if not providing input via stdin"
@@ -144,9 +130,9 @@ fn open_file_or_stdin(maybe_path: Option) -> Result) -> Result> {
- if std::io::stdout().is_terminal() {
+ if atty::is(atty::Stream::Stdout) {
if let Some(path) = maybe_path {
- Ok(Box::new(File::create(path).unwrap()))
+ Ok(Box::new(File::create(&path).unwrap()))
} else {
Err(anyhow!("Must provide file path if not piping to stdout"))
}
@@ -163,22 +149,16 @@ fn main() -> Result<()> {
changes_file,
format,
output_file,
- skip_verifying_heads,
} => {
let output: Box = if let Some(output_file) = output_file {
- Box::new(File::create(output_file)?)
+ Box::new(File::create(&output_file)?)
} else {
Box::new(std::io::stdout())
};
match format {
ExportFormat::Json => {
let mut in_buffer = open_file_or_stdin(changes_file)?;
- export::export_json(
- &mut in_buffer,
- output,
- skip_verifying_heads,
- std::io::stdout().is_terminal(),
- )
+ export::export_json(&mut in_buffer, output, atty::is(atty::Stream::Stdout))
}
ExportFormat::Toml => unimplemented!(),
}
@@ -195,30 +175,23 @@ fn main() -> Result<()> {
}
ExportFormat::Toml => unimplemented!(),
},
- Command::Examine {
- input_file,
- skip_verifying_heads,
+ Command::Change { ..
+ //input_file,
+ //output_file,
+ //script,
} => {
+ unimplemented!()
+/*
let in_buffer = open_file_or_stdin(input_file)?;
- let out_buffer = std::io::stdout();
- match examine::examine(
- in_buffer,
- out_buffer,
- skip_verifying_heads,
- std::io::stdout().is_terminal(),
- ) {
- Ok(()) => {}
- Err(e) => {
- eprintln!("Error: {:?}", e);
- }
- }
- Ok(())
+ let mut out_buffer = create_file_or_stdout(output_file)?;
+ change::change(in_buffer, &mut out_buffer, script.as_str())
+ .map_err(|e| anyhow::format_err!("Unable to make changes: {:?}", e))
+*/
}
- Command::ExamineSync { input_file } => {
+ Command::Examine { input_file } => {
let in_buffer = open_file_or_stdin(input_file)?;
let out_buffer = std::io::stdout();
- match examine_sync::examine_sync(in_buffer, out_buffer, std::io::stdout().is_terminal())
- {
+ match examine::examine(in_buffer, out_buffer, atty::is(atty::Stream::Stdout)) {
Ok(()) => {}
Err(e) => {
eprintln!("Error: {:?}", e);
diff --git a/rust/automerge-cli/src/merge.rs b/automerge-cli/src/merge.rs
similarity index 100%
rename from rust/automerge-cli/src/merge.rs
rename to automerge-cli/src/merge.rs
diff --git a/rust/automerge-cli/tests/integration.rs b/automerge-cli/tests/integration.rs
similarity index 100%
rename from rust/automerge-cli/tests/integration.rs
rename to automerge-cli/tests/integration.rs
diff --git a/automerge-js/.gitignore b/automerge-js/.gitignore
new file mode 100644
index 00000000..5add9449
--- /dev/null
+++ b/automerge-js/.gitignore
@@ -0,0 +1,2 @@
+/node_modules
+/yarn.lock
diff --git a/automerge-js/package.json b/automerge-js/package.json
new file mode 100644
index 00000000..17018429
--- /dev/null
+++ b/automerge-js/package.json
@@ -0,0 +1,18 @@
+{
+ "name": "automerge-js",
+ "version": "0.1.0",
+ "main": "src/index.js",
+ "license": "MIT",
+ "scripts": {
+ "test": "mocha --bail --full-trace"
+ },
+ "devDependencies": {
+ "mocha": "^9.1.1"
+ },
+ "dependencies": {
+ "automerge-wasm": "file:../automerge-wasm",
+ "fast-sha256": "^1.3.0",
+ "pako": "^2.0.4",
+ "uuid": "^8.3"
+ }
+}
diff --git a/rust/automerge-wasm/test/helpers/columnar.js b/automerge-js/src/columnar.js
similarity index 100%
rename from rust/automerge-wasm/test/helpers/columnar.js
rename to automerge-js/src/columnar.js
diff --git a/rust/automerge-wasm/test/helpers/common.js b/automerge-js/src/common.js
similarity index 100%
rename from rust/automerge-wasm/test/helpers/common.js
rename to automerge-js/src/common.js
diff --git a/automerge-js/src/constants.js b/automerge-js/src/constants.js
new file mode 100644
index 00000000..ea92228c
--- /dev/null
+++ b/automerge-js/src/constants.js
@@ -0,0 +1,18 @@
+// Properties of the document root object
+//const OPTIONS = Symbol('_options') // object containing options passed to init()
+//const CACHE = Symbol('_cache') // map from objectId to immutable object
+const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers)
+const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers)
+const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers)
+const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers)
+const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers)
+
+// Properties of all Automerge objects
+//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string)
+//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts
+//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback
+//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element
+
+module.exports = {
+ STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN
+}
diff --git a/javascript/src/counter.ts b/automerge-js/src/counter.js
similarity index 63%
rename from javascript/src/counter.ts
rename to automerge-js/src/counter.js
index 88adb840..1ea56479 100644
--- a/javascript/src/counter.ts
+++ b/automerge-js/src/counter.js
@@ -1,16 +1,12 @@
-import { Automerge, type ObjID, type Prop } from "@automerge/automerge-wasm"
-import { COUNTER } from "./constants"
/**
* The most basic CRDT: an integer value that can be changed only by
* incrementing and decrementing. Since addition of integers is commutative,
* the value trivially converges.
*/
-export class Counter {
- value: number
-
- constructor(value?: number) {
+class Counter {
+ constructor(value) {
this.value = value || 0
- Reflect.defineProperty(this, COUNTER, { value: true })
+ Object.freeze(this)
}
/**
@@ -21,7 +17,7 @@ export class Counter {
* concatenating it with another string, as in `x + ''`.
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf
*/
- valueOf(): number {
+ valueOf() {
return this.value
}
@@ -30,7 +26,7 @@ export class Counter {
* this method is called e.g. when you do `['value: ', x].join('')` or when
* you use string interpolation: `value: ${x}`.
*/
- toString(): string {
+ toString() {
return this.valueOf().toString()
}
@@ -38,7 +34,7 @@ export class Counter {
* Returns the counter value, so that a JSON serialization of an Automerge
* document represents the counter simply as an integer.
*/
- toJSON(): number {
+ toJSON() {
return this.value
}
}
@@ -48,32 +44,13 @@ export class Counter {
* callback.
*/
class WriteableCounter extends Counter {
- context: Automerge
- path: Prop[]
- objectId: ObjID
- key: Prop
-
- constructor(
- value: number,
- context: Automerge,
- path: Prop[],
- objectId: ObjID,
- key: Prop
- ) {
- super(value)
- this.context = context
- this.path = path
- this.objectId = objectId
- this.key = key
- }
-
/**
* Increases the value of the counter by `delta`. If `delta` is not given,
* increases the value of the counter by 1.
*/
- increment(delta: number): number {
- delta = typeof delta === "number" ? delta : 1
- this.context.increment(this.objectId, this.key, delta)
+ increment(delta) {
+ delta = typeof delta === 'number' ? delta : 1
+ this.context.inc(this.objectId, this.key, delta)
this.value += delta
return this.value
}
@@ -82,8 +59,8 @@ class WriteableCounter extends Counter {
* Decreases the value of the counter by `delta`. If `delta` is not given,
* decreases the value of the counter by 1.
*/
- decrement(delta: number): number {
- return this.increment(typeof delta === "number" ? -delta : -1)
+ decrement(delta) {
+ return this.inc(typeof delta === 'number' ? -delta : -1)
}
}
@@ -93,15 +70,15 @@ class WriteableCounter extends Counter {
* `objectId` is the ID of the object containing the counter, and `key` is
* the property name (key in map, or index in list) where the counter is
* located.
- */
-export function getWriteableCounter(
- value: number,
- context: Automerge,
- path: Prop[],
- objectId: ObjID,
- key: Prop
-): WriteableCounter {
- return new WriteableCounter(value, context, path, objectId, key)
+*/
+function getWriteableCounter(value, context, path, objectId, key) {
+ const instance = Object.create(WriteableCounter.prototype)
+ instance.value = value
+ instance.context = context
+ instance.path = path
+ instance.objectId = objectId
+ instance.key = key
+ return instance
}
-//module.exports = { Counter, getWriteableCounter }
+module.exports = { Counter, getWriteableCounter }
diff --git a/rust/automerge-wasm/test/helpers/encoding.js b/automerge-js/src/encoding.js
similarity index 100%
rename from rust/automerge-wasm/test/helpers/encoding.js
rename to automerge-js/src/encoding.js
diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js
new file mode 100644
index 00000000..326fc967
--- /dev/null
+++ b/automerge-js/src/index.js
@@ -0,0 +1,372 @@
+const AutomergeWASM = require("automerge-wasm")
+const uuid = require('./uuid')
+
+let { rootProxy, listProxy, textProxy, mapProxy } = require("./proxies")
+let { Counter } = require("./counter")
+let { Text } = require("./text")
+let { Int, Uint, Float64 } = require("./numbers")
+let { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } = require("./constants")
+
+function init(actor) {
+ if (typeof actor != 'string') {
+ actor = null
+ }
+ const state = AutomergeWASM.create(actor)
+ return rootProxy(state, true);
+}
+
+function clone(doc) {
+ const state = doc[STATE].clone()
+ return rootProxy(state, true);
+}
+
+function free(doc) {
+ return doc[STATE].free()
+}
+
+function from(data, actor) {
+ let doc1 = init(actor)
+ let doc2 = change(doc1, (d) => Object.assign(d, data))
+ return doc2
+}
+
+function change(doc, options, callback) {
+ if (callback === undefined) {
+ // FIXME implement options
+ callback = options
+ options = {}
+ }
+ if (typeof options === "string") {
+ options = { message: options }
+ }
+ if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
+ throw new RangeError("must be the document root");
+ }
+ if (doc[FROZEN] === true) {
+ throw new RangeError("Attempting to use an outdated Automerge document")
+ }
+ if (!!doc[HEADS] === true) {
+ throw new RangeError("Attempting to change an out of date document");
+ }
+ if (doc[READ_ONLY] === false) {
+ throw new RangeError("Calls to Automerge.change cannot be nested")
+ }
+ const state = doc[STATE]
+ const heads = state.getHeads()
+ try {
+ doc[HEADS] = heads
+ doc[FROZEN] = true
+ let root = rootProxy(state);
+ callback(root)
+ if (state.pendingOps() === 0) {
+ doc[FROZEN] = false
+ doc[HEADS] = undefined
+ return doc
+ } else {
+ state.commit(options.message, options.time)
+ return rootProxy(state, true);
+ }
+ } catch (e) {
+ //console.log("ERROR: ",e)
+ doc[FROZEN] = false
+ doc[HEADS] = undefined
+ state.rollback()
+ throw e
+ }
+}
+
+function emptyChange(doc, options) {
+ if (options === undefined) {
+ options = {}
+ }
+ if (typeof options === "string") {
+ options = { message: options }
+ }
+
+ if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
+ throw new RangeError("must be the document root");
+ }
+ if (doc[FROZEN] === true) {
+ throw new RangeError("Attempting to use an outdated Automerge document")
+ }
+ if (doc[READ_ONLY] === false) {
+ throw new RangeError("Calls to Automerge.change cannot be nested")
+ }
+
+ const state = doc[STATE]
+ state.commit(options.message, options.time)
+ return rootProxy(state, true);
+}
+
+function load(data, actor) {
+ const state = AutomergeWASM.loadDoc(data, actor)
+ return rootProxy(state, true);
+}
+
+function save(doc) {
+ const state = doc[STATE]
+ return state.save()
+}
+
+function merge(local, remote) {
+ if (local[HEADS] === true) {
+ throw new RangeError("Attempting to change an out of date document");
+ }
+ const localState = local[STATE]
+ const heads = localState.getHeads()
+ const remoteState = remote[STATE]
+ const changes = localState.getChangesAdded(remoteState)
+ localState.applyChanges(changes)
+ local[HEADS] = heads
+ return rootProxy(localState, true)
+}
+
+function getActorId(doc) {
+ const state = doc[STATE]
+ return state.getActorId()
+}
+
+function conflictAt(context, objectId, prop) {
+ let values = context.values(objectId, prop)
+ if (values.length <= 1) {
+ return
+ }
+ let result = {}
+ for (const conflict of values) {
+ const datatype = conflict[0]
+ const value = conflict[1]
+ switch (datatype) {
+ case "map":
+ result[value] = mapProxy(context, value, [ prop ], true)
+ break;
+ case "list":
+ result[value] = listProxy(context, value, [ prop ], true)
+ break;
+ case "text":
+ result[value] = textProxy(context, value, [ prop ], true)
+ break;
+ //case "table":
+ //case "cursor":
+ case "str":
+ case "uint":
+ case "int":
+ case "f64":
+ case "boolean":
+ case "bytes":
+ case "null":
+ result[conflict[2]] = value
+ break;
+ case "counter":
+ result[conflict[2]] = new Counter(value)
+ break;
+ case "timestamp":
+ result[conflict[2]] = new Date(value)
+ break;
+ default:
+ throw RangeError(`datatype ${datatype} unimplemented`)
+ }
+ }
+ return result
+}
+
+function getConflicts(doc, prop) {
+ const state = doc[STATE]
+ const objectId = doc[OBJECT_ID]
+ return conflictAt(state, objectId, prop)
+}
+
+function getLastLocalChange(doc) {
+ const state = doc[STATE]
+ try {
+ return state.getLastLocalChange()
+ } catch (e) {
+ return
+ }
+}
+
+function getObjectId(doc) {
+ return doc[OBJECT_ID]
+}
+
+function getChanges(oldState, newState) {
+ const o = oldState[STATE]
+ const n = newState[STATE]
+ const heads = oldState[HEADS]
+ return n.getChanges(heads || o.getHeads())
+}
+
+function getAllChanges(doc) {
+ const state = doc[STATE]
+ return state.getChanges([])
+}
+
+function applyChanges(doc, changes) {
+ if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
+ throw new RangeError("must be the document root");
+ }
+ if (doc[FROZEN] === true) {
+ throw new RangeError("Attempting to use an outdated Automerge document")
+ }
+ if (doc[READ_ONLY] === false) {
+ throw new RangeError("Calls to Automerge.change cannot be nested")
+ }
+ const state = doc[STATE]
+ const heads = state.getHeads()
+ state.applyChanges(changes)
+ doc[HEADS] = heads
+ return [rootProxy(state, true)];
+}
+
+function getHistory(doc) {
+ const actor = getActorId(doc)
+ const history = getAllChanges(doc)
+ return history.map((change, index) => ({
+ get change () {
+ return decodeChange(change)
+ },
+ get snapshot () {
+ const [state] = applyChanges(init(), history.slice(0, index + 1))
+ return state
+ }
+ })
+ )
+}
+
+function equals() {
+ if (!isObject(val1) || !isObject(val2)) return val1 === val2
+ const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort()
+ if (keys1.length !== keys2.length) return false
+ for (let i = 0; i < keys1.length; i++) {
+ if (keys1[i] !== keys2[i]) return false
+ if (!equals(val1[keys1[i]], val2[keys2[i]])) return false
+ }
+ return true
+}
+
+function encodeSyncMessage(msg) {
+ return AutomergeWASM.encodeSyncMessage(msg)
+}
+
+function decodeSyncMessage(msg) {
+ return AutomergeWASM.decodeSyncMessage(msg)
+}
+
+function encodeSyncState(state) {
+ return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state))
+}
+
+function decodeSyncState(state) {
+ return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state))
+}
+
+function generateSyncMessage(doc, inState) {
+ const state = doc[STATE]
+ const syncState = AutomergeWASM.importSyncState(inState)
+ const message = state.generateSyncMessage(syncState)
+ const outState = AutomergeWASM.exportSyncState(syncState)
+ return [ outState, message ]
+}
+
+function receiveSyncMessage(doc, inState, message) {
+ const syncState = AutomergeWASM.importSyncState(inState)
+ if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
+ throw new RangeError("must be the document root");
+ }
+ if (doc[FROZEN] === true) {
+ throw new RangeError("Attempting to use an outdated Automerge document")
+ }
+ if (!!doc[HEADS] === true) {
+ throw new RangeError("Attempting to change an out of date document");
+ }
+ if (doc[READ_ONLY] === false) {
+ throw new RangeError("Calls to Automerge.change cannot be nested")
+ }
+ const state = doc[STATE]
+ const heads = state.getHeads()
+ state.receiveSyncMessage(syncState, message)
+ const outState = AutomergeWASM.exportSyncState(syncState)
+ doc[HEADS] = heads
+ return [rootProxy(state, true), outState, null];
+}
+
+function initSyncState() {
+ return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState(change))
+}
+
+function encodeChange(change) {
+ return AutomergeWASM.encodeChange(change)
+}
+
+function decodeChange(data) {
+ return AutomergeWASM.decodeChange(data)
+}
+
+function encodeSyncMessage(change) {
+ return AutomergeWASM.encodeSyncMessage(change)
+}
+
+function decodeSyncMessage(data) {
+ return AutomergeWASM.decodeSyncMessage(data)
+}
+
+function getMissingDeps(doc, heads) {
+ const state = doc[STATE]
+ return state.getMissingDeps(heads)
+}
+
+function getHeads(doc) {
+ const state = doc[STATE]
+ return doc[HEADS] || state.getHeads()
+}
+
+function dump(doc) {
+ const state = doc[STATE]
+ state.dump()
+}
+
+function toJS(doc) {
+ if (typeof doc === "object") {
+ if (doc instanceof Uint8Array) {
+ return doc
+ }
+ if (doc === null) {
+ return doc
+ }
+ if (doc instanceof Array) {
+ return doc.map((a) => toJS(a))
+ }
+ if (doc instanceof Text) {
+ return doc.map((a) => toJS(a))
+ }
+ let tmp = {}
+ for (index in doc) {
+ tmp[index] = toJS(doc[index])
+ }
+ return tmp
+ } else {
+ return doc
+ }
+}
+
+module.exports = {
+ init, from, change, emptyChange, clone, free,
+ load, save, merge, getChanges, getAllChanges, applyChanges,
+ getLastLocalChange, getObjectId, getActorId, getConflicts,
+ encodeChange, decodeChange, equals, getHistory, getHeads, uuid,
+ generateSyncMessage, receiveSyncMessage, initSyncState,
+ decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState,
+ getMissingDeps,
+ dump, Text, Counter, Int, Uint, Float64, toJS,
+}
+
+// depricated
+// Frontend, setDefaultBackend, Backend
+
+// more...
+/*
+for (let name of ['getObjectId', 'getObjectById',
+ 'setActorId',
+ 'Text', 'Table', 'Counter', 'Observable' ]) {
+ module.exports[name] = Frontend[name]
+}
+*/
diff --git a/automerge-js/src/numbers.js b/automerge-js/src/numbers.js
new file mode 100644
index 00000000..1ee22dee
--- /dev/null
+++ b/automerge-js/src/numbers.js
@@ -0,0 +1,33 @@
+// Convience classes to allow users to stricly specify the number type they want
+
+class Int {
+ constructor(value) {
+ if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) {
+ throw new RangeError(`Value ${value} cannot be a uint`)
+ }
+ this.value = value
+ Object.freeze(this)
+ }
+}
+
+class Uint {
+ constructor(value) {
+ if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) {
+ throw new RangeError(`Value ${value} cannot be a uint`)
+ }
+ this.value = value
+ Object.freeze(this)
+ }
+}
+
+class Float64 {
+ constructor(value) {
+ if (typeof value !== 'number') {
+ throw new RangeError(`Value ${value} cannot be a float64`)
+ }
+ this.value = value || 0.0
+ Object.freeze(this)
+ }
+}
+
+module.exports = { Int, Uint, Float64 }
diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js
new file mode 100644
index 00000000..f9e27855
--- /dev/null
+++ b/automerge-js/src/proxies.js
@@ -0,0 +1,623 @@
+
+const AutomergeWASM = require("automerge-wasm")
+const { Int, Uint, Float64 } = require("./numbers");
+const { Counter, getWriteableCounter } = require("./counter");
+const { Text } = require("./text");
+const { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } = require("./constants")
+
+function parseListIndex(key) {
+ if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10)
+ if (typeof key !== 'number') {
+ // throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key))
+ return key
+ }
+ if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) {
+ throw new RangeError('A list index must be positive, but you passed ' + key)
+ }
+ return key
+}
+
+function valueAt(target, prop) {
+ const { context, objectId, path, readonly, heads} = target
+ let value = context.value(objectId, prop, heads)
+ if (value === undefined) {
+ return
+ }
+ const datatype = value[0]
+ const val = value[1]
+ switch (datatype) {
+ case undefined: return;
+ case "map": return mapProxy(context, val, [ ... path, prop ], readonly, heads);
+ case "list": return listProxy(context, val, [ ... path, prop ], readonly, heads);
+ case "text": return textProxy(context, val, [ ... path, prop ], readonly, heads);
+ //case "table":
+ //case "cursor":
+ case "str": return val;
+ case "uint": return val;
+ case "int": return val;
+ case "f64": return val;
+ case "boolean": return val;
+ case "null": return null;
+ case "bytes": return val;
+ case "timestamp": return val;
+ case "counter": {
+ if (readonly) {
+ return new Counter(val);
+ } else {
+ return getWriteableCounter(val, context, path, objectId, prop)
+ }
+ }
+ default:
+ throw RangeError(`datatype ${datatype} unimplemented`)
+ }
+}
+
+function import_value(value) {
+ switch (typeof value) {
+ case 'object':
+ if (value == null) {
+ return [ null, "null"]
+ } else if (value instanceof Uint) {
+ return [ value.value, "uint" ]
+ } else if (value instanceof Int) {
+ return [ value.value, "int" ]
+ } else if (value instanceof Float64) {
+ return [ value.value, "f64" ]
+ } else if (value instanceof Counter) {
+ return [ value.value, "counter" ]
+ } else if (value instanceof Date) {
+ return [ value.getTime(), "timestamp" ]
+ } else if (value instanceof Uint8Array) {
+ return [ value, "bytes" ]
+ } else if (value instanceof Array) {
+ return [ value, "list" ]
+ } else if (value instanceof Text) {
+ return [ value, "text" ]
+ } else if (value[OBJECT_ID]) {
+ throw new RangeError('Cannot create a reference to an existing document object')
+ } else {
+ return [ value, "map" ]
+ }
+ break;
+ case 'boolean':
+ return [ value, "boolean" ]
+ case 'number':
+ if (Number.isInteger(value)) {
+ return [ value, "int" ]
+ } else {
+ return [ value, "f64" ]
+ }
+ break;
+ case 'string':
+ return [ value ]
+ break;
+ default:
+ throw new RangeError(`Unsupported type of value: ${typeof value}`)
+ }
+}
+
+const MapHandler = {
+ get (target, key) {
+ const { context, objectId, path, readonly, frozen, heads, cache } = target
+ if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] }
+ if (key === OBJECT_ID) return objectId
+ if (key === READ_ONLY) return readonly
+ if (key === FROZEN) return frozen
+ if (key === HEADS) return heads
+ if (key === STATE) return context;
+ if (!cache[key]) {
+ cache[key] = valueAt(target, key)
+ }
+ return cache[key]
+ },
+
+ set (target, key, val) {
+ let { context, objectId, path, readonly, frozen} = target
+ target.cache = {} // reset cache on set
+ if (val && val[OBJECT_ID]) {
+ throw new RangeError('Cannot create a reference to an existing document object')
+ }
+ if (key === FROZEN) {
+ target.frozen = val
+ return
+ }
+ if (key === HEADS) {
+ target.heads = val
+ return
+ }
+ let [ value, datatype ] = import_value(val)
+ if (frozen) {
+ throw new RangeError("Attempting to use an outdated Automerge document")
+ }
+ if (readonly) {
+ throw new RangeError(`Object property "${key}" cannot be modified`)
+ }
+ switch (datatype) {
+ case "list":
+ const list = context.set_object(objectId, key, [])
+ const proxyList = listProxy(context, list, [ ... path, key ], readonly );
+ for (let i = 0; i < value.length; i++) {
+ proxyList[i] = value[i]
+ }
+ break;
+ case "text":
+ const text = context.set_object(objectId, key, "", "text")
+ const proxyText = textProxy(context, text, [ ... path, key ], readonly );
+ for (let i = 0; i < value.length; i++) {
+ proxyText[i] = value.get(i)
+ }
+ break;
+ case "map":
+ const map = context.set_object(objectId, key, {})
+ const proxyMap = mapProxy(context, map, [ ... path, key ], readonly );
+ for (const key in value) {
+ proxyMap[key] = value[key]
+ }
+ break;
+ default:
+ context.set(objectId, key, value, datatype)
+ }
+ return true
+ },
+
+ deleteProperty (target, key) {
+ const { context, objectId, path, readonly, frozen } = target
+ target.cache = {} // reset cache on delete
+ if (readonly) {
+ throw new RangeError(`Object property "${key}" cannot be modified`)
+ }
+ context.del(objectId, key)
+ return true
+ },
+
+ has (target, key) {
+ const value = this.get(target, key)
+ return value !== undefined
+ },
+
+ getOwnPropertyDescriptor (target, key) {
+ const { context, objectId } = target
+ const value = this.get(target, key)
+ if (typeof value !== 'undefined') {
+ return {
+ configurable: true, enumerable: true, value
+ }
+ }
+ },
+
+ ownKeys (target) {
+ const { context, objectId, heads} = target
+ return context.keys(objectId, heads)
+ },
+}
+
+
+const ListHandler = {
+ get (target, index) {
+ const {context, objectId, path, readonly, frozen, heads } = target
+ index = parseListIndex(index)
+ if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } }
+ if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
+ if (index === OBJECT_ID) return objectId
+ if (index === READ_ONLY) return readonly
+ if (index === FROZEN) return frozen
+ if (index === HEADS) return heads
+ if (index === STATE) return context;
+ if (index === 'length') return context.length(objectId, heads);
+ if (index === Symbol.iterator) {
+ let i = 0;
+ return function *() {
+ // FIXME - ugly
+ let value = valueAt(target, i)
+ while (value !== undefined) {
+ yield value
+ i += 1
+ value = valueAt(target, i)
+ }
+ }
+ }
+ if (typeof index === 'number') {
+ return valueAt(target, index)
+ } else {
+ return listMethods(target)[index]
+ }
+ },
+
+ set (target, index, val) {
+ let {context, objectId, path, readonly, frozen } = target
+ index = parseListIndex(index)
+ if (val && val[OBJECT_ID]) {
+ throw new RangeError('Cannot create a reference to an existing document object')
+ }
+ if (index === FROZEN) {
+ target.frozen = val
+ return
+ }
+ if (index === HEADS) {
+ target.heads = val
+ return
+ }
+ if (typeof index == "string") {
+ throw new RangeError('list index must be a number')
+ }
+ const [ value, datatype] = import_value(val)
+ if (frozen) {
+ throw new RangeError("Attempting to use an outdated Automerge document")
+ }
+ if (readonly) {
+ throw new RangeError(`Object property "${index}" cannot be modified`)
+ }
+ switch (datatype) {
+ case "list":
+ let list
+ if (index >= context.length(objectId)) {
+ list = context.insert_object(objectId, index, [])
+ } else {
+ list = context.set_object(objectId, index, [])
+ }
+ const proxyList = listProxy(context, list, [ ... path, index ], readonly);
+ proxyList.splice(0,0,...value)
+ break;
+ case "text":
+ let text
+ if (index >= context.length(objectId)) {
+ text = context.insert_object(objectId, index, "", "text")
+ } else {
+ text = context.set_object(objectId, index, "", "text")
+ }
+ const proxyText = textProxy(context, text, [ ... path, index ], readonly);
+ proxyText.splice(0,0,...value)
+ break;
+ case "map":
+ let map
+ if (index >= context.length(objectId)) {
+ map = context.insert_object(objectId, index, {})
+ } else {
+ map = context.set_object(objectId, index, {})
+ }
+ const proxyMap = mapProxy(context, map, [ ... path, index ], readonly);
+ for (const key in value) {
+ proxyMap[key] = value[key]
+ }
+ break;
+ default:
+ if (index >= context.length(objectId)) {
+ context.insert(objectId, index, value, datatype)
+ } else {
+ context.set(objectId, index, value, datatype)
+ }
+ }
+ return true
+ },
+
+ deleteProperty (target, index) {
+ const {context, objectId} = target
+ index = parseListIndex(index)
+ if (context.value(objectId, index)[0] == "counter") {
+ throw new TypeError('Unsupported operation: deleting a counter from a list')
+ }
+ context.del(objectId, index)
+ return true
+ },
+
+ has (target, index) {
+ const {context, objectId, heads} = target
+ index = parseListIndex(index)
+ if (typeof index === 'number') {
+ return index < context.length(objectId, heads)
+ }
+ return index === 'length'
+ },
+
+ getOwnPropertyDescriptor (target, index) {
+ const {context, objectId, path, readonly, frozen, heads} = target
+
+ if (index === 'length') return {writable: true, value: context.length(objectId, heads) }
+ if (index === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId}
+
+ index = parseListIndex(index)
+
+ let value = valueAt(target, index)
+ return { configurable: true, enumerable: true, value }
+ },
+
+ getPrototypeOf(target) { return Object.getPrototypeOf([]) },
+ ownKeys (target) {
+ const {context, objectId, heads } = target
+ let keys = []
+ // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array
+ // but not uncommenting it causes for (i in list) {} to not enumerate values properly
+ //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) }
+ keys.push("length");
+ return keys
+ }
+}
+
+const TextHandler = Object.assign({}, ListHandler, {
+ get (target, index) {
+ // FIXME this is a one line change from ListHandler.get()
+ const {context, objectId, path, readonly, frozen, heads } = target
+ index = parseListIndex(index)
+ if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
+ if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } }
+ if (index === OBJECT_ID) return objectId
+ if (index === READ_ONLY) return readonly
+ if (index === FROZEN) return frozen
+ if (index === HEADS) return heads
+ if (index === STATE) return context;
+ if (index === 'length') return context.length(objectId, heads);
+ if (index === Symbol.iterator) {
+ let i = 0;
+ return function *() {
+ let value = valueAt(target, i)
+ while (value !== undefined) {
+ yield value
+ i += 1
+ value = valueAt(target, i)
+ }
+ }
+ }
+ if (typeof index === 'number') {
+ return valueAt(target, index)
+ } else {
+ return textMethods(target)[index] || listMethods(target)[index]
+ }
+ },
+ getPrototypeOf(target) {
+ return Object.getPrototypeOf(new Text())
+ },
+})
+
+function mapProxy(context, objectId, path, readonly, heads) {
+ return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler)
+}
+
+function listProxy(context, objectId, path, readonly, heads) {
+ let target = []
+ Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}})
+ return new Proxy(target, ListHandler)
+}
+
+function textProxy(context, objectId, path, readonly, heads) {
+ let target = []
+ Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}})
+ return new Proxy(target, TextHandler)
+}
+
+function rootProxy(context, readonly) {
+ return mapProxy(context, "_root", [], readonly)
+}
+
+function listMethods(target) {
+ const {context, objectId, path, readonly, frozen, heads} = target
+ const methods = {
+ deleteAt(index, numDelete) {
+ if (typeof numDelete === 'number') {
+ context.splice(objectId, index, numDelete)
+ } else {
+ context.del(objectId, index)
+ }
+ return this
+ },
+
+ fill(val, start, end) {
+ // FIXME
+ let list = context.getObject(objectId)
+ let [value, datatype] = valueAt(target, index)
+ for (let index = parseListIndex(start || 0); index < parseListIndex(end || list.length); index++) {
+ context.set(objectId, index, value, datatype)
+ }
+ return this
+ },
+
+ indexOf(o, start = 0) {
+ // FIXME
+ const id = o[OBJECT_ID]
+ if (id) {
+ const list = context.getObject(objectId)
+ for (let index = start; index < list.length; index++) {
+ if (list[index][OBJECT_ID] === id) {
+ return index
+ }
+ }
+ return -1
+ } else {
+ return context.indexOf(objectId, o, start)
+ }
+ },
+
+ insertAt(index, ...values) {
+ this.splice(index, 0, ...values)
+ return this
+ },
+
+ pop() {
+ let length = context.length(objectId)
+ if (length == 0) {
+ return undefined
+ }
+ let last = valueAt(target, length - 1)
+ context.del(objectId, length - 1)
+ return last
+ },
+
+ push(...values) {
+ let len = context.length(objectId)
+ this.splice(len, 0, ...values)
+ return context.length(objectId)
+ },
+
+ shift() {
+ if (context.length(objectId) == 0) return
+ const first = valueAt(target, 0)
+ context.del(objectId, 0)
+ return first
+ },
+
+ splice(index, del, ...vals) {
+ index = parseListIndex(index)
+ del = parseListIndex(del)
+ for (let val of vals) {
+ if (val && val[OBJECT_ID]) {
+ throw new RangeError('Cannot create a reference to an existing document object')
+ }
+ }
+ if (frozen) {
+ throw new RangeError("Attempting to use an outdated Automerge document")
+ }
+ if (readonly) {
+ throw new RangeError("Sequence object cannot be modified outside of a change block")
+ }
+ let result = []
+ for (let i = 0; i < del; i++) {
+ let value = valueAt(target, index)
+ result.push(value)
+ context.del(objectId, index)
+ }
+ const values = vals.map((val) => import_value(val))
+ for (let [value,datatype] of values) {
+ switch (datatype) {
+ case "list":
+ const list = context.insert_object(objectId, index, [])
+ const proxyList = listProxy(context, list, [ ... path, index ], readonly);
+ proxyList.splice(0,0,...value)
+ break;
+ case "text":
+ const text = context.insert_object(objectId, index, "", "text")
+ const proxyText = textProxy(context, text, [ ... path, index ], readonly);
+ proxyText.splice(0,0,...value)
+ break;
+ case "map":
+ const map = context.insert_object(objectId, index, {})
+ const proxyMap = mapProxy(context, map, [ ... path, index ], readonly);
+ for (const key in value) {
+ proxyMap[key] = value[key]
+ }
+ break;
+ default:
+ context.insert(objectId, index, value, datatype)
+ }
+ index += 1
+ }
+ return result
+ },
+
+ unshift(...values) {
+ this.splice(0, 0, ...values)
+ return context.length(objectId)
+ },
+
+ entries() {
+ let i = 0;
+ const iterator = {
+ next: () => {
+ let value = valueAt(target, i)
+ if (value === undefined) {
+ return { value: undefined, done: true }
+ } else {
+ return { value: [ i, value ], done: false }
+ }
+ }
+ }
+ return iterator
+ },
+
+ keys() {
+ let i = 0;
+ let len = context.length(objectId, heads)
+ const iterator = {
+ next: () => {
+ let value = undefined
+ if (i < len) { value = i; i++ }
+ return { value, done: true }
+ }
+ }
+ return iterator
+ },
+
+ values() {
+ let i = 0;
+ const iterator = {
+ next: () => {
+ let value = valueAt(target, i)
+ if (value === undefined) {
+ return { value: undefined, done: true }
+ } else {
+ return { value, done: false }
+ }
+ }
+ }
+ return iterator
+ }
+ }
+
+ // Read-only methods that can delegate to the JavaScript built-in implementations
+ // FIXME - super slow
+ for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
+ 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
+ 'slice', 'some', 'toLocaleString', 'toString']) {
+ methods[method] = (...args) => {
+ const list = []
+ while (true) {
+ let value = valueAt(target, list.length)
+ if (value == undefined) {
+ break
+ }
+ list.push(value)
+ }
+
+ return list[method](...args)
+ }
+ }
+
+ return methods
+}
+
+function textMethods(target) {
+ const {context, objectId, path, readonly, frozen} = target
+ const methods = {
+ set (index, value) {
+ return this[index] = value
+ },
+ get (index) {
+ return this[index]
+ },
+ toString () {
+ let str = ''
+ let length = this.length
+ for (let i = 0; i < length; i++) {
+ const value = this.get(i)
+ if (typeof value === 'string') str += value
+ }
+ return str
+ },
+ toSpans () {
+ let spans = []
+ let chars = ''
+ let length = this.length
+ for (let i = 0; i < length; i++) {
+ const value = this[i]
+ if (typeof value === 'string') {
+ chars += value
+ } else {
+ if (chars.length > 0) {
+ spans.push(chars)
+ chars = ''
+ }
+ spans.push(value)
+ }
+ }
+ if (chars.length > 0) {
+ spans.push(chars)
+ }
+ return spans
+ },
+ toJSON () {
+ return this.toString()
+ }
+ }
+ return methods
+}
+
+
+module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler }
diff --git a/javascript/test/legacy/sync.js b/automerge-js/src/sync.js
similarity index 80%
rename from javascript/test/legacy/sync.js
rename to automerge-js/src/sync.js
index 233c4292..2ae3f4e4 100644
--- a/javascript/test/legacy/sync.js
+++ b/automerge-js/src/sync.js
@@ -16,15 +16,11 @@
* last sync to disk), and we fall back to sending the entire document in this case.
*/
-const Backend = null //require('./backend')
-const {
- hexStringToBytes,
- bytesToHexString,
- Encoder,
- Decoder,
-} = require("./encoding")
-const { decodeChangeMeta } = require("./columnar")
-const { copyObject } = require("./common")
+//const Backend = require('./backend')
+const Backend = {} //require('./backend')
+const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding')
+const { decodeChangeMeta } = require('./columnar')
+const { copyObject } = require('../src/common')
const HASH_SIZE = 32 // 256 bits = 32 bytes
const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification
@@ -33,8 +29,7 @@ const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identif
// These constants correspond to a 1% false positive rate. The values can be changed without
// breaking compatibility of the network protocol, since the parameters used for a particular
// Bloom filter are encoded in the wire format.
-const BITS_PER_ENTRY = 10,
- NUM_PROBES = 7
+const BITS_PER_ENTRY = 10, NUM_PROBES = 7
/**
* A Bloom filter implementation that can be serialised to a byte array for transmission
@@ -42,15 +37,13 @@ const BITS_PER_ENTRY = 10,
* so this implementation does not perform its own hashing.
*/
class BloomFilter {
- constructor(arg) {
+ constructor (arg) {
if (Array.isArray(arg)) {
// arg is an array of SHA256 hashes in hexadecimal encoding
this.numEntries = arg.length
this.numBitsPerEntry = BITS_PER_ENTRY
this.numProbes = NUM_PROBES
- this.bits = new Uint8Array(
- Math.ceil((this.numEntries * this.numBitsPerEntry) / 8)
- )
+ this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8))
for (let hash of arg) this.addHash(hash)
} else if (arg instanceof Uint8Array) {
if (arg.byteLength === 0) {
@@ -63,12 +56,10 @@ class BloomFilter {
this.numEntries = decoder.readUint32()
this.numBitsPerEntry = decoder.readUint32()
this.numProbes = decoder.readUint32()
- this.bits = decoder.readRawBytes(
- Math.ceil((this.numEntries * this.numBitsPerEntry) / 8)
- )
+ this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8))
}
} else {
- throw new TypeError("invalid argument")
+ throw new TypeError('invalid argument')
}
}
@@ -96,32 +87,12 @@ class BloomFilter {
* http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf
*/
getProbes(hash) {
- const hashBytes = hexStringToBytes(hash),
- modulo = 8 * this.bits.byteLength
- if (hashBytes.byteLength !== 32)
- throw new RangeError(`Not a 256-bit hash: ${hash}`)
+ const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength
+ if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`)
// on the next three lines, the right shift means interpret value as unsigned
- let x =
- ((hashBytes[0] |
- (hashBytes[1] << 8) |
- (hashBytes[2] << 16) |
- (hashBytes[3] << 24)) >>>
- 0) %
- modulo
- let y =
- ((hashBytes[4] |
- (hashBytes[5] << 8) |
- (hashBytes[6] << 16) |
- (hashBytes[7] << 24)) >>>
- 0) %
- modulo
- let z =
- ((hashBytes[8] |
- (hashBytes[9] << 8) |
- (hashBytes[10] << 16) |
- (hashBytes[11] << 24)) >>>
- 0) %
- modulo
+ let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo
+ let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo
+ let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo
const probes = [x]
for (let i = 1; i < this.numProbes; i++) {
x = (x + y) % modulo
@@ -158,14 +129,12 @@ class BloomFilter {
* Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array.
*/
function encodeHashes(encoder, hashes) {
- if (!Array.isArray(hashes)) throw new TypeError("hashes must be an array")
+ if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array')
encoder.appendUint32(hashes.length)
for (let i = 0; i < hashes.length; i++) {
- if (i > 0 && hashes[i - 1] >= hashes[i])
- throw new RangeError("hashes must be sorted")
+ if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted')
const bytes = hexStringToBytes(hashes[i])
- if (bytes.byteLength !== HASH_SIZE)
- throw new TypeError("heads hashes must be 256 bits")
+ if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits')
encoder.appendRawBytes(bytes)
}
}
@@ -175,8 +144,7 @@ function encodeHashes(encoder, hashes) {
* array of hex strings.
*/
function decodeHashes(decoder) {
- let length = decoder.readUint32(),
- hashes = []
+ let length = decoder.readUint32(), hashes = []
for (let i = 0; i < length; i++) {
hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE)))
}
@@ -216,11 +184,11 @@ function decodeSyncMessage(bytes) {
const heads = decodeHashes(decoder)
const need = decodeHashes(decoder)
const haveCount = decoder.readUint32()
- let message = { heads, need, have: [], changes: [] }
+ let message = {heads, need, have: [], changes: []}
for (let i = 0; i < haveCount; i++) {
const lastSync = decodeHashes(decoder)
const bloom = decoder.readPrefixedBytes(decoder)
- message.have.push({ lastSync, bloom })
+ message.have.push({lastSync, bloom})
}
const changeCount = decoder.readUint32()
for (let i = 0; i < changeCount; i++) {
@@ -267,7 +235,7 @@ function decodeSyncState(bytes) {
function makeBloomFilter(backend, lastSync) {
const newChanges = Backend.getChanges(backend, lastSync)
const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash)
- return { lastSync, bloom: new BloomFilter(hashes).bytes }
+ return {lastSync, bloom: new BloomFilter(hashes).bytes}
}
/**
@@ -278,26 +246,20 @@ function makeBloomFilter(backend, lastSync) {
*/
function getChangesToSend(backend, have, need) {
if (have.length === 0) {
- return need
- .map(hash => Backend.getChangeByHash(backend, hash))
- .filter(change => change !== undefined)
+ return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined)
}
- let lastSyncHashes = {},
- bloomFilters = []
+ let lastSyncHashes = {}, bloomFilters = []
for (let h of have) {
for (let hash of h.lastSync) lastSyncHashes[hash] = true
bloomFilters.push(new BloomFilter(h.bloom))
}
// Get all changes that were added since the last sync
- const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)).map(
- change => decodeChangeMeta(change, true)
- )
+ const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes))
+ .map(change => decodeChangeMeta(change, true))
- let changeHashes = {},
- dependents = {},
- hashesToSend = {}
+ let changeHashes = {}, dependents = {}, hashesToSend = {}
for (let change of changes) {
changeHashes[change.hash] = true
@@ -331,8 +293,7 @@ function getChangesToSend(backend, have, need) {
let changesToSend = []
for (let hash of need) {
hashesToSend[hash] = true
- if (!changeHashes[hash]) {
- // Change is not among those returned by getMissingChanges()?
+ if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()?
const change = Backend.getChangeByHash(backend, hash)
if (change) changesToSend.push(change)
}
@@ -357,7 +318,7 @@ function initSyncState() {
}
function compareArrays(a, b) {
- return a.length === b.length && a.every((v, i) => v === b[i])
+ return (a.length === b.length) && a.every((v, i) => v === b[i])
}
/**
@@ -369,19 +330,10 @@ function generateSyncMessage(backend, syncState) {
throw new Error("generateSyncMessage called with no Automerge document")
}
if (!syncState) {
- throw new Error(
- "generateSyncMessage requires a syncState, which can be created with initSyncState()"
- )
+ throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()")
}
- let {
- sharedHeads,
- lastSentHeads,
- theirHeads,
- theirNeed,
- theirHave,
- sentHashes,
- } = syncState
+ let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState
const ourHeads = Backend.getHeads(backend)
// Hashes to explicitly request from the remote peer: any missing dependencies of unapplied
@@ -405,28 +357,18 @@ function generateSyncMessage(backend, syncState) {
const lastSync = theirHave[0].lastSync
if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) {
// we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need
- const resetMsg = {
- heads: ourHeads,
- need: [],
- have: [{ lastSync: [], bloom: new Uint8Array(0) }],
- changes: [],
- }
+ const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []}
return [syncState, encodeSyncMessage(resetMsg)]
}
}
// XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size
// these changes should ideally be RLE encoded but we haven't implemented that yet.
- let changesToSend =
- Array.isArray(theirHave) && Array.isArray(theirNeed)
- ? getChangesToSend(backend, theirHave, theirNeed)
- : []
+ let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : []
// If the heads are equal, we're in sync and don't need to do anything further
- const headsUnchanged =
- Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads)
- const headsEqual =
- Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads)
+ const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads)
+ const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads)
if (headsUnchanged && headsEqual && changesToSend.length === 0) {
// no need to send a sync message if we know we're synced!
return [syncState, null]
@@ -434,19 +376,12 @@ function generateSyncMessage(backend, syncState) {
// TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the
// unnecessary recomputation
- changesToSend = changesToSend.filter(
- change => !sentHashes[decodeChangeMeta(change, true).hash]
- )
+ changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash])
// Regular response to a sync message: send any changes that the other node
// doesn't have. We leave the "have" field empty because the previous message
// generated by `syncStart` already indicated what changes we have.
- const syncMessage = {
- heads: ourHeads,
- have: ourHave,
- need: ourNeed,
- changes: changesToSend,
- }
+ const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend}
if (changesToSend.length > 0) {
sentHashes = copyObject(sentHashes)
for (const change of changesToSend) {
@@ -454,10 +389,7 @@ function generateSyncMessage(backend, syncState) {
}
}
- syncState = Object.assign({}, syncState, {
- lastSentHeads: ourHeads,
- sentHashes,
- })
+ syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes})
return [syncState, encodeSyncMessage(syncMessage)]
}
@@ -475,14 +407,13 @@ function generateSyncMessage(backend, syncState) {
* another peer, that means that peer had those changes, and therefore we now both know about them.
*/
function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) {
- const newHeads = myNewHeads.filter(head => !myOldHeads.includes(head))
- const commonHeads = ourOldSharedHeads.filter(head =>
- myNewHeads.includes(head)
- )
+ const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head))
+ const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head))
const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort()
return advancedHeads
}
+
/**
* Given a backend, a message message and the state of our peer, apply any changes, update what
* we believe about the peer, and (if there were applied changes) produce a patch for the frontend
@@ -492,13 +423,10 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) {
throw new Error("generateSyncMessage called with no Automerge document")
}
if (!oldSyncState) {
- throw new Error(
- "generateSyncMessage requires a syncState, which can be created with initSyncState()"
- )
+ throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()")
}
- let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState,
- patch = null
+ let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null
const message = decodeSyncMessage(binaryMessage)
const beforeHeads = Backend.getHeads(backend)
@@ -507,27 +435,18 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) {
// changes without applying them. The set of changes may also be incomplete if the sender decided
// to break a large set of changes into chunks.
if (message.changes.length > 0) {
- ;[backend, patch] = Backend.applyChanges(backend, message.changes)
- sharedHeads = advanceHeads(
- beforeHeads,
- Backend.getHeads(backend),
- sharedHeads
- )
+ [backend, patch] = Backend.applyChanges(backend, message.changes)
+ sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads)
}
// If heads are equal, indicate we don't need to send a response message
- if (
- message.changes.length === 0 &&
- compareArrays(message.heads, beforeHeads)
- ) {
+ if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) {
lastSentHeads = message.heads
}
// If all of the remote heads are known to us, that means either our heads are equal, or we are
// ahead of the remote peer. In this case, take the remote heads to be our shared heads.
- const knownHeads = message.heads.filter(head =>
- Backend.getChangeByHash(backend, head)
- )
+ const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head))
if (knownHeads.length === message.heads.length) {
sharedHeads = message.heads
// If the remote peer has lost all its data, reset our state to perform a full resync
@@ -549,18 +468,14 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) {
theirHave: message.have, // the information we need to calculate the changes they need
theirHeads: message.heads,
theirNeed: message.need,
- sentHashes,
+ sentHashes
}
return [backend, syncState, patch]
}
module.exports = {
- receiveSyncMessage,
- generateSyncMessage,
- encodeSyncMessage,
- decodeSyncMessage,
- initSyncState,
- encodeSyncState,
- decodeSyncState,
- BloomFilter, // BloomFilter is a private API, exported only for testing purposes
+ receiveSyncMessage, generateSyncMessage,
+ encodeSyncMessage, decodeSyncMessage,
+ initSyncState, encodeSyncState, decodeSyncState,
+ BloomFilter // BloomFilter is a private API, exported only for testing purposes
}
diff --git a/automerge-js/src/text.js b/automerge-js/src/text.js
new file mode 100644
index 00000000..a7f442fe
--- /dev/null
+++ b/automerge-js/src/text.js
@@ -0,0 +1,132 @@
+const { OBJECT_ID } = require('./constants')
+const { isObject } = require('../src/common')
+
+class Text {
+ constructor (text) {
+ const instance = Object.create(Text.prototype)
+ if (typeof text === 'string') {
+ instance.elems = [...text]
+ } else if (Array.isArray(text)) {
+ instance.elems = text
+ } else if (text === undefined) {
+ instance.elems = []
+ } else {
+ throw new TypeError(`Unsupported initial value for Text: ${text}`)
+ }
+ return instance
+ }
+
+ get length () {
+ return this.elems.length
+ }
+
+ get (index) {
+ return this.elems[index]
+ }
+
+ getElemId (index) {
+ return undefined
+ }
+
+ /**
+ * Iterates over the text elements character by character, including any
+ * inline objects.
+ */
+ [Symbol.iterator] () {
+ let elems = this.elems, index = -1
+ return {
+ next () {
+ index += 1
+ if (index < elems.length) {
+ return {done: false, value: elems[index]}
+ } else {
+ return {done: true}
+ }
+ }
+ }
+ }
+
+ /**
+ * Returns the content of the Text object as a simple string, ignoring any
+ * non-character elements.
+ */
+ toString() {
+ // Concatting to a string is faster than creating an array and then
+ // .join()ing for small (<100KB) arrays.
+ // https://jsperf.com/join-vs-loop-w-type-test
+ let str = ''
+ for (const elem of this.elems) {
+ if (typeof elem === 'string') str += elem
+ }
+ return str
+ }
+
+ /**
+ * Returns the content of the Text object as a sequence of strings,
+ * interleaved with non-character elements.
+ *
+ * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans:
+ * => ['ab', {x: 3}, 'cd']
+ */
+ toSpans() {
+ let spans = []
+ let chars = ''
+ for (const elem of this.elems) {
+ if (typeof elem === 'string') {
+ chars += elem
+ } else {
+ if (chars.length > 0) {
+ spans.push(chars)
+ chars = ''
+ }
+ spans.push(elem)
+ }
+ }
+ if (chars.length > 0) {
+ spans.push(chars)
+ }
+ return spans
+ }
+
+ /**
+ * Returns the content of the Text object as a simple string, so that the
+ * JSON serialization of an Automerge document represents text nicely.
+ */
+ toJSON() {
+ return this.toString()
+ }
+
+ /**
+ * Updates the list item at position `index` to a new value `value`.
+ */
+ set (index, value) {
+ this.elems[index] = value
+ }
+
+ /**
+ * Inserts new list items `values` starting at position `index`.
+ */
+ insertAt(index, ...values) {
+ this.elems.splice(index, 0, ... values)
+ }
+
+ /**
+ * Deletes `numDelete` list items starting at position `index`.
+ * if `numDelete` is not given, one item is deleted.
+ */
+ deleteAt(index, numDelete = 1) {
+ this.elems.splice(index, numDelete)
+ }
+}
+
+// Read-only methods that can delegate to the JavaScript built-in array
+for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
+ 'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
+ 'slice', 'some', 'toLocaleString']) {
+ Text.prototype[method] = function (...args) {
+ const array = [...this]
+ return array[method](...args)
+ }
+}
+
+module.exports = { Text }
diff --git a/automerge-js/src/uuid.js b/automerge-js/src/uuid.js
new file mode 100644
index 00000000..42a8cc6e
--- /dev/null
+++ b/automerge-js/src/uuid.js
@@ -0,0 +1,16 @@
+const { v4: uuid } = require('uuid')
+
+function defaultFactory() {
+ return uuid().replace(/-/g, '')
+}
+
+let factory = defaultFactory
+
+function makeUuid() {
+ return factory()
+}
+
+makeUuid.setFactory = newFactory => { factory = newFactory }
+makeUuid.reset = () => { factory = defaultFactory }
+
+module.exports = makeUuid
diff --git a/automerge-js/test/basic_test.js b/automerge-js/test/basic_test.js
new file mode 100644
index 00000000..68d2fecf
--- /dev/null
+++ b/automerge-js/test/basic_test.js
@@ -0,0 +1,164 @@
+
+const assert = require('assert')
+const util = require('util')
+const Automerge = require('..')
+
+describe('Automerge', () => {
+ describe('basics', () => {
+ it('should init clone and free', () => {
+ let doc1 = Automerge.init()
+ let doc2 = Automerge.clone(doc1);
+ })
+
+ it('handle basic set and read on root object', () => {
+ let doc1 = Automerge.init()
+ let doc2 = Automerge.change(doc1, (d) => {
+ d.hello = "world"
+ d.big = "little"
+ d.zip = "zop"
+ d.app = "dap"
+ assert.deepEqual(d, { hello: "world", big: "little", zip: "zop", app: "dap" })
+ })
+ assert.deepEqual(doc2, { hello: "world", big: "little", zip: "zop", app: "dap" })
+ })
+
+ it('handle basic sets over many changes', () => {
+ let doc1 = Automerge.init()
+ let timestamp = new Date();
+ let counter = new Automerge.Counter(100);
+ let bytes = new Uint8Array([10,11,12]);
+ let doc2 = Automerge.change(doc1, (d) => {
+ d.hello = "world"
+ })
+ let doc3 = Automerge.change(doc2, (d) => {
+ d.counter1 = counter
+ })
+ let doc4 = Automerge.change(doc3, (d) => {
+ d.timestamp1 = timestamp
+ })
+ let doc5 = Automerge.change(doc4, (d) => {
+ d.app = null
+ })
+ let doc6 = Automerge.change(doc5, (d) => {
+ d.bytes1 = bytes
+ })
+ let doc7 = Automerge.change(doc6, (d) => {
+ d.uint = new Automerge.Uint(1)
+ d.int = new Automerge.Int(-1)
+ d.float64 = new Automerge.Float64(5.5)
+ d.number1 = 100
+ d.number2 = -45.67
+ d.true = true
+ d.false = false
+ })
+
+ assert.deepEqual(doc7, { hello: "world", true: true, false: false, int: -1, uint: 1, float64: 5.5, number1: 100, number2: -45.67, counter1: counter, timestamp1: timestamp, bytes1: bytes, app: null })
+
+ let changes = Automerge.getAllChanges(doc7)
+ let t1 = Automerge.init()
+ ;let [t2] = Automerge.applyChanges(t1, changes)
+ assert.deepEqual(doc7,t2)
+ })
+
+ it('handle overwrites to values', () => {
+ let doc1 = Automerge.init()
+ let doc2 = Automerge.change(doc1, (d) => {
+ d.hello = "world1"
+ })
+ let doc3 = Automerge.change(doc2, (d) => {
+ d.hello = "world2"
+ })
+ let doc4 = Automerge.change(doc3, (d) => {
+ d.hello = "world3"
+ })
+ let doc5 = Automerge.change(doc4, (d) => {
+ d.hello = "world4"
+ })
+ assert.deepEqual(doc5, { hello: "world4" } )
+ })
+
+ it('handle set with object value', () => {
+ let doc1 = Automerge.init()
+ let doc2 = Automerge.change(doc1, (d) => {
+ d.subobj = { hello: "world", subsubobj: { zip: "zop" } }
+ })
+ assert.deepEqual(doc2, { subobj: { hello: "world", subsubobj: { zip: "zop" } } })
+ })
+
+ it('handle simple list creation', () => {
+ let doc1 = Automerge.init()
+ let doc2 = Automerge.change(doc1, (d) => d.list = [])
+ assert.deepEqual(doc2, { list: []})
+ })
+
+ it('handle simple lists', () => {
+ let doc1 = Automerge.init()
+ let doc2 = Automerge.change(doc1, (d) => {
+ d.list = [ 1, 2, 3 ]
+ })
+ assert.deepEqual(doc2.list.length, 3)
+ assert.deepEqual(doc2.list[0], 1)
+ assert.deepEqual(doc2.list[1], 2)
+ assert.deepEqual(doc2.list[2], 3)
+ assert.deepEqual(doc2, { list: [1,2,3] })
+ // assert.deepStrictEqual(Automerge.toJS(doc2), { list: [1,2,3] })
+
+ let doc3 = Automerge.change(doc2, (d) => {
+ d.list[1] = "a"
+ })
+
+ assert.deepEqual(doc3.list.length, 3)
+ assert.deepEqual(doc3.list[0], 1)
+ assert.deepEqual(doc3.list[1], "a")
+ assert.deepEqual(doc3.list[2], 3)
+ assert.deepEqual(doc3, { list: [1,"a",3] })
+ })
+ it('handle simple lists', () => {
+ let doc1 = Automerge.init()
+ let doc2 = Automerge.change(doc1, (d) => {
+ d.list = [ 1, 2, 3 ]
+ })
+ let changes = Automerge.getChanges(doc1, doc2)
+ let docB1 = Automerge.init()
+ ;let [docB2] = Automerge.applyChanges(docB1, changes)
+ assert.deepEqual(docB2, doc2);
+ })
+ it('handle text', () => {
+ let doc1 = Automerge.init()
+ let tmp = new Automerge.Text("hello")
+ let doc2 = Automerge.change(doc1, (d) => {
+ d.list = new Automerge.Text("hello")
+ d.list.insertAt(2,"Z")
+ })
+ let changes = Automerge.getChanges(doc1, doc2)
+ let docB1 = Automerge.init()
+ ;let [docB2] = Automerge.applyChanges(docB1, changes)
+ assert.deepEqual(docB2, doc2);
+ })
+
+ it('have many list methods', () => {
+ let doc1 = Automerge.from({ list: [1,2,3] })
+ assert.deepEqual(doc1, { list: [1,2,3] });
+ let doc2 = Automerge.change(doc1, (d) => {
+ d.list.splice(1,1,9,10)
+ })
+ assert.deepEqual(doc2, { list: [1,9,10,3] });
+ let doc3 = Automerge.change(doc2, (d) => {
+ d.list.push(11,12)
+ })
+ assert.deepEqual(doc3, { list: [1,9,10,3,11,12] });
+ let doc4 = Automerge.change(doc3, (d) => {
+ d.list.unshift(2,2)
+ })
+ assert.deepEqual(doc4, { list: [2,2,1,9,10,3,11,12] });
+ let doc5 = Automerge.change(doc4, (d) => {
+ d.list.shift()
+ })
+ assert.deepEqual(doc5, { list: [2,1,9,10,3,11,12] });
+ let doc6 = Automerge.change(doc5, (d) => {
+ d.list.insertAt(3,100,101)
+ })
+ assert.deepEqual(doc6, { list: [2,1,9,100,101,10,3,11,12] });
+ })
+ })
+})
diff --git a/automerge-js/test/columnar_test.js b/automerge-js/test/columnar_test.js
new file mode 100644
index 00000000..8cbe1482
--- /dev/null
+++ b/automerge-js/test/columnar_test.js
@@ -0,0 +1,97 @@
+const assert = require('assert')
+const { checkEncoded } = require('./helpers')
+const Automerge = require('..')
+const { encodeChange, decodeChange } = Automerge
+
+describe('change encoding', () => {
+ it('should encode text edits', () => {
+ /*
+ const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: '', deps: [], ops: [
+ {action: 'makeText', obj: '_root', key: 'text', insert: false, pred: []},
+ {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []},
+ {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', insert: false, pred: ['2@aaaa']},
+ {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []},
+ {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []}
+ ]}
+ */
+ const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: null, deps: [], ops: [
+ {action: 'makeText', obj: '_root', key: 'text', pred: []},
+ {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []},
+ {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', pred: ['2@aaaa']},
+ {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []},
+ {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []}
+ ]}
+ checkEncoded(encodeChange(change1), [
+ 0x85, 0x6f, 0x4a, 0x83, // magic bytes
+ 0xe2, 0xbd, 0xfb, 0xf5, // checksum
+ 1, 94, 0, 2, 0xaa, 0xaa, // chunkType: change, length, deps, actor 'aaaa'
+ 1, 1, 9, 0, 0, // seq, startOp, time, message, actor list
+ 12, 0x01, 4, 0x02, 4, // column count, objActor, objCtr
+ 0x11, 8, 0x13, 7, 0x15, 8, // keyActor, keyCtr, keyStr
+ 0x34, 4, 0x42, 6, // insert, action
+ 0x56, 6, 0x57, 3, // valLen, valRaw
+ 0x70, 6, 0x71, 2, 0x73, 2, // predNum, predActor, predCtr
+ 0, 1, 4, 0, // objActor column: null, 0, 0, 0, 0
+ 0, 1, 4, 1, // objCtr column: null, 1, 1, 1, 1
+ 0, 2, 0x7f, 0, 0, 1, 0x7f, 0, // keyActor column: null, null, 0, null, 0
+ 0, 1, 0x7c, 0, 2, 0x7e, 4, // keyCtr column: null, 0, 2, 0, 4
+ 0x7f, 4, 0x74, 0x65, 0x78, 0x74, 0, 4, // keyStr column: 'text', null, null, null, null
+ 1, 1, 1, 2, // insert column: false, true, false, true, true
+ 0x7d, 4, 1, 3, 2, 1, // action column: makeText, set, del, set, set
+ 0x7d, 0, 0x16, 0, 2, 0x16, // valLen column: 0, 0x16, 0, 0x16, 0x16
+ 0x68, 0x48, 0x69, // valRaw column: 'h', 'H', 'i'
+ 2, 0, 0x7f, 1, 2, 0, // predNum column: 0, 0, 1, 0, 0
+ 0x7f, 0, // predActor column: 0
+ 0x7f, 2 // predCtr column: 2
+ ])
+ const decoded = decodeChange(encodeChange(change1))
+ assert.deepStrictEqual(decoded, Object.assign({hash: decoded.hash}, change1))
+ })
+
+ // FIXME - skipping this b/c it was never implemented in the rust impl and isnt trivial
+/*
+ it.skip('should require strict ordering of preds', () => {
+ const change = new Uint8Array([
+ 133, 111, 74, 131, 31, 229, 112, 44, 1, 105, 1, 58, 30, 190, 100, 253, 180, 180, 66, 49, 126,
+ 81, 142, 10, 3, 35, 140, 189, 231, 34, 145, 57, 66, 23, 224, 149, 64, 97, 88, 140, 168, 194,
+ 229, 4, 244, 209, 58, 138, 67, 140, 1, 152, 236, 250, 2, 0, 1, 4, 55, 234, 66, 242, 8, 21, 11,
+ 52, 1, 66, 2, 86, 3, 87, 10, 112, 2, 113, 3, 115, 4, 127, 9, 99, 111, 109, 109, 111, 110, 86,
+ 97, 114, 1, 127, 1, 127, 166, 1, 52, 48, 57, 49, 52, 57, 52, 53, 56, 50, 127, 2, 126, 0, 1,
+ 126, 139, 1, 0
+ ])
+ assert.throws(() => { decodeChange(change) }, /operation IDs are not in ascending order/)
+ })
+*/
+
+ describe('with trailing bytes', () => {
+ let change = new Uint8Array([
+ 0x85, 0x6f, 0x4a, 0x83, // magic bytes
+ 0xb2, 0x98, 0x9e, 0xa9, // checksum
+ 1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234'
+ 1, 1, 252, 250, 220, 255, 5, // seq, startOp, time
+ 14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111, 110, // message: 'Initialization'
+ 0, 6, // actor list, column count
+ 0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action
+ 0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum
+ 0x7f, 1, 0x78, // keyStr: 'x'
+ 1, // insert: false
+ 0x7f, 1, // action: set
+ 0x7f, 19, // valLen: 1 byte of type uint
+ 1, // valRaw: 1
+ 0x7f, 0, // predNum: 0
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 // 10 trailing bytes
+ ])
+
+ it('should allow decoding and re-encoding', () => {
+ // NOTE: This calls the JavaScript encoding and decoding functions, even when the WebAssembly
+ // backend is loaded. Should the wasm backend export its own functions for testing?
+ checkEncoded(change, encodeChange(decodeChange(change)))
+ })
+
+ it('should be preserved in document encoding', () => {
+ const [doc] = Automerge.applyChanges(Automerge.init(), [change])
+ const [reconstructed] = Automerge.getAllChanges(Automerge.load(Automerge.save(doc)))
+ checkEncoded(change, reconstructed)
+ })
+ })
+})
diff --git a/javascript/test/helpers.ts b/automerge-js/test/helpers.js
similarity index 56%
rename from javascript/test/helpers.ts
rename to automerge-js/test/helpers.js
index df76e558..c3fc52ae 100644
--- a/javascript/test/helpers.ts
+++ b/automerge-js/test/helpers.js
@@ -1,21 +1,16 @@
-import * as assert from "assert"
-import { Encoder } from "./legacy/encoding"
+const assert = require('assert')
+const { Encoder } = require('../src/encoding')
// Assertion that succeeds if the first argument deepStrictEquals at least one of the
// subsequent arguments (but we don't care which one)
-export function assertEqualsOneOf(actual, ...expected) {
+function assertEqualsOneOf(actual, ...expected) {
assert(expected.length > 0)
for (let i = 0; i < expected.length; i++) {
try {
assert.deepStrictEqual(actual, expected[i])
return // if we get here without an exception, that means success
} catch (e) {
- if (e instanceof assert.AssertionError) {
- if (!e.name.match(/^AssertionError/) || i === expected.length - 1)
- throw e
- } else {
- throw e
- }
+ if (!e.name.match(/^AssertionError/) || i === expected.length - 1) throw e
}
}
}
@@ -24,13 +19,14 @@ export function assertEqualsOneOf(actual, ...expected) {
* Asserts that the byte array maintained by `encoder` contains the same byte
* sequence as the array `bytes`.
*/
-export function checkEncoded(encoder, bytes, detail?) {
- const encoded = encoder instanceof Encoder ? encoder.buffer : encoder
+function checkEncoded(encoder, bytes, detail) {
+ const encoded = (encoder instanceof Encoder) ? encoder.buffer : encoder
const expected = new Uint8Array(bytes)
- const message =
- (detail ? `${detail}: ` : "") + `${encoded} expected to equal ${expected}`
+ const message = (detail ? `${detail}: ` : '') + `${encoded} expected to equal ${expected}`
assert(encoded.byteLength === expected.byteLength, message)
for (let i = 0; i < encoded.byteLength; i++) {
assert(encoded[i] === expected[i], message)
}
}
+
+module.exports = { assertEqualsOneOf, checkEncoded }
diff --git a/automerge-js/test/legacy_tests.js b/automerge-js/test/legacy_tests.js
new file mode 100644
index 00000000..76348d06
--- /dev/null
+++ b/automerge-js/test/legacy_tests.js
@@ -0,0 +1,1419 @@
+const assert = require('assert')
+//const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge')
+const Automerge = require('../src')
+const { assertEqualsOneOf } = require('./helpers')
+const { decodeChange } = require('../src/columnar')
+//const { decodeChange } = Automerge
+
+const UUID_PATTERN = /^[0-9a-f]{32}$/
+const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/
+
+// CORE FEATURES
+//
+// TODO - Cursors
+// TODO - Tables
+// TODO - on-pass load() & reconstruct change from opset
+// TODO - micro-patches (needed for fully hydrated object in js)
+// TODO - valueAt(heads) / GC
+//
+// AUTOMERGE UNSUPPORTED
+//
+// TODO - patchCallback
+
+
+describe('Automerge', () => {
+ describe('initialization ', () => {
+ it('should initially be an empty map', () => {
+ const doc = Automerge.init()
+ assert.deepStrictEqual(doc, {})
+ })
+
+ it('should allow instantiating from an existing object', () => {
+ const initialState = { birds: { wrens: 3, magpies: 4 } }
+ const doc = Automerge.from(initialState)
+ assert.deepStrictEqual(doc, initialState)
+ })
+
+ it('should allow merging of an object initialized with `from`', () => {
+ let doc1 = Automerge.from({ cards: [] })
+ let doc2 = Automerge.merge(Automerge.init(), doc1)
+ assert.deepStrictEqual(doc2, { cards: [] })
+ })
+
+ it('should allow passing an actorId when instantiating from an existing object', () => {
+ const actorId = '1234'
+ let doc = Automerge.from({ foo: 1 }, actorId)
+ assert.strictEqual(Automerge.getActorId(doc), '1234')
+ })
+
+ it('accepts an empty object as initial state', () => {
+ const doc = Automerge.from({})
+ assert.deepStrictEqual(doc, {})
+ })
+
+ it('accepts an array as initial state, but converts it to an object', () => {
+ const doc = Automerge.from(['a', 'b', 'c'])
+ assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' })
+ })
+
+ it('accepts strings as initial values, but treats them as an array of characters', () => {
+ const doc = Automerge.from('abc')
+ assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' })
+ })
+
+ it('ignores numbers provided as initial values', () => {
+ const doc = Automerge.from(123)
+ assert.deepStrictEqual(doc, {})
+ })
+
+ it('ignores booleans provided as initial values', () => {
+ const doc1 = Automerge.from(false)
+ assert.deepStrictEqual(doc1, {})
+ const doc2 = Automerge.from(true)
+ assert.deepStrictEqual(doc2, {})
+ })
+ })
+
+ describe('sequential use', () => {
+ let s1, s2
+ beforeEach(() => {
+ s1 = Automerge.init()
+ })
+
+ it('should not mutate objects', () => {
+ s2 = Automerge.change(s1, doc => doc.foo = 'bar')
+ assert.strictEqual(s1.foo, undefined)
+ assert.strictEqual(s2.foo, 'bar')
+ })
+
+ it('changes should be retrievable', () => {
+ const change1 = Automerge.getLastLocalChange(s1)
+ s2 = Automerge.change(s1, doc => doc.foo = 'bar')
+ const change2 = Automerge.getLastLocalChange(s2)
+ assert.strictEqual(change1, undefined)
+ const change = decodeChange(change2)
+ assert.deepStrictEqual(change, {
+ actor: change.actor, deps: [], seq: 1, startOp: 1,
+ hash: change.hash, message: '', time: change.time,
+ ops: [{obj: '_root', key: 'foo', action: 'set', insert: false, value: 'bar', pred: []}]
+ })
+ })
+
+ it('should not register any conflicts on repeated assignment', () => {
+ assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined)
+ s1 = Automerge.change(s1, 'change', doc => doc.foo = 'one')
+ assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined)
+ s1 = Automerge.change(s1, 'change', doc => doc.foo = 'two')
+ assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined)
+ })
+
+ describe('changes', () => {
+ it('should group several changes', () => {
+ s2 = Automerge.change(s1, 'change message', doc => {
+ doc.first = 'one'
+ assert.strictEqual(doc.first, 'one')
+ doc.second = 'two'
+ assert.deepStrictEqual(doc, {
+ first: 'one', second: 'two'
+ })
+ })
+ assert.deepStrictEqual(s1, {})
+ assert.deepStrictEqual(s2, {first: 'one', second: 'two'})
+ })
+
+ it('should freeze objects if desired', () => {
+ s1 = Automerge.init({freeze: true})
+ s2 = Automerge.change(s1, doc => doc.foo = 'bar')
+ try {
+ s2.foo = 'lemon'
+ } catch (e) { }
+ assert.strictEqual(s2.foo, 'bar')
+
+ let deleted = false
+ try {
+ deleted = delete s2.foo
+ } catch (e) { }
+ assert.strictEqual(s2.foo, 'bar')
+ assert.strictEqual(deleted, false)
+
+ Automerge.change(s2, () => {
+ try {
+ s2.foo = 'lemon'
+ } catch (e) { }
+ assert.strictEqual(s2.foo, 'bar')
+ })
+
+ assert.throws(() => { Object.assign(s2, {x: 4}) })
+ assert.strictEqual(s2.x, undefined)
+ })
+
+ it('should allow repeated reading and writing of values', () => {
+ s2 = Automerge.change(s1, 'change message', doc => {
+ doc.value = 'a'
+ assert.strictEqual(doc.value, 'a')
+ doc.value = 'b'
+ doc.value = 'c'
+ assert.strictEqual(doc.value, 'c')
+ })
+ assert.deepStrictEqual(s1, {})
+ assert.deepStrictEqual(s2, {value: 'c'})
+ })
+
+ it('should not record conflicts when writing the same field several times within one change', () => {
+ s1 = Automerge.change(s1, 'change message', doc => {
+ doc.value = 'a'
+ doc.value = 'b'
+ doc.value = 'c'
+ })
+ assert.strictEqual(s1.value, 'c')
+ assert.strictEqual(Automerge.getConflicts(s1, 'value'), undefined)
+ })
+
+ it('should return the unchanged state object if nothing changed', () => {
+ s2 = Automerge.change(s1, () => {})
+ assert.strictEqual(s2, s1)
+ })
+
+ it('should ignore field updates that write the existing value', () => {
+ s1 = Automerge.change(s1, doc => doc.field = 123)
+ s2 = Automerge.change(s1, doc => doc.field = 123)
+ assert.strictEqual(s2, s1)
+ })
+
+ it('should not ignore field updates that resolve a conflict', () => {
+ s2 = Automerge.merge(Automerge.init(), s1)
+ s1 = Automerge.change(s1, doc => doc.field = 123)
+ s2 = Automerge.change(s2, doc => doc.field = 321)
+ s1 = Automerge.merge(s1, s2)
+ assert.strictEqual(Object.keys(Automerge.getConflicts(s1, 'field')).length, 2)
+ const resolved = Automerge.change(s1, doc => doc.field = s1.field)
+ assert.notStrictEqual(resolved, s1)
+ assert.deepStrictEqual(resolved, {field: s1.field})
+ assert.strictEqual(Automerge.getConflicts(resolved, 'field'), undefined)
+ })
+
+ it('should ignore list element updates that write the existing value', () => {
+ s1 = Automerge.change(s1, doc => doc.list = [123])
+ s2 = Automerge.change(s1, doc => doc.list[0] = 123)
+ assert.strictEqual(s2, s1)
+ })
+
+ it('should not ignore list element updates that resolve a conflict', () => {
+ s1 = Automerge.change(s1, doc => doc.list = [1])
+ s2 = Automerge.merge(Automerge.init(), s1)
+ s1 = Automerge.change(s1, doc => doc.list[0] = 123)
+ s2 = Automerge.change(s2, doc => doc.list[0] = 321)
+ s1 = Automerge.merge(s1, s2)
+ assert.deepStrictEqual(Automerge.getConflicts(s1.list, 0), {
+ [`3@${Automerge.getActorId(s1)}`]: 123,
+ [`3@${Automerge.getActorId(s2)}`]: 321
+ })
+ const resolved = Automerge.change(s1, doc => doc.list[0] = s1.list[0])
+ assert.deepStrictEqual(resolved, s1)
+ assert.notStrictEqual(resolved, s1)
+ assert.strictEqual(Automerge.getConflicts(resolved.list, 0), undefined)
+ })
+
+ it('should sanity-check arguments', () => {
+ s1 = Automerge.change(s1, doc => doc.nested = {})
+ assert.throws(() => { Automerge.change({}, doc => doc.foo = 'bar') }, /must be the document root/)
+ assert.throws(() => { Automerge.change(s1.nested, doc => doc.foo = 'bar') }, /must be the document root/)
+ })
+
+ it('should not allow nested change blocks', () => {
+ assert.throws(() => {
+ Automerge.change(s1, doc1 => {
+ Automerge.change(doc1, doc2 => {
+ doc2.foo = 'bar'
+ })
+ })
+ }, /Calls to Automerge.change cannot be nested/)
+ assert.throws(() => {
+ s1 = Automerge.change(s1, doc1 => {
+ s2 = Automerge.change(s1, doc2 => doc2.two = 2)
+ doc1.one = 1
+ })
+ }, /Attempting to use an outdated Automerge document/)
+ })
+
+ it('should not allow the same base document to be used for multiple changes', () => {
+ assert.throws(() => {
+ Automerge.change(s1, doc => doc.one = 1)
+ Automerge.change(s1, doc => doc.two = 2)
+ }, /Attempting to use an outdated Automerge document/)
+ })
+
+ it('should allow a document to be cloned', () => {
+ s1 = Automerge.change(s1, doc => doc.zero = 0)
+ s2 = Automerge.clone(s1)
+ s1 = Automerge.change(s1, doc => doc.one = 1)
+ s2 = Automerge.change(s2, doc => doc.two = 2)
+ assert.deepStrictEqual(s1, {zero: 0, one: 1})
+ assert.deepStrictEqual(s2, {zero: 0, two: 2})
+ Automerge.free(s1)
+ Automerge.free(s2)
+ })
+
+ it('should work with Object.assign merges', () => {
+ s1 = Automerge.change(s1, doc1 => {
+ doc1.stuff = {foo: 'bar', baz: 'blur'}
+ })
+ s1 = Automerge.change(s1, doc1 => {
+ doc1.stuff = Object.assign({}, doc1.stuff, {baz: 'updated!'})
+ })
+ assert.deepStrictEqual(s1, {stuff: {foo: 'bar', baz: 'updated!'}})
+ })
+
+ it('should support Date objects in maps', () => {
+ const now = new Date()
+ s1 = Automerge.change(s1, doc => doc.now = now)
+ let changes = Automerge.getAllChanges(s1)
+ ;[s2] = Automerge.applyChanges(Automerge.init(), changes)
+ assert.strictEqual(s2.now instanceof Date, true)
+ assert.strictEqual(s2.now.getTime(), now.getTime())
+ })
+
+ it('should support Date objects in lists', () => {
+ const now = new Date()
+ s1 = Automerge.change(s1, doc => doc.list = [now])
+ let changes = Automerge.getAllChanges(s1)
+ ;[s2] = Automerge.applyChanges(Automerge.init(), changes)
+ assert.strictEqual(s2.list[0] instanceof Date, true)
+ assert.strictEqual(s2.list[0].getTime(), now.getTime())
+ })
+
+ /*
+ it.skip('should call patchCallback if supplied', () => {
+ const callbacks = [], actor = Automerge.getActorId(s1)
+ const s2 = Automerge.change(s1, {
+ patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local})
+ }, doc => {
+ doc.birds = ['Goldfinch']
+ })
+ assert.strictEqual(callbacks.length, 1)
+ assert.deepStrictEqual(callbacks[0].patch, {
+ actor, seq: 1, maxOp: 2, deps: [], clock: {[actor]: 1}, pendingChanges: 0,
+ diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
+ objectId: `1@${actor}`, type: 'list', edits: [
+ {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {'type': 'value', value: 'Goldfinch'}}
+ ]
+ }}}}
+ })
+ assert.strictEqual(callbacks[0].before, s1)
+ assert.strictEqual(callbacks[0].after, s2)
+ assert.strictEqual(callbacks[0].local, true)
+ })
+ */
+
+ /*
+ it.skip('should call a patchCallback set up on document initialisation', () => {
+ const callbacks = []
+ s1 = Automerge.init({
+ patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local})
+ })
+ const s2 = Automerge.change(s1, doc => doc.bird = 'Goldfinch')
+ const actor = Automerge.getActorId(s1)
+ assert.strictEqual(callbacks.length, 1)
+ assert.deepStrictEqual(callbacks[0].patch, {
+ actor, seq: 1, maxOp: 1, deps: [], clock: {[actor]: 1}, pendingChanges: 0,
+ diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}}
+ })
+ assert.strictEqual(callbacks[0].before, s1)
+ assert.strictEqual(callbacks[0].after, s2)
+ assert.strictEqual(callbacks[0].local, true)
+ })
+ */
+ })
+
+ describe('emptyChange()', () => {
+ it('should append an empty change to the history', () => {
+ s1 = Automerge.change(s1, 'first change', doc => doc.field = 123)
+ s2 = Automerge.emptyChange(s1, 'empty change')
+ assert.notStrictEqual(s2, s1)
+ assert.deepStrictEqual(s2, s1)
+ assert.deepStrictEqual(Automerge.getHistory(s2).map(state => state.change.message), ['first change', 'empty change'])
+ })
+
+ it('should reference dependencies', () => {
+ s1 = Automerge.change(s1, doc => doc.field = 123)
+ s2 = Automerge.merge(Automerge.init(), s1)
+ s2 = Automerge.change(s2, doc => doc.other = 'hello')
+ s1 = Automerge.emptyChange(Automerge.merge(s1, s2))
+ const history = Automerge.getHistory(s1)
+ const emptyChange = history[2].change
+ assert.deepStrictEqual(emptyChange.deps, [history[0].change.hash, history[1].change.hash].sort())
+ assert.deepStrictEqual(emptyChange.ops, [])
+ })
+ })
+
+ describe('root object', () => {
+ it('should handle single-property assignment', () => {
+ s1 = Automerge.change(s1, 'set bar', doc => doc.foo = 'bar')
+ s1 = Automerge.change(s1, 'set zap', doc => doc.zip = 'zap')
+ assert.strictEqual(s1.foo, 'bar')
+ assert.strictEqual(s1.zip, 'zap')
+ assert.deepStrictEqual(s1, {foo: 'bar', zip: 'zap'})
+ })
+
+ it('should allow floating-point values', () => {
+ s1 = Automerge.change(s1, doc => doc.number = 1589032171.1)
+ assert.strictEqual(s1.number, 1589032171.1)
+ })
+
+ it('should handle multi-property assignment', () => {
+ s1 = Automerge.change(s1, 'multi-assign', doc => {
+ Object.assign(doc, {foo: 'bar', answer: 42})
+ })
+ assert.strictEqual(s1.foo, 'bar')
+ assert.strictEqual(s1.answer, 42)
+ assert.deepStrictEqual(s1, {foo: 'bar', answer: 42})
+ })
+
+ it('should handle root property deletion', () => {
+ s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar'; doc.something = null })
+ s1 = Automerge.change(s1, 'del foo', doc => { delete doc.foo })
+ assert.strictEqual(s1.foo, undefined)
+ assert.strictEqual(s1.something, null)
+ assert.deepStrictEqual(s1, {something: null})
+ })
+
+ it('should follow JS delete behavior', () => {
+ s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar' })
+ let deleted
+ s1 = Automerge.change(s1, 'del foo', doc => {
+ deleted = delete doc.foo
+ })
+ assert.strictEqual(deleted, true)
+ let deleted2
+ assert.doesNotThrow(() => {
+ s1 = Automerge.change(s1, 'del baz', doc => {
+ deleted2 = delete doc.baz
+ })
+ })
+ assert.strictEqual(deleted2, true)
+ })
+
+ it('should allow the type of a property to be changed', () => {
+ s1 = Automerge.change(s1, 'set number', doc => doc.prop = 123)
+ assert.strictEqual(s1.prop, 123)
+ s1 = Automerge.change(s1, 'set string', doc => doc.prop = '123')
+ assert.strictEqual(s1.prop, '123')
+ s1 = Automerge.change(s1, 'set null', doc => doc.prop = null)
+ assert.strictEqual(s1.prop, null)
+ s1 = Automerge.change(s1, 'set bool', doc => doc.prop = true)
+ assert.strictEqual(s1.prop, true)
+ })
+
+ it('should require property names to be valid', () => {
+ assert.throws(() => {
+ Automerge.change(s1, 'foo', doc => doc[''] = 'x')
+ }, /must not be an empty string/)
+ })
+
+ it('should not allow assignment of unsupported datatypes', () => {
+ Automerge.change(s1, doc => {
+ assert.throws(() => { doc.foo = undefined }, /Unsupported type of value: undefined/)
+ assert.throws(() => { doc.foo = {prop: undefined} }, /Unsupported type of value: undefined/)
+ assert.throws(() => { doc.foo = () => {} }, /Unsupported type of value: function/)
+ assert.throws(() => { doc.foo = Symbol('foo') }, /Unsupported type of value: symbol/)
+ })
+ })
+ })
+
+ describe('nested maps', () => {
+ it('should assign an objectId to nested maps', () => {
+ s1 = Automerge.change(s1, doc => { doc.nested = {} })
+ let id = Automerge.getObjectId(s1.nested)
+ assert.strictEqual(OPID_PATTERN.test(Automerge.getObjectId(s1.nested)), true)
+ assert.notEqual(Automerge.getObjectId(s1.nested), '_root')
+ })
+
+ it('should handle assignment of a nested property', () => {
+ s1 = Automerge.change(s1, 'first change', doc => {
+ doc.nested = {}
+ doc.nested.foo = 'bar'
+ })
+ s1 = Automerge.change(s1, 'second change', doc => {
+ doc.nested.one = 1
+ })
+ assert.deepStrictEqual(s1, {nested: {foo: 'bar', one: 1}})
+ assert.deepStrictEqual(s1.nested, {foo: 'bar', one: 1})
+ assert.strictEqual(s1.nested.foo, 'bar')
+ assert.strictEqual(s1.nested.one, 1)
+ })
+
+ it('should handle assignment of an object literal', () => {
+ s1 = Automerge.change(s1, doc => {
+ doc.textStyle = {bold: false, fontSize: 12}
+ })
+ assert.deepStrictEqual(s1, {textStyle: {bold: false, fontSize: 12}})
+ assert.deepStrictEqual(s1.textStyle, {bold: false, fontSize: 12})
+ assert.strictEqual(s1.textStyle.bold, false)
+ assert.strictEqual(s1.textStyle.fontSize, 12)
+ })
+
+ it('should handle assignment of multiple nested properties', () => {
+ s1 = Automerge.change(s1, doc => {
+ doc.textStyle = {bold: false, fontSize: 12}
+ Object.assign(doc.textStyle, {typeface: 'Optima', fontSize: 14})
+ })
+ assert.strictEqual(s1.textStyle.typeface, 'Optima')
+ assert.strictEqual(s1.textStyle.bold, false)
+ assert.strictEqual(s1.textStyle.fontSize, 14)
+ assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', bold: false, fontSize: 14})
+ })
+
+ it('should handle arbitrary-depth nesting', () => {
+ s1 = Automerge.change(s1, doc => {
+ doc.a = {b: {c: {d: {e: {f: {g: 'h'}}}}}}
+ })
+ s1 = Automerge.change(s1, doc => {
+ doc.a.b.c.d.e.f.i = 'j'
+ })
+ assert.deepStrictEqual(s1, {a: { b: { c: { d: { e: { f: { g: 'h', i: 'j'}}}}}}})
+ assert.strictEqual(s1.a.b.c.d.e.f.g, 'h')
+ assert.strictEqual(s1.a.b.c.d.e.f.i, 'j')
+ })
+
+ it('should allow an old object to be replaced with a new one', () => {
+ s1 = Automerge.change(s1, 'change 1', doc => {
+ doc.myPet = {species: 'dog', legs: 4, breed: 'dachshund'}
+ })
+ s2 = Automerge.change(s1, 'change 2', doc => {
+ doc.myPet = {species: 'koi', variety: '紅白', colors: {red: true, white: true, black: false}}
+ })
+ assert.deepStrictEqual(s1.myPet, {
+ species: 'dog', legs: 4, breed: 'dachshund'
+ })
+ assert.strictEqual(s1.myPet.breed, 'dachshund')
+ assert.deepStrictEqual(s2.myPet, {
+ species: 'koi', variety: '紅白',
+ colors: {red: true, white: true, black: false}
+ })
+ assert.strictEqual(s2.myPet.breed, undefined)
+ assert.strictEqual(s2.myPet.variety, '紅白')
+ })
+
+ it('should allow fields to be changed between primitive and nested map', () => {
+ s1 = Automerge.change(s1, doc => doc.color = '#ff7f00')
+ assert.strictEqual(s1.color, '#ff7f00')
+ s1 = Automerge.change(s1, doc => doc.color = {red: 255, green: 127, blue: 0})
+ assert.deepStrictEqual(s1.color, {red: 255, green: 127, blue: 0})
+ s1 = Automerge.change(s1, doc => doc.color = '#ff7f00')
+ assert.strictEqual(s1.color, '#ff7f00')
+ })
+
+ it('should not allow several references to the same map object', () => {
+ s1 = Automerge.change(s1, doc => doc.object = {})
+ assert.throws(() => {
+ Automerge.change(s1, doc => { doc.x = doc.object })
+ }, /Cannot create a reference to an existing document object/)
+ assert.throws(() => {
+ Automerge.change(s1, doc => { doc.x = s1.object })
+ }, /Cannot create a reference to an existing document object/)
+ assert.throws(() => {
+ Automerge.change(s1, doc => { doc.x = {}; doc.y = doc.x })
+ }, /Cannot create a reference to an existing document object/)
+ })
+
+ it('should not allow object-copying idioms', () => {
+ s1 = Automerge.change(s1, doc => {
+ doc.items = [{id: 'id1', name: 'one'}, {id: 'id2', name: 'two'}]
+ })
+ // People who have previously worked with immutable state in JavaScript may be tempted
+ // to use idioms like this, which don't work well with Automerge -- see e.g.
+ // https://github.com/automerge/automerge/issues/260
+ assert.throws(() => {
+ Automerge.change(s1, doc => {
+ doc.items = [...doc.items, {id: 'id3', name: 'three'}]
+ })
+ }, /Cannot create a reference to an existing document object/)
+ })
+
+ it('should handle deletion of properties within a map', () => {
+ s1 = Automerge.change(s1, 'set style', doc => {
+ doc.textStyle = {typeface: 'Optima', bold: false, fontSize: 12}
+ })
+ s1 = Automerge.change(s1, 'non-bold', doc => delete doc.textStyle.bold)
+ assert.strictEqual(s1.textStyle.bold, undefined)
+ assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', fontSize: 12})
+ })
+
+ it('should handle deletion of references to a map', () => {
+ s1 = Automerge.change(s1, 'make rich text doc', doc => {
+ Object.assign(doc, {title: 'Hello', textStyle: {typeface: 'Optima', fontSize: 12}})
+ })
+ s1 = Automerge.change(s1, doc => delete doc.textStyle)
+ assert.strictEqual(s1.textStyle, undefined)
+ assert.deepStrictEqual(s1, {title: 'Hello'})
+ })
+
+ it('should validate field names', () => {
+ s1 = Automerge.change(s1, doc => doc.nested = {})
+ assert.throws(() => { Automerge.change(s1, doc => doc.nested[''] = 'x') }, /must not be an empty string/)
+ assert.throws(() => { Automerge.change(s1, doc => doc.nested = {'': 'x'}) }, /must not be an empty string/)
+ })
+ })
+
+ describe('lists', () => {
+ it('should allow elements to be inserted', () => {
+ s1 = Automerge.change(s1, doc => doc.noodles = [])
+ s1 = Automerge.change(s1, doc => doc.noodles.insertAt(0, 'udon', 'soba'))
+ s1 = Automerge.change(s1, doc => doc.noodles.insertAt(1, 'ramen'))
+ assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']})
+ assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba'])
+ assert.strictEqual(s1.noodles[0], 'udon')
+ assert.strictEqual(s1.noodles[1], 'ramen')
+ assert.strictEqual(s1.noodles[2], 'soba')
+ assert.strictEqual(s1.noodles.length, 3)
+ })
+
+ it('should handle assignment of a list literal', () => {
+ s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
+ assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']})
+ assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba'])
+ assert.strictEqual(s1.noodles[0], 'udon')
+ assert.strictEqual(s1.noodles[1], 'ramen')
+ assert.strictEqual(s1.noodles[2], 'soba')
+ assert.strictEqual(s1.noodles[3], undefined)
+ assert.strictEqual(s1.noodles.length, 3)
+ })
+
+ it('should only allow numeric indexes', () => {
+ s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
+ s1 = Automerge.change(s1, doc => doc.noodles[1] = 'Ramen!')
+ assert.strictEqual(s1.noodles[1], 'Ramen!')
+ s1 = Automerge.change(s1, doc => doc.noodles['1'] = 'RAMEN!!!')
+ assert.strictEqual(s1.noodles[1], 'RAMEN!!!')
+ assert.throws(() => { Automerge.change(s1, doc => doc.noodles.favourite = 'udon') }, /list index must be a number/)
+ assert.throws(() => { Automerge.change(s1, doc => doc.noodles[''] = 'udon') }, /list index must be a number/)
+ assert.throws(() => { Automerge.change(s1, doc => doc.noodles['1e6'] = 'udon') }, /list index must be a number/)
+ })
+
+ it('should handle deletion of list elements', () => {
+ s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
+ s1 = Automerge.change(s1, doc => delete doc.noodles[1])
+ assert.deepStrictEqual(s1.noodles, ['udon', 'soba'])
+ s1 = Automerge.change(s1, doc => doc.noodles.deleteAt(1))
+ assert.deepStrictEqual(s1.noodles, ['udon'])
+ assert.strictEqual(s1.noodles[0], 'udon')
+ assert.strictEqual(s1.noodles[1], undefined)
+ assert.strictEqual(s1.noodles[2], undefined)
+ assert.strictEqual(s1.noodles.length, 1)
+ })
+
+ it('should handle assignment of individual list indexes', () => {
+ s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon', 'ramen', 'soba'])
+ s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi')
+ assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi', 'soba'])
+ assert.strictEqual(s1.japaneseFood[0], 'udon')
+ assert.strictEqual(s1.japaneseFood[1], 'sushi')
+ assert.strictEqual(s1.japaneseFood[2], 'soba')
+ assert.strictEqual(s1.japaneseFood[3], undefined)
+ assert.strictEqual(s1.japaneseFood.length, 3)
+ })
+
+ it('concurrent edits insert in reverse actorid order if counters equal', () => {
+ s1 = Automerge.init('aaaa')
+ s2 = Automerge.init('bbbb')
+ s1 = Automerge.change(s1, doc => doc.list = [])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa"))
+ s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "2@bbbb"))
+ s2 = Automerge.merge(s2, s1)
+ assert.deepStrictEqual(Automerge.toJS(s2).list, ["2@bbbb", "2@aaaa"])
+ })
+
+ it('concurrent edits insert in reverse counter order if different', () => {
+ s1 = Automerge.init('aaaa')
+ s2 = Automerge.init('bbbb')
+ s1 = Automerge.change(s1, doc => doc.list = [])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa"))
+ s2 = Automerge.change(s2, doc => doc.foo = "2@bbbb")
+ s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "3@bbbb"))
+ s2 = Automerge.merge(s2, s1)
+ assert.deepStrictEqual(s2.list, ["3@bbbb", "2@aaaa"])
+ })
+
+ it('should treat out-by-one assignment as insertion', () => {
+ s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon'])
+ s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi')
+ assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi'])
+ assert.strictEqual(s1.japaneseFood[0], 'udon')
+ assert.strictEqual(s1.japaneseFood[1], 'sushi')
+ assert.strictEqual(s1.japaneseFood[2], undefined)
+ assert.strictEqual(s1.japaneseFood.length, 2)
+ })
+
+ it('should not allow out-of-range assignment', () => {
+ s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon'])
+ assert.throws(() => { Automerge.change(s1, doc => doc.japaneseFood[4] = 'ramen') }, /is out of bounds/)
+ })
+
+ it('should allow bulk assignment of multiple list indexes', () => {
+ s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
+ s1 = Automerge.change(s1, doc => Object.assign(doc.noodles, {0: 'うどん', 2: 'そば'}))
+ assert.deepStrictEqual(s1.noodles, ['うどん', 'ramen', 'そば'])
+ assert.strictEqual(s1.noodles[0], 'うどん')
+ assert.strictEqual(s1.noodles[1], 'ramen')
+ assert.strictEqual(s1.noodles[2], 'そば')
+ assert.strictEqual(s1.noodles.length, 3)
+ })
+
+ it('should handle nested objects', () => {
+ s1 = Automerge.change(s1, doc => doc.noodles = [{type: 'ramen', dishes: ['tonkotsu', 'shoyu']}])
+ s1 = Automerge.change(s1, doc => doc.noodles.push({type: 'udon', dishes: ['tempura udon']}))
+ s1 = Automerge.change(s1, doc => doc.noodles[0].dishes.push('miso'))
+ assert.deepStrictEqual(s1, {noodles: [
+ {type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso']},
+ {type: 'udon', dishes: ['tempura udon']}
+ ]})
+ assert.deepStrictEqual(s1.noodles[0], {
+ type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso']
+ })
+ assert.deepStrictEqual(s1.noodles[1], {
+ type: 'udon', dishes: ['tempura udon']
+ })
+ })
+
+ it('should handle nested lists', () => {
+ s1 = Automerge.change(s1, doc => doc.noodleMatrix = [['ramen', 'tonkotsu', 'shoyu']])
+ s1 = Automerge.change(s1, doc => doc.noodleMatrix.push(['udon', 'tempura udon']))
+ s1 = Automerge.change(s1, doc => doc.noodleMatrix[0].push('miso'))
+ assert.deepStrictEqual(s1.noodleMatrix, [['ramen', 'tonkotsu', 'shoyu', 'miso'], ['udon', 'tempura udon']])
+ assert.deepStrictEqual(s1.noodleMatrix[0], ['ramen', 'tonkotsu', 'shoyu', 'miso'])
+ assert.deepStrictEqual(s1.noodleMatrix[1], ['udon', 'tempura udon'])
+ })
+
+ it('should handle deep nesting', () => {
+ s1 = Automerge.change(s1, doc => doc.nesting = {
+ maps: { m1: { m2: { foo: "bar", baz: {} }, m2a: { } } },
+ lists: [ [ 1, 2, 3 ], [ [ 3, 4, 5, [6]], 7 ] ],
+ mapsinlists: [ { foo: "bar" }, [ { bar: "baz" } ] ],
+ listsinmaps: { foo: [1, 2, 3], bar: [ [ { baz: "123" } ] ] }
+ })
+ s1 = Automerge.change(s1, doc => {
+ doc.nesting.maps.m1a = "123"
+ doc.nesting.maps.m1.m2.baz.xxx = "123"
+ delete doc.nesting.maps.m1.m2a
+ doc.nesting.lists.shift()
+ doc.nesting.lists[0][0].pop()
+ doc.nesting.lists[0][0].push(100)
+ doc.nesting.mapsinlists[0].foo = "baz"
+ doc.nesting.mapsinlists[1][0].foo = "bar"
+ delete doc.nesting.mapsinlists[1]
+ doc.nesting.listsinmaps.foo.push(4)
+ doc.nesting.listsinmaps.bar[0][0].baz = "456"
+ delete doc.nesting.listsinmaps.bar
+ })
+ assert.deepStrictEqual(s1, { nesting: {
+ maps: { m1: { m2: { foo: "bar", baz: { xxx: "123" } } }, m1a: "123" },
+ lists: [ [ [ 3, 4, 5, 100 ], 7 ] ],
+ mapsinlists: [ { foo: "baz" } ],
+ listsinmaps: { foo: [1, 2, 3, 4] }
+ }})
+ })
+
+ it('should handle replacement of the entire list', () => {
+ s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen'])
+ s1 = Automerge.change(s1, doc => doc.japaneseNoodles = doc.noodles.slice())
+ s1 = Automerge.change(s1, doc => doc.noodles = ['wonton', 'pho'])
+ assert.deepStrictEqual(s1, {
+ noodles: ['wonton', 'pho'],
+ japaneseNoodles: ['udon', 'soba', 'ramen']
+ })
+ assert.deepStrictEqual(s1.noodles, ['wonton', 'pho'])
+ assert.strictEqual(s1.noodles[0], 'wonton')
+ assert.strictEqual(s1.noodles[1], 'pho')
+ assert.strictEqual(s1.noodles[2], undefined)
+ assert.strictEqual(s1.noodles.length, 2)
+ })
+
+ it('should allow assignment to change the type of a list element', () => {
+ s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen'])
+ assert.deepStrictEqual(s1.noodles, ['udon', 'soba', 'ramen'])
+ s1 = Automerge.change(s1, doc => doc.noodles[1] = {type: 'soba', options: ['hot', 'cold']})
+ assert.deepStrictEqual(s1.noodles, ['udon', {type: 'soba', options: ['hot', 'cold']}, 'ramen'])
+ s1 = Automerge.change(s1, doc => doc.noodles[1] = ['hot soba', 'cold soba'])
+ assert.deepStrictEqual(s1.noodles, ['udon', ['hot soba', 'cold soba'], 'ramen'])
+ s1 = Automerge.change(s1, doc => doc.noodles[1] = 'soba is the best')
+ assert.deepStrictEqual(s1.noodles, ['udon', 'soba is the best', 'ramen'])
+ })
+
+ it('should allow list creation and assignment in the same change callback', () => {
+ s1 = Automerge.change(Automerge.init(), doc => {
+ doc.letters = ['a', 'b', 'c']
+ doc.letters[1] = 'd'
+ })
+ assert.strictEqual(s1.letters[1], 'd')
+ })
+
+ it('should allow adding and removing list elements in the same change callback', () => {
+ s1 = Automerge.change(Automerge.init(), doc => doc.noodles = [])
+ s1 = Automerge.change(s1, doc => {
+ doc.noodles.push('udon')
+ doc.noodles.deleteAt(0)
+ })
+ assert.deepStrictEqual(s1, {noodles: []})
+ // do the add-remove cycle twice, test for #151 (https://github.com/automerge/automerge/issues/151)
+ s1 = Automerge.change(s1, doc => {
+ doc.noodles.push('soba')
+ doc.noodles.deleteAt(0)
+ })
+ assert.deepStrictEqual(s1, {noodles: []})
+ })
+
+ it('should handle arbitrary-depth nesting', () => {
+ s1 = Automerge.change(s1, doc => doc.maze = [[[[[[[['noodles', ['here']]]]]]]]])
+ s1 = Automerge.change(s1, doc => doc.maze[0][0][0][0][0][0][0][1].unshift('found'))
+ assert.deepStrictEqual(s1.maze, [[[[[[[['noodles', ['found', 'here']]]]]]]]])
+ assert.deepStrictEqual(s1.maze[0][0][0][0][0][0][0][1][1], 'here')
+ s2 = Automerge.load(Automerge.save(s1))
+ assert.deepStrictEqual(s1,s2)
+ })
+
+ it('should not allow several references to the same list object', () => {
+ s1 = Automerge.change(s1, doc => doc.list = [])
+ assert.throws(() => {
+ Automerge.change(s1, doc => { doc.x = doc.list })
+ }, /Cannot create a reference to an existing document object/)
+ assert.throws(() => {
+ Automerge.change(s1, doc => { doc.x = s1.list })
+ }, /Cannot create a reference to an existing document object/)
+ assert.throws(() => {
+ Automerge.change(s1, doc => { doc.x = []; doc.y = doc.x })
+ }, /Cannot create a reference to an existing document object/)
+ })
+ })
+
+ describe('counters', () => {
+ // counter
+ it('should allow deleting counters from maps', () => {
+ const s1 = Automerge.change(Automerge.init(), doc => doc.birds = {wrens: new Automerge.Counter(1)})
+ const s2 = Automerge.change(s1, doc => doc.birds.wrens.increment(2))
+ const s3 = Automerge.change(s2, doc => delete doc.birds.wrens)
+ assert.deepStrictEqual(s2, {birds: {wrens: new Automerge.Counter(3)}})
+ assert.deepStrictEqual(s3, {birds: {}})
+ })
+
+ // counter
+ /*
+ it('should not allow deleting counters from lists', () => {
+ const s1 = Automerge.change(Automerge.init(), doc => doc.recordings = [new Automerge.Counter(1)])
+ const s2 = Automerge.change(s1, doc => doc.recordings[0].increment(2))
+ assert.deepStrictEqual(s2, {recordings: [new Automerge.Counter(3)]})
+ assert.throws(() => { Automerge.change(s2, doc => doc.recordings.deleteAt(0)) }, /Unsupported operation/)
+ })
+ */
+ })
+ })
+
+ describe('concurrent use', () => {
+ let s1, s2, s3
+ beforeEach(() => {
+ s1 = Automerge.init()
+ s2 = Automerge.init()
+ s3 = Automerge.init()
+ })
+
+ it('should merge concurrent updates of different properties', () => {
+ s1 = Automerge.change(s1, doc => doc.foo = 'bar')
+ s2 = Automerge.change(s2, doc => doc.hello = 'world')
+ s3 = Automerge.merge(s1, s2)
+ assert.strictEqual(s3.foo, 'bar')
+ assert.strictEqual(s3.hello, 'world')
+ assert.deepStrictEqual(s3, {foo: 'bar', hello: 'world'})
+ assert.strictEqual(Automerge.getConflicts(s3, 'foo'), undefined)
+ assert.strictEqual(Automerge.getConflicts(s3, 'hello'), undefined)
+ s4 = Automerge.load(Automerge.save(s3))
+ assert.deepEqual(s3,s4)
+ })
+
+ it('should add concurrent increments of the same property', () => {
+ s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter())
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.counter.increment())
+ s2 = Automerge.change(s2, doc => doc.counter.increment(2))
+ s3 = Automerge.merge(s1, s2)
+ assert.strictEqual(s1.counter.value, 1)
+ assert.strictEqual(s2.counter.value, 2)
+ assert.strictEqual(s3.counter.value, 3)
+ assert.strictEqual(Automerge.getConflicts(s3, 'counter'), undefined)
+ s4 = Automerge.load(Automerge.save(s3))
+ assert.deepEqual(s3,s4)
+ })
+
+ it('should add increments only to the values they precede', () => {
+ s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter(0))
+ s1 = Automerge.change(s1, doc => doc.counter.increment())
+ s2 = Automerge.change(s2, doc => doc.counter = new Automerge.Counter(100))
+ s2 = Automerge.change(s2, doc => doc.counter.increment(3))
+ s3 = Automerge.merge(s1, s2)
+ if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
+ assert.deepStrictEqual(s3, {counter: new Automerge.Counter(1)})
+ } else {
+ assert.deepStrictEqual(s3, {counter: new Automerge.Counter(103)})
+ }
+ assert.deepStrictEqual(Automerge.getConflicts(s3, 'counter'), {
+ [`1@${Automerge.getActorId(s1)}`]: new Automerge.Counter(1),
+ [`1@${Automerge.getActorId(s2)}`]: new Automerge.Counter(103)
+ })
+ s4 = Automerge.load(Automerge.save(s3))
+ assert.deepEqual(s3,s4)
+ })
+
+ it('should detect concurrent updates of the same field', () => {
+ s1 = Automerge.change(s1, doc => doc.field = 'one')
+ s2 = Automerge.change(s2, doc => doc.field = 'two')
+ s3 = Automerge.merge(s1, s2)
+ if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
+ assert.deepStrictEqual(s3, {field: 'one'})
+ } else {
+ assert.deepStrictEqual(s3, {field: 'two'})
+ }
+ assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), {
+ [`1@${Automerge.getActorId(s1)}`]: 'one',
+ [`1@${Automerge.getActorId(s2)}`]: 'two'
+ })
+ })
+
+ it('should detect concurrent updates of the same list element', () => {
+ s1 = Automerge.change(s1, doc => doc.birds = ['finch'])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.birds[0] = 'greenfinch')
+ s2 = Automerge.change(s2, doc => doc.birds[0] = 'goldfinch')
+ s3 = Automerge.merge(s1, s2)
+ if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
+ assert.deepStrictEqual(s3.birds, ['greenfinch'])
+ } else {
+ assert.deepStrictEqual(s3.birds, ['goldfinch'])
+ }
+ assert.deepStrictEqual(Automerge.getConflicts(s3.birds, 0), {
+ [`3@${Automerge.getActorId(s1)}`]: 'greenfinch',
+ [`3@${Automerge.getActorId(s2)}`]: 'goldfinch'
+ })
+ })
+
+ it('should handle assignment conflicts of different types', () => {
+ s1 = Automerge.change(s1, doc => doc.field = 'string')
+ s2 = Automerge.change(s2, doc => doc.field = ['list'])
+ s3 = Automerge.change(s3, doc => doc.field = {thing: 'map'})
+ s1 = Automerge.merge(Automerge.merge(s1, s2), s3)
+ assertEqualsOneOf(s1.field, 'string', ['list'], {thing: 'map'})
+ assert.deepStrictEqual(Automerge.getConflicts(s1, 'field'), {
+ [`1@${Automerge.getActorId(s1)}`]: 'string',
+ [`1@${Automerge.getActorId(s2)}`]: ['list'],
+ [`1@${Automerge.getActorId(s3)}`]: {thing: 'map'}
+ })
+ })
+
+ it('should handle changes within a conflicting map field', () => {
+ s1 = Automerge.change(s1, doc => doc.field = 'string')
+ s2 = Automerge.change(s2, doc => doc.field = {})
+ s2 = Automerge.change(s2, doc => doc.field.innerKey = 42)
+ s3 = Automerge.merge(s1, s2)
+ assertEqualsOneOf(s3.field, 'string', {innerKey: 42})
+ assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), {
+ [`1@${Automerge.getActorId(s1)}`]: 'string',
+ [`1@${Automerge.getActorId(s2)}`]: {innerKey: 42}
+ })
+ })
+
+ it('should handle changes within a conflicting list element', () => {
+ s1 = Automerge.change(s1, doc => doc.list = ['hello'])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.list[0] = {map1: true})
+ s1 = Automerge.change(s1, doc => doc.list[0].key = 1)
+ s2 = Automerge.change(s2, doc => doc.list[0] = {map2: true})
+ s2 = Automerge.change(s2, doc => doc.list[0].key = 2)
+ s3 = Automerge.merge(s1, s2)
+ if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
+ assert.deepStrictEqual(s3.list, [{map1: true, key: 1}])
+ } else {
+ assert.deepStrictEqual(s3.list, [{map2: true, key: 2}])
+ }
+ assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), {
+ [`3@${Automerge.getActorId(s1)}`]: {map1: true, key: 1},
+ [`3@${Automerge.getActorId(s2)}`]: {map2: true, key: 2}
+ })
+ })
+
+ it('should not merge concurrently assigned nested maps', () => {
+ s1 = Automerge.change(s1, doc => doc.config = {background: 'blue'})
+ s2 = Automerge.change(s2, doc => doc.config = {logo_url: 'logo.png'})
+ s3 = Automerge.merge(s1, s2)
+ assertEqualsOneOf(s3.config, {background: 'blue'}, {logo_url: 'logo.png'})
+ assert.deepStrictEqual(Automerge.getConflicts(s3, 'config'), {
+ [`1@${Automerge.getActorId(s1)}`]: {background: 'blue'},
+ [`1@${Automerge.getActorId(s2)}`]: {logo_url: 'logo.png'}
+ })
+ })
+
+ it('should clear conflicts after assigning a new value', () => {
+ s1 = Automerge.change(s1, doc => doc.field = 'one')
+ s2 = Automerge.change(s2, doc => doc.field = 'two')
+ s3 = Automerge.merge(s1, s2)
+ s3 = Automerge.change(s3, doc => doc.field = 'three')
+ assert.deepStrictEqual(s3, {field: 'three'})
+ assert.strictEqual(Automerge.getConflicts(s3, 'field'), undefined)
+ s2 = Automerge.merge(s2, s3)
+ assert.deepStrictEqual(s2, {field: 'three'})
+ assert.strictEqual(Automerge.getConflicts(s2, 'field'), undefined)
+ })
+
+ it('should handle concurrent insertions at different list positions', () => {
+ s1 = Automerge.change(s1, doc => doc.list = ['one', 'three'])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.list.splice(1, 0, 'two'))
+ s2 = Automerge.change(s2, doc => doc.list.push('four'))
+ s3 = Automerge.merge(s1, s2)
+ assert.deepStrictEqual(s3, {list: ['one', 'two', 'three', 'four']})
+ assert.strictEqual(Automerge.getConflicts(s3, 'list'), undefined)
+ })
+
+ it('should handle concurrent insertions at the same list position', () => {
+ s1 = Automerge.change(s1, doc => doc.birds = ['parakeet'])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.birds.push('starling'))
+ s2 = Automerge.change(s2, doc => doc.birds.push('chaffinch'))
+ s3 = Automerge.merge(s1, s2)
+ assertEqualsOneOf(s3.birds, ['parakeet', 'starling', 'chaffinch'], ['parakeet', 'chaffinch', 'starling'])
+ s2 = Automerge.merge(s2, s3)
+ assert.deepStrictEqual(s2, s3)
+ })
+
+ it('should handle concurrent assignment and deletion of a map entry', () => {
+ // Add-wins semantics
+ s1 = Automerge.change(s1, doc => doc.bestBird = 'robin')
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => delete doc.bestBird)
+ s2 = Automerge.change(s2, doc => doc.bestBird = 'magpie')
+ s3 = Automerge.merge(s1, s2)
+ assert.deepStrictEqual(s1, {})
+ assert.deepStrictEqual(s2, {bestBird: 'magpie'})
+ assert.deepStrictEqual(s3, {bestBird: 'magpie'})
+ assert.strictEqual(Automerge.getConflicts(s3, 'bestBird'), undefined)
+ })
+
+ it('should handle concurrent assignment and deletion of a list element', () => {
+ // Concurrent assignment ressurects a deleted list element. Perhaps a little
+ // surprising, but consistent with add-wins semantics of maps (see test above)
+ s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch'])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.birds[1] = 'starling')
+ s2 = Automerge.change(s2, doc => doc.birds.splice(1, 1))
+ s3 = Automerge.merge(s1, s2)
+ assert.deepStrictEqual(s1.birds, ['blackbird', 'starling', 'goldfinch'])
+ assert.deepStrictEqual(s2.birds, ['blackbird', 'goldfinch'])
+ assert.deepStrictEqual(s3.birds, ['blackbird', 'starling', 'goldfinch'])
+ s4 = Automerge.load(Automerge.save(s3))
+ assert.deepStrictEqual(s3, s4);
+ })
+
+ it('should handle insertion after a deleted list element', () => {
+ s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch'])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.birds.splice(1, 2))
+ s2 = Automerge.change(s2, doc => doc.birds.splice(2, 0, 'starling'))
+ s3 = Automerge.merge(s1, s2)
+ assert.deepStrictEqual(s3, {birds: ['blackbird', 'starling']})
+ assert.deepStrictEqual(Automerge.merge(s2, s3), {birds: ['blackbird', 'starling']})
+ })
+
+ it('should handle concurrent deletion of the same element', () => {
+ s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant'])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.birds.deleteAt(1)) // buzzard
+ s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard
+ s3 = Automerge.merge(s1, s2)
+ assert.deepStrictEqual(s3.birds, ['albatross', 'cormorant'])
+ })
+
+ it('should handle concurrent deletion of different elements', () => {
+ s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant'])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.birds.deleteAt(0)) // albatross
+ s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard
+ s3 = Automerge.merge(s1, s2)
+ assert.deepStrictEqual(s3.birds, ['cormorant'])
+ })
+
+ it('should handle concurrent updates at different levels of the tree', () => {
+ // A delete higher up in the tree overrides an update in a subtree
+ s1 = Automerge.change(s1, doc => doc.animals = {birds: {pink: 'flamingo', black: 'starling'}, mammals: ['badger']})
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.animals.birds.brown = 'sparrow')
+ s2 = Automerge.change(s2, doc => delete doc.animals.birds)
+ s3 = Automerge.merge(s1, s2)
+ assert.deepStrictEqual(s1.animals, {
+ birds: {
+ pink: 'flamingo', brown: 'sparrow', black: 'starling'
+ },
+ mammals: ['badger']
+ })
+ assert.deepStrictEqual(s2.animals, {mammals: ['badger']})
+ assert.deepStrictEqual(s3.animals, {mammals: ['badger']})
+ })
+
+ it('should handle updates of concurrently deleted objects', () => {
+ s1 = Automerge.change(s1, doc => doc.birds = {blackbird: {feathers: 'black'}})
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => delete doc.birds.blackbird)
+ s2 = Automerge.change(s2, doc => doc.birds.blackbird.beak = 'orange')
+ s3 = Automerge.merge(s1, s2)
+ assert.deepStrictEqual(s1, {birds: {}})
+ })
+
+ it('should not interleave sequence insertions at the same position', () => {
+ s1 = Automerge.change(s1, doc => doc.wisdom = [])
+ s2 = Automerge.merge(s2, s1)
+ s1 = Automerge.change(s1, doc => doc.wisdom.push('to', 'be', 'is', 'to', 'do'))
+ s2 = Automerge.change(s2, doc => doc.wisdom.push('to', 'do', 'is', 'to', 'be'))
+ s3 = Automerge.merge(s1, s2)
+ assertEqualsOneOf(s3.wisdom,
+ ['to', 'be', 'is', 'to', 'do', 'to', 'do', 'is', 'to', 'be'],
+ ['to', 'do', 'is', 'to', 'be', 'to', 'be', 'is', 'to', 'do'])
+ // In case you're wondering: http://quoteinvestigator.com/2013/09/16/do-be-do/
+ })
+
+ describe('multiple insertions at the same list position', () => {
+ it('should handle insertion by greater actor ID', () => {
+ s1 = Automerge.init('aaaa')
+ s2 = Automerge.init('bbbb')
+ s1 = Automerge.change(s1, doc => doc.list = ['two'])
+ s2 = Automerge.merge(s2, s1)
+ s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one'))
+ assert.deepStrictEqual(s2.list, ['one', 'two'])
+ })
+
+ it('should handle insertion by lesser actor ID', () => {
+ s1 = Automerge.init('bbbb')
+ s2 = Automerge.init('aaaa')
+ s1 = Automerge.change(s1, doc => doc.list = ['two'])
+ s2 = Automerge.merge(s2, s1)
+ s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one'))
+ assert.deepStrictEqual(s2.list, ['one', 'two'])
+ })
+
+ it('should handle insertion regardless of actor ID', () => {
+ s1 = Automerge.change(s1, doc => doc.list = ['two'])
+ s2 = Automerge.merge(s2, s1)
+ s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one'))
+ assert.deepStrictEqual(s2.list, ['one', 'two'])
+ })
+
+ it('should make insertion order consistent with causality', () => {
+ s1 = Automerge.change(s1, doc => doc.list = ['four'])
+ s2 = Automerge.merge(s2, s1)
+ s2 = Automerge.change(s2, doc => doc.list.unshift('three'))
+ s1 = Automerge.merge(s1, s2)
+ s1 = Automerge.change(s1, doc => doc.list.unshift('two'))
+ s2 = Automerge.merge(s2, s1)
+ s2 = Automerge.change(s2, doc => doc.list.unshift('one'))
+ assert.deepStrictEqual(s2.list, ['one', 'two', 'three', 'four'])
+ })
+ })
+ })
+
+ describe('saving and loading', () => {
+ it('should save and restore an empty document', () => {
+ let s = Automerge.load(Automerge.save(Automerge.init()))
+ assert.deepStrictEqual(s, {})
+ })
+
+ it('should generate a new random actor ID', () => {
+ let s1 = Automerge.init()
+ let s2 = Automerge.load(Automerge.save(s1))
+ assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s1).toString()), true)
+ assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s2).toString()), true)
+ assert.notEqual(Automerge.getActorId(s1), Automerge.getActorId(s2))
+ })
+
+ it('should allow a custom actor ID to be set', () => {
+ let s = Automerge.load(Automerge.save(Automerge.init()), '333333')
+ assert.strictEqual(Automerge.getActorId(s), '333333')
+ })
+
+ it('should reconstitute complex datatypes', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => doc.todos = [{title: 'water plants', done: false}])
+ let s2 = Automerge.load(Automerge.save(s1))
+ assert.deepStrictEqual(s2, {todos: [{title: 'water plants', done: false}]})
+ })
+
+ it('should save and load maps with @ symbols in the keys', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => doc["123@4567"] = "hello")
+ let s2 = Automerge.load(Automerge.save(s1))
+ assert.deepStrictEqual(s2, { "123@4567": "hello" })
+ })
+
+ it('should reconstitute conflicts', () => {
+ let s1 = Automerge.change(Automerge.init('111111'), doc => doc.x = 3)
+ let s2 = Automerge.change(Automerge.init('222222'), doc => doc.x = 5)
+ s1 = Automerge.merge(s1, s2)
+ let s3 = Automerge.load(Automerge.save(s1))
+ assert.strictEqual(s1.x, 5)
+ assert.strictEqual(s3.x, 5)
+ assert.deepStrictEqual(Automerge.getConflicts(s1, 'x'), {'1@111111': 3, '1@222222': 5})
+ assert.deepStrictEqual(Automerge.getConflicts(s3, 'x'), {'1@111111': 3, '1@222222': 5})
+ })
+
+ it('should reconstitute element ID counters', () => {
+ const s1 = Automerge.init('01234567')
+ const s2 = Automerge.change(s1, doc => doc.list = ['a'])
+ const listId = Automerge.getObjectId(s2.list)
+ const changes12 = Automerge.getAllChanges(s2).map(decodeChange)
+ assert.deepStrictEqual(changes12, [{
+ hash: changes12[0].hash, actor: '01234567', seq: 1, startOp: 1,
+ time: changes12[0].time, message: '', deps: [], ops: [
+ {obj: '_root', action: 'makeList', key: 'list', insert: false, pred: []},
+ {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'a', pred: []}
+ ]
+ }])
+ const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0))
+ const s4 = Automerge.load(Automerge.save(s3), '01234567')
+ const s5 = Automerge.change(s4, doc => doc.list.push('b'))
+ const changes45 = Automerge.getAllChanges(s5).map(decodeChange)
+ assert.deepStrictEqual(s5, {list: ['b']})
+ assert.deepStrictEqual(changes45[2], {
+ hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 4,
+ time: changes45[2].time, message: '', deps: [changes45[1].hash], ops: [
+ {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'b', pred: []}
+ ]
+ })
+ })
+
+ it('should allow a reloaded list to be mutated', () => {
+ let doc = Automerge.change(Automerge.init(), doc => doc.foo = [])
+ doc = Automerge.load(Automerge.save(doc))
+ doc = Automerge.change(doc, 'add', doc => doc.foo.push(1))
+ doc = Automerge.load(Automerge.save(doc))
+ assert.deepStrictEqual(doc.foo, [1])
+ })
+
+ it('should reload a document containing deflated columns', () => {
+ // In this test, the keyCtr column is long enough for deflate compression to kick in, but the
+ // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr.
+ // When checking whether the columns appear in ascending order, we must ignore the deflate bit.
+ let doc = Automerge.change(Automerge.init(), doc => {
+ doc.list = []
+ for (let i = 0; i < 200; i++) doc.list.insertAt(Math.floor(Math.random() * i), 'a')
+ })
+ Automerge.load(Automerge.save(doc))
+ let expected = []
+ for (let i = 0; i < 200; i++) expected.push('a')
+ assert.deepStrictEqual(doc, {list: expected})
+ })
+
+ /*
+ it.skip('should call patchCallback if supplied', () => {
+ const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch'])
+ const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch'))
+ const callbacks = [], actor = Automerge.getActorId(s1)
+ const reloaded = Automerge.load(Automerge.save(s2), {
+ patchCallback(patch, before, after, local) {
+ callbacks.push({patch, before, after, local})
+ }
+ })
+ assert.strictEqual(callbacks.length, 1)
+ assert.deepStrictEqual(callbacks[0].patch, {
+ maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0,
+ diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
+ objectId: `1@${actor}`, type: 'list', edits: [
+ {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']}
+ ]
+ }}}}
+ })
+ assert.deepStrictEqual(callbacks[0].before, {})
+ assert.strictEqual(callbacks[0].after, reloaded)
+ assert.strictEqual(callbacks[0].local, false)
+ })
+ */
+ })
+
+ describe('history API', () => {
+ it('should return an empty history for an empty document', () => {
+ assert.deepStrictEqual(Automerge.getHistory(Automerge.init()), [])
+ })
+
+ it('should make past document states accessible', () => {
+ let s = Automerge.init()
+ s = Automerge.change(s, doc => doc.config = {background: 'blue'})
+ s = Automerge.change(s, doc => doc.birds = ['mallard'])
+ s = Automerge.change(s, doc => doc.birds.unshift('oystercatcher'))
+ assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.snapshot), [
+ {config: {background: 'blue'}},
+ {config: {background: 'blue'}, birds: ['mallard']},
+ {config: {background: 'blue'}, birds: ['oystercatcher', 'mallard']}
+ ])
+ })
+
+ it('should make change messages accessible', () => {
+ let s = Automerge.init()
+ s = Automerge.change(s, 'Empty Bookshelf', doc => doc.books = [])
+ s = Automerge.change(s, 'Add Orwell', doc => doc.books.push('Nineteen Eighty-Four'))
+ s = Automerge.change(s, 'Add Huxley', doc => doc.books.push('Brave New World'))
+ assert.deepStrictEqual(s.books, ['Nineteen Eighty-Four', 'Brave New World'])
+ assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.change.message),
+ ['Empty Bookshelf', 'Add Orwell', 'Add Huxley'])
+ })
+ })
+
+ describe('changes API', () => {
+ it('should return an empty list on an empty document', () => {
+ let changes = Automerge.getAllChanges(Automerge.init())
+ assert.deepStrictEqual(changes, [])
+ })
+
+ it('should return an empty list when nothing changed', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch'])
+ assert.deepStrictEqual(Automerge.getChanges(s1, s1), [])
+ })
+
+ it('should do nothing when applying an empty list of changes', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch'])
+ assert.deepStrictEqual(Automerge.applyChanges(s1, [])[0], s1)
+ })
+
+ it('should return all changes when compared to an empty document', () => {
+ let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
+ let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
+ let changes = Automerge.getChanges(Automerge.init(), s2)
+ assert.strictEqual(changes.length, 2)
+ })
+
+ it('should allow a document copy to be reconstructed from scratch', () => {
+ let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
+ let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
+ let changes = Automerge.getAllChanges(s2)
+ let [s3] = Automerge.applyChanges(Automerge.init(), changes)
+ assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch'])
+ })
+
+ it('should return changes since the last given version', () => {
+ let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
+ let changes1 = Automerge.getAllChanges(s1)
+ let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
+ let changes2 = Automerge.getChanges(s1, s2)
+ assert.strictEqual(changes1.length, 1) // Add Chaffinch
+ assert.strictEqual(changes2.length, 1) // Add Bullfinch
+ })
+
+ it('should incrementally apply changes since the last given version', () => {
+ let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
+ let changes1 = Automerge.getAllChanges(s1)
+ let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
+ let changes2 = Automerge.getChanges(s1, s2)
+ let [s3] = Automerge.applyChanges(Automerge.init(), changes1)
+ let [s4] = Automerge.applyChanges(s3, changes2)
+ assert.deepStrictEqual(s3.birds, ['Chaffinch'])
+ assert.deepStrictEqual(s4.birds, ['Chaffinch', 'Bullfinch'])
+ })
+
+ it('should handle updates to a list element', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch', 'Bullfinch'])
+ let s2 = Automerge.change(s1, doc => doc.birds[0] = 'Goldfinch')
+ let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2))
+ assert.deepStrictEqual(s3.birds, ['Goldfinch', 'Bullfinch'])
+ assert.strictEqual(Automerge.getConflicts(s3.birds, 0), undefined)
+ })
+
+ // TEXT
+ it('should handle updates to a text object', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('ab'))
+ let s2 = Automerge.change(s1, doc => doc.text.set(0, 'A'))
+ let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2))
+ assert.deepStrictEqual([...s3.text], ['A', 'b'])
+ })
+
+ /*
+ it.skip('should report missing dependencies', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch'])
+ let s2 = Automerge.merge(Automerge.init(), s1)
+ s2 = Automerge.change(s2, doc => doc.birds.push('Bullfinch'))
+ let changes = Automerge.getAllChanges(s2)
+ let [s3, patch] = Automerge.applyChanges(Automerge.init(), [changes[1]])
+ assert.deepStrictEqual(s3, {})
+ assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)),
+ decodeChange(changes[1]).deps)
+ assert.strictEqual(patch.pendingChanges, 1)
+ ;[s3, patch] = Automerge.applyChanges(s3, [changes[0]])
+ assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch'])
+ assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), [])
+ assert.strictEqual(patch.pendingChanges, 0)
+ })
+ */
+
+ it('should report missing dependencies with out-of-order applyChanges', () => {
+ let s0 = Automerge.init()
+ let s1 = Automerge.change(s0, doc => doc.test = ['a'])
+ let changes01 = Automerge.getAllChanges(s1)
+ let s2 = Automerge.change(s1, doc => doc.test = ['b'])
+ let changes12 = Automerge.getChanges(s1, s2)
+ let s3 = Automerge.change(s2, doc => doc.test = ['c'])
+ let changes23 = Automerge.getChanges(s2, s3)
+ let s4 = Automerge.init()
+ let [s5] = Automerge.applyChanges(s4, changes23)
+ let [s6] = Automerge.applyChanges(s5, changes12)
+// assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s6)), [decodeChange(changes01[0]).hash])
+ assert.deepStrictEqual(Automerge.getMissingDeps(s6), [decodeChange(changes01[0]).hash])
+ })
+
+ /*
+ it.skip('should call patchCallback if supplied when applying changes', () => {
+ const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch'])
+ const callbacks = [], actor = Automerge.getActorId(s1)
+ const before = Automerge.init()
+ const [after, patch] = Automerge.applyChanges(before, Automerge.getAllChanges(s1), {
+ patchCallback(patch, before, after, local) {
+ callbacks.push({patch, before, after, local})
+ }
+ })
+ assert.strictEqual(callbacks.length, 1)
+ assert.deepStrictEqual(callbacks[0].patch, {
+ maxOp: 2, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0,
+ diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
+ objectId: `1@${actor}`, type: 'list', edits: [
+ {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {type: 'value', value: 'Goldfinch'}}
+ ]
+ }}}}
+ })
+ assert.strictEqual(callbacks[0].patch, patch)
+ assert.strictEqual(callbacks[0].before, before)
+ assert.strictEqual(callbacks[0].after, after)
+ assert.strictEqual(callbacks[0].local, false)
+ })
+ */
+
+ /*
+ it.skip('should merge multiple applied changes into one patch', () => {
+ const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch'])
+ const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch'))
+ const patches = [], actor = Automerge.getActorId(s2)
+ Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2),
+ {patchCallback: p => patches.push(p)})
+ assert.deepStrictEqual(patches, [{
+ maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0,
+ diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
+ objectId: `1@${actor}`, type: 'list', edits: [
+ {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']}
+ ]
+ }}}}
+ }])
+ })
+ */
+
+ /*
+ it.skip('should call a patchCallback registered on doc initialisation', () => {
+ const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch')
+ const patches = [], actor = Automerge.getActorId(s1)
+ const before = Automerge.init({patchCallback: p => patches.push(p)})
+ Automerge.applyChanges(before, Automerge.getAllChanges(s1))
+ assert.deepStrictEqual(patches, [{
+ maxOp: 1, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0,
+ diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}}
+ }])
+ })
+ */
+ })
+})
diff --git a/javascript/test/sync_test.ts b/automerge-js/test/sync_test.js
similarity index 54%
rename from javascript/test/sync_test.ts
rename to automerge-js/test/sync_test.js
index 5724985c..86c3b3fd 100644
--- a/javascript/test/sync_test.ts
+++ b/automerge-js/test/sync_test.js
@@ -1,57 +1,48 @@
-import * as assert from "assert"
-import * as Automerge from "../src"
-import { BloomFilter } from "./legacy/sync"
-import {
- decodeSyncMessage,
- encodeSyncMessage,
- decodeSyncState,
- encodeSyncState,
- initSyncState,
-} from "../src"
+const assert = require('assert')
+const Automerge = require('..');
+const { BloomFilter } = require('../src/sync')
+const { decodeChangeMeta } = require('../src/columnar')
+const { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } = Automerge
+
+function inspect(a) {
+ const util = require("util");
+ return util.inspect(a,false,null,true)
+}
function getHeads(doc) {
return Automerge.getHeads(doc)
}
function getMissingDeps(doc) {
- return Automerge.getMissingDeps(doc, [])
+ return Automerge.getMissingDeps(doc)
}
-function sync(
- a,
- b,
- aSyncState = initSyncState(),
- bSyncState = initSyncState()
-) {
+function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) {
const MAX_ITER = 10
- let aToBmsg: Automerge.SyncMessage | null = null,
- bToAmsg: Automerge.SyncMessage | null = null,
- i = 0
+ let aToBmsg = null, bToAmsg = null, i = 0
do {
- ;[aSyncState, aToBmsg] = Automerge.generateSyncMessage(a, aSyncState)
+ [aSyncState, aToBmsg] = Automerge.generateSyncMessage(a, aSyncState)
;[bSyncState, bToAmsg] = Automerge.generateSyncMessage(b, bSyncState)
if (aToBmsg) {
- ;[b, bSyncState] = Automerge.receiveSyncMessage(b, bSyncState, aToBmsg)
+ [b, bSyncState] = Automerge.receiveSyncMessage(b, bSyncState, aToBmsg)
}
if (bToAmsg) {
- ;[a, aSyncState] = Automerge.receiveSyncMessage(a, aSyncState, bToAmsg)
+ [a, aSyncState] = Automerge.receiveSyncMessage(a, aSyncState, bToAmsg)
}
if (i++ > MAX_ITER) {
- throw new Error(
- `Did not synchronize within ${MAX_ITER} iterations. Do you have a bug causing an infinite loop?`
- )
+ throw new Error(`Did not synchronize within ${MAX_ITER} iterations. Do you have a bug causing an infinite loop?`)
}
} while (aToBmsg || bToAmsg)
return [a, b, aSyncState, bSyncState]
}
-describe("Data sync protocol", () => {
- describe("with docs already in sync", () => {
- describe("an empty local doc", () => {
- it("should send a sync message implying no local data", () => {
+describe('Data sync protocol', () => {
+ describe('with docs already in sync', () => {
+ describe('an empty local doc', () => {
+ it('should send a sync message implying no local data', () => {
let n1 = Automerge.init()
let s1 = initSyncState()
let m1
@@ -65,35 +56,26 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(message.changes, [])
})
- it("should not reply if we have no data as well", () => {
- let n1 = Automerge.init(),
- n2 = Automerge.init()
- let s1 = initSyncState(),
- s2 = initSyncState()
- let m1: Automerge.SyncMessage | null = null,
- m2: Automerge.SyncMessage | null = null
+ it('should not reply if we have no data as well', () => {
+ let n1 = Automerge.init(), n2 = Automerge.init()
+ let s1 = initSyncState(), s2 = initSyncState()
+ let m1 = null, m2 = null
;[s1, m1] = Automerge.generateSyncMessage(n1, s1)
- if (m1 != null) {
- ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1)
- }
+ ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1)
;[s2, m2] = Automerge.generateSyncMessage(n2, s2)
assert.deepStrictEqual(m2, null)
})
})
- describe("documents with data", () => {
- it("repos with equal heads do not need a reply message", () => {
- let n1 = Automerge.init(),
- n2 = Automerge.init()
- let s1 = initSyncState(),
- s2 = initSyncState()
- let m1: Automerge.SyncMessage | null = null,
- m2: Automerge.SyncMessage | null = null
+ describe('documents with data', () => {
+ it('repos with equal heads do not need a reply message', () => {
+ let n1 = Automerge.init(), n2 = Automerge.init()
+ let s1 = initSyncState(), s2 = initSyncState()
+ let m1 = null, m2 = null
// make two nodes with the same changes
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = []))
- for (let i = 0; i < 10; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.n = [])
+ for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i))
;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1))
assert.deepStrictEqual(n1, n2)
@@ -102,96 +84,82 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(s1.lastSentHeads, getHeads(n1))
// heads are equal so this message should be null
- if (m1 != null) {
- ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1)
- }
+ ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1)
;[s2, m2] = Automerge.generateSyncMessage(n2, s2)
assert.strictEqual(m2, null)
})
- it("n1 should offer all changes to n2 when starting from nothing", () => {
- let n1 = Automerge.init(),
- n2 = Automerge.init()
+ it('n1 should offer all changes to n2 when starting from nothing', () => {
+ let n1 = Automerge.init(), n2 = Automerge.init()
// make changes for n1 that n2 should request
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = []))
- for (let i = 0; i < 10; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.n = [])
+ for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i))
assert.notDeepStrictEqual(n1, n2)
const [after1, after2] = sync(n1, n2)
assert.deepStrictEqual(after1, after2)
})
- it("should sync peers where one has commits the other does not", () => {
- let n1 = Automerge.init(),
- n2 = Automerge.init()
+ it('should sync peers where one has commits the other does not', () => {
+ let n1 = Automerge.init(), n2 = Automerge.init()
// make changes for n1 that n2 should request
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = []))
- for (let i = 0; i < 10; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.n = [])
+ for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i))
assert.notDeepStrictEqual(n1, n2)
;[n1, n2] = sync(n1, n2)
assert.deepStrictEqual(n1, n2)
})
- it("should work with prior sync state", () => {
+ it('should work with prior sync state', () => {
// create & synchronize two nodes
- let n1 = Automerge.init(),
- n2 = Automerge.init()
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let n1 = Automerge.init(), n2 = Automerge.init()
+ let s1 = initSyncState(), s2 = initSyncState()
- for (let i = 0; i < 5; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2)
// modify the first node further
- for (let i = 5; i < 10; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 5; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
assert.notDeepStrictEqual(n1, n2)
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
assert.deepStrictEqual(n1, n2)
})
- it("should not generate messages once synced", () => {
+ it('should not generate messages once synced', () => {
// create & synchronize two nodes
- let n1 = Automerge.init("abc123"),
- n2 = Automerge.init("def456")
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456')
+ let s1 = initSyncState(), s2 = initSyncState()
- let message
- for (let i = 0; i < 5; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
- for (let i = 0; i < 5; i++)
- n2 = Automerge.change(n2, { time: 0 }, doc => (doc.y = i))
+ let message, patch
+ for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
+ for (let i = 0; i < 5; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.y = i)
- // n1 reports what it has
- ;[s1, message] = Automerge.generateSyncMessage(n1, s1)
+ // n1 reports what it has
+ ;[s1, message] = Automerge.generateSyncMessage(n1, s1, n1)
// n2 receives that message and sends changes along with what it has
- ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message)
+ ;[n2, s2, patch] = Automerge.receiveSyncMessage(n2, s2, message)
;[s2, message] = Automerge.generateSyncMessage(n2, s2)
assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5)
//assert.deepStrictEqual(patch, null) // no changes arrived
// n1 receives the changes and replies with the changes it now knows n2 needs
- ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, message)
+ ;[n1, s1, patch] = Automerge.receiveSyncMessage(n1, s1, message)
;[s1, message] = Automerge.generateSyncMessage(n1, s1)
assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5)
//assert.deepStrictEqual(patch.diffs.props, {y: {'5@def456': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived
// n2 applies the changes and sends confirmation ending the exchange
- ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message)
+ ;[n2, s2, patch] = Automerge.receiveSyncMessage(n2, s2, message)
;[s2, message] = Automerge.generateSyncMessage(n2, s2)
//assert.deepStrictEqual(patch.diffs.props, {x: {'5@abc123': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived
// n1 receives the message and has nothing more to say
- ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, message)
+ ;[n1, s1, patch] = Automerge.receiveSyncMessage(n1, s1, message)
;[s1, message] = Automerge.generateSyncMessage(n1, s1)
assert.deepStrictEqual(message, null)
//assert.deepStrictEqual(patch, null) // no changes arrived
@@ -201,38 +169,28 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(message, null)
})
- it("should allow simultaneous messages during synchronization", () => {
+ it('should allow simultaneous messages during synchronization', () => {
// create & synchronize two nodes
- let n1 = Automerge.init("abc123"),
- n2 = Automerge.init("def456")
- let s1 = initSyncState(),
- s2 = initSyncState()
- for (let i = 0; i < 5; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
- for (let i = 0; i < 5; i++)
- n2 = Automerge.change(n2, { time: 0 }, doc => (doc.y = i))
- const head1 = getHeads(n1)[0],
- head2 = getHeads(n2)[0]
+ let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456')
+ let s1 = initSyncState(), s2 = initSyncState()
+ for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
+ for (let i = 0; i < 5; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.y = i)
+ const head1 = getHeads(n1)[0], head2 = getHeads(n2)[0]
// both sides report what they have but have no shared peer state
let msg1to2, msg2to1
;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1)
;[s2, msg2to1] = Automerge.generateSyncMessage(n2, s2)
assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0)
- assert.deepStrictEqual(
- decodeSyncMessage(msg1to2).have[0].lastSync.length,
- 0
- )
+ assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0)
assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0)
- assert.deepStrictEqual(
- decodeSyncMessage(msg2to1).have[0].lastSync.length,
- 0
- )
+ assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0)
// n1 and n2 receives that message and update sync state but make no patch
- ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1)
+ let patch1, patch2
+ ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1)
//assert.deepStrictEqual(patch1, null) // no changes arrived, so no patch
- ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2)
+ ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2)
//assert.deepStrictEqual(patch2, null) // no changes arrived, so no patch
// now both reply with their local changes the other lacks
@@ -243,14 +201,15 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5)
// both should now apply the changes and update the frontend
- ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1)
+ ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1)
assert.deepStrictEqual(getMissingDeps(n1), [])
//assert.notDeepStrictEqual(patch1, null)
- assert.deepStrictEqual(n1, { x: 4, y: 4 })
- ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2)
+ assert.deepStrictEqual(n1, {x: 4, y: 4})
+
+ ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2)
assert.deepStrictEqual(getMissingDeps(n2), [])
//assert.notDeepStrictEqual(patch2, null)
- assert.deepStrictEqual(n2, { x: 4, y: 4 })
+ assert.deepStrictEqual(n2, {x: 4, y: 4})
// The response acknowledges the changes received, and sends no further changes
;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1)
@@ -259,8 +218,8 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0)
// After receiving acknowledgements, their shared heads should be equal
- ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1)
- ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2)
+ ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1)
+ ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2)
assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort())
assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort())
//assert.deepStrictEqual(patch1, null)
@@ -273,56 +232,41 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(msg2to1, null)
// If we make one more change, and start another sync, its lastSync should be updated
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 5))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 5)
;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1)
- assert.deepStrictEqual(
- decodeSyncMessage(msg1to2).have[0].lastSync,
- [head1, head2].sort()
- )
+ assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort())
})
- it("should assume sent changes were recieved until we hear otherwise", () => {
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- message: Automerge.SyncMessage | null = null
+ it('should assume sent changes were recieved until we hear otherwise', () => {
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), message = null
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.items = []))
- ;[n1, n2, s1] = sync(n1, n2)
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.items = [])
+ ;[n1, n2, s1, s2 ] = sync(n1, n2)
- n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("x"))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('x'))
;[s1, message] = Automerge.generateSyncMessage(n1, s1)
- if (message != null) {
- assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1)
- }
+ assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1)
- n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("y"))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('y'))
;[s1, message] = Automerge.generateSyncMessage(n1, s1)
- if (message != null) {
- assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1)
- }
+ assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1)
- n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("z"))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('z'))
;[s1, message] = Automerge.generateSyncMessage(n1, s1)
- if (message != null) {
- assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1)
- }
+ assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1)
})
- it("should work regardless of who initiates the exchange", () => {
+ it('should work regardless of who initiates the exchange', () => {
// create & synchronize two nodes
- let n1 = Automerge.init(),
- n2 = Automerge.init()
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let n1 = Automerge.init(), n2 = Automerge.init()
+ let s1 = initSyncState(), s2 = initSyncState()
- for (let i = 0; i < 5; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
// modify the first node further
- for (let i = 5; i < 10; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 5; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
assert.notDeepStrictEqual(n1, n2)
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
@@ -331,24 +275,21 @@ describe("Data sync protocol", () => {
})
})
- describe("with diverged documents", () => {
- it("should work without prior sync state", () => {
+ describe('with diverged documents', () => {
+ it('should work without prior sync state', () => {
// Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14
// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+
// `-- c15 <-- c16 <-- c17
// lastSync is undefined.
// create two peers both with divergent commits
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- for (let i = 0; i < 10; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
+
;[n1, n2] = sync(n1, n2)
- for (let i = 10; i < 15; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
- for (let i = 15; i < 18; i++)
- n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = i))
+ for (let i = 10; i < 15; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
+ for (let i = 15; i < 18; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = i)
assert.notDeepStrictEqual(n1, n2)
;[n1, n2] = sync(n1, n2)
@@ -356,26 +297,21 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(n1, n2)
})
- it("should work with prior sync state", () => {
+ it('should work with prior sync state', () => {
// Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14
// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+
// `-- c15 <-- c16 <-- c17
// lastSync is c9.
// create two peers both with divergent commits
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), s2 = initSyncState()
- for (let i = 0; i < 10; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
- for (let i = 10; i < 15; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
- for (let i = 15; i < 18; i++)
- n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = i))
+ for (let i = 10; i < 15; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
+ for (let i = 15; i < 18; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = i)
s1 = decodeSyncState(encodeSyncState(s1))
s2 = decodeSyncState(encodeSyncState(s2))
@@ -385,33 +321,27 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(n1, n2)
})
- it("should ensure non-empty state after sync", () => {
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- s2 = initSyncState()
+ it('should ensure non-empty state after sync', () => {
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), s2 = initSyncState()
- for (let i = 0; i < 3; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
assert.deepStrictEqual(s1.sharedHeads, getHeads(n1))
assert.deepStrictEqual(s2.sharedHeads, getHeads(n1))
})
- it("should re-sync after one node crashed with data loss", () => {
+ it('should re-sync after one node crashed with data loss', () => {
// Scenario: (r) (n2) (n1)
// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8
// n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2.
// we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2)
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), s2 = initSyncState()
// n1 makes three changes, which we sync to n2
- for (let i = 0; i < 3; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
// save a copy of n2 as "r" to simulate recovering from crash
@@ -419,43 +349,38 @@ describe("Data sync protocol", () => {
;[r, rSyncState] = [Automerge.clone(n2), s2]
// sync another few commits
- for (let i = 3; i < 6; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 3; i < 6; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
// everyone should be on the same page here
assert.deepStrictEqual(getHeads(n1), getHeads(n2))
assert.deepStrictEqual(n1, n2)
// now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r
- for (let i = 6; i < 9; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 6; i < 9; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
s1 = decodeSyncState(encodeSyncState(s1))
rSyncState = decodeSyncState(encodeSyncState(rSyncState))
assert.notDeepStrictEqual(getHeads(n1), getHeads(r))
assert.notDeepStrictEqual(n1, r)
- assert.deepStrictEqual(n1, { x: 8 })
- assert.deepStrictEqual(r, { x: 2 })
+ assert.deepStrictEqual(n1, {x: 8})
+ assert.deepStrictEqual(r, {x: 2})
;[n1, r, s1, rSyncState] = sync(n1, r, s1, rSyncState)
assert.deepStrictEqual(getHeads(n1), getHeads(r))
assert.deepStrictEqual(n1, r)
})
- it("should resync after one node experiences data loss without disconnecting", () => {
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- s2 = initSyncState()
+ it('should resync after one node experiences data loss without disconnecting', () => {
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), s2 = initSyncState()
// n1 makes three changes, which we sync to n2
- for (let i = 0; i < 3; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
assert.deepStrictEqual(getHeads(n1), getHeads(n2))
assert.deepStrictEqual(n1, n2)
- let n2AfterDataLoss = Automerge.init("89abcdef")
+ let n2AfterDataLoss = Automerge.init('89abcdef')
// "n2" now has no data, but n1 still thinks it does. Note we don't do
// decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting
@@ -464,35 +389,29 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(n1, n2)
})
- it("should handle changes concurrent to the last sync heads", () => {
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef"),
- n3 = Automerge.init("fedcba98")
- let s12 = initSyncState(),
- s21 = initSyncState(),
- s23 = initSyncState(),
- s32 = initSyncState()
+ it('should handle changes concurrent to the last sync heads', () => {
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98')
+ let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState()
// Change 1 is known to all three nodes
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 1))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1)
;[n1, n2, s12, s21] = sync(n1, n2, s12, s21)
;[n2, n3, s23, s32] = sync(n2, n3, s23, s32)
// Change 2 is known to n1 and n2
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 2))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 2)
;[n1, n2, s12, s21] = sync(n1, n2, s12, s21)
// Each of the three nodes makes one change (changes 3, 4, 5)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 3))
- n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = 4))
- n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 5))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 3)
+ n2 = Automerge.change(n2, {time: 0}, doc => doc.x = 4)
+ n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 5)
// Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to
// simulate transmission over a network (see https://github.com/automerge/automerge/pull/362)
let change = Automerge.getLastLocalChange(n3)
- if (typeof Buffer === "function" && change != null)
- change = Buffer.from(change)
- ;[n2] = (change && Automerge.applyChanges(n2, [change])) || [n2]
+ if (typeof Buffer === 'function') change = Buffer.from(change)
+ ;[n2] = Automerge.applyChanges(n2, [change])
// Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads
;[n1, n2, s12, s21] = sync(n1, n2, s12, s21)
@@ -500,14 +419,12 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(n1, n2)
})
- it("should handle histories with lots of branching and merging", () => {
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef"),
- n3 = Automerge.init("fedcba98")
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 0))
- ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n1)!])
- ;[n3] = Automerge.applyChanges(n3, [Automerge.getLastLocalChange(n1)!])
- n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 1))
+ it('should handle histories with lots of branching and merging', () => {
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98')
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 0)
+ ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n1)])
+ ;[n3] = Automerge.applyChanges(n3, [Automerge.getLastLocalChange(n1)])
+ n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 1)
// - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21
// / \/ \/ \/
@@ -516,29 +433,29 @@ describe("Data sync protocol", () => {
// \ /
// ---------------------------------------------- n3c1 <-----
for (let i = 1; i < 20; i++) {
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n1 = i))
- n2 = Automerge.change(n2, { time: 0 }, doc => (doc.n2 = i))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.n1 = i)
+ n2 = Automerge.change(n2, {time: 0}, doc => doc.n2 = i)
const change1 = Automerge.getLastLocalChange(n1)
const change2 = Automerge.getLastLocalChange(n2)
- ;[n1] = Automerge.applyChanges(n1, [change2!])
- ;[n2] = Automerge.applyChanges(n2, [change1!])
+ ;[n1] = Automerge.applyChanges(n1, [change2])
+ ;[n2] = Automerge.applyChanges(n2, [change1])
}
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let s1 = initSyncState(), s2 = initSyncState()
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
// Having n3's last change concurrent to the last sync heads forces us into the slower code path
- ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n3)!])
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n1 = "final"))
- n2 = Automerge.change(n2, { time: 0 }, doc => (doc.n2 = "final"))
+ ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n3)])
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.n1 = 'final')
+ n2 = Automerge.change(n2, {time: 0}, doc => doc.n2 = 'final')
+
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
assert.deepStrictEqual(getHeads(n1), getHeads(n2))
assert.deepStrictEqual(n1, n2)
})
})
- describe("with false positives", () => {
+ describe('with false positives', () => {
// NOTE: the following tests use brute force to search for Bloom filter false positives. The
// tests make change hashes deterministic by fixing the actorId and change timestamp to be
// constants. The loop that searches for false positives is then initialised such that it finds
@@ -547,36 +464,22 @@ describe("Data sync protocol", () => {
// then the false positive will no longer be the first loop iteration. The tests should still
// pass because the loop will run until a false positive is found, but they will be slower.
- it("should handle a false-positive head", () => {
+ it('should handle a false-positive head', () => {
// Scenario: ,-- n1
// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+
// `-- n2
// where n2 is a false positive in the Bloom filter containing {n1}.
// lastSync is c9.
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), s2 = initSyncState()
- for (let i = 0; i < 10; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2)
- for (let i = 1; ; i++) {
- // search for false positive; see comment above
- const n1up = Automerge.change(
- Automerge.clone(n1, { actor: "01234567" }),
- { time: 0 },
- doc => (doc.x = `${i} @ n1`)
- )
- const n2up = Automerge.change(
- Automerge.clone(n2, { actor: "89abcdef" }),
- { time: 0 },
- doc => (doc.x = `${i} @ n2`)
- )
+ for (let i = 1; ; i++) { // search for false positive; see comment above
+ const n1up = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`)
+ const n2up = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`)
if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) {
- n1 = n1up
- n2 = n2up
- break
+ n1 = n1up; n2 = n2up; break
}
}
const allHeads = [...getHeads(n1), ...getHeads(n2)].sort()
@@ -587,7 +490,7 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(getHeads(n2), allHeads)
})
- describe("with a false-positive dependency", () => {
+ describe('with a false-positive dependency', () => {
let n1, n2, s1, s2, n1hash2, n2hash2
beforeEach(() => {
@@ -596,57 +499,34 @@ describe("Data sync protocol", () => {
// `-- n2c1 <-- n2c2
// where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}.
// lastSync is c9.
- n1 = Automerge.init("01234567")
- n2 = Automerge.init("89abcdef")
+ n1 = Automerge.init('01234567')
+ n2 = Automerge.init('89abcdef')
s1 = initSyncState()
s2 = initSyncState()
- for (let i = 0; i < 10; i++)
- n1 = Automerge.change(n1, { time: 0 }, (doc: any) => (doc.x = i))
+ for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2)
let n1hash1, n2hash1
- for (let i = 29; ; i++) {
- // search for false positive; see comment above
- const n1us1 = Automerge.change(
- Automerge.clone(n1, { actor: "01234567" }),
- { time: 0 },
- (doc: any) => (doc.x = `${i} @ n1`)
- )
- const n2us1 = Automerge.change(
- Automerge.clone(n2, { actor: "89abcdef" }),
- { time: 0 },
- (doc: any) => (doc.x = `${i} @ n2`)
- )
- n1hash1 = getHeads(n1us1)[0]
- n2hash1 = getHeads(n2us1)[0]
- const n1us2 = Automerge.change(
- n1us1,
- { time: 0 },
- (doc: any) => (doc.x = "final @ n1")
- )
- const n2us2 = Automerge.change(
- n2us1,
- { time: 0 },
- (doc: any) => (doc.x = "final @ n2")
- )
- n1hash2 = getHeads(n1us2)[0]
- n2hash2 = getHeads(n2us2)[0]
+ for (let i = 29; ; i++) { // search for false positive; see comment above
+ const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`)
+ const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`)
+ n1hash1 = getHeads(n1us1)[0]; n2hash1 = getHeads(n2us1)[0]
+ const n1us2 = Automerge.change(n1us1, {time: 0}, doc => doc.x = 'final @ n1')
+ const n2us2 = Automerge.change(n2us1, {time: 0}, doc => doc.x = 'final @ n2')
+ n1hash2 = getHeads(n1us2)[0]; n2hash2 = getHeads(n2us2)[0]
if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) {
- n1 = n1us2
- n2 = n2us2
- break
+ n1 = n1us2; n2 = n2us2; break
}
}
})
- it("should sync two nodes without connection reset", () => {
- ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
+ it('should sync two nodes without connection reset', () => {
+ [n1, n2, s1, s2] = sync(n1, n2, s1, s2)
assert.deepStrictEqual(getHeads(n1), [n1hash2, n2hash2].sort())
assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort())
})
- // FIXME - this has a periodic failure
- it("should sync two nodes with connection reset", () => {
+ it('should sync two nodes with connection reset', () => {
s1 = decodeSyncState(encodeSyncState(s1))
s2 = decodeSyncState(encodeSyncState(s2))
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
@@ -654,7 +534,7 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort())
})
- it.skip("should sync three nodes", () => {
+ it('should sync three nodes', () => {
s1 = decodeSyncState(encodeSyncState(s1))
s2 = decodeSyncState(encodeSyncState(s2))
@@ -674,73 +554,37 @@ describe("Data sync protocol", () => {
assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent
// n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it
- let n3 = Automerge.init("fedcba98"),
- s13 = initSyncState(),
- s31 = initSyncState()
+ let n3 = Automerge.init('fedcba98'), s13 = initSyncState(), s31 = initSyncState()
;[n1, n3, s13, s31] = sync(n1, n3, s13, s31)
assert.deepStrictEqual(getHeads(n1), [n1hash2])
assert.deepStrictEqual(getHeads(n3), [n1hash2])
})
})
- it("should not require an additional request when a false-positive depends on a true-negative", () => {
+ it('should not require an additional request when a false-positive depends on a true-negative', () => {
// Scenario: ,-- n1c1 <-- n1c2 <-- n1c3
// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+
// `-- n2c1 <-- n2c2 <-- n2c3
// where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}.
// lastSync is c4.
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), s2 = initSyncState()
let n1hash3, n2hash3
- for (let i = 0; i < 5; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2)
- for (let i = 86; ; i++) {
- // search for false positive; see comment above
- const n1us1 = Automerge.change(
- Automerge.clone(n1, { actor: "01234567" }),
- { time: 0 },
- doc => (doc.x = `${i} @ n1`)
- )
- const n2us1 = Automerge.change(
- Automerge.clone(n2, { actor: "89abcdef" }),
- { time: 0 },
- doc => (doc.x = `${i} @ n2`)
- )
+ for (let i = 86; ; i++) { // search for false positive; see comment above
+ const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`)
+ const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`)
const n1hash1 = getHeads(n1us1)[0]
- const n1us2 = Automerge.change(
- n1us1,
- { time: 0 },
- doc => (doc.x = `${i + 1} @ n1`)
- )
- const n2us2 = Automerge.change(
- n2us1,
- { time: 0 },
- doc => (doc.x = `${i + 1} @ n2`)
- )
- const n1hash2 = getHeads(n1us2)[0],
- n2hash2 = getHeads(n2us2)[0]
- const n1up3 = Automerge.change(
- n1us2,
- { time: 0 },
- doc => (doc.x = "final @ n1")
- )
- const n2up3 = Automerge.change(
- n2us2,
- { time: 0 },
- doc => (doc.x = "final @ n2")
- )
- n1hash3 = getHeads(n1up3)[0]
- n2hash3 = getHeads(n2up3)[0]
- if (
- new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)
- ) {
- n1 = n1up3
- n2 = n2up3
- break
+ const n1us2 = Automerge.change(n1us1, {time: 0}, doc => doc.x = `${i + 1} @ n1`)
+ const n2us2 = Automerge.change(n2us1, {time: 0}, doc => doc.x = `${i + 1} @ n2`)
+ const n1hash2 = getHeads(n1us2)[0], n2hash2 = getHeads(n2us2)[0]
+ const n1up3 = Automerge.change(n1us2, {time: 0}, doc => doc.x = 'final @ n1')
+ const n2up3 = Automerge.change(n2us2, {time: 0}, doc => doc.x = 'final @ n2')
+ n1hash3 = getHeads(n1up3)[0]; n2hash3 = getHeads(n2up3)[0]
+ if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) {
+ n1 = n1up3; n2 = n2up3; break
}
}
const bothHeads = [n1hash3, n2hash3].sort()
@@ -751,46 +595,31 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(getHeads(n2), bothHeads)
})
- it("should handle chains of false-positives", () => {
+ it('should handle chains of false-positives', () => {
// Scenario: ,-- c5
// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+
// `-- n2c1 <-- n2c2 <-- n2c3
// where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}.
// lastSync is c4.
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), s2 = initSyncState()
- for (let i = 0; i < 5; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2, s1, s2)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 5))
- for (let i = 2; ; i++) {
- // search for false positive; see comment above
- const n2us1 = Automerge.change(
- Automerge.clone(n2, { actor: "89abcdef" }),
- { time: 0 },
- doc => (doc.x = `${i} @ n2`)
- )
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 5)
+ for (let i = 2; ; i++) { // search for false positive; see comment above
+ const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`)
if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us1)[0])) {
- n2 = n2us1
- break
+ n2 = n2us1; break
}
}
- for (let i = 141; ; i++) {
- // search for false positive; see comment above
- const n2us2 = Automerge.change(
- Automerge.clone(n2, { actor: "89abcdef" }),
- { time: 0 },
- doc => (doc.x = `${i} again`)
- )
+ for (let i = 141; ; i++) { // search for false positive; see comment above
+ const n2us2 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} again`)
if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us2)[0])) {
- n2 = n2us2
- break
+ n2 = n2us2; break
}
}
- n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = "final @ n2"))
+ n2 = Automerge.change(n2, {time: 0}, doc => doc.x = 'final @ n2')
const allHeads = [...getHeads(n1), ...getHeads(n2)].sort()
s1 = decodeSyncState(encodeSyncState(s1))
@@ -800,46 +629,32 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(getHeads(n2), allHeads)
})
- it("should allow the false-positive hash to be explicitly requested", () => {
+ it('should allow the false-positive hash to be explicitly requested', () => {
// Scenario: ,-- n1
// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+
// `-- n2
// where n2 causes a false positive in the Bloom filter containing {n1}.
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), s2 = initSyncState()
let message
- for (let i = 0; i < 10; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n1, n2, s1, s2] = sync(n1, n2)
s1 = decodeSyncState(encodeSyncState(s1))
s2 = decodeSyncState(encodeSyncState(s2))
- for (let i = 1; ; i++) {
- // brute-force search for false positive; see comment above
- const n1up = Automerge.change(
- Automerge.clone(n1, { actor: "01234567" }),
- { time: 0 },
- doc => (doc.x = `${i} @ n1`)
- )
- const n2up = Automerge.change(
- Automerge.clone(n2, { actor: "89abcdef" }),
- { time: 0 },
- doc => (doc.x = `${i} @ n2`)
- )
+ for (let i = 1; ; i++) { // brute-force search for false positive; see comment above
+ const n1up = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`)
+ const n2up = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`)
// check if the bloom filter on n2 will believe n1 already has a particular hash
// this will mean n2 won't offer that data to n2 by receiving a sync message from n1
if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) {
- n1 = n1up
- n2 = n2up
- break
+ n1 = n1up; n2 = n2up; break
}
}
// n1 creates a sync message for n2 with an ill-fated bloom
- ;[s1, message] = Automerge.generateSyncMessage(n1, s1)
+ [s1, message] = Automerge.generateSyncMessage(n1, s1)
assert.strictEqual(decodeSyncMessage(message).changes.length, 0)
// n2 receives it and DOESN'T send a change back
@@ -863,42 +678,32 @@ describe("Data sync protocol", () => {
})
})
- describe("protocol features", () => {
- it("should allow multiple Bloom filters", () => {
+ describe('protocol features', () => {
+ it('should allow multiple Bloom filters', () => {
// Scenario: ,-- n1c1 <-- n1c2 <-- n1c3
// c0 <-- c1 <-- c2 <-+--- n2c1 <-- n2c2 <-- n2c3
// `-- n3c1 <-- n3c2 <-- n3c3
// n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2};
// n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3};
// n3 has {c0, c1, c2, n3c1, n3c2, n3c3}.
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef"),
- n3 = Automerge.init("76543210")
- let s13 = initSyncState()
- let s32 = initSyncState(),
- s31 = initSyncState(),
- s23 = initSyncState()
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210')
+ let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState()
+ let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState()
let message1, message2, message3
- for (let i = 0; i < 3; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
- // sync all 3 nodes
- ;[n1, n2, ,] = sync(n1, n2) // eslint-disable-line no-unused-vars -- kept for consistency
+ for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
+ // sync all 3 nodes
+ ;[n1, n2, s12, s21] = sync(n1, n2) // eslint-disable-line no-unused-vars -- kept for consistency
;[n1, n3, s13, s31] = sync(n1, n3)
;[n3, n2, s32, s23] = sync(n3, n2)
- for (let i = 0; i < 2; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = `${i} @ n1`))
- for (let i = 0; i < 2; i++)
- n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = `${i} @ n2`))
+ for (let i = 0; i < 2; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `${i} @ n1`)
+ for (let i = 0; i < 2; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = `${i} @ n2`)
;[n1] = Automerge.applyChanges(n1, Automerge.getAllChanges(n2))
;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1))
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = `3 @ n1`))
- n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = `3 @ n2`))
- for (let i = 0; i < 3; i++)
- n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = `${i} @ n3`))
- const n1c3 = getHeads(n1)[0],
- n2c3 = getHeads(n2)[0],
- n3c3 = getHeads(n3)[0]
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `3 @ n1`)
+ n2 = Automerge.change(n2, {time: 0}, doc => doc.x = `3 @ n2`)
+ for (let i = 0; i < 3; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = `${i} @ n3`)
+ const n1c3 = getHeads(n1)[0], n2c3 = getHeads(n2)[0], n3c3 = getHeads(n3)[0]
s13 = decodeSyncState(encodeSyncState(s13))
s31 = decodeSyncState(encodeSyncState(s31))
s23 = decodeSyncState(encodeSyncState(s23))
@@ -920,11 +725,7 @@ describe("Data sync protocol", () => {
const modifiedMessage = decodeSyncMessage(message3)
modifiedMessage.have.push(decodeSyncMessage(message1).have[0])
assert.strictEqual(modifiedMessage.changes.length, 0)
- ;[n2, s23] = Automerge.receiveSyncMessage(
- n2,
- s23,
- encodeSyncMessage(modifiedMessage)
- )
+ ;[n2, s23] = Automerge.receiveSyncMessage(n2, s23, encodeSyncMessage(modifiedMessage))
// n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't)
;[s23, message2] = Automerge.generateSyncMessage(n2, s23)
@@ -938,76 +739,53 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(getHeads(n3), [n1c3, n2c3, n3c3].sort())
})
- it("should allow any change to be requested", () => {
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- s2 = initSyncState()
- let message: Automerge.SyncMessage | null = null
+ it('should allow any change to be requested', () => {
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), s2 = initSyncState()
+ let message = null
- for (let i = 0; i < 3; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
const lastSync = getHeads(n1)
- for (let i = 3; i < 6; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 3; i < 6; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
+
;[n1, n2, s1, s2] = sync(n1, n2)
s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed
;[s1, message] = Automerge.generateSyncMessage(n1, s1)
- const modMsg = decodeSyncMessage(message!)
+ const modMsg = decodeSyncMessage(message)
modMsg.need = lastSync // re-request change 2
- ;[n2, s2] = Automerge.receiveSyncMessage(
- n2,
- s2,
- encodeSyncMessage(modMsg)
- )
+ ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, encodeSyncMessage(modMsg))
;[s1, message] = Automerge.generateSyncMessage(n2, s2)
- assert.strictEqual(decodeSyncMessage(message!).changes.length, 1)
- assert.strictEqual(
- Automerge.decodeChange(decodeSyncMessage(message!).changes[0]).hash,
- lastSync[0]
- )
+ assert.strictEqual(decodeSyncMessage(message).changes.length, 1)
+ assert.strictEqual(Automerge.decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0])
})
- it("should ignore requests for a nonexistent change", () => {
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef")
- let s1 = initSyncState(),
- s2 = initSyncState()
- let message: Automerge.SyncMessage | null = null
+ it('should ignore requests for a nonexistent change', () => {
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef')
+ let s1 = initSyncState(), s2 = initSyncState()
+ let message = null
- for (let i = 0; i < 3; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i))
+ for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i)
;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1))
;[s1, message] = Automerge.generateSyncMessage(n1, s1)
- const decoded = Automerge.decodeSyncMessage(message!)
- decoded.need = [
- "0000000000000000000000000000000000000000000000000000000000000000",
- ]
- message = Automerge.encodeSyncMessage(decoded)
- ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message!)
+ message.need = ['0000000000000000000000000000000000000000000000000000000000000000']
+ ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message)
;[s2, message] = Automerge.generateSyncMessage(n2, s2)
assert.strictEqual(message, null)
})
- it("should allow a subset of changes to be sent", () => {
+ it('should allow a subset of changes to be sent', () => {
// ,-- c1 <-- c2
// c0 <-+
// `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8
- let n1 = Automerge.init("01234567"),
- n2 = Automerge.init("89abcdef"),
- n3 = Automerge.init("76543210")
- let s1 = initSyncState(),
- s2 = initSyncState()
+ let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210')
+ let s1 = initSyncState(), s2 = initSyncState()
let msg, decodedMsg
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 0))
+ n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 0)
n3 = Automerge.merge(n3, n1)
- for (let i = 1; i <= 2; i++)
- n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) // n1 has {c0, c1, c2}
- for (let i = 3; i <= 4; i++)
- n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = i)) // n3 has {c0, c3, c4}
- const c2 = getHeads(n1)[0],
- c4 = getHeads(n3)[0]
+ for (let i = 1; i <= 2; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) // n1 has {c0, c1, c2}
+ for (let i = 3; i <= 4; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = i) // n3 has {c0, c3, c4}
+ const c2 = getHeads(n1)[0], c4 = getHeads(n3)[0]
n2 = Automerge.merge(n2, n3) // n2 has {c0, c3, c4}
// Sync n1 and n2, so their shared heads are {c2, c4}
@@ -1018,13 +796,11 @@ describe("Data sync protocol", () => {
assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort())
// n2 and n3 apply {c5, c6, c7, c8}
- n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 5))
+ n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 5)
const change5 = Automerge.getLastLocalChange(n3)
- n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 6))
- const change6 = Automerge.getLastLocalChange(n3),
- c6 = getHeads(n3)[0]
- for (let i = 7; i <= 8; i++)
- n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = i))
+ n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 6)
+ const change6 = Automerge.getLastLocalChange(n3), c6 = getHeads(n3)[0]
+ for (let i = 7; i <= 8; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = i)
const c8 = getHeads(n3)[0]
n2 = Automerge.merge(n2, n3)
@@ -1035,10 +811,9 @@ describe("Data sync protocol", () => {
decodedMsg = decodeSyncMessage(msg)
decodedMsg.changes = [change5, change6]
msg = encodeSyncMessage(decodedMsg)
- const sentHashes = [
- Automerge.decodeChange(change5!).hash,
- Automerge.decodeChange(change6!).hash,
- ]
+ const sentHashes = {}
+ sentHashes[decodeChangeMeta(change5, true).hash] = true
+ sentHashes[decodeChangeMeta(change6, true).hash] = true
s2.sentHashes = sentHashes
;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg)
assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort())
@@ -1047,10 +822,7 @@ describe("Data sync protocol", () => {
;[s1, msg] = Automerge.generateSyncMessage(n1, s1)
;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg)
assert.deepStrictEqual(decodeSyncMessage(msg).need, [c8])
- assert.deepStrictEqual(
- decodeSyncMessage(msg).have[0].lastSync,
- [c2, c6].sort()
- )
+ assert.deepStrictEqual(decodeSyncMessage(msg).have[0].lastSync, [c2, c6].sort())
assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort())
assert.deepStrictEqual(s2.sharedHeads, [c2, c6].sort())
diff --git a/automerge-js/test/text_test.js b/automerge-js/test/text_test.js
new file mode 100644
index 00000000..57e8884e
--- /dev/null
+++ b/automerge-js/test/text_test.js
@@ -0,0 +1,697 @@
+const assert = require('assert')
+const Automerge = require('..')
+const { assertEqualsOneOf } = require('./helpers')
+
+function attributeStateToAttributes(accumulatedAttributes) {
+ const attributes = {}
+ Object.entries(accumulatedAttributes).forEach(([key, values]) => {
+ if (values.length && values[0] !== null) {
+ attributes[key] = values[0]
+ }
+ })
+ return attributes
+}
+
+function isEquivalent(a, b) {
+ const aProps = Object.getOwnPropertyNames(a)
+ const bProps = Object.getOwnPropertyNames(b)
+
+ if (aProps.length != bProps.length) {
+ return false
+ }
+
+ for (let i = 0; i < aProps.length; i++) {
+ const propName = aProps[i]
+ if (a[propName] !== b[propName]) {
+ return false
+ }
+ }
+
+ return true
+}
+
+function isControlMarker(pseudoCharacter) {
+ return typeof pseudoCharacter === 'object' && pseudoCharacter.attributes
+}
+
+function opFrom(text, attributes) {
+ let op = { insert: text }
+ if (Object.keys(attributes).length > 0) {
+ op.attributes = attributes
+ }
+ return op
+}
+
+function accumulateAttributes(span, accumulatedAttributes) {
+ Object.entries(span).forEach(([key, value]) => {
+ if (!accumulatedAttributes[key]) {
+ accumulatedAttributes[key] = []
+ }
+ if (value === null) {
+ if (accumulatedAttributes[key].length === 0 || accumulatedAttributes[key] === null) {
+ accumulatedAttributes[key].unshift(null)
+ } else {
+ accumulatedAttributes[key].shift()
+ }
+ } else {
+ if (accumulatedAttributes[key][0] === null) {
+ accumulatedAttributes[key].shift()
+ } else {
+ accumulatedAttributes[key].unshift(value)
+ }
+ }
+ })
+ return accumulatedAttributes
+}
+
+function automergeTextToDeltaDoc(text) {
+ let ops = []
+ let controlState = {}
+ let currentString = ""
+ let attributes = {}
+ text.toSpans().forEach((span) => {
+ if (isControlMarker(span)) {
+ controlState = accumulateAttributes(span.attributes, controlState)
+ } else {
+ let next = attributeStateToAttributes(controlState)
+
+ // if the next span has the same calculated attributes as the current span
+ // don't bother outputting it as a separate span, just let it ride
+ if (typeof span === 'string' && isEquivalent(next, attributes)) {
+ currentString = currentString + span
+ return
+ }
+
+ if (currentString) {
+ ops.push(opFrom(currentString, attributes))
+ }
+
+ // If we've got a string, we might be able to concatenate it to another
+ // same-attributed-string, so remember it and go to the next iteration.
+ if (typeof span === 'string') {
+ currentString = span
+ attributes = next
+ } else {
+ // otherwise we have an embed "character" and should output it immediately.
+ // embeds are always one-"character" in length.
+ ops.push(opFrom(span, next))
+ currentString = ''
+ attributes = {}
+ }
+ }
+ })
+
+ // at the end, flush any accumulated string out
+ if (currentString) {
+ ops.push(opFrom(currentString, attributes))
+ }
+
+ return ops
+}
+
+function inverseAttributes(attributes) {
+ let invertedAttributes = {}
+ Object.keys(attributes).forEach((key) => {
+ invertedAttributes[key] = null
+ })
+ return invertedAttributes
+}
+
+function applyDeleteOp(text, offset, op) {
+ let length = op.delete
+ while (length > 0) {
+ if (isControlMarker(text.get(offset))) {
+ offset += 1
+ } else {
+ // we need to not delete control characters, but we do delete embed characters
+ text.deleteAt(offset, 1)
+ length -= 1
+ }
+ }
+ return [text, offset]
+}
+
+function applyRetainOp(text, offset, op) {
+ let length = op.retain
+
+ if (op.attributes) {
+ text.insertAt(offset, { attributes: op.attributes })
+ offset += 1
+ }
+
+ while (length > 0) {
+ const char = text.get(offset)
+ offset += 1
+ if (!isControlMarker(char)) {
+ length -= 1
+ }
+ }
+
+ if (op.attributes) {
+ text.insertAt(offset, { attributes: inverseAttributes(op.attributes) })
+ offset += 1
+ }
+
+ return [text, offset]
+}
+
+
+function applyInsertOp(text, offset, op) {
+ let originalOffset = offset
+
+ if (typeof op.insert === 'string') {
+ text.insertAt(offset, ...op.insert.split(''))
+ offset += op.insert.length
+ } else {
+ // we have an embed or something similar
+ text.insertAt(offset, op.insert)
+ offset += 1
+ }
+
+ if (op.attributes) {
+ text.insertAt(originalOffset, { attributes: op.attributes })
+ offset += 1
+ }
+ if (op.attributes) {
+ text.insertAt(offset, { attributes: inverseAttributes(op.attributes) })
+ offset += 1
+ }
+ return [text, offset]
+}
+
+// XXX: uhhhhh, why can't I pass in text?
+function applyDeltaDocToAutomergeText(delta, doc) {
+ let offset = 0
+
+ delta.forEach(op => {
+ if (op.retain) {
+ [, offset] = applyRetainOp(doc.text, offset, op)
+ } else if (op.delete) {
+ [, offset] = applyDeleteOp(doc.text, offset, op)
+ } else if (op.insert) {
+ [, offset] = applyInsertOp(doc.text, offset, op)
+ }
+ })
+}
+
+describe('Automerge.Text', () => {
+ let s1, s2
+ beforeEach(() => {
+ s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text())
+ s2 = Automerge.merge(Automerge.init(), s1)
+ })
+
+ it('should support insertion', () => {
+ s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a'))
+ assert.strictEqual(s1.text.length, 1)
+ assert.strictEqual(s1.text.get(0), 'a')
+ assert.strictEqual(s1.text.toString(), 'a')
+ //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`)
+ })
+
+ it('should support deletion', () => {
+ s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
+ s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1))
+ assert.strictEqual(s1.text.length, 2)
+ assert.strictEqual(s1.text.get(0), 'a')
+ assert.strictEqual(s1.text.get(1), 'c')
+ assert.strictEqual(s1.text.toString(), 'ac')
+ })
+
+ it("should support implicit and explicit deletion", () => {
+ s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c"))
+ s1 = Automerge.change(s1, doc => doc.text.deleteAt(1))
+ s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0))
+ assert.strictEqual(s1.text.length, 2)
+ assert.strictEqual(s1.text.get(0), "a")
+ assert.strictEqual(s1.text.get(1), "c")
+ assert.strictEqual(s1.text.toString(), "ac")
+ })
+
+ it('should handle concurrent insertion', () => {
+ s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
+ s2 = Automerge.change(s2, doc => doc.text.insertAt(0, 'x', 'y', 'z'))
+ s1 = Automerge.merge(s1, s2)
+ assert.strictEqual(s1.text.length, 6)
+ assertEqualsOneOf(s1.text.toString(), 'abcxyz', 'xyzabc')
+ assertEqualsOneOf(s1.text.join(''), 'abcxyz', 'xyzabc')
+ })
+
+ it('should handle text and other ops in the same change', () => {
+ s1 = Automerge.change(s1, doc => {
+ doc.foo = 'bar'
+ doc.text.insertAt(0, 'a')
+ })
+ assert.strictEqual(s1.foo, 'bar')
+ assert.strictEqual(s1.text.toString(), 'a')
+ assert.strictEqual(s1.text.join(''), 'a')
+ })
+
+ it('should serialize to JSON as a simple string', () => {
+ s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', '"', 'b'))
+ assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}')
+ })
+
+ it('should allow modification before an object is assigned to a document', () => {
+ s1 = Automerge.change(Automerge.init(), doc => {
+ const text = new Automerge.Text()
+ text.insertAt(0, 'a', 'b', 'c', 'd')
+ text.deleteAt(2)
+ doc.text = text
+ assert.strictEqual(doc.text.toString(), 'abd')
+ assert.strictEqual(doc.text.join(''), 'abd')
+ })
+ assert.strictEqual(s1.text.toString(), 'abd')
+ assert.strictEqual(s1.text.join(''), 'abd')
+ })
+
+ it('should allow modification after an object is assigned to a document', () => {
+ s1 = Automerge.change(Automerge.init(), doc => {
+ const text = new Automerge.Text()
+ doc.text = text
+ doc.text.insertAt(0, 'a', 'b', 'c', 'd')
+ doc.text.deleteAt(2)
+ assert.strictEqual(doc.text.toString(), 'abd')
+ assert.strictEqual(doc.text.join(''), 'abd')
+ })
+ assert.strictEqual(s1.text.join(''), 'abd')
+ })
+
+ it('should not allow modification outside of a change callback', () => {
+ assert.throws(() => s1.text.insertAt(0, 'a'), /object cannot be modified outside of a change block/)
+ })
+
+ describe('with initial value', () => {
+ it('should accept a string as initial value', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('init'))
+ assert.strictEqual(s1.text.length, 4)
+ assert.strictEqual(s1.text.get(0), 'i')
+ assert.strictEqual(s1.text.get(1), 'n')
+ assert.strictEqual(s1.text.get(2), 'i')
+ assert.strictEqual(s1.text.get(3), 't')
+ assert.strictEqual(s1.text.toString(), 'init')
+ })
+
+ it('should accept an array as initial value', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text(['i', 'n', 'i', 't']))
+ assert.strictEqual(s1.text.length, 4)
+ assert.strictEqual(s1.text.get(0), 'i')
+ assert.strictEqual(s1.text.get(1), 'n')
+ assert.strictEqual(s1.text.get(2), 'i')
+ assert.strictEqual(s1.text.get(3), 't')
+ assert.strictEqual(s1.text.toString(), 'init')
+ })
+
+ it('should initialize text in Automerge.from()', () => {
+ let s1 = Automerge.from({text: new Automerge.Text('init')})
+ assert.strictEqual(s1.text.length, 4)
+ assert.strictEqual(s1.text.get(0), 'i')
+ assert.strictEqual(s1.text.get(1), 'n')
+ assert.strictEqual(s1.text.get(2), 'i')
+ assert.strictEqual(s1.text.get(3), 't')
+ assert.strictEqual(s1.text.toString(), 'init')
+ })
+
+ it('should encode the initial value as a change', () => {
+ const s1 = Automerge.from({text: new Automerge.Text('init')})
+ const changes = Automerge.getAllChanges(s1)
+ assert.strictEqual(changes.length, 1)
+ const [s2] = Automerge.applyChanges(Automerge.init(), changes)
+ assert.strictEqual(s2.text instanceof Automerge.Text, true)
+ assert.strictEqual(s2.text.toString(), 'init')
+ assert.strictEqual(s2.text.join(''), 'init')
+ })
+
+ it('should allow immediate access to the value', () => {
+ Automerge.change(Automerge.init(), doc => {
+ const text = new Automerge.Text('init')
+ assert.strictEqual(text.length, 4)
+ assert.strictEqual(text.get(0), 'i')
+ assert.strictEqual(text.toString(), 'init')
+ doc.text = text
+ assert.strictEqual(doc.text.length, 4)
+ assert.strictEqual(doc.text.get(0), 'i')
+ assert.strictEqual(doc.text.toString(), 'init')
+ })
+ })
+
+ it('should allow pre-assignment modification of the initial value', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ const text = new Automerge.Text('init')
+ text.deleteAt(3)
+ assert.strictEqual(text.join(''), 'ini')
+ doc.text = text
+ assert.strictEqual(doc.text.join(''), 'ini')
+ assert.strictEqual(doc.text.toString(), 'ini')
+ })
+ assert.strictEqual(s1.text.toString(), 'ini')
+ assert.strictEqual(s1.text.join(''), 'ini')
+ })
+
+ it('should allow post-assignment modification of the initial value', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ const text = new Automerge.Text('init')
+ doc.text = text
+ doc.text.deleteAt(0)
+ doc.text.insertAt(0, 'I')
+ assert.strictEqual(doc.text.join(''), 'Init')
+ assert.strictEqual(doc.text.toString(), 'Init')
+ })
+ assert.strictEqual(s1.text.join(''), 'Init')
+ assert.strictEqual(s1.text.toString(), 'Init')
+ })
+ })
+
+ describe('non-textual control characters', () => {
+ let s1
+ beforeEach(() => {
+ s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text()
+ doc.text.insertAt(0, 'a')
+ doc.text.insertAt(1, { attribute: 'bold' })
+ })
+ })
+
+ it('should allow fetching non-textual characters', () => {
+ assert.deepEqual(s1.text.get(1), { attribute: 'bold' })
+ //assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`)
+ })
+
+ it('should include control characters in string length', () => {
+ assert.strictEqual(s1.text.length, 2)
+ assert.strictEqual(s1.text.get(0), 'a')
+ })
+
+ it('should exclude control characters from toString()', () => {
+ assert.strictEqual(s1.text.toString(), 'a')
+ })
+
+ it('should allow control characters to be updated', () => {
+ const s2 = Automerge.change(s1, doc => doc.text.get(1).attribute = 'italic')
+ const s3 = Automerge.load(Automerge.save(s2))
+ assert.strictEqual(s1.text.get(1).attribute, 'bold')
+ assert.strictEqual(s2.text.get(1).attribute, 'italic')
+ assert.strictEqual(s3.text.get(1).attribute, 'italic')
+ })
+
+ describe('spans interface to Text', () => {
+ it('should return a simple string as a single span', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('hello world')
+ })
+ assert.deepEqual(s1.text.toSpans(), ['hello world'])
+ })
+ it('should return an empty string as an empty array', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text()
+ })
+ assert.deepEqual(s1.text.toSpans(), [])
+ })
+ it('should split a span at a control character', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('hello world')
+ doc.text.insertAt(5, { attributes: { bold: true } })
+ })
+ assert.deepEqual(s1.text.toSpans(),
+ ['hello', { attributes: { bold: true } }, ' world'])
+ })
+ it('should allow consecutive control characters', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('hello world')
+ doc.text.insertAt(5, { attributes: { bold: true } })
+ doc.text.insertAt(6, { attributes: { italic: true } })
+ })
+ assert.deepEqual(s1.text.toSpans(),
+ ['hello',
+ { attributes: { bold: true } },
+ { attributes: { italic: true } },
+ ' world'
+ ])
+ })
+ it('should allow non-consecutive control characters', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('hello world')
+ doc.text.insertAt(5, { attributes: { bold: true } })
+ doc.text.insertAt(12, { attributes: { italic: true } })
+ })
+ assert.deepEqual(s1.text.toSpans(),
+ ['hello',
+ { attributes: { bold: true } },
+ ' world',
+ { attributes: { italic: true } }
+ ])
+ })
+
+ it('should be convertable into a Quill delta', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('Gandalf the Grey')
+ doc.text.insertAt(0, { attributes: { bold: true } })
+ doc.text.insertAt(7 + 1, { attributes: { bold: null } })
+ doc.text.insertAt(12 + 2, { attributes: { color: '#cccccc' } })
+ })
+
+ let deltaDoc = automergeTextToDeltaDoc(s1.text)
+
+ // From https://quilljs.com/docs/delta/
+ let expectedDoc = [
+ { insert: 'Gandalf', attributes: { bold: true } },
+ { insert: ' the ' },
+ { insert: 'Grey', attributes: { color: '#cccccc' } }
+ ]
+
+ assert.deepEqual(deltaDoc, expectedDoc)
+ })
+
+ it('should support embeds', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('')
+ doc.text.insertAt(0, { attributes: { link: 'https://quilljs.com' } })
+ doc.text.insertAt(1, {
+ image: 'https://quilljs.com/assets/images/icon.png'
+ })
+ doc.text.insertAt(2, { attributes: { link: null } })
+ })
+
+ let deltaDoc = automergeTextToDeltaDoc(s1.text)
+
+ // From https://quilljs.com/docs/delta/
+ let expectedDoc = [{
+ // An image link
+ insert: {
+ image: 'https://quilljs.com/assets/images/icon.png'
+ },
+ attributes: {
+ link: 'https://quilljs.com'
+ }
+ }]
+
+ assert.deepEqual(deltaDoc, expectedDoc)
+ })
+
+ it('should handle concurrent overlapping spans', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('Gandalf the Grey')
+ })
+
+ let s2 = Automerge.merge(Automerge.init(), s1)
+
+ let s3 = Automerge.change(s1, doc => {
+ doc.text.insertAt(8, { attributes: { bold: true } })
+ doc.text.insertAt(16 + 1, { attributes: { bold: null } })
+ })
+
+ let s4 = Automerge.change(s2, doc => {
+ doc.text.insertAt(0, { attributes: { bold: true } })
+ doc.text.insertAt(11 + 1, { attributes: { bold: null } })
+ })
+
+ let merged = Automerge.merge(s3, s4)
+
+ let deltaDoc = automergeTextToDeltaDoc(merged.text)
+
+ // From https://quilljs.com/docs/delta/
+ let expectedDoc = [
+ { insert: 'Gandalf the Grey', attributes: { bold: true } },
+ ]
+
+ assert.deepEqual(deltaDoc, expectedDoc)
+ })
+
+ it('should handle debolding spans', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('Gandalf the Grey')
+ })
+
+ let s2 = Automerge.merge(Automerge.init(), s1)
+
+ let s3 = Automerge.change(s1, doc => {
+ doc.text.insertAt(0, { attributes: { bold: true } })
+ doc.text.insertAt(16 + 1, { attributes: { bold: null } })
+ })
+
+ let s4 = Automerge.change(s2, doc => {
+ doc.text.insertAt(8, { attributes: { bold: null } })
+ doc.text.insertAt(11 + 1, { attributes: { bold: true } })
+ })
+
+
+ let merged = Automerge.merge(s3, s4)
+
+ let deltaDoc = automergeTextToDeltaDoc(merged.text)
+
+ // From https://quilljs.com/docs/delta/
+ let expectedDoc = [
+ { insert: 'Gandalf ', attributes: { bold: true } },
+ { insert: 'the' },
+ { insert: ' Grey', attributes: { bold: true } },
+ ]
+
+ assert.deepEqual(deltaDoc, expectedDoc)
+ })
+
+ // xxx: how would this work for colors?
+ it('should handle destyling across destyled spans', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('Gandalf the Grey')
+ })
+
+ let s2 = Automerge.merge(Automerge.init(), s1)
+
+ let s3 = Automerge.change(s1, doc => {
+ doc.text.insertAt(0, { attributes: { bold: true } })
+ doc.text.insertAt(16 + 1, { attributes: { bold: null } })
+ })
+
+ let s4 = Automerge.change(s2, doc => {
+ doc.text.insertAt(8, { attributes: { bold: null } })
+ doc.text.insertAt(11 + 1, { attributes: { bold: true } })
+ })
+
+ let merged = Automerge.merge(s3, s4)
+
+ let final = Automerge.change(merged, doc => {
+ doc.text.insertAt(3 + 1, { attributes: { bold: null } })
+ doc.text.insertAt(doc.text.length, { attributes: { bold: true } })
+ })
+
+ let deltaDoc = automergeTextToDeltaDoc(final.text)
+
+ // From https://quilljs.com/docs/delta/
+ let expectedDoc = [
+ { insert: 'Gan', attributes: { bold: true } },
+ { insert: 'dalf the Grey' },
+ ]
+
+ assert.deepEqual(deltaDoc, expectedDoc)
+ })
+
+ it('should apply an insert', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('Hello world')
+ })
+
+ const delta = [
+ { retain: 6 },
+ { insert: 'reader' },
+ { delete: 5 }
+ ]
+
+ let s2 = Automerge.change(s1, doc => {
+ applyDeltaDocToAutomergeText(delta, doc)
+ })
+
+ assert.strictEqual(s2.text.join(''), 'Hello reader')
+ })
+
+ it('should apply an insert with control characters', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('Hello world')
+ })
+
+ const delta = [
+ { retain: 6 },
+ { insert: 'reader', attributes: { bold: true } },
+ { delete: 5 },
+ { insert: '!' }
+ ]
+
+ let s2 = Automerge.change(s1, doc => {
+ applyDeltaDocToAutomergeText(delta, doc)
+ })
+
+ assert.strictEqual(s2.text.toString(), 'Hello reader!')
+ assert.deepEqual(s2.text.toSpans(), [
+ "Hello ",
+ { attributes: { bold: true } },
+ "reader",
+ { attributes: { bold: null } },
+ "!"
+ ])
+ })
+
+ it('should account for control characters in retain/delete lengths', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('Hello world')
+ doc.text.insertAt(4, { attributes: { color: '#ccc' } })
+ doc.text.insertAt(10, { attributes: { color: '#f00' } })
+ })
+
+ const delta = [
+ { retain: 6 },
+ { insert: 'reader', attributes: { bold: true } },
+ { delete: 5 },
+ { insert: '!' }
+ ]
+
+ let s2 = Automerge.change(s1, doc => {
+ applyDeltaDocToAutomergeText(delta, doc)
+ })
+
+ assert.strictEqual(s2.text.toString(), 'Hello reader!')
+ assert.deepEqual(s2.text.toSpans(), [
+ "Hell",
+ { attributes: { color: '#ccc'} },
+ "o ",
+ { attributes: { bold: true } },
+ "reader",
+ { attributes: { bold: null } },
+ { attributes: { color: '#f00'} },
+ "!"
+ ])
+ })
+
+ it('should support embeds', () => {
+ let s1 = Automerge.change(Automerge.init(), doc => {
+ doc.text = new Automerge.Text('')
+ })
+
+ let deltaDoc = [{
+ // An image link
+ insert: {
+ image: 'https://quilljs.com/assets/images/icon.png'
+ },
+ attributes: {
+ link: 'https://quilljs.com'
+ }
+ }]
+
+ let s2 = Automerge.change(s1, doc => {
+ applyDeltaDocToAutomergeText(deltaDoc, doc)
+ })
+
+ assert.deepEqual(s2.text.toSpans(), [
+ { attributes: { link: 'https://quilljs.com' } },
+ { image: 'https://quilljs.com/assets/images/icon.png'},
+ { attributes: { link: null } },
+ ])
+ })
+ })
+ })
+
+ it('should support unicode when creating text', () => {
+ s1 = Automerge.from({
+ text: new Automerge.Text('🐦')
+ })
+ assert.strictEqual(s1.text.get(0), '🐦')
+ })
+})
diff --git a/automerge-js/test/uuid_test.js b/automerge-js/test/uuid_test.js
new file mode 100644
index 00000000..a0f83df1
--- /dev/null
+++ b/automerge-js/test/uuid_test.js
@@ -0,0 +1,32 @@
+const assert = require('assert')
+const Automerge = require('..')
+
+const uuid = Automerge.uuid
+
+describe('uuid', () => {
+ afterEach(() => {
+ uuid.reset()
+ })
+
+ describe('default implementation', () => {
+ it('generates unique values', () => {
+ assert.notEqual(uuid(), uuid())
+ })
+ })
+
+ describe('custom implementation', () => {
+ let counter
+
+ function customUuid() {
+ return `custom-uuid-${counter++}`
+ }
+
+ before(() => uuid.setFactory(customUuid))
+ beforeEach(() => counter = 0)
+
+ it('invokes the custom factory', () => {
+ assert.equal(uuid(), 'custom-uuid-0')
+ assert.equal(uuid(), 'custom-uuid-1')
+ })
+ })
+})
diff --git a/rust/automerge-wasm/.gitignore b/automerge-wasm/.gitignore
similarity index 59%
rename from rust/automerge-wasm/.gitignore
rename to automerge-wasm/.gitignore
index 77c11e08..90f5b649 100644
--- a/rust/automerge-wasm/.gitignore
+++ b/automerge-wasm/.gitignore
@@ -1,6 +1,7 @@
/node_modules
-/bundler
-/nodejs
-/deno
+/dev
+/node
+/web
+/target
Cargo.lock
yarn.lock
diff --git a/rust/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml
similarity index 78%
rename from rust/automerge-wasm/Cargo.toml
rename to automerge-wasm/Cargo.toml
index b6055a7d..2ee2b44e 100644
--- a/rust/automerge-wasm/Cargo.toml
+++ b/automerge-wasm/Cargo.toml
@@ -2,14 +2,13 @@
[package]
name = "automerge-wasm"
description = "An js/wasm wrapper for the rust implementation of automerge-backend"
-repository = "https://github.com/automerge/automerge-rs"
-version = "0.1.0"
+# repository = "https://github.com/automerge/automerge-rs"
+version = "0.0.4"
authors = ["Alex Good ","Orion Henry ", "Martin Kleppmann"]
categories = ["wasm"]
readme = "README.md"
edition = "2021"
license = "MIT"
-rust-version = "1.57.0"
[lib]
crate-type = ["cdylib","rlib"]
@@ -28,24 +27,23 @@ serde = "^1.0"
serde_json = "^1.0"
rand = { version = "^0.8.4" }
getrandom = { version = "^0.2.2", features=["js"] }
-uuid = { version = "^1.2.1", features=["v4", "js", "serde"] }
-serde-wasm-bindgen = "0.4.3"
+uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] }
+serde-wasm-bindgen = "0.1.3"
serde_bytes = "0.11.5"
+unicode-segmentation = "1.7.1"
hex = "^0.4.3"
regex = "^1.5"
-itertools = "^0.10.3"
-thiserror = "^1.0.16"
[dependencies.wasm-bindgen]
-version = "^0.2.83"
+version = "^0.2"
#features = ["std"]
features = ["serde-serialize", "std"]
[package.metadata.wasm-pack.profile.release]
-# wasm-opt = false
+wasm-opt = true
[package.metadata.wasm-pack.profile.profiling]
-wasm-opt = false
+wasm-opt = true
# The `web-sys` crate allows you to interact with the various browser APIs,
# like the DOM.
@@ -57,6 +55,5 @@ features = ["console"]
[dev-dependencies]
futures = "^0.1"
-proptest = { version = "^1.0.0", default-features = false, features = ["std"] }
wasm-bindgen-futures = "^0.4"
wasm-bindgen-test = "^0.3"
diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md
new file mode 100644
index 00000000..80f8f1fa
--- /dev/null
+++ b/automerge-wasm/README.md
@@ -0,0 +1,4 @@
+## Automerge WASM Low Level Interface
+
+This is a low level automerge library written in rust exporting a javascript API via WASM. This low level api is the underpinning to the `automerge-js` library that reimplements the Automerge API via these low level functions.
+
diff --git a/automerge-wasm/attr_bug.js b/automerge-wasm/attr_bug.js
new file mode 100644
index 00000000..324fba33
--- /dev/null
+++ b/automerge-wasm/attr_bug.js
@@ -0,0 +1,15 @@
+let Automerge = require(".")
+let util = require('util')
+
+let heads = ['d138235e8123c407852968a976bb3d05bb30b9f7639854e64cb4adee98a407a6']
+let newHeads = ['d2a0500dad1b4ef1ca0f66015ae24f5cd7bec8316aa8e1115640a665e188147e']
+let text = '10@e1761c3ec92a87d3620d1bc007bdf83a000015ca0b60684edfd007672a0f00113ba1'
+let data = '133,111,74,131,126,182,225,217,0,130,22,22,34,0,174,8,20,12,38,118,140,95,76,123,139,6,212,187,22,0,0,45,11,84,68,75,148,168,76,245,27,147,189,91,99,157,102,34,0,174,8,20,12,38,118,140,95,76,123,139,6,212,187,22,0,0,60,72,31,34,255,16,190,226,176,124,232,19,117,181,152,202,34,0,174,8,20,12,38,118,140,95,76,123,139,6,212,187,22,0,0,173,17,57,82,13,196,120,217,253,4,117,222,120,203,127,31,34,0,174,8,20,12,38,118,140,95,76,123,139,6,212,187,22,0,0,195,238,208,1,215,183,150,181,230,202,10,131,10,53,212,98,16,118,64,44,216,205,38,70,50,172,104,141,96,213,70,225,153,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,7,90,18,166,242,242,169,181,172,173,95,218,197,230,53,171,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,20,123,52,22,113,155,106,167,61,96,211,220,13,176,202,18,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,21,202,11,96,104,78,223,208,7,103,42,15,0,17,59,161,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,49,157,99,144,176,89,107,142,238,50,16,33,198,172,12,98,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,49,160,189,244,223,205,155,34,245,110,74,38,170,63,47,165,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,101,43,36,88,127,139,248,176,98,81,75,151,178,155,65,235,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,104,72,125,26,22,39,88,236,174,2,180,0,186,44,23,100,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,106,192,146,37,220,38,124,176,133,96,99,183,52,146,51,32,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,137,185,129,79,171,192,93,254,162,191,198,11,166,169,184,231,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,183,221,99,120,31,214,103,85,152,145,225,205,226,10,71,148,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,204,247,249,8,135,23,98,57,29,144,111,93,62,1,176,68,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,243,90,241,176,57,235,58,247,98,38,71,96,245,193,178,119,34,229,150,245,136,76,151,59,113,93,112,149,234,7,68,20,213,0,0,23,61,123,236,184,3,106,194,171,46,241,84,223,211,110,241,34,229,150,245,136,76,151,59,113,93,112,149,234,7,68,20,213,0,0,32,181,113,40,11,161,118,67,217,36,93,201,189,221,55,174,34,229,150,245,136,76,151,59,113,93,112,149,234,7,68,20,213,0,0,97,188,15,173,96,163,123,87,228,32,227,245,56,237,53,228,34,229,150,245,136,76,151,59,113,93,112,149,234,7,68,20,213,0,0,97,221,248,228,210,133,45,170,105,131,177,2,9,124,254,61,16,255,46,217,125,15,181,79,74,181,101,95,13,121,190,236,160,1,210,160,80,13,173,27,78,241,202,15,102,1,90,226,79,92,215,190,200,49,106,168,225,17,86,64,166,101,225,136,20,126,8,1,48,3,78,19,38,35,3,53,77,64,17,67,20,86,3,14,1,11,2,17,17,165,1,27,242,1,21,202,1,33,238,1,43,187,2,52,4,66,51,86,78,95,177,2,128,1,45,129,1,15,131,1,23,127,7,21,4,24,8,12,15,3,14,15,6,193,0,12,8,13,3,10,127,9,6,11,127,16,3,2,20,3,10,5,33,3,126,5,17,17,1,11,17,24,21,12,0,125,18,20,19,126,1,0,20,1,127,108,23,1,127,105,11,1,127,117,2,1,127,126,14,1,127,114,192,0,1,127,64,7,1,127,121,2,1,126,126,0,5,1,126,123,0,2,1,127,126,19,1,127,109,9,1,127,11,32,1,125,86,118,0,16,1,127,113,10,1,127,117,23,1,127,105,11,1,127,117,2,0,127,14,2,3,16,1,127,3,2,1,127,2,9,1,2,2,14,1,121,2,1,2,1,2,1,2,171,1,1,127,2,28,1,126,0,4,38,1,167,2,0,255,1,0,127,70,123,34,97,117,116,104,111,114,73,100,34,58,34,101,49,55,54,49,99,51,101,99,57,50,97,56,55,100,51,54,50,48,100,49,98,99,48,48,55,98,100,102,56,51,97,34,44,34,109,101,115,115,97,103,101,34,58,34,74,97,102,102,97,32,67,97,107,101,34,125,39,0,127,0,192,1,1,127,2,32,1,127,2,18,1,127,2,49,1,127,0,191,1,1,126,119,10,33,1,126,95,34,17,1,126,112,17,49,1,167,2,7,0,17,161,2,7,127,4,3,8,4,15,0,17,3,9,157,2,10,119,12,19,42,53,55,71,74,77,80,0,21,5,7,124,6,7,15,7,3,4,127,3,4,4,127,6,7,4,124,6,12,3,4,2,15,127,3,2,15,127,6,15,3,127,6,14,3,11,21,2,3,127,6,11,4,126,6,12,15,1,5,4,126,12,3,13,4,127,14,3,4,2,14,124,6,12,3,4,3,8,127,10,9,17,126,8,12,10,21,126,8,6,5,8,127,3,3,8,127,12,8,13,3,8,127,6,6,8,2,14,124,6,12,3,8,4,15,127,6,2,8,123,15,12,3,15,6,2,8,127,12,2,2,123,8,6,12,3,8,4,15,127,6,6,8,125,12,10,3,5,8,127,6,2,8,11,0,16,8,127,6,2,12,127,9,6,11,126,16,3,2,4,0,10,69,142,189,75,66,97,28,133,207,121,223,50,149,140,140,43,213,32,45,125,64,208,93,226,6,81,75,83,67,4,145,91,91,67,91,229,216,7,250,187,16,33,53,93,250,35,154,171,185,177,32,130,162,156,36,18,84,156,28,85,80,55,125,95,21,228,89,14,156,7,206,129,35,152,28,195,113,30,79,254,230,110,8,250,133,63,2,226,23,207,62,97,121,67,162,128,188,184,184,103,91,48,27,112,138,82,149,46,98,244,189,40,211,59,108,202,235,80,181,124,192,185,179,74,32,84,56,225,63,62,81,148,65,165,161,76,114,15,21,214,48,190,250,142,178,85,36,199,10,194,204,62,72,17,143,140,48,211,202,122,123,74,243,58,96,221,28,249,195,66,136,87,184,49,11,235,251,70,191,32,22,161,189,173,154,36,53,206,166,83,42,254,5,55,231,39,15,88,198,10,59,178,180,189,81,147,121,83,57,41,104,5,150,48,23,239,244,247,151,143,194,13,70,121,122,43,151,163,183,150,196,55,24,155,96,102,166,32,233,115,68,122,127,8,97,114,99,104,105,118,101,100,2,6,97,117,116,104,111,114,126,8,99,111,109,109,101,110,116,115,12,99,111,110,116,114,105,98,117,116,111,114,115,3,7,109,101,115,115,97,103,101,2,9,112,97,114,101,110,116,95,105,100,125,6,112,105,110,110,101,100,6,115,104,97,114,101,100,4,116,101,120,116,3,4,116,105,109,101,124,5,116,105,116,108,101,32,48,48,97,101,48,56,49,52,48,99,50,54,55,54,56,99,53,102,52,99,55,98,56,98,48,54,100,52,98,98,49,54,32,101,49,55,54,49,99,51,101,99,57,50,97,56,55,100,51,54,50,48,100,49,98,99,48,48,55,98,100,102,56,51,97,32,101,53,57,54,102,53,56,56,52,99,57,55,51,98,55,49,53,100,55,48,57,53,101,97,48,55,52,52,49,52,100,53,0,157,2,9,4,116,121,112,101,2,7,127,21,3,7,124,4,21,4,21,4,7,119,4,21,7,1,7,17,7,5,3,2,12,121,6,12,15,12,4,12,4,2,3,111,12,4,12,6,12,4,12,4,12,4,12,6,12,3,5,15,5,2,3,126,12,6,30,3,12,21,2,3,115,12,4,12,4,12,4,12,4,12,4,12,6,12,16,1,123,4,10,12,4,12,2,3,102,4,12,4,12,4,12,4,12,4,12,4,12,14,12,4,12,14,12,6,12,3,5,8,5,3,10,10,17,127,12,11,21,126,6,12,4,8,2,3,125,12,8,12,7,13,102,5,13,8,12,6,12,8,5,12,8,12,14,12,6,12,3,5,15,5,3,12,6,12,8,15,12,2,3,124,6,12,8,12,3,2,117,6,12,3,5,15,5,3,12,6,12,15,2,8,124,12,8,12,10,2,3,121,8,12,8,12,6,12,8,12,0,112,12,8,12,8,12,8,12,8,12,8,12,8,12,8,12,6,2,12,127,9,6,11,125,16,3,5,2,4,2,7,127,4,3,8,4,15,117,143,207,43,131,113,28,199,63,239,103,101,241,236,49,19,113,115,81,147,178,56,108,10,7,92,108,56,40,162,20,218,193,109,139,210,52,179,231,251,41,23,148,103,59,40,7,57,176,54,155,205,197,143,63,65,106,53,57,49,108,53,187,56,104,23,66,118,179,135,92,148,195,235,211,235,240,254,244,249,188,157,85,193,93,169,164,194,95,187,37,125,168,154,244,164,178,113,85,214,164,103,181,140,144,120,129,38,46,193,147,253,203,242,76,235,17,223,81,138,79,128,40,167,248,150,2,198,36,103,233,23,79,99,150,228,55,161,33,104,218,97,79,24,102,176,91,1,219,101,44,14,96,157,227,252,64,127,241,54,108,84,98,179,97,177,13,231,33,231,40,77,200,139,28,233,170,147,224,123,210,79,234,209,137,14,125,142,219,166,218,47,200,116,35,142,177,226,46,82,53,68,73,196,80,3,53,41,218,98,108,64,32,12,184,244,181,150,42,204,93,211,41,175,85,124,218,26,231,12,66,223,31,12,119,143,218,123,149,178,216,132,111,112,172,43,202,150,12,217,16,225,206,3,126,36,171,132,249,61,238,115,40,239,149,18,75,174,17,199,25,123,165,2,69,184,64,205,22,124,138,31,29,234,73,240,43,100,120,243,98,159,139,244,31,231,124,69,80,140,240,213,155,211,98,193,47,25,155,100,169,206,96,248,2,20,157,2,9,3,1,2,0,7,1,127,4,7,1,127,0,28,1,127,0,235,0,1,127,0,43,1,127,7,6,1,127,9,9,1,127,0,5,1,127,0,15,1,127,0,5,1,2,0,55,1,127,0,10,1,127,1,2,134,4,2,0,125,230,1,166,1,182,1,2,214,2,2,1,126,0,105,2,100,127,6,3,2,127,0,28,22,127,0,30,22,127,38,204,0,22,127,0,43,22,125,102,2,6,6,22,127,2,9,22,127,0,5,22,127,0,15,22,127,0,5,22,2,0,54,22,125,38,0,22,9,150,1,173,80,75,78,195,48,16,189,74,110,208,184,78,234,100,87,17,85,21,16,169,5,129,248,108,208,56,246,164,85,211,16,98,47,210,172,145,216,245,2,108,122,150,46,144,224,74,116,193,36,161,233,5,106,217,158,55,243,230,243,108,205,196,136,37,92,39,225,16,2,161,248,104,232,42,38,19,215,21,82,97,192,65,251,225,8,253,32,240,146,80,112,41,152,175,132,27,250,26,92,225,121,204,83,254,253,252,97,18,199,47,183,179,217,221,120,60,190,2,68,112,34,88,233,233,50,79,117,41,75,13,10,163,236,102,184,201,46,39,69,52,227,79,38,136,179,231,183,245,133,143,108,90,101,139,98,190,102,122,163,217,99,140,215,209,251,215,246,251,119,240,209,222,135,159,207,237,97,32,107,107,43,203,44,88,131,96,149,229,22,27,76,177,90,202,26,201,46,150,198,161,13,14,161,60,117,122,127,191,107,2,70,33,28,15,145,170,45,111,110,110,13,41,51,10,254,121,84,176,34,138,16,145,202,58,132,129,186,55,147,186,89,85,223,135,90,244,80,85,173,131,77,54,118,203,216,242,53,79,201,240,118,208,177,158,140,75,237,121,151,216,199,154,199,164,8,157,164,147,86,210,211,82,39,193,21,125,4,45,206,121,85,203,253,206,41,160,132,180,132,98,113,46,240,7,126,0,1,3,0,2,1,126,0,1,4,0,2,1,51,0,127,1,10,0,127,1,219,0,0,127,1,15,0,3,1,36,0,127,1,21,0,2,1,54,0,127,1,11,0,126,21,4,2,21,123,4,21,18,19,20,6,15,127,4,118,156,2,243,125,140,2,3,242,125,141,2,37,2,127,145,126,2,127,3,125,127,92'
+
+let doc = Automerge.loadDoc(new Uint8Array(data.toString().split(",").map((n) => parseInt(n))))
+
+console.log(doc.text(text,heads))
+console.log(doc.text(text,newHeads))
+console.log(doc.text(text))
+console.log(util.inspect(doc.attribute(text,heads,[newHeads]), false, null, false))
+
diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts
new file mode 100644
index 00000000..04373f11
--- /dev/null
+++ b/automerge-wasm/index.d.ts
@@ -0,0 +1,249 @@
+
+export type Actor = string;
+export type ObjID = string;
+export type Change = Uint8Array;
+export type SyncMessage = Uint8Array;
+export type Prop = string | number;
+export type Hash = string;
+export type Heads = Hash[];
+export type Value = string | number | boolean | null | Date | Uint8Array
+export type ObjType = string | Array | Object
+export type FullValue =
+ ["str", string] |
+ ["int", number] |
+ ["uint", number] |
+ ["f64", number] |
+ ["boolean", boolean] |
+ ["timestamp", Date] |
+ ["counter", number] |
+ ["bytes", Uint8Array] |
+ ["null", Uint8Array] |
+ ["map", ObjID] |
+ ["list", ObjID] |
+ ["text", ObjID] |
+ ["table", ObjID]
+
+export enum ObjTypeName {
+ list = "list",
+ map = "map",
+ table = "table",
+ text = "text",
+}
+
+export type Datatype =
+ "boolean" |
+ "str" |
+ "int" |
+ "uint" |
+ "f64" |
+ "null" |
+ "timestamp" |
+ "counter" |
+ "bytes" |
+ "map" |
+ "text" |
+ "list";
+
+export type DecodedSyncMessage = {
+ heads: Heads,
+ need: Heads,
+ have: any[]
+ changes: Change[]
+}
+
+export type DecodedChange = {
+ actor: Actor,
+ seq: number
+ startOp: number,
+ time: number,
+ message: string | null,
+ deps: Heads,
+ hash: Hash,
+ ops: Op[]
+}
+
+export type ChangeSetAddition = {
+ actor: string,
+ start: number,
+ end: number,
+}
+
+export type ChangeSetDeletion = {
+ actor: string,
+ pos: number,
+ val: string
+}
+
+export type ChangeSet = {
+ add: ChangeSetAddition[],
+ del: ChangeSetDeletion[]
+}
+
+export type Op = {
+ action: string,
+ obj: ObjID,
+ key: string,
+ value?: string | number | boolean,
+ datatype?: string,
+ pred: string[],
+}
+
+export function create(actor?: Actor): Automerge;
+export function loadDoc(data: Uint8Array, actor?: Actor): Automerge;
+export function encodeChange(change: DecodedChange): Change;
+export function decodeChange(change: Change): DecodedChange;
+export function initSyncState(): SyncState;
+export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage;
+export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage;
+export function encodeSyncState(state: SyncState): Uint8Array;
+export function decodeSyncState(data: Uint8Array): SyncState;
+
+export class Automerge {
+ // change state
+ set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined;
+ set_object(obj: ObjID, prop: Prop, value: ObjType): ObjID;
+ insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined;
+ insert_object(obj: ObjID, index: number, value: ObjType): ObjID;
+ push(obj: ObjID, value: Value, datatype?: Datatype): undefined;
+ push_object(obj: ObjID, value: ObjType): ObjID;
+ splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined;
+ inc(obj: ObjID, prop: Prop, value: number): void;
+ del(obj: ObjID, prop: Prop): void;
+
+ // returns a single value - if there is a conflict return the winner
+ value(obj: ObjID, prop: any, heads?: Heads): FullValue | null;
+ // return all values in case of a conflict
+ values(obj: ObjID, arg: any, heads?: Heads): FullValue[];
+ keys(obj: ObjID, heads?: Heads): string[];
+ text(obj: ObjID, heads?: Heads): string;
+ length(obj: ObjID, heads?: Heads): number;
+ materialize(obj?: ObjID): any;
+
+ // experimental spans api - unstable!
+ mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void;
+ unmark(obj: ObjID, mark: ObjID): void;
+ spans(obj: ObjID): any;
+ raw_spans(obj: ObjID): any;
+ blame(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
+ attribute(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
+ attribute2(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
+
+ // transactions
+ commit(message?: string, time?: number): Heads;
+ merge(other: Automerge): ObjID[];
+ getActorId(): Actor;
+ pendingOps(): number;
+ rollback(): number;
+
+ // save and load to local store
+ save(): Uint8Array;
+ saveIncremental(): Uint8Array;
+ loadIncremental(data: Uint8Array): ObjID[];
+
+ // sync over network
+ receiveSyncMessage(state: SyncState, message: SyncMessage): ObjID[];
+ generateSyncMessage(state: SyncState): SyncMessage | null;
+
+ // low level change functions
+ applyChanges(changes: Change[]): ObjID[];
+ getChanges(have_deps: Heads): Change[];
+ getChangeByHash(hash: Hash): Change | null;
+ getChangesAdded(other: Automerge): Change[];
+ getHeads(): Heads;
+ getLastLocalChange(): Change;
+ getMissingDeps(heads?: Heads): Heads;
+
+ // memory management
+ free(): void;
+ clone(actor?: string): Automerge;
+ fork(actor?: string): Automerge;
+
+ // dump internal state to console.log
+ dump(): void;
+
+ // dump internal state to a JS object
+ toJS(): any;
+}
+
+export class SyncState {
+ free(): void;
+ clone(): SyncState;
+ lastSentHeads: any;
+ sentHashes: any;
+ readonly sharedHeads: any;
+}
+
+export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
+
+export interface InitOutput {
+ readonly memory: WebAssembly.Memory;
+ readonly __wbg_automerge_free: (a: number) => void;
+ readonly automerge_new: (a: number, b: number, c: number) => void;
+ readonly automerge_clone: (a: number, b: number, c: number, d: number) => void;
+ readonly automerge_free: (a: number) => void;
+ readonly automerge_pendingOps: (a: number) => number;
+ readonly automerge_commit: (a: number, b: number, c: number, d: number, e: number) => number;
+ readonly automerge_rollback: (a: number) => number;
+ readonly automerge_keys: (a: number, b: number, c: number, d: number, e: number) => void;
+ readonly automerge_text: (a: number, b: number, c: number, d: number, e: number) => void;
+ readonly automerge_splice: (a: number, b: number, c: number, d: number, e: number, f: number, g: number) => void;
+ readonly automerge_push: (a: number, b: number, c: number, d: number, e: number, f: number, g: number) => void;
+ readonly automerge_insert: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void;
+ readonly automerge_set: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void;
+ readonly automerge_inc: (a: number, b: number, c: number, d: number, e: number, f: number) => void;
+ readonly automerge_value: (a: number, b: number, c: number, d: number, e: number, f: number) => void;
+ readonly automerge_values: (a: number, b: number, c: number, d: number, e: number, f: number) => void;
+ readonly automerge_length: (a: number, b: number, c: number, d: number, e: number) => void;
+ readonly automerge_del: (a: number, b: number, c: number, d: number, e: number) => void;
+ readonly automerge_save: (a: number, b: number) => void;
+ readonly automerge_saveIncremental: (a: number) => number;
+ readonly automerge_loadIncremental: (a: number, b: number, c: number) => void;
+ readonly automerge_applyChanges: (a: number, b: number, c: number) => void;
+ readonly automerge_getChanges: (a: number, b: number, c: number) => void;
+ readonly automerge_getChangesAdded: (a: number, b: number, c: number) => void;
+ readonly automerge_getHeads: (a: number) => number;
+ readonly automerge_getActorId: (a: number, b: number) => void;
+ readonly automerge_getLastLocalChange: (a: number, b: number) => void;
+ readonly automerge_dump: (a: number) => void;
+ readonly automerge_getMissingDeps: (a: number, b: number, c: number) => void;
+ readonly automerge_receiveSyncMessage: (a: number, b: number, c: number, d: number) => void;
+ readonly automerge_generateSyncMessage: (a: number, b: number, c: number) => void;
+ readonly automerge_toJS: (a: number) => number;
+ readonly create: (a: number, b: number, c: number) => void;
+ readonly loadDoc: (a: number, b: number, c: number, d: number) => void;
+ readonly encodeChange: (a: number, b: number) => void;
+ readonly decodeChange: (a: number, b: number) => void;
+ readonly initSyncState: () => number;
+ readonly importSyncState: (a: number, b: number) => void;
+ readonly exportSyncState: (a: number) => number;
+ readonly encodeSyncMessage: (a: number, b: number) => void;
+ readonly decodeSyncMessage: (a: number, b: number) => void;
+ readonly encodeSyncState: (a: number, b: number) => void;
+ readonly decodeSyncState: (a: number, b: number) => void;
+ readonly __wbg_list_free: (a: number) => void;
+ readonly __wbg_map_free: (a: number) => void;
+ readonly __wbg_text_free: (a: number) => void;
+ readonly __wbg_table_free: (a: number) => void;
+ readonly __wbg_syncstate_free: (a: number) => void;
+ readonly syncstate_sharedHeads: (a: number) => number;
+ readonly syncstate_lastSentHeads: (a: number) => number;
+ readonly syncstate_set_lastSentHeads: (a: number, b: number, c: number) => void;
+ readonly syncstate_set_sentHashes: (a: number, b: number, c: number) => void;
+ readonly syncstate_clone: (a: number) => number;
+ readonly __wbindgen_malloc: (a: number) => number;
+ readonly __wbindgen_realloc: (a: number, b: number, c: number) => number;
+ readonly __wbindgen_add_to_stack_pointer: (a: number) => number;
+ readonly __wbindgen_free: (a: number, b: number) => void;
+ readonly __wbindgen_exn_store: (a: number) => void;
+}
+
+/**
+* If `module_or_path` is {RequestInfo} or {URL}, makes a request and
+* for everything else, calls `WebAssembly.instantiate` directly.
+*
+* @param {InitInput | Promise} module_or_path
+*
+* @returns {Promise}
+*/
+
+export default function init (module_or_path?: InitInput | Promise): Promise;
diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json
new file mode 100644
index 00000000..336f78f6
--- /dev/null
+++ b/automerge-wasm/package.json
@@ -0,0 +1,43 @@
+{
+ "collaborators": [
+ "Orion Henry ",
+ "Alex Good ",
+ "Martin Kleppmann"
+ ],
+ "name": "automerge-wasm-pack",
+ "description": "wasm-bindgen bindings to the automerge rust implementation",
+ "version": "0.0.23",
+ "license": "MIT",
+ "files": [
+ "README.md",
+ "package.json",
+ "index.d.ts",
+ "node/index.js",
+ "node/index_bg.wasm",
+ "web/index.js",
+ "web/index_bg.wasm"
+ ],
+ "types": "index.d.ts",
+ "module": "./web/index.js",
+ "main": "./node/index.js",
+ "scripts": {
+ "build": "rimraf ./node && wasm-pack build --target nodejs --dev --out-name index -d node && cp index.d.ts node",
+ "release-w": "rimraf ./web && wasm-pack build --target web --release --out-name index -d web && cp index.d.ts web",
+ "release-n": "rimraf ./node && wasm-pack build --target nodejs --release --out-name index -d node && cp index.d.ts node",
+ "release": "yarn release-w && yarn release-n",
+ "test": "yarn build && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts"
+ },
+ "dependencies": {},
+ "devDependencies": {
+ "@types/expect": "^24.3.0",
+ "@types/jest": "^27.4.0",
+ "@types/mocha": "^9.1.0",
+ "@types/node": "^17.0.13",
+ "fast-sha256": "^1.3.0",
+ "mocha": "^9.1.3",
+ "pako": "^2.0.4",
+ "rimraf": "^3.0.2",
+ "ts-mocha": "^9.0.2",
+ "typescript": "^4.5.5"
+ }
+}
diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs
new file mode 100644
index 00000000..4fec2359
--- /dev/null
+++ b/automerge-wasm/src/interop.rs
@@ -0,0 +1,388 @@
+use automerge as am;
+use automerge::transaction::Transactable;
+use automerge::{Change, ChangeHash, Prop};
+use js_sys::{Array, Object, Reflect, Uint8Array};
+use std::collections::HashSet;
+use std::fmt::Display;
+use unicode_segmentation::UnicodeSegmentation;
+use wasm_bindgen::prelude::*;
+use wasm_bindgen::JsCast;
+
+use crate::{ObjId, ScalarValue, Value};
+
+pub(crate) struct JS(pub JsValue);
+pub(crate) struct AR(pub Array);
+
+impl From for JsValue {
+ fn from(ar: AR) -> Self {
+ ar.0.into()
+ }
+}
+
+impl From for JsValue {
+ fn from(js: JS) -> Self {
+ js.0
+ }
+}
+
+impl From for JS {
+ fn from(state: am::sync::State) -> Self {
+ let shared_heads: JS = state.shared_heads.into();
+ let last_sent_heads: JS = state.last_sent_heads.into();
+ let their_heads: JS = state.their_heads.into();
+ let their_need: JS = state.their_need.into();
+ let sent_hashes: JS = state.sent_hashes.into();
+ let their_have = if let Some(have) = &state.their_have {
+ JsValue::from(AR::from(have.as_slice()).0)
+ } else {
+ JsValue::null()
+ };
+ let result: JsValue = Object::new().into();
+ // we can unwrap here b/c we made the object and know its not frozen
+ Reflect::set(&result, &"sharedHeads".into(), &shared_heads.0).unwrap();
+ Reflect::set(&result, &"lastSentHeads".into(), &last_sent_heads.0).unwrap();
+ Reflect::set(&result, &"theirHeads".into(), &their_heads.0).unwrap();
+ Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap();
+ Reflect::set(&result, &"theirHave".into(), &their_have).unwrap();
+ Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap();
+ JS(result)
+ }
+}
+
+impl From> for JS {
+ fn from(heads: Vec) -> Self {
+ let heads: Array = heads
+ .iter()
+ .map(|h| JsValue::from_str(&h.to_string()))
+ .collect();
+ JS(heads.into())
+ }
+}
+
+impl From> for JS {
+ fn from(heads: HashSet) -> Self {
+ let result: JsValue = Object::new().into();
+ for key in &heads {
+ Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap();
+ }
+ JS(result)
+ }
+}
+
+impl From