diff --git a/.envrc b/.envrc
deleted file mode 100644
index 3550a30f..00000000
--- a/.envrc
+++ /dev/null
@@ -1 +0,0 @@
-use flake
diff --git a/.github/workflows/advisory-cron.yaml b/.github/workflows/advisory-cron.yaml
deleted file mode 100644
index 31bac5a3..00000000
--- a/.github/workflows/advisory-cron.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
-name: Advisories
-on:
- schedule:
- - cron: '0 18 * * *'
-jobs:
- cargo-deny:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- checks:
- - advisories
- - bans licenses sources
- steps:
- - uses: actions/checkout@v2
- - uses: EmbarkStudios/cargo-deny-action@v1
- with:
- command: check ${{ matrix.checks }}
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
deleted file mode 100644
index 8519ac5e..00000000
--- a/.github/workflows/ci.yaml
+++ /dev/null
@@ -1,177 +0,0 @@
-name: CI
-on:
- push:
- branches:
- - main
- pull_request:
- branches:
- - main
-jobs:
- fmt:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: 1.67.0
- default: true
- components: rustfmt
- - uses: Swatinem/rust-cache@v1
- - run: ./scripts/ci/fmt
- shell: bash
-
- lint:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: 1.67.0
- default: true
- components: clippy
- - uses: Swatinem/rust-cache@v1
- - run: ./scripts/ci/lint
- shell: bash
-
- docs:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: 1.67.0
- default: true
- - uses: Swatinem/rust-cache@v1
- - name: Build rust docs
- run: ./scripts/ci/rust-docs
- shell: bash
- - name: Install doxygen
- run: sudo apt-get install -y doxygen
- shell: bash
-
- cargo-deny:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- checks:
- - advisories
- - bans licenses sources
- continue-on-error: ${{ matrix.checks == 'advisories' }}
- steps:
- - uses: actions/checkout@v2
- - uses: EmbarkStudios/cargo-deny-action@v1
- with:
- arguments: '--manifest-path ./rust/Cargo.toml'
- command: check ${{ matrix.checks }}
-
- wasm_tests:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Install wasm-bindgen-cli
- run: cargo install wasm-bindgen-cli wasm-opt
- - name: Install wasm32 target
- run: rustup target add wasm32-unknown-unknown
- - name: run tests
- run: ./scripts/ci/wasm_tests
- deno_tests:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - uses: denoland/setup-deno@v1
- with:
- deno-version: v1.x
- - name: Install wasm-bindgen-cli
- run: cargo install wasm-bindgen-cli wasm-opt
- - name: Install wasm32 target
- run: rustup target add wasm32-unknown-unknown
- - name: run tests
- run: ./scripts/ci/deno_tests
-
- js_fmt:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: install
- run: yarn global add prettier
- - name: format
- run: prettier -c javascript/.prettierrc javascript
-
- js_tests:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - name: Install wasm-bindgen-cli
- run: cargo install wasm-bindgen-cli wasm-opt
- - name: Install wasm32 target
- run: rustup target add wasm32-unknown-unknown
- - name: run tests
- run: ./scripts/ci/js_tests
-
- cmake_build:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: nightly-2023-01-26
- default: true
- - uses: Swatinem/rust-cache@v1
- - name: Install CMocka
- run: sudo apt-get install -y libcmocka-dev
- - name: Install/update CMake
- uses: jwlawson/actions-setup-cmake@v1.12
- with:
- cmake-version: latest
- - name: Install rust-src
- run: rustup component add rust-src
- - name: Build and test C bindings
- run: ./scripts/ci/cmake-build Release Static
- shell: bash
-
- linux:
- runs-on: ubuntu-latest
- strategy:
- matrix:
- toolchain:
- - 1.67.0
- steps:
- - uses: actions/checkout@v2
- - uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: ${{ matrix.toolchain }}
- default: true
- - uses: Swatinem/rust-cache@v1
- - run: ./scripts/ci/build-test
- shell: bash
-
- macos:
- runs-on: macos-latest
- steps:
- - uses: actions/checkout@v2
- - uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: 1.67.0
- default: true
- - uses: Swatinem/rust-cache@v1
- - run: ./scripts/ci/build-test
- shell: bash
-
- windows:
- runs-on: windows-latest
- steps:
- - uses: actions/checkout@v2
- - uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: 1.67.0
- default: true
- - uses: Swatinem/rust-cache@v1
- - run: ./scripts/ci/build-test
- shell: bash
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
deleted file mode 100644
index b501d526..00000000
--- a/.github/workflows/docs.yaml
+++ /dev/null
@@ -1,52 +0,0 @@
-on:
- push:
- branches:
- - main
-
-name: Documentation
-
-jobs:
- deploy-docs:
- concurrency: deploy-docs
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
-
- - name: Toolchain
- uses: actions-rs/toolchain@v1
- with:
- profile: minimal
- toolchain: stable
- override: true
-
- - name: Cache
- uses: Swatinem/rust-cache@v1
-
- - name: Clean docs dir
- run: rm -rf docs
- shell: bash
-
- - name: Clean Rust docs dir
- uses: actions-rs/cargo@v1
- with:
- command: clean
- args: --manifest-path ./rust/Cargo.toml --doc
-
- - name: Build Rust docs
- uses: actions-rs/cargo@v1
- with:
- command: doc
- args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps
-
- - name: Move Rust docs
- run: mkdir -p docs && mv rust/target/doc/* docs/.
- shell: bash
-
- - name: Configure root page
- run: echo '' > docs/index.html
-
- - name: Deploy docs
- uses: peaceiris/actions-gh-pages@v3
- with:
- github_token: ${{ secrets.GITHUB_TOKEN }}
- publish_dir: ./docs
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
deleted file mode 100644
index 762671ff..00000000
--- a/.github/workflows/release.yaml
+++ /dev/null
@@ -1,214 +0,0 @@
-name: Release
-on:
- push:
- branches:
- - main
-
-jobs:
- check_if_wasm_version_upgraded:
- name: Check if WASM version has been upgraded
- runs-on: ubuntu-latest
- outputs:
- wasm_version: ${{ steps.version-updated.outputs.current-package-version }}
- wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }}
- steps:
- - uses: JiPaix/package-json-updated-action@v1.0.5
- id: version-updated
- with:
- path: rust/automerge-wasm/package.json
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- publish-wasm:
- name: Publish WASM package
- runs-on: ubuntu-latest
- needs:
- - check_if_wasm_version_upgraded
- # We create release only if the version in the package.json has been upgraded
- if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true'
- steps:
- - uses: actions/setup-node@v3
- with:
- node-version: '16.x'
- registry-url: 'https://registry.npmjs.org'
- - uses: denoland/setup-deno@v1
- - uses: actions/checkout@v3
- with:
- fetch-depth: 0
- ref: ${{ github.ref }}
- - name: Get rid of local github workflows
- run: rm -r .github/workflows
- - name: Remove tmp_branch if it exists
- run: git push origin :tmp_branch || true
- - run: git checkout -b tmp_branch
- - name: Install wasm-bindgen-cli
- run: cargo install wasm-bindgen-cli wasm-opt
- - name: Install wasm32 target
- run: rustup target add wasm32-unknown-unknown
- - name: run wasm js tests
- id: wasm_js_tests
- run: ./scripts/ci/wasm_tests
- - name: run wasm deno tests
- id: wasm_deno_tests
- run: ./scripts/ci/deno_tests
- - name: build release
- id: build_release
- run: |
- npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release
- - name: Collate deno release files
- if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
- run: |
- mkdir $GITHUB_WORKSPACE/deno_wasm_dist
- cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist
- cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist
- cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist
- cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist
- sed -i '1i /// ' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js
- - name: Create npm release
- if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
- run: |
- if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then
- echo "This version is already published"
- exit 0
- fi
- EXTRA_ARGS="--access public"
- if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
- echo "Is pre-release version"
- EXTRA_ARGS="$EXTRA_ARGS --tag next"
- fi
- if [ "$NODE_AUTH_TOKEN" = "" ]; then
- echo "Can't publish on NPM, You need a NPM_TOKEN secret."
- false
- fi
- npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS
- env:
- NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
- VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- - name: Commit wasm deno release files
- run: |
- git config --global user.name "actions"
- git config --global user.email actions@github.com
- git add $GITHUB_WORKSPACE/deno_wasm_dist
- git commit -am "Add deno release files"
- git push origin tmp_branch
- - name: Tag wasm release
- if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
- uses: softprops/action-gh-release@v1
- with:
- name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- target_commitish: tmp_branch
- generate_release_notes: false
- draft: false
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- - name: Remove tmp_branch
- run: git push origin :tmp_branch
- check_if_js_version_upgraded:
- name: Check if JS version has been upgraded
- runs-on: ubuntu-latest
- outputs:
- js_version: ${{ steps.version-updated.outputs.current-package-version }}
- js_has_updated: ${{ steps.version-updated.outputs.has-updated }}
- steps:
- - uses: JiPaix/package-json-updated-action@v1.0.5
- id: version-updated
- with:
- path: javascript/package.json
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- publish-js:
- name: Publish JS package
- runs-on: ubuntu-latest
- needs:
- - check_if_js_version_upgraded
- - check_if_wasm_version_upgraded
- - publish-wasm
- # We create release only if the version in the package.json has been upgraded and after the WASM release
- if: |
- (always() && ! cancelled()) &&
- (needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') &&
- needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true'
- steps:
- - uses: actions/setup-node@v3
- with:
- node-version: '16.x'
- registry-url: 'https://registry.npmjs.org'
- - uses: denoland/setup-deno@v1
- - uses: actions/checkout@v3
- with:
- fetch-depth: 0
- ref: ${{ github.ref }}
- - name: Get rid of local github workflows
- run: rm -r .github/workflows
- - name: Remove js_tmp_branch if it exists
- run: git push origin :js_tmp_branch || true
- - run: git checkout -b js_tmp_branch
- - name: check js formatting
- run: |
- yarn global add prettier
- prettier -c javascript/.prettierrc javascript
- - name: run js tests
- id: js_tests
- run: |
- cargo install wasm-bindgen-cli wasm-opt
- rustup target add wasm32-unknown-unknown
- ./scripts/ci/js_tests
- - name: build js release
- id: build_release
- run: |
- npm --prefix $GITHUB_WORKSPACE/javascript run build
- - name: build js deno release
- id: build_deno_release
- run: |
- VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build
- env:
- WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- - name: run deno tests
- id: deno_tests
- run: |
- npm --prefix $GITHUB_WORKSPACE/javascript run deno:test
- - name: Collate deno release files
- if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
- run: |
- mkdir $GITHUB_WORKSPACE/deno_js_dist
- cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist
- - name: Create npm release
- if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
- run: |
- if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then
- echo "This version is already published"
- exit 0
- fi
- EXTRA_ARGS="--access public"
- if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
- echo "Is pre-release version"
- EXTRA_ARGS="$EXTRA_ARGS --tag next"
- fi
- if [ "$NODE_AUTH_TOKEN" = "" ]; then
- echo "Can't publish on NPM, You need a NPM_TOKEN secret."
- false
- fi
- npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS
- env:
- NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
- VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }}
- - name: Commit js deno release files
- run: |
- git config --global user.name "actions"
- git config --global user.email actions@github.com
- git add $GITHUB_WORKSPACE/deno_js_dist
- git commit -am "Add deno js release files"
- git push origin js_tmp_branch
- - name: Tag JS release
- if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
- uses: softprops/action-gh-release@v1
- with:
- name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }}
- tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }}
- target_commitish: js_tmp_branch
- generate_release_notes: false
- draft: false
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- - name: Remove js_tmp_branch
- run: git push origin :js_tmp_branch
diff --git a/.gitignore b/.gitignore
index f77865d0..9f9a9231 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,4 @@
-/.direnv
-perf.*
-/Cargo.lock
-build/
-.vim/*
/target
+**/*.rs.bk
+Cargo.lock
+libtest.rmeta
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 00000000..a3b8921c
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,25 @@
+language: rust
+rust:
+- stable
+- beta
+cache: cargo
+before_script:
+- rustup self update
+- rustup component add clippy
+- rustup component add rustfmt
+script:
+- cargo fmt --all -- --check
+- cargo clippy --all-targets --all-features -- -D warnings
+- cargo build --verbose --all
+- cargo test --verbose --all
+jobs:
+ allow_failures:
+ - rust: nightly
+ fast_finish: true
+deploy:
+ provider: cargo
+ on:
+ tags: true
+ condition: "$TRAVIS_RUST_VERSION = stable"
+ token:
+ secure: FWmUT2NJTcy3ccw8B1RYgvlg5SxnkEAeBU2hxXeKLmEBAjzhVPVHjwaQ5RktMRHsyKYJEfDpLD0EHUZknhyDxzCuUKzKYlGgRmtlnsCKS+gDM4j88e/OEnDvxZ2d8ag3Jp8+3GCvv2yjUHFs2JpclqR4ib8LmL6d6x+1+1uxaMOgaDhxQCDLV0eZwX5mTdGAWJl/CpxziFXHYN8/j+e58dJgWN6TUO6BBZeZmkp4xQ6iggEUgIKLLYynG5cM2XtS/j/qbL2ObloamIv9p0SNtj8wTQupJZW3JPBc77gimfeXVQd2+4B/31lJ3GW1310gVBZ9EA7BTbC3M3AkHJFPUIgfEn803zrZhm4WxGg2B+2kENWPpSRUMjhxaPuxAVStHOBl2WSsQTmTRrSUf1nvZUdixTARr6BkKakiNPqts7X/HbxE0cxkk5gtobTyNb4HFbaM/8449U8+KbX7mDXv50FGmRrKxkepOzfRdoEz4h9LnCFWweyle2bpFCQlnro+1SnBRSVmH+c1YUZbIl+He53GUEAwObcHGk+TlhVCGMtmGj/g1THOf4VcWh8C3XoO2yWIu9FoJKvJbd7qm0+dOv+QY8fxgrs4JRSSnt8rXBXhxLKe/ZXl5fHOmLca8T6i/PRfbQ9AzFSCPcz8o4hNO/lVQPSrNrkvxSF39buuYGU=
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 00000000..5d1662b6
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,10 @@
+[workspace]
+
+members = [
+ "automerge",
+ "automerge-backend",
+ "automerge-backend-wasm",
+]
+
+[profile.release]
+lto = true
diff --git a/LICENSE b/LICENSE
index 18812465..dfa2838f 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,19 +1,7 @@
-Copyright (c) 2019-2021 the Automerge contributors
+Copyright 2019 Alex Good
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/README.md b/README.md
index ad174da4..7e5400fc 100644
--- a/README.md
+++ b/README.md
@@ -1,147 +1,75 @@
# Automerge
-
+[](https://docs.rs/automerge)
+[](https://crates.io/crates/automerge)
+[](https://travis-ci.org/alexjg/automerge-rs)
-[](https://automerge.org/)
-[](https://automerge.org/automerge-rs/automerge/)
-[](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml)
-[](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml)
-Automerge is a library which provides fast implementations of several different
-CRDTs, a compact compression format for these CRDTs, and a sync protocol for
-efficiently transmitting those changes over the network. The objective of the
-project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational
-databases support server applications - by providing mechanisms for persistence
-which allow application developers to avoid thinking about hard distributed
-computing problems. Automerge aims to be PostgreSQL for your local-first app.
+This is a very early, very much work in progress implementation of [automerge](https://github.com/automerge/automerge) in rust. At the moment it implements a simple interface for reading the state of an OpSet, and a really horrendous interface for generating new changes to the Opset.
-If you're looking for documentation on the JavaScript implementation take a look
-at https://automerge.org/docs/hello/. There are other implementations in both
-Rust and C, but they are earlier and don't have documentation yet. You can find
-them in `rust/automerge` and `rust/automerge-c` if you are comfortable
-reading the code and tests to figure out how to use them.
+## Plans
-If you're familiar with CRDTs and interested in the design of Automerge in
-particular take a look at https://automerge.org/docs/how-it-works/backend/
+We're tentatively working on a plan to write a backend for the current javascript implementation of Automerge in Rust. The javascript Automerge library is split into two parts, a "frontend" and a "backend". The "backend" contains a lot of the more complex logic of the CRDT and also has a fairly small API. Given these facts we think we might be able to write a rust implementation of the backend, which compiles to WASM and can be used as a drop in replacement for the current backend. This same rust implementation could also be used via FFI on a lot of other platforms, which would make language interop much easier. This is all early days but it's very exciting.
-Finally, if you want to talk to us about this project please [join the
-Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw)
+For now though, it's a mostly broken pure rust implementation
-## Status
+## How to use
-This project is formed of a core Rust implementation which is exposed via FFI in
-javascript+WASM, C, and soon other languages. Alex
-([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining
-automerge, other members of Ink and Switch are also contributing time and there
-are several other maintainers. The focus is currently on shipping the new JS
-package. We expect to be iterating the API and adding new features over the next
-six months so there will likely be several major version bumps in all packages
-in that time.
-
-In general we try and respect semver.
-
-### JavaScript
-
-A stable release of the javascript package is currently available as
-`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are
-available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at
-https://deno.land/x/automerge
-
-### Rust
-
-The rust codebase is currently oriented around producing a performant backend
-for the Javascript wrapper and as such the API for Rust code is low level and
-not well documented. We will be returning to this over the next few months but
-for now you will need to be comfortable reading the tests and asking questions
-to figure out how to use it. If you are looking to build rust applications which
-use automerge you may want to look into
-[autosurgeon](https://github.com/alexjg/autosurgeon)
-
-## Repository Organisation
-
-- `./rust` - the rust rust implementation and also the Rust components of
- platform specific wrappers (e.g. `automerge-wasm` for the WASM API or
- `automerge-c` for the C FFI bindings)
-- `./javascript` - The javascript library which uses `automerge-wasm`
- internally but presents a more idiomatic javascript interface
-- `./scripts` - scripts which are useful to maintenance of the repository.
- This includes the scripts which are run in CI.
-- `./img` - static assets for use in `.md` files
-
-## Building
-
-To build this codebase you will need:
-
-- `rust`
-- `node`
-- `yarn`
-- `cmake`
-- `cmocka`
-
-You will also need to install the following with `cargo install`
-
-- `wasm-bindgen-cli`
-- `wasm-opt`
-- `cargo-deny`
-
-And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation.
-
-The various subprojects (the rust code, the wrapper projects) have their own
-build instructions, but to run the tests that will be run in CI you can run
-`./scripts/ci/run`.
-
-### For macOS
-
-These instructions worked to build locally on macOS 13.1 (arm64) as of
-Nov 29th 2022.
-
-```bash
-# clone the repo
-git clone https://github.com/automerge/automerge-rs
-cd automerge-rs
-
-# install rustup
-curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
-
-# install homebrew
-/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
-
-# install cmake, node, cmocka
-brew install cmake node cmocka
-
-# install yarn
-npm install --global yarn
-
-# install javascript dependencies
-yarn --cwd ./javascript
-
-# install rust dependencies
-cargo install wasm-bindgen-cli wasm-opt cargo-deny
-
-# get nightly rust to produce optimized automerge-c builds
-rustup toolchain install nightly
-rustup component add rust-src --toolchain nightly
-
-# add wasm target in addition to current architecture
-rustup target add wasm32-unknown-unknown
-
-# Run ci script
-./scripts/ci/run
-```
-
-If your build fails to find `cmocka.h` you may need to teach it about homebrew's
-installation location:
+Add this to your dependencies
```
-export CPATH=/opt/homebrew/include
-export LIBRARY_PATH=/opt/homebrew/lib
-./scripts/ci/run
+automerge = "0.0.2"
```
-## Contributing
+You'll need to export changes from automerge as JSON rather than using the encoding that `Automerge.save` uses. So first do this (in javascript):
-Please try and split your changes up into relatively independent commits which
-change one subsystem at a time and add good commit messages which describe what
-the change is and why you're making it (err on the side of longer commit
-messages). `git blame` should give future maintainers a good idea of why
-something is the way it is.
+```javascript
+const doc =
+const changes = Automerge.getHistory(doc).map(h => h.change)
+console.log(JSON.stringify(changes, null, 4))
+```
+
+Now you can load these changes into automerge like so:
+
+
+```rust,no_run
+extern crate automerge;
+
+fn main() {
+ let changes: Vec = serde_json::from_str("").unwrap();
+ let document = automerge::Document::load(changes).unwrap();
+ let state: serde_json::Value = document.state().unwrap();
+ println!("{:?}", state);
+}
+```
+
+You can create new changes to the document by doing things like this:
+
+```rust,no_run
+extern crate automerge;
+
+fn main() {
+ let mut doc = Document::init();
+ let json_value: serde_json::Value = serde_json::from_str(
+ r#"
+ {
+ "cards_by_id": {},
+ "size_of_cards": 12.0,
+ "numRounds": 11.0,
+ "cards": [1.0, false]
+ }
+ "#,
+ )
+ .unwrap();
+ doc.create_and_apply_change(
+ Some("Some change".to_string()),
+ vec![ChangeRequest::Set {
+ path: Path::root().key("the-state".to_string()),
+ value: Value::from_json(&json_value),
+ }],
+ )
+ .unwrap();
+}
+```
+
+Check the docs on `ChangeRequest` for more information on what you can do.
diff --git a/automerge-backend-wasm/.gitignore b/automerge-backend-wasm/.gitignore
new file mode 100644
index 00000000..33a3be33
--- /dev/null
+++ b/automerge-backend-wasm/.gitignore
@@ -0,0 +1,5 @@
+node_modules
+/dist
+/target
+/pkg
+/wasm-pack.log
diff --git a/automerge-backend-wasm/Cargo.toml b/automerge-backend-wasm/Cargo.toml
new file mode 100644
index 00000000..6568f989
--- /dev/null
+++ b/automerge-backend-wasm/Cargo.toml
@@ -0,0 +1,53 @@
+# You must change these to your own details.
+[package]
+name = "automerge-backend-wasm"
+description = ""
+version = "0.1.0"
+authors = ["Alex Good ","Orion Henry "]
+categories = ["wasm"]
+readme = "README.md"
+edition = "2018"
+
+[lib]
+crate-type = ["cdylib","rlib"]
+
+
+[features]
+# If you uncomment this line, it will enable `wee_alloc`:
+#default = ["wee_alloc"]
+
+[dependencies]
+# The `wasm-bindgen` crate provides the bare minimum functionality needed
+# to interact with JavaScript.
+automerge-backend = { path = "../automerge-backend" }
+js-sys = "^0.3"
+serde = "^1.0"
+serde_json = "^1.0"
+
+# `wee_alloc` is a tiny allocator for wasm that is only ~1K in code size
+# compared to the default allocator's ~10K. However, it is slower than the default
+# allocator, so it's not enabled by default.
+wee_alloc = { version = "0.4.2", optional = true }
+
+[dependencies.wasm-bindgen]
+version = "^0.2"
+features = ["serde-serialize"]
+
+# The `web-sys` crate allows you to interact with the various browser APIs,
+# like the DOM.
+[dependencies.web-sys]
+version = "0.3.22"
+features = ["console"]
+
+# The `console_error_panic_hook` crate provides better debugging of panics by
+# logging them with `console.error`. This is great for development, but requires
+# all the `std::fmt` and `std::panicking` infrastructure, so it's only enabled
+# in debug mode.
+[target."cfg(debug_assertions)".dependencies]
+console_error_panic_hook = "0.1.5"
+
+# These crates are used for running unit tests.
+[dev-dependencies]
+futures = "^0.1"
+wasm-bindgen-futures = "^0.3"
+wasm-bindgen-test = "^0.3"
diff --git a/automerge-backend-wasm/README.md b/automerge-backend-wasm/README.md
new file mode 100644
index 00000000..680f386c
--- /dev/null
+++ b/automerge-backend-wasm/README.md
@@ -0,0 +1,48 @@
+## How to install
+
+```sh
+npm install
+```
+
+## How to run in debug mode
+
+```sh
+# Builds the project and opens it in a new browser tab. Auto-reloads when the project changes.
+npm start
+```
+
+## How to build in release mode
+
+```sh
+# Builds the project and places it into the `dist` folder.
+npm run build
+```
+
+## How to run unit tests
+
+```sh
+# Runs tests in Firefox
+npm test -- --firefox
+
+# Runs tests in Chrome
+npm test -- --chrome
+
+# Runs tests in Safari
+npm test -- --safari
+```
+
+## What does each file do?
+
+* `Cargo.toml` contains the standard Rust metadata. You put your Rust dependencies in here. You must change this file with your details (name, description, version, authors, categories)
+
+* `package.json` contains the standard npm metadata. You put your JavaScript dependencies in here. You must change this file with your details (author, name, version)
+
+* `webpack.config.js` contains the Webpack configuration. You shouldn't need to change this, unless you have very special needs.
+
+* The `js` folder contains your JavaScript code (`index.js` is used to hook everything into Webpack, you don't need to change it).
+
+* The `src` folder contains your Rust code.
+
+* The `static` folder contains any files that you want copied as-is into the final build. It contains an `index.html` file which loads the `index.js` file.
+
+* The `tests` folder contains your Rust unit tests.
diff --git a/automerge-backend-wasm/index.js b/automerge-backend-wasm/index.js
new file mode 100644
index 00000000..e59ba728
--- /dev/null
+++ b/automerge-backend-wasm/index.js
@@ -0,0 +1,79 @@
+let Backend = require("./pkg")
+let { fromJS, List } = require('immutable')
+
+function toJS(obj) {
+ if (List.isList(obj)) {
+ return obj.toJS()
+ }
+ return obj
+}
+
+let init = () => {
+ return { state: Backend.State.new(), clock: {}, frozen: false };
+}
+
+let clean = (backend) => {
+ if (backend.frozen) {
+ let state = backend.state.forkAt(backend.clock)
+ backend.state = state
+ backend.clock = state.getClock()
+ backend.frozen = false
+ }
+ return backend.state
+}
+
+let mutate = (oldBackend,fn) => {
+ let state = clean(oldBackend)
+ let result = fn(state)
+ oldBackend.frozen = true
+ let newBackend = { state, clock: state.getClock(), frozen: false };
+ return [ newBackend, result ]
+}
+
+let applyChanges = (backend,changes) => {
+ return mutate(backend, (b) => b.applyChanges(toJS(changes)));
+}
+
+let applyLocalChange = (backend,change) => {
+ return mutate(backend, (b) => b.applyLocalChange(toJS(change)));
+}
+
+let merge = (backend1,backend2) => {
+// let changes = backend2.getMissingChanges(backend1.clock)
+// backend1.applyChanges(changes)
+// let missing_changes = remote.get_missing_changes(self.op_set.clock.clone());
+// self.apply_changes(missing_changes)
+ return mutate(backend1, (b) => b.merge(clean(backend2)));
+}
+
+let getClock = (backend) => {
+ return fromJS(backend.clock);
+}
+
+let getHistory = (backend) => {
+ // TODO: I cant fromJS here b/c transit screws it up
+ let history = clean(backend).getHistory();
+ return history
+}
+
+let getUndoStack = (backend) => {
+ let stack = clean(backend).getUndoStack();
+ return fromJS(stack)
+}
+
+let getRedoStack = (backend) => {
+ let stack = clean(backend).getRedoStack();
+ return fromJS(stack)
+}
+
+let getPatch = (backend) => clean(backend).getPatch()
+let getChanges = (backend,other) => clean(backend).getChanges(clean(other))
+let getChangesForActor = (backend,actor) => clean(backend).getChangesForActor(actor)
+let getMissingChanges = (backend,clock) => clean(backend).getMissingChanges(clock)
+let getMissingDeps = (backend) => clean(backend).getMissingDeps()
+
+module.exports = {
+ init, applyChanges, applyLocalChange, getPatch,
+ getChanges, getChangesForActor, getMissingChanges, getMissingDeps, merge, getClock,
+ getHistory, getUndoStack, getRedoStack
+}
diff --git a/automerge-backend-wasm/package.json b/automerge-backend-wasm/package.json
new file mode 100644
index 00000000..01fbe76e
--- /dev/null
+++ b/automerge-backend-wasm/package.json
@@ -0,0 +1,22 @@
+{
+ "author": "Orion Henry ",
+ "name": "automerge-backend-wasm",
+ "description": "wasm-bindgen bindings to the automerge-backend rust implementation",
+ "version": "0.1.0",
+ "license": "MIT",
+ "main": "./index.js",
+ "scripts": {
+ "build": "rimraf pkg && wasm-pack build --target nodejs --out-name index",
+ "release": "rimraf pkg && wasm-pack build --target nodejs --out-name index --release",
+ "mocha": "yarn build && mocha --bail --full-trace",
+ "test": "cargo test && wasm-pack test --node"
+ },
+ "dependencies": {
+ "immutable": "^3.8.2"
+ },
+ "devDependencies": {
+ "mocha": "^6.2.0",
+ "automerge":"^0.12.1",
+ "rimraf": "^2.6.3"
+ }
+}
diff --git a/automerge-backend-wasm/src/lib.rs b/automerge-backend-wasm/src/lib.rs
new file mode 100644
index 00000000..a79f5e65
--- /dev/null
+++ b/automerge-backend-wasm/src/lib.rs
@@ -0,0 +1,177 @@
+use automerge_backend::{ActorID, AutomergeError, Backend, Change, ChangeRequest, Clock};
+use serde::de::DeserializeOwned;
+use serde::Serialize;
+use wasm_bindgen::prelude::*;
+
+extern crate web_sys;
+#[allow(unused_macros)]
+macro_rules! log {
+ ( $( $t:tt )* ) => {
+ // web_sys::console::log_1(&format!( $( $t )* ).into());
+ };
+}
+
+#[cfg(feature = "wee_alloc")]
+#[global_allocator]
+static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
+
+fn js_to_rust(value: JsValue) -> Result {
+ value.into_serde().map_err(json_error_to_js)
+}
+
+fn rust_to_js(value: T) -> Result {
+ JsValue::from_serde(&value).map_err(json_error_to_js)
+}
+
+#[wasm_bindgen]
+#[derive(PartialEq, Debug, Clone)]
+pub struct State {
+ backend: Backend,
+}
+
+#[allow(clippy::new_without_default)]
+#[wasm_bindgen]
+impl State {
+ #[wasm_bindgen(js_name = applyChanges)]
+ pub fn apply_changes(&mut self, changes: JsValue) -> Result {
+ log!("apply_changes {:?}", changes);
+ let c: Vec = js_to_rust(changes)?;
+ let patch = self
+ .backend
+ .apply_changes(c)
+ .map_err(automerge_error_to_js)?;
+ rust_to_js(&patch)
+ }
+
+ #[wasm_bindgen(js_name = applyLocalChange)]
+ pub fn apply_local_change(&mut self, change: JsValue) -> Result {
+ log!("apply_local_changes {:?}", change);
+ let c: ChangeRequest = js_to_rust(change)?;
+ let patch = self
+ .backend
+ .apply_local_change(c)
+ .map_err(automerge_error_to_js)?;
+ rust_to_js(&patch)
+ }
+
+ #[wasm_bindgen(js_name = getPatch)]
+ pub fn get_patch(&self) -> Result {
+ log!("get_patch");
+ let patch = self.backend.get_patch();
+ rust_to_js(&patch)
+ }
+
+ #[wasm_bindgen(js_name = getChanges)]
+ pub fn get_changes(&self, state: &State) -> Result {
+ log!("get_changes");
+ let changes = self
+ .backend
+ .get_changes(&state.backend)
+ .map_err(automerge_error_to_js)?;
+ rust_to_js(&changes)
+ }
+
+ #[wasm_bindgen(js_name = getChangesForActor)]
+ pub fn get_changes_for_actorid(&self, actorid: JsValue) -> Result {
+ log!("get_changes_for_actorid");
+ let a: ActorID = js_to_rust(actorid)?;
+ let changes = self.backend.get_changes_for_actor_id(&a);
+ rust_to_js(&changes)
+ }
+
+ #[wasm_bindgen(js_name = getMissingChanges)]
+ pub fn get_missing_changes(&self, clock: JsValue) -> Result {
+ log!("get_missing_changes");
+ let c: Clock = js_to_rust(clock)?;
+ let changes = self.backend.get_missing_changes(c);
+ rust_to_js(&changes)
+ }
+
+ #[wasm_bindgen(js_name = getMissingDeps)]
+ pub fn get_missing_deps(&self) -> Result {
+ log!("get_missing_deps");
+ let clock = self.backend.get_missing_deps();
+ rust_to_js(&clock)
+ }
+
+ #[wasm_bindgen(js_name = getClock)]
+ pub fn get_clock(&self) -> Result {
+ log!("get_clock");
+ let clock = self.backend.clock();
+ rust_to_js(&clock)
+ }
+
+ #[wasm_bindgen(js_name = getHistory)]
+ pub fn get_history(&self) -> Result {
+ log!("get_history");
+ let history = self.backend.history();
+ rust_to_js(&history)
+ }
+
+ #[wasm_bindgen(js_name = getUndoStack)]
+ pub fn get_undo_stack(&self) -> Result {
+ log!("get_undo_stack");
+ let stack = self.backend.undo_stack();
+ rust_to_js(&stack)
+ }
+
+ #[wasm_bindgen(js_name = getRedoStack)]
+ pub fn get_redo_stack(&self) -> Result {
+ log!("get_redo_stack");
+ let stack = self.backend.redo_stack();
+ rust_to_js(&stack)
+ }
+
+ #[wasm_bindgen]
+ pub fn merge(&mut self, remote: &State) -> Result {
+ log!("merge");
+ let patch = self
+ .backend
+ .merge(&remote.backend)
+ .map_err(automerge_error_to_js)?;
+ rust_to_js(&patch)
+ }
+
+ #[wasm_bindgen]
+ pub fn fork(&self) -> State {
+ log!("fork");
+ self.clone()
+ }
+
+ #[wasm_bindgen]
+ #[wasm_bindgen(js_name = forkAt)]
+ pub fn fork_at(&self, _clock: JsValue) -> Result {
+ log!("fork_at");
+ let clock: Clock = js_to_rust(_clock)?;
+ let changes = self
+ .backend
+ .history()
+ .iter()
+ .filter(|change| clock.get(&change.actor_id) >= change.seq)
+ .map(|&c| c.clone())
+ .collect();
+ let mut fork = State {
+ backend: Backend::init(),
+ };
+ let _patch = fork
+ .backend
+ .apply_changes(changes)
+ .map_err(automerge_error_to_js)?;
+ Ok(fork)
+ }
+
+ #[wasm_bindgen]
+ pub fn new() -> State {
+ State {
+ backend: Backend::init(),
+ }
+ }
+}
+
+fn automerge_error_to_js(err: AutomergeError) -> JsValue {
+ JsValue::from(std::format!("Automerge error: {}", err))
+}
+
+fn json_error_to_js(err: serde_json::Error) -> JsValue {
+ JsValue::from(std::format!("serde_json error: {}", err))
+}
diff --git a/automerge-backend-wasm/test/backend_test.js b/automerge-backend-wasm/test/backend_test.js
new file mode 100644
index 00000000..c1e2736b
--- /dev/null
+++ b/automerge-backend-wasm/test/backend_test.js
@@ -0,0 +1,446 @@
+const assert = require('assert')
+const { List } = require('immutable')
+const Backend = require('..')
+const uuid = require('automerge/src/uuid')
+const ROOT_ID = '00000000-0000-0000-0000-000000000000'
+
+describe('Automerge.Backend', () => {
+ describe('incremental diffs', () => {
+ it('should assign to a key in a map', () => {
+ const actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyChanges(s0, [change1])
+ assert.deepEqual(patch1, {
+ canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
+ diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'bird', value: 'magpie'}]
+ })
+ })
+
+ it('should increment a key in a map', () => {
+ const actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'counter', value: 1, datatype: 'counter'}
+ ]}
+ const change2 = {actor, seq: 2, deps: {}, ops: [
+ {action: 'inc', obj: ROOT_ID, key: 'counter', value: 2}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyChanges(s0, [change1])
+ const [s2, patch2] = Backend.applyChanges(s1, [change2])
+ assert.deepEqual(patch2, {
+ canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
+ diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'counter', value: 3, datatype: 'counter'}]
+ })
+ })
+
+ it('should make a conflict on assignment to the same key', () => {
+ const change1 = {actor: 'actor1', seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
+ ]}
+ const change2 = {actor: 'actor2', seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'blackbird'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyChanges(s0, [change1])
+ const [s2, patch2] = Backend.applyChanges(s1, [change2])
+ assert.deepEqual(patch2, {
+ canUndo: false, canRedo: false, clock: {actor1: 1, actor2: 1}, deps: {actor1: 1, actor2: 1},
+ diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'bird', value: 'blackbird',
+ conflicts: [{actor: 'actor1', value: 'magpie'}]}
+ ]})
+ })
+
+ it('should delete a key from a map', () => {
+ const actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
+ ]}
+ const change2 = {actor, seq: 2, deps: {}, ops: [
+ {action: 'del', obj: ROOT_ID, key: 'bird'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyChanges(s0, [change1])
+ const [s2, patch2] = Backend.applyChanges(s1, [change2])
+ assert.deepEqual(patch2, {
+ canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
+ diffs: [{action: 'remove', obj: ROOT_ID, type: 'map', key: 'bird'}]
+ })
+ })
+
+ it('should create nested maps', () => {
+ const birds = uuid(), actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeMap', obj: birds},
+ {action: 'set', obj: birds, key: 'wrens', value: 3},
+ {action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyChanges(s0, [change1])
+ assert.deepEqual(patch1, {
+ canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
+ diffs: [
+ {action: 'create', obj: birds, type: 'map'},
+ {action: 'set', obj: birds, type: 'map', key: 'wrens', value: 3},
+ {action: 'set', obj: ROOT_ID, type: 'map', key: 'birds', value: birds, link: true}
+ ]
+ })
+ })
+
+ it('should assign to keys in nested maps', () => {
+ const birds = uuid(), actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeMap', obj: birds},
+ {action: 'set', obj: birds, key: 'wrens', value: 3},
+ {action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
+ ]}
+ const change2 = {actor, seq: 2, deps: {}, ops: [
+ {action: 'set', obj: birds, key: 'sparrows', value: 15}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyChanges(s0, [change1])
+ const [s2, patch2] = Backend.applyChanges(s1, [change2])
+ assert.deepEqual(patch2, {
+ canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
+ diffs: [{action: 'set', obj: birds, type: 'map', key: 'sparrows', value: 15}]
+ })
+ })
+
+ it('should create lists', () => {
+ const birds = uuid(), actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeList', obj: birds},
+ {action: 'ins', obj: birds, key: '_head', elem: 1},
+ {action: 'set', obj: birds, key: `${actor}:1`, value: 'chaffinch'},
+ {action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyChanges(s0, [change1])
+ assert.deepEqual(patch1, {
+ canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
+ diffs: [
+ {action: 'create', obj: birds, type: 'list'},
+ {action: 'insert', obj: birds, type: 'list', index: 0, value: 'chaffinch', elemId: `${actor}:1`},
+ {action: 'set', obj: ROOT_ID, type: 'map', key: 'birds', value: birds, link: true}
+ ]
+ })
+ })
+
+ it('should apply updates inside lists', () => {
+ const birds = uuid(), actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeList', obj: birds},
+ {action: 'ins', obj: birds, key: '_head', elem: 1},
+ {action: 'set', obj: birds, key: `${actor}:1`, value: 'chaffinch'},
+ {action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
+ ]}
+ const change2 = {actor, seq: 2, deps: {}, ops: [
+ {action: 'set', obj: birds, key: `${actor}:1`, value: 'greenfinch'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyChanges(s0, [change1])
+ const [s2, patch2] = Backend.applyChanges(s1, [change2])
+ assert.deepEqual(patch2, {
+ canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
+ diffs: [{action: 'set', obj: birds, type: 'list', index: 0, value: 'greenfinch'}]
+ })
+ })
+
+ it('should delete list elements', () => {
+ const birds = uuid(), actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeList', obj: birds},
+ {action: 'ins', obj: birds, key: '_head', elem: 1},
+ {action: 'set', obj: birds, key: `${actor}:1`, value: 'chaffinch'},
+ {action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
+ ]}
+ const change2 = {actor, seq: 2, deps: {}, ops: [
+ {action: 'del', obj: birds, key: `${actor}:1`}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyChanges(s0, [change1])
+ const [s2, patch2] = Backend.applyChanges(s1, [change2])
+ assert.deepEqual(patch2, {
+ canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
+ diffs: [{action: 'remove', obj: birds, type: 'list', index: 0}]
+ })
+ })
+
+ it('should handle list element insertion and deletion in the same change', () => {
+ const birds = uuid(), actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeList', obj: birds},
+ {action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
+ ]}
+ const change2 = {actor, seq: 2, deps: {}, ops: [
+ {action: 'ins', obj: birds, key: '_head', elem: 1},
+ {action: 'del', obj: birds, key: `${actor}:1`}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyChanges(s0, [change1])
+ const [s2, patch2] = Backend.applyChanges(s1, [change2])
+ assert.deepEqual(patch2, {
+ canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
+ diffs: [{action: 'maxElem', obj: birds, value: 1, type: 'list'}]
+ })
+ })
+
+ it('should support Date objects at the root', () => {
+ const now = new Date()
+ const actor = uuid(), change = {actor, seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'now', value: now.getTime(), datatype: 'timestamp'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change])
+ assert.deepEqual(patch, {
+ canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
+ diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'now', value: now.getTime(), datatype: 'timestamp'}]
+ })
+ })
+
+ it('should support Date objects in a list', () => {
+ const now = new Date(), list = uuid(), actor = uuid()
+ const change = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeList', obj: list},
+ {action: 'ins', obj: list, key: '_head', elem: 1},
+ {action: 'set', obj: list, key: `${actor}:1`, value: now.getTime(), datatype: 'timestamp'},
+ {action: 'link', obj: ROOT_ID, key: 'list', value: list}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change])
+ assert.deepEqual(patch, {
+ canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
+ diffs: [
+ {action: 'create', obj: list, type: 'list'},
+ {action: 'insert', obj: list, type: 'list', index: 0,
+ value: now.getTime(), elemId: `${actor}:1`, datatype: 'timestamp'},
+ {action: 'set', obj: ROOT_ID, type: 'map', key: 'list', value: list, link: true}
+ ]
+ })
+ })
+ })
+
+ describe('applyLocalChange()', () => {
+ it('should apply change requests', () => {
+ const actor = uuid()
+ const change1 = {requestType: 'change', actor, seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyLocalChange(s0, change1)
+ assert.deepEqual(patch1, {
+ actor, seq: 1, canUndo: true, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
+ diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'bird', value: 'magpie'}]
+ })
+ })
+
+ it('should throw an exception on duplicate requests', () => {
+ const actor = uuid()
+ const change1 = {requestType: 'change', actor, seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
+ ]}
+ const change2 = {requestType: 'change', actor, seq: 2, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'jay'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch1] = Backend.applyLocalChange(s0, change1)
+ const [s2, patch2] = Backend.applyLocalChange(s1, change2)
+// assert.throws(() => Backend.applyLocalChange(s2, change1), /Change request has already been applied/)
+ assert.throws(() => Backend.applyLocalChange(s2, change2), /Change request has already been applied/)
+ })
+ })
+
+ describe('getPatch()', () => {
+ it('should include the most recent value for a key', () => {
+ const actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
+ ]}
+ const change2 = {actor, seq: 2, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'blackbird'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change1, change2])
+ assert.deepEqual(Backend.getPatch(s1), {
+ canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
+ diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'bird', value: 'blackbird'}]
+ })
+ })
+
+ it('should include conflicting values for a key', () => {
+ const change1 = {actor: 'actor1', seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
+ ]}
+ const change2 = {actor: 'actor2', seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'bird', value: 'blackbird'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change1, change2])
+ assert.deepEqual(Backend.getPatch(s1), {
+ canUndo: false, canRedo: false, clock: {actor1: 1, actor2: 1}, deps: {actor1: 1, actor2: 1},
+ diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'bird', value: 'blackbird',
+ conflicts: [{actor: 'actor1', value: 'magpie'}]}
+ ]})
+ })
+
+ it('should handle increments for a key in a map', () => {
+ const actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'counter', value: 1, datatype: 'counter'}
+ ]}
+ const change2 = {actor, seq: 2, deps: {}, ops: [
+ {action: 'inc', obj: ROOT_ID, key: 'counter', value: 2}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change1, change2])
+ assert.deepEqual(Backend.getPatch(s1), {
+ canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
+ diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'counter', value: 3, datatype: 'counter'}]
+ })
+ })
+
+ it('should create nested maps', () => {
+ const birds = uuid(), actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeMap', obj: birds},
+ {action: 'set', obj: birds, key: 'wrens', value: 3},
+ {action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
+ ]}
+ const change2 = {actor, seq: 2, deps: {}, ops: [
+ {action: 'del', obj: birds, key: 'wrens'},
+ {action: 'set', obj: birds, key: 'sparrows', value: 15}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change1, change2])
+ assert.deepEqual(Backend.getPatch(s1), {
+ canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
+ diffs: [
+ {action: 'create', obj: birds, type: 'map'},
+ {action: 'set', obj: birds, type: 'map', key: 'sparrows', value: 15},
+ {action: 'set', obj: ROOT_ID, type: 'map', key: 'birds', value: birds, link: true}
+ ]
+ })
+ })
+
+ it('should create lists', () => {
+ const birds = uuid(), actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeList', obj: birds},
+ {action: 'ins', obj: birds, key: '_head', elem: 1},
+ {action: 'set', obj: birds, key: `${actor}:1`, value: 'chaffinch'},
+ {action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change1])
+ assert.deepEqual(Backend.getPatch(s1), {
+ canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
+ diffs: [
+ {action: 'create', obj: birds, type: 'list'},
+ {action: 'insert', obj: birds, type: 'list', index: 0, value: 'chaffinch', elemId: `${actor}:1`},
+ {action: 'maxElem', obj: birds, type: 'list', value: 1},
+ {action: 'set', obj: ROOT_ID, type: 'map', key: 'birds', value: birds, link: true}
+ ]
+ })
+ })
+
+ it('should include the latest state of a list', () => {
+ const birds = uuid(), actor = uuid()
+ const change1 = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeList', obj: birds},
+ {action: 'ins', obj: birds, key: '_head', elem: 1},
+ {action: 'set', obj: birds, key: `${actor}:1`, value: 'chaffinch'},
+ {action: 'ins', obj: birds, key: `${actor}:1`, elem: 2},
+ {action: 'set', obj: birds, key: `${actor}:2`, value: 'goldfinch'},
+ {action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
+ ]}
+ const change2 = {actor, seq: 2, deps: {}, ops: [
+ {action: 'del', obj: birds, key: `${actor}:1`},
+ {action: 'ins', obj: birds, key: `${actor}:1`, elem: 3},
+ {action: 'set', obj: birds, key: `${actor}:3`, value: 'greenfinch'},
+ {action: 'set', obj: birds, key: `${actor}:2`, value: 'goldfinches!!'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change1, change2])
+ assert.deepEqual(Backend.getPatch(s1), {
+ canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
+ diffs: [
+ {action: 'create', obj: birds, type: 'list'},
+ {action: 'insert', obj: birds, type: 'list', index: 0, value: 'greenfinch', elemId: `${actor}:3`},
+ {action: 'insert', obj: birds, type: 'list', index: 1, value: 'goldfinches!!', elemId: `${actor}:2`},
+ {action: 'maxElem', obj: birds, type: 'list', value: 3},
+ {action: 'set', obj: ROOT_ID, type: 'map', key: 'birds', value: birds, link: true}
+ ]
+ })
+ })
+
+ it('should handle nested maps in lists', () => {
+ const todos = uuid(), item = uuid(), actor = uuid()
+ const change = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeList', obj: todos},
+ {action: 'ins', obj: todos, key: '_head', elem: 1},
+ {action: 'makeMap', obj: item},
+ {action: 'set', obj: item, key: 'done', value: false},
+ {action: 'set', obj: item, key: 'title', value: 'water plants'},
+ {action: 'link', obj: todos, key:`${actor}:1`, value: item},
+ {action: 'link', obj: ROOT_ID, key: 'todos', value: todos}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change])
+ assert.deepEqual(Backend.getPatch(s1), {
+ canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
+ diffs: [
+ {action: 'create', obj: item, type: 'map'},
+ {action: 'set', obj: item, type: 'map', key: 'done', value: false},
+ {action: 'set', obj: item, type: 'map', key: 'title', value: 'water plants'},
+ {action: 'create', obj: todos, type: 'list'},
+ {action: 'insert', obj: todos, type: 'list', index: 0, value: item, link: true, elemId: `${actor}:1`},
+ {action: 'maxElem', obj: todos, type: 'list', value: 1},
+ {action: 'set', obj: ROOT_ID, type: 'map', key: 'todos', value: todos, link: true}
+ ]
+ })
+ })
+
+ it('should include Date objects at the root', () => {
+ const now = new Date()
+ const actor = uuid(), change = {actor, seq: 1, deps: {}, ops: [
+ {action: 'set', obj: ROOT_ID, key: 'now', value: now.getTime(), datatype: 'timestamp'}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change])
+ assert.deepEqual(Backend.getPatch(s1), {
+ canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
+ diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'now', value: now.getTime(), datatype: 'timestamp'}]
+ })
+ })
+
+ it('should include Date objects in a list', () => {
+ const now = new Date(), list = uuid(), actor = uuid()
+ const change = {actor, seq: 1, deps: {}, ops: [
+ {action: 'makeList', obj: list},
+ {action: 'ins', obj: list, key: '_head', elem: 1},
+ {action: 'set', obj: list, key: `${actor}:1`, value: now.getTime(), datatype: 'timestamp'},
+ {action: 'link', obj: ROOT_ID, key: 'list', value: list}
+ ]}
+ const s0 = Backend.init()
+ const [s1, patch] = Backend.applyChanges(s0, [change])
+ assert.deepEqual(Backend.getPatch(s1), {
+ canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
+ diffs: [
+ {action: 'create', obj: list, type: 'list'},
+ {action: 'insert', obj: list, type: 'list', index: 0, value: now.getTime(), elemId: `${actor}:1`, datatype: 'timestamp'},
+ {action: 'maxElem', obj: list, type: 'list', value: 1},
+ {action: 'set', obj: ROOT_ID, type: 'map', key: 'list', value: list, link: true}
+ ]
+ })
+ })
+ })
+ describe('getHistory()', () => {
+ it('should start with no history ', () => {
+ const s0 = Backend.init()
+ const history = Backend.getHistory(s0)
+ assert.deepEqual(history,[])
+ })
+ })
+})
diff --git a/automerge-backend-wasm/test/mocha.opts b/automerge-backend-wasm/test/mocha.opts
new file mode 100644
index 00000000..feb9721d
--- /dev/null
+++ b/automerge-backend-wasm/test/mocha.opts
@@ -0,0 +1,3 @@
+--use_strict
+--watch-extensions js
+test/*test*.js
diff --git a/automerge-backend-wasm/tests/app.rs b/automerge-backend-wasm/tests/app.rs
new file mode 100644
index 00000000..fccf4c9f
--- /dev/null
+++ b/automerge-backend-wasm/tests/app.rs
@@ -0,0 +1,22 @@
+#![cfg(target_arch = "wasm32")]
+
+extern crate automerge_backend_wasm;
+
+use automerge_backend::{Key, ObjectID, Operation, PrimitiveValue};
+use wasm_bindgen::JsValue;
+use wasm_bindgen_test::wasm_bindgen_test;
+
+#[wasm_bindgen_test]
+fn test_wasm() {
+ let op1: Operation = Operation::Set {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
+ key: Key("somekeyid".to_string()),
+ value: PrimitiveValue::Boolean(true),
+ datatype: None,
+ };
+
+ let js_value = JsValue::from_serde(&op1).unwrap();
+ let op2: Operation = js_value.into_serde().unwrap();
+
+ assert_eq!(op1, op2);
+}
diff --git a/automerge-backend-wasm/yarn.lock b/automerge-backend-wasm/yarn.lock
new file mode 100644
index 00000000..655c1688
--- /dev/null
+++ b/automerge-backend-wasm/yarn.lock
@@ -0,0 +1,644 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+ansi-colors@3.2.3:
+ version "3.2.3"
+ resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.3.tgz#57d35b8686e851e2cc04c403f1c00203976a1813"
+ integrity sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==
+
+ansi-regex@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998"
+ integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=
+
+ansi-regex@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997"
+ integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==
+
+ansi-styles@^3.2.0, ansi-styles@^3.2.1:
+ version "3.2.1"
+ resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
+ integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
+ dependencies:
+ color-convert "^1.9.0"
+
+argparse@^1.0.7:
+ version "1.0.10"
+ resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
+ integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
+ dependencies:
+ sprintf-js "~1.0.2"
+
+automerge@^0.12.1:
+ version "0.12.1"
+ resolved "https://registry.yarnpkg.com/automerge/-/automerge-0.12.1.tgz#8e8ca23affa888c6376ee19068eab573cfa8ba09"
+ integrity sha512-7JOiRk4b6EP/Uj0AjmZTeYICXJmBRHFkL0U3mlTNXuDlUr3c4v/Wb8v0RXiX4UuVgGjkovcjOdiBMkVmzdu2KQ==
+ dependencies:
+ immutable "^3.8.2"
+ transit-immutable-js "^0.7.0"
+ transit-js "^0.8.861"
+ uuid "3.3.2"
+
+balanced-match@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
+ integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
+
+brace-expansion@^1.1.7:
+ version "1.1.11"
+ resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
+ integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
+ dependencies:
+ balanced-match "^1.0.0"
+ concat-map "0.0.1"
+
+browser-stdout@1.3.1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60"
+ integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==
+
+camelcase@^5.0.0:
+ version "5.3.1"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
+ integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
+
+chalk@^2.0.1:
+ version "2.4.2"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
+ integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
+ dependencies:
+ ansi-styles "^3.2.1"
+ escape-string-regexp "^1.0.5"
+ supports-color "^5.3.0"
+
+cliui@^5.0.0:
+ version "5.0.0"
+ resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5"
+ integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==
+ dependencies:
+ string-width "^3.1.0"
+ strip-ansi "^5.2.0"
+ wrap-ansi "^5.1.0"
+
+color-convert@^1.9.0:
+ version "1.9.3"
+ resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
+ integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
+ dependencies:
+ color-name "1.1.3"
+
+color-name@1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
+ integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=
+
+concat-map@0.0.1:
+ version "0.0.1"
+ resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
+ integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
+
+debug@3.2.6:
+ version "3.2.6"
+ resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
+ integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
+ dependencies:
+ ms "^2.1.1"
+
+decamelize@^1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
+ integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
+
+define-properties@^1.1.2, define-properties@^1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1"
+ integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==
+ dependencies:
+ object-keys "^1.0.12"
+
+diff@3.5.0:
+ version "3.5.0"
+ resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12"
+ integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==
+
+emoji-regex@^7.0.1:
+ version "7.0.3"
+ resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156"
+ integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==
+
+es-abstract@^1.17.0-next.1:
+ version "1.17.4"
+ resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.4.tgz#e3aedf19706b20e7c2594c35fc0d57605a79e184"
+ integrity sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ==
+ dependencies:
+ es-to-primitive "^1.2.1"
+ function-bind "^1.1.1"
+ has "^1.0.3"
+ has-symbols "^1.0.1"
+ is-callable "^1.1.5"
+ is-regex "^1.0.5"
+ object-inspect "^1.7.0"
+ object-keys "^1.1.1"
+ object.assign "^4.1.0"
+ string.prototype.trimleft "^2.1.1"
+ string.prototype.trimright "^2.1.1"
+
+es-to-primitive@^1.2.1:
+ version "1.2.1"
+ resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a"
+ integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==
+ dependencies:
+ is-callable "^1.1.4"
+ is-date-object "^1.0.1"
+ is-symbol "^1.0.2"
+
+escape-string-regexp@1.0.5, escape-string-regexp@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
+ integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
+
+esprima@^4.0.0:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
+ integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
+
+find-up@3.0.0, find-up@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
+ integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==
+ dependencies:
+ locate-path "^3.0.0"
+
+flat@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/flat/-/flat-4.1.0.tgz#090bec8b05e39cba309747f1d588f04dbaf98db2"
+ integrity sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==
+ dependencies:
+ is-buffer "~2.0.3"
+
+fs.realpath@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
+ integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
+
+function-bind@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
+ integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
+
+get-caller-file@^2.0.1:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
+ integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
+
+glob@7.1.3:
+ version "7.1.3"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.3.tgz#3960832d3f1574108342dafd3a67b332c0969df1"
+ integrity sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==
+ dependencies:
+ fs.realpath "^1.0.0"
+ inflight "^1.0.4"
+ inherits "2"
+ minimatch "^3.0.4"
+ once "^1.3.0"
+ path-is-absolute "^1.0.0"
+
+glob@^7.1.3:
+ version "7.1.4"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255"
+ integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A==
+ dependencies:
+ fs.realpath "^1.0.0"
+ inflight "^1.0.4"
+ inherits "2"
+ minimatch "^3.0.4"
+ once "^1.3.0"
+ path-is-absolute "^1.0.0"
+
+growl@1.10.5:
+ version "1.10.5"
+ resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e"
+ integrity sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==
+
+has-flag@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
+ integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
+
+has-symbols@^1.0.0, has-symbols@^1.0.1:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8"
+ integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==
+
+has@^1.0.3:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
+ integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
+ dependencies:
+ function-bind "^1.1.1"
+
+he@1.2.0:
+ version "1.2.0"
+ resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
+ integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
+
+immutable@^3.8.2:
+ version "3.8.2"
+ resolved "https://registry.yarnpkg.com/immutable/-/immutable-3.8.2.tgz#c2439951455bb39913daf281376f1530e104adf3"
+ integrity sha1-wkOZUUVbs5kT2vKBN28VMOEErfM=
+
+inflight@^1.0.4:
+ version "1.0.6"
+ resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
+ integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
+ dependencies:
+ once "^1.3.0"
+ wrappy "1"
+
+inherits@2:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
+ integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
+
+is-buffer@~2.0.3:
+ version "2.0.4"
+ resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.4.tgz#3e572f23c8411a5cfd9557c849e3665e0b290623"
+ integrity sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==
+
+is-callable@^1.1.4, is-callable@^1.1.5:
+ version "1.1.5"
+ resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.5.tgz#f7e46b596890456db74e7f6e976cb3273d06faab"
+ integrity sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==
+
+is-date-object@^1.0.1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e"
+ integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==
+
+is-fullwidth-code-point@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
+ integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=
+
+is-regex@^1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.5.tgz#39d589a358bf18967f726967120b8fc1aed74eae"
+ integrity sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==
+ dependencies:
+ has "^1.0.3"
+
+is-symbol@^1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937"
+ integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==
+ dependencies:
+ has-symbols "^1.0.1"
+
+isexe@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
+ integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
+
+js-yaml@3.13.1:
+ version "3.13.1"
+ resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847"
+ integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==
+ dependencies:
+ argparse "^1.0.7"
+ esprima "^4.0.0"
+
+locate-path@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
+ integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==
+ dependencies:
+ p-locate "^3.0.0"
+ path-exists "^3.0.0"
+
+lodash@^4.17.15:
+ version "4.17.15"
+ resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
+ integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
+
+log-symbols@2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a"
+ integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==
+ dependencies:
+ chalk "^2.0.1"
+
+minimatch@3.0.4, minimatch@^3.0.4:
+ version "3.0.4"
+ resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
+ integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
+ dependencies:
+ brace-expansion "^1.1.7"
+
+minimist@0.0.8:
+ version "0.0.8"
+ resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
+ integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=
+
+mkdirp@0.5.1:
+ version "0.5.1"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
+ integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=
+ dependencies:
+ minimist "0.0.8"
+
+mocha@^6.2.0:
+ version "6.2.2"
+ resolved "https://registry.yarnpkg.com/mocha/-/mocha-6.2.2.tgz#5d8987e28940caf8957a7d7664b910dc5b2fea20"
+ integrity sha512-FgDS9Re79yU1xz5d+C4rv1G7QagNGHZ+iXF81hO8zY35YZZcLEsJVfFolfsqKFWunATEvNzMK0r/CwWd/szO9A==
+ dependencies:
+ ansi-colors "3.2.3"
+ browser-stdout "1.3.1"
+ debug "3.2.6"
+ diff "3.5.0"
+ escape-string-regexp "1.0.5"
+ find-up "3.0.0"
+ glob "7.1.3"
+ growl "1.10.5"
+ he "1.2.0"
+ js-yaml "3.13.1"
+ log-symbols "2.2.0"
+ minimatch "3.0.4"
+ mkdirp "0.5.1"
+ ms "2.1.1"
+ node-environment-flags "1.0.5"
+ object.assign "4.1.0"
+ strip-json-comments "2.0.1"
+ supports-color "6.0.0"
+ which "1.3.1"
+ wide-align "1.1.3"
+ yargs "13.3.0"
+ yargs-parser "13.1.1"
+ yargs-unparser "1.6.0"
+
+ms@2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
+ integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==
+
+ms@^2.1.1:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
+ integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
+
+node-environment-flags@1.0.5:
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.5.tgz#fa930275f5bf5dae188d6192b24b4c8bbac3d76a"
+ integrity sha512-VNYPRfGfmZLx0Ye20jWzHUjyTW/c+6Wq+iLhDzUI4XmhrDd9l/FozXV3F2xOaXjvp0co0+v1YSR3CMP6g+VvLQ==
+ dependencies:
+ object.getownpropertydescriptors "^2.0.3"
+ semver "^5.7.0"
+
+object-inspect@^1.7.0:
+ version "1.7.0"
+ resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67"
+ integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==
+
+object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1:
+ version "1.1.1"
+ resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e"
+ integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
+
+object.assign@4.1.0, object.assign@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da"
+ integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==
+ dependencies:
+ define-properties "^1.1.2"
+ function-bind "^1.1.1"
+ has-symbols "^1.0.0"
+ object-keys "^1.0.11"
+
+object.getownpropertydescriptors@^2.0.3:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz#369bf1f9592d8ab89d712dced5cb81c7c5352649"
+ integrity sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg==
+ dependencies:
+ define-properties "^1.1.3"
+ es-abstract "^1.17.0-next.1"
+
+once@^1.3.0:
+ version "1.4.0"
+ resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
+ integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
+ dependencies:
+ wrappy "1"
+
+p-limit@^2.0.0:
+ version "2.2.2"
+ resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.2.tgz#61279b67721f5287aa1c13a9a7fbbc48c9291b1e"
+ integrity sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==
+ dependencies:
+ p-try "^2.0.0"
+
+p-locate@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4"
+ integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==
+ dependencies:
+ p-limit "^2.0.0"
+
+p-try@^2.0.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
+ integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
+
+path-exists@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
+ integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
+
+path-is-absolute@^1.0.0:
+ version "1.0.1"
+ resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
+ integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
+
+require-directory@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
+ integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
+
+require-main-filename@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
+ integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
+
+rimraf@^2.6.3:
+ version "2.7.1"
+ resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
+ integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
+ dependencies:
+ glob "^7.1.3"
+
+semver@^5.7.0:
+ version "5.7.1"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
+ integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
+
+set-blocking@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
+ integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
+
+sprintf-js@~1.0.2:
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
+ integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
+
+"string-width@^1.0.2 || 2":
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
+ integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==
+ dependencies:
+ is-fullwidth-code-point "^2.0.0"
+ strip-ansi "^4.0.0"
+
+string-width@^3.0.0, string-width@^3.1.0:
+ version "3.1.0"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961"
+ integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==
+ dependencies:
+ emoji-regex "^7.0.1"
+ is-fullwidth-code-point "^2.0.0"
+ strip-ansi "^5.1.0"
+
+string.prototype.trimleft@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz#9bdb8ac6abd6d602b17a4ed321870d2f8dcefc74"
+ integrity sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==
+ dependencies:
+ define-properties "^1.1.3"
+ function-bind "^1.1.1"
+
+string.prototype.trimright@^2.1.1:
+ version "2.1.1"
+ resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz#440314b15996c866ce8a0341894d45186200c5d9"
+ integrity sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==
+ dependencies:
+ define-properties "^1.1.3"
+ function-bind "^1.1.1"
+
+strip-ansi@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f"
+ integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8=
+ dependencies:
+ ansi-regex "^3.0.0"
+
+strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0:
+ version "5.2.0"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae"
+ integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==
+ dependencies:
+ ansi-regex "^4.1.0"
+
+strip-json-comments@2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
+ integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
+
+supports-color@6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.0.0.tgz#76cfe742cf1f41bb9b1c29ad03068c05b4c0e40a"
+ integrity sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==
+ dependencies:
+ has-flag "^3.0.0"
+
+supports-color@^5.3.0:
+ version "5.5.0"
+ resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
+ integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
+ dependencies:
+ has-flag "^3.0.0"
+
+transit-immutable-js@^0.7.0:
+ version "0.7.0"
+ resolved "https://registry.yarnpkg.com/transit-immutable-js/-/transit-immutable-js-0.7.0.tgz#993e25089b6311ff402140f556276d6d253005d9"
+ integrity sha1-mT4lCJtjEf9AIUD1VidtbSUwBdk=
+
+transit-js@^0.8.861:
+ version "0.8.861"
+ resolved "https://registry.yarnpkg.com/transit-js/-/transit-js-0.8.861.tgz#829e516b80349a41fff5d59f5e6993b5473f72c9"
+ integrity sha512-4O9OrYPZw6C0M5gMTvaeOp+xYz6EF79JsyxIvqXHlt+pisSrioJWFOE80N8aCPoJLcNaXF442RZrVtdmd4wkDQ==
+
+uuid@3.3.2:
+ version "3.3.2"
+ resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"
+ integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==
+
+which-module@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
+ integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
+
+which@1.3.1:
+ version "1.3.1"
+ resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
+ integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
+ dependencies:
+ isexe "^2.0.0"
+
+wide-align@1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457"
+ integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==
+ dependencies:
+ string-width "^1.0.2 || 2"
+
+wrap-ansi@^5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09"
+ integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==
+ dependencies:
+ ansi-styles "^3.2.0"
+ string-width "^3.0.0"
+ strip-ansi "^5.0.0"
+
+wrappy@1:
+ version "1.0.2"
+ resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
+ integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
+
+y18n@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b"
+ integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==
+
+yargs-parser@13.1.1, yargs-parser@^13.1.1:
+ version "13.1.1"
+ resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0"
+ integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==
+ dependencies:
+ camelcase "^5.0.0"
+ decamelize "^1.2.0"
+
+yargs-unparser@1.6.0:
+ version "1.6.0"
+ resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-1.6.0.tgz#ef25c2c769ff6bd09e4b0f9d7c605fb27846ea9f"
+ integrity sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==
+ dependencies:
+ flat "^4.1.0"
+ lodash "^4.17.15"
+ yargs "^13.3.0"
+
+yargs@13.3.0, yargs@^13.3.0:
+ version "13.3.0"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.0.tgz#4c657a55e07e5f2cf947f8a366567c04a0dedc83"
+ integrity sha512-2eehun/8ALW8TLoIl7MVaRUrg+yCnenu8B4kBlRxj3GJGDKU1Og7sMXPNm1BYyM1DOJmTZ4YeN/Nwxv+8XJsUA==
+ dependencies:
+ cliui "^5.0.0"
+ find-up "^3.0.0"
+ get-caller-file "^2.0.1"
+ require-directory "^2.1.1"
+ require-main-filename "^2.0.0"
+ set-blocking "^2.0.0"
+ string-width "^3.0.0"
+ which-module "^2.0.0"
+ y18n "^4.0.0"
+ yargs-parser "^13.1.1"
diff --git a/automerge-backend/Cargo.toml b/automerge-backend/Cargo.toml
new file mode 100644
index 00000000..cdde013a
--- /dev/null
+++ b/automerge-backend/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "automerge-backend"
+version = "0.0.1"
+authors = ["Alex Good "]
+edition = "2018"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+serde = { version = "^1.0", features=["derive"] }
+serde_json = "^1.0"
+uuid = { version = "^0.5.1", features=["v4"] }
+
+[dependencies.web-sys]
+version = "0.3"
+features = [
+ "console",
+]
diff --git a/automerge-backend/src/actor_states.rs b/automerge-backend/src/actor_states.rs
new file mode 100644
index 00000000..70afb1d0
--- /dev/null
+++ b/automerge-backend/src/actor_states.rs
@@ -0,0 +1,117 @@
+use crate::error::AutomergeError;
+use crate::operation_with_metadata::OperationWithMetadata;
+use crate::protocol::{ActorID, Change, Clock};
+use std::collections::HashMap;
+use std::rc::Rc;
+
+// ActorStates manages
+// `change_by_actor` - a seq ordered vec of changes per actor
+// `deps_by_actor` - a seq ordered vec of transitive deps per actor
+// `history` - a list of all changes received in order
+// this struct is used for telling if two ops are concurrent or referencing
+// historic changes
+
+#[derive(Debug, PartialEq, Clone)]
+pub struct ActorStates {
+ pub history: Vec>,
+ change_by_actor: HashMap>>,
+ deps_by_actor: HashMap>,
+ // this lets me return a reference to an empty clock when needed
+ // without having to do any extra allocations or copies
+ // in the default path
+ empty_clock: Clock,
+}
+
+impl ActorStates {
+ pub(crate) fn new() -> ActorStates {
+ ActorStates {
+ change_by_actor: HashMap::new(),
+ deps_by_actor: HashMap::new(),
+ empty_clock: Clock::empty(),
+ history: Vec::new(),
+ }
+ }
+
+ pub fn is_concurrent(&self, op1: &OperationWithMetadata, op2: &OperationWithMetadata) -> bool {
+ let clock1 = self.get_deps(&op1.actor_id, op1.sequence);
+ let clock2 = self.get_deps(&op2.actor_id, op2.sequence);
+ clock1.get(&op2.actor_id) < op2.sequence && clock2.get(&op1.actor_id) < op1.sequence
+ }
+
+ pub fn get(&self, actor_id: &ActorID) -> Vec<&Change> {
+ self.change_by_actor
+ .get(actor_id)
+ .map(|vec| vec.iter().map(|c| c.as_ref()).collect() )
+ .unwrap_or_default()
+ }
+
+ fn get_change(&self, actor_id: &ActorID, seq: u32) -> Option<&Rc> {
+ self.change_by_actor
+ .get(actor_id)
+ .and_then(|v| v.get((seq as usize) - 1))
+ }
+
+ fn get_deps(&self, actor_id: &ActorID, seq: u32) -> &Clock {
+ self.get_deps_option(actor_id, seq)
+ .unwrap_or(&self.empty_clock)
+ }
+
+ fn get_deps_option(&self, actor_id: &ActorID, seq: u32) -> Option<&Clock> {
+ self.deps_by_actor
+ .get(actor_id)
+ .and_then(|v| v.get((seq as usize) - 1))
+ }
+
+ fn transitive_deps(&self, clock: &Clock) -> Clock {
+ let mut all_deps = clock.clone();
+ clock
+ .into_iter()
+ .filter_map(|(actor_id, seq)| self.get_deps_option(actor_id, *seq))
+ .for_each(|deps| all_deps.merge(deps));
+ all_deps
+ }
+
+ // if the change is new - return Ok(true)
+ // if the change is a duplicate - dont insert and return Ok(false)
+ // if the change has a dup actor:seq but is different error
+ pub(crate) fn add_change(&mut self, change: Change) -> Result {
+ if let Some(c) = self.get_change(&change.actor_id, change.seq) {
+ if &change == c.as_ref() {
+ return Ok(false);
+ } else {
+ return Err(AutomergeError::InvalidChange(
+ "Invalid reuse of sequence number for actor".to_string(),
+ ));
+ }
+ }
+
+ let deps = change.dependencies.with(&change.actor_id, change.seq - 1);
+ let all_deps = self.transitive_deps(&deps);
+ let actor_id = change.actor_id.clone();
+
+ let rc = Rc::new(change);
+ self.history.push(rc.clone());
+
+ let actor_changes = self
+ .change_by_actor
+ .entry(actor_id.clone())
+ .or_insert_with(Vec::new);
+
+ if (rc.seq as usize) - 1 != actor_changes.len() {
+ panic!(
+ "cant push c={:?}:{:?} at ${:?}",
+ rc.actor_id,
+ rc.seq,
+ actor_changes.len()
+ );
+ }
+
+ actor_changes.push(rc);
+
+ let actor_deps = self.deps_by_actor.entry(actor_id).or_insert_with(Vec::new);
+
+ actor_deps.push(all_deps);
+
+ Ok(true)
+ }
+}
diff --git a/automerge-backend/src/backend.rs b/automerge-backend/src/backend.rs
new file mode 100644
index 00000000..e0cc5b07
--- /dev/null
+++ b/automerge-backend/src/backend.rs
@@ -0,0 +1,1166 @@
+use crate::{
+ ActorID, AutomergeError, Change, ChangeRequest, ChangeRequestType, Clock, Diff, OpSet,
+ Operation, Patch,
+};
+
+#[derive(Debug, PartialEq, Clone)]
+pub struct Backend {
+ op_set: OpSet,
+}
+
+impl Backend {
+ pub fn init() -> Backend {
+ Backend {
+ op_set: OpSet::init(),
+ }
+ }
+
+ pub fn apply_changes(&mut self, changes: Vec) -> Result {
+ let nested_diffs = changes
+ .into_iter()
+ .map(|c| self.op_set.apply_change(c, false))
+ .collect::>, AutomergeError>>()?;
+ let diffs = nested_diffs.into_iter().flatten().collect();
+ Ok(Patch {
+ actor: None,
+ can_undo: self.op_set.can_undo(),
+ can_redo: self.op_set.can_redo(),
+ clock: self.op_set.clock.clone(),
+ deps: self.op_set.clock.clone(),
+ diffs,
+ seq: None,
+ })
+ }
+
+ pub fn apply_local_change(&mut self, change: ChangeRequest) -> Result {
+ let actor_id = change.actor_id.clone();
+ let seq = change.seq;
+ if self.op_set.clock.get(&actor_id) >= seq {
+ return Err(AutomergeError::DuplicateChange(format!(
+ "Change request has already been applied {} {}",
+ actor_id.0, seq
+ )));
+ }
+ match change.request_type {
+ ChangeRequestType::Change(ops) => {
+ let diffs = self.op_set.apply_change(
+ Change {
+ actor_id: change.actor_id,
+ operations: ops,
+ seq,
+ message: change.message,
+ dependencies: change.dependencies,
+ },
+ change.undoable.unwrap_or(true),
+ )?;
+ Ok(Patch {
+ actor: Some(actor_id),
+ can_undo: self.op_set.can_undo(),
+ can_redo: self.op_set.can_redo(),
+ clock: self.op_set.clock.clone(),
+ deps: self.op_set.clock.clone(),
+ diffs,
+ seq: Some(seq),
+ })
+ }
+ ChangeRequestType::Undo => {
+ let diffs = self.op_set.do_undo(
+ change.actor_id.clone(),
+ change.seq,
+ change.message,
+ change.dependencies,
+ )?;
+ Ok(Patch {
+ actor: Some(actor_id),
+ can_undo: self.op_set.can_undo(),
+ can_redo: self.op_set.can_redo(),
+ clock: self.op_set.clock.clone(),
+ deps: self.op_set.clock.clone(),
+ diffs,
+ seq: Some(seq),
+ })
+ }
+ ChangeRequestType::Redo => {
+ let diffs = self.op_set.do_redo(
+ change.actor_id.clone(),
+ change.seq,
+ change.message,
+ change.dependencies,
+ )?;
+ Ok(Patch {
+ actor: Some(actor_id),
+ can_undo: self.op_set.can_undo(),
+ can_redo: self.op_set.can_redo(),
+ clock: self.op_set.clock.clone(),
+ deps: self.op_set.clock.clone(),
+ diffs,
+ seq: Some(seq),
+ })
+ }
+ }
+ }
+
+ pub fn undo_stack(&self) -> &Vec> {
+ &self.op_set.undo_stack
+ }
+
+ pub fn redo_stack(&self) -> &Vec> {
+ &self.op_set.redo_stack
+ }
+
+ pub fn history(&self) -> Vec<&Change> {
+ self.op_set.states.history.iter().map(|rc| rc.as_ref()).collect()
+ }
+
+ pub fn get_patch(&self) -> Patch {
+ Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: self.op_set.clock.clone(),
+ deps: self.op_set.clock.clone(),
+ diffs: self.op_set.object_store.generate_diffs(),
+ actor: None,
+ seq: None,
+ }
+ }
+
+ /// Get changes which are in `other` but not in this backend
+ pub fn get_changes<'a>(&self, other: &'a Backend) -> Result, AutomergeError> {
+ if self.clock().divergent(&other.clock()) {
+ return Err(AutomergeError::DivergedState(
+ "Cannot diff two states that have diverged".to_string(),
+ ));
+ }
+ Ok(other.op_set.get_missing_changes(&self.op_set.clock))
+ }
+
+ pub fn get_changes_for_actor_id(&self, actor_id: &ActorID) -> Vec<&Change> {
+ self.op_set.states.get(actor_id)
+ }
+
+ pub fn get_missing_changes(&self, clock: Clock) -> Vec<&Change> {
+ self.op_set.get_missing_changes(&clock)
+ }
+
+ pub fn get_missing_deps(&self) -> Clock {
+ self.op_set.get_missing_deps()
+ }
+
+ pub fn merge(&mut self, remote: &Backend) -> Result {
+ let missing_changes = remote
+ .get_missing_changes(self.op_set.clock.clone())
+ .iter()
+ .cloned()
+ .cloned()
+ .collect();
+ self.apply_changes(missing_changes)
+ }
+
+ pub fn clock(&self) -> &Clock {
+ &self.op_set.clock
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ ActorID, Backend, Change, ChangeRequest, ChangeRequestType, Clock, Conflict, DataType,
+ Diff, DiffAction, ElementID, ElementValue, Key, MapType, ObjectID, Operation, Patch,
+ PrimitiveValue, SequenceType,
+ };
+
+ struct ApplyChangeTestCase {
+ name: &'static str,
+ changes: Vec,
+ expected_patch: Patch,
+ }
+
+ #[test]
+ fn test_diffs() {
+ let actor1 = ActorID::from_string("actor1".to_string());
+ let actor2 = ActorID::from_string("actor2".to_string());
+ let testcases = vec![
+ ApplyChangeTestCase {
+ name: "Assign to key in map",
+ changes: vec![Change {
+ actor_id: actor1.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Set {
+ object_id: ObjectID::Root,
+ key: Key("bird".to_string()),
+ value: PrimitiveValue::Str("magpie".to_string()),
+ datatype: None,
+ }],
+ }],
+ expected_patch: Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor1, 1),
+ deps: Clock::empty().with(&actor1, 1),
+ diffs: vec![Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("bird".to_string()),
+ ElementValue::Primitive(PrimitiveValue::Str("magpie".to_string())),
+ None,
+ ),
+ conflicts: Vec::new(),
+ }],
+ seq: None,
+ actor: None,
+ },
+ },
+ ApplyChangeTestCase {
+ name: "Increment a key in a map",
+ changes: vec![
+ Change {
+ actor_id: actor1.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Set {
+ object_id: ObjectID::Root,
+ key: Key("counter".to_string()),
+ value: PrimitiveValue::Number(1.0),
+ datatype: Some(DataType::Counter),
+ }],
+ },
+ Change {
+ actor_id: actor1.clone(),
+ seq: 2,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Increment {
+ object_id: ObjectID::Root,
+ key: Key("counter".to_string()),
+ value: 2.0,
+ }],
+ },
+ ],
+ expected_patch: Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor1, 2),
+ deps: Clock::empty().with(&actor1, 2),
+ diffs: vec![Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("counter".to_string()),
+ ElementValue::Primitive(PrimitiveValue::Number(3.0)),
+ Some(DataType::Counter),
+ ),
+ conflicts: Vec::new(),
+ }],
+ seq: None,
+ actor: None,
+ },
+ },
+ ApplyChangeTestCase {
+ name: "should make a conflict on assignment to the same key",
+ changes: vec![
+ Change {
+ actor_id: ActorID::from_string("actor1".to_string()),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Set {
+ object_id: ObjectID::Root,
+ key: Key("bird".to_string()),
+ value: PrimitiveValue::Str("magpie".to_string()),
+ datatype: None,
+ }],
+ },
+ Change {
+ actor_id: ActorID::from_string("actor2".to_string()),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Set {
+ object_id: ObjectID::Root,
+ key: Key("bird".to_string()),
+ value: PrimitiveValue::Str("blackbird".to_string()),
+ datatype: None,
+ }],
+ },
+ ],
+ expected_patch: Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor1, 1).with(&actor2, 1),
+ deps: Clock::empty().with(&actor1, 1).with(&actor2, 1),
+ diffs: vec![Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("bird".to_string()),
+ ElementValue::Primitive(PrimitiveValue::Str("blackbird".to_string())),
+ None,
+ ),
+ conflicts: vec![Conflict {
+ actor: actor1.clone(),
+ value: ElementValue::Primitive(PrimitiveValue::Str(
+ "magpie".to_string(),
+ )),
+ datatype: None,
+ }],
+ }],
+ seq: None,
+ actor: None,
+ },
+ },
+ ApplyChangeTestCase {
+ name: "delete a key from a map",
+ changes: vec![
+ Change {
+ actor_id: actor1.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Set {
+ object_id: ObjectID::Root,
+ key: Key("bird".to_string()),
+ value: PrimitiveValue::Str("magpie".to_string()),
+ datatype: None,
+ }],
+ },
+ Change {
+ actor_id: actor1.clone(),
+ seq: 2,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Delete {
+ object_id: ObjectID::Root,
+ key: Key("bird".to_string()),
+ }],
+ },
+ ],
+ expected_patch: Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor1, 2),
+ deps: Clock::empty().with(&actor1, 2),
+ diffs: vec![Diff {
+ action: DiffAction::RemoveMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("bird".to_string()),
+ ),
+ conflicts: Vec::new(),
+ }],
+ seq: None,
+ actor: None,
+ },
+ },
+ ApplyChangeTestCase {
+ name: "create nested maps",
+ changes: vec![Change {
+ actor_id: actor1.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![
+ Operation::MakeMap {
+ object_id: ObjectID::ID("birds".to_string()),
+ },
+ Operation::Set {
+ object_id: ObjectID::ID("birds".to_string()),
+ key: Key("wrens".to_string()),
+ value: PrimitiveValue::Number(3.0),
+ datatype: None,
+ },
+ Operation::Link {
+ object_id: ObjectID::Root,
+ key: Key("birds".to_string()),
+ value: ObjectID::ID("birds".to_string()),
+ },
+ ],
+ }],
+ expected_patch: Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor1, 1),
+ deps: Clock::empty().with(&actor1, 1),
+ diffs: vec![
+ Diff {
+ action: DiffAction::CreateMap(
+ ObjectID::ID("birds".to_string()),
+ MapType::Map,
+ ),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::ID("birds".to_string()),
+ MapType::Map,
+ Key("wrens".to_string()),
+ ElementValue::Primitive(PrimitiveValue::Number(3.0)),
+ None,
+ ),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("birds".to_string()),
+ ElementValue::Link(ObjectID::ID("birds".to_string())),
+ None,
+ ),
+ conflicts: Vec::new(),
+ },
+ ],
+ seq: None,
+ actor: None,
+ },
+ },
+ ApplyChangeTestCase {
+ name: "create lists",
+ changes: vec![Change {
+ actor_id: actor1.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![
+ Operation::MakeList {
+ object_id: ObjectID::ID("birds".to_string()),
+ },
+ Operation::Insert {
+ list_id: ObjectID::ID("birds".to_string()),
+ key: ElementID::Head,
+ elem: 1,
+ },
+ Operation::Set {
+ object_id: ObjectID::ID("birds".to_string()),
+ key: ElementID::from_actor_and_elem(actor1.clone(), 1).as_key(),
+ value: PrimitiveValue::Str("chaffinch".to_string()),
+ datatype: None,
+ },
+ Operation::Link {
+ object_id: ObjectID::Root,
+ key: Key("birds".to_string()),
+ value: ObjectID::ID("birds".to_string()),
+ },
+ ],
+ }],
+ expected_patch: Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor1, 1),
+ deps: Clock::empty().with(&actor1, 1),
+ diffs: vec![
+ Diff {
+ action: DiffAction::CreateList(
+ ObjectID::ID("birds".to_string()),
+ SequenceType::List,
+ ),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::InsertSequenceElement(
+ ObjectID::ID("birds".to_string()),
+ SequenceType::List,
+ 0,
+ ElementValue::Primitive(PrimitiveValue::Str(
+ "chaffinch".to_string(),
+ )),
+ None,
+ ElementID::from_actor_and_elem(actor1.clone(), 1),
+ ),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("birds".to_string()),
+ ElementValue::Link(ObjectID::ID("birds".to_string())),
+ None,
+ ),
+ conflicts: Vec::new(),
+ },
+ ],
+ seq: None,
+ actor: None,
+ },
+ },
+ ApplyChangeTestCase {
+ name: "apply update inside lists",
+ changes: vec![
+ Change {
+ actor_id: actor1.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![
+ Operation::MakeList {
+ object_id: ObjectID::ID("birds".to_string()),
+ },
+ Operation::Insert {
+ list_id: ObjectID::ID("birds".to_string()),
+ key: ElementID::Head,
+ elem: 1,
+ },
+ Operation::Set {
+ object_id: ObjectID::ID("birds".to_string()),
+ key: Key("actor1:1".to_string()),
+ value: PrimitiveValue::Str("chaffinch".to_string()),
+ datatype: None,
+ },
+ Operation::Link {
+ object_id: ObjectID::Root,
+ key: Key("birds".to_string()),
+ value: ObjectID::ID("birds".to_string()),
+ },
+ ],
+ },
+ Change {
+ actor_id: actor1.clone(),
+ seq: 2,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Set {
+ object_id: ObjectID::ID("birds".to_string()),
+ key: Key("actor1:1".to_string()),
+ value: PrimitiveValue::Str("greenfinch".to_string()),
+ datatype: None,
+ }],
+ },
+ ],
+ expected_patch: Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor1, 2),
+ deps: Clock::empty().with(&actor1, 2),
+ diffs: vec![Diff {
+ action: DiffAction::SetSequenceElement(
+ ObjectID::ID("birds".to_string()),
+ SequenceType::List,
+ 0,
+ ElementValue::Primitive(PrimitiveValue::Str("greenfinch".to_string())),
+ None,
+ ),
+ conflicts: Vec::new(),
+ }],
+ seq: None,
+ actor: None,
+ },
+ },
+ ApplyChangeTestCase {
+ name: "delete list elements",
+ changes: vec![
+ Change {
+ actor_id: actor1.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![
+ Operation::MakeList {
+ object_id: ObjectID::ID("birds".to_string()),
+ },
+ Operation::Insert {
+ list_id: ObjectID::ID("birds".to_string()),
+ key: ElementID::Head,
+ elem: 1,
+ },
+ Operation::Set {
+ object_id: ObjectID::ID("birds".to_string()),
+ key: Key("actor1:1".to_string()),
+ value: PrimitiveValue::Str("chaffinch".to_string()),
+ datatype: None,
+ },
+ Operation::Link {
+ object_id: ObjectID::Root,
+ key: Key("birds".to_string()),
+ value: ObjectID::ID("birds".to_string()),
+ },
+ ],
+ },
+ Change {
+ actor_id: actor1.clone(),
+ seq: 2,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Delete {
+ object_id: ObjectID::ID("birds".to_string()),
+ key: Key("actor1:1".to_string()),
+ }],
+ },
+ ],
+ expected_patch: Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor1, 2),
+ deps: Clock::empty().with(&actor1, 2),
+ diffs: vec![Diff {
+ action: DiffAction::RemoveSequenceElement(
+ ObjectID::ID("birds".to_string()),
+ SequenceType::List,
+ 0,
+ ),
+ conflicts: Vec::new(),
+ }],
+ seq: None,
+ actor: None,
+ },
+ },
+ ApplyChangeTestCase {
+ name: "Handle list element insertion and deletion in the same change",
+ changes: vec![
+ Change {
+ actor_id: actor1.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![
+ Operation::MakeList {
+ object_id: ObjectID::ID("birds".to_string()),
+ },
+ Operation::Link {
+ object_id: ObjectID::Root,
+ key: Key("birds".to_string()),
+ value: ObjectID::ID("birds".to_string()),
+ },
+ ],
+ },
+ Change {
+ actor_id: actor1.clone(),
+ seq: 2,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![
+ Operation::Insert {
+ list_id: ObjectID::ID("birds".to_string()),
+ key: ElementID::Head,
+ elem: 1,
+ },
+ Operation::Delete {
+ object_id: ObjectID::ID("birds".to_string()),
+ key: Key("actor:1".to_string()),
+ },
+ ],
+ },
+ ],
+ expected_patch: Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor1, 2),
+ deps: Clock::empty().with(&actor1, 2),
+ diffs: vec![Diff {
+ action: DiffAction::MaxElem(
+ ObjectID::ID("birds".to_string()),
+ 1,
+ SequenceType::List,
+ ),
+ conflicts: Vec::new(),
+ }],
+ seq: None,
+ actor: None,
+ },
+ },
+ ];
+
+ for testcase in testcases {
+ let mut backend = Backend::init();
+ let patches = testcase
+ .changes
+ .into_iter()
+ .map(|c| backend.apply_changes(vec![c]).unwrap());
+ assert_eq!(
+ testcase.expected_patch,
+ patches.last().unwrap(),
+ "Patches not equal for {}",
+ testcase.name
+ );
+ }
+ }
+
+ struct ApplyLocalChangeTestCase {
+ name: &'static str,
+ change_requests: Vec,
+ expected_patches: Vec,
+ }
+
+ #[test]
+ fn test_apply_local_change() {
+ let actor1 = ActorID("actor1".to_string());
+ let birds = ObjectID::ID("birds".to_string());
+ let testcases = vec![
+ ApplyLocalChangeTestCase {
+ name: "Should undo",
+ change_requests: vec![
+ ChangeRequest {
+ actor_id: actor1.clone(),
+ seq: 1,
+ message: None,
+ dependencies: Clock::empty(),
+ undoable: None,
+ request_type: ChangeRequestType::Change(vec![
+ Operation::MakeMap {
+ object_id: birds.clone(),
+ },
+ Operation::Link {
+ object_id: ObjectID::Root,
+ key: Key("birds".to_string()),
+ value: birds.clone(),
+ },
+ Operation::Set {
+ object_id: birds.clone(),
+ key: Key("chaffinch".to_string()),
+ value: PrimitiveValue::Boolean(true),
+ datatype: None,
+ },
+ ]),
+ },
+ ChangeRequest {
+ actor_id: actor1.clone(),
+ seq: 2,
+ message: None,
+ undoable: None,
+ dependencies: Clock::empty().with(&actor1, 1),
+ request_type: ChangeRequestType::Undo,
+ },
+ ],
+ expected_patches: vec![
+ Patch {
+ actor: Some(actor1.clone()),
+ can_redo: false,
+ can_undo: true,
+ seq: Some(1),
+ clock: Clock::empty().with(&actor1, 1),
+ deps: Clock::empty().with(&actor1, 1),
+ diffs: vec![
+ Diff {
+ action: DiffAction::CreateMap(birds.clone(), MapType::Map),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("birds".to_string()),
+ ElementValue::Link(birds.clone()),
+ None,
+ ),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::SetMapKey(
+ birds.clone(),
+ MapType::Map,
+ Key("chaffinch".to_string()),
+ ElementValue::Primitive(PrimitiveValue::Boolean(true)),
+ None,
+ ),
+ conflicts: Vec::new(),
+ },
+ ],
+ },
+ Patch {
+ actor: Some(actor1.clone()),
+ can_redo: true,
+ can_undo: false,
+ seq: Some(2),
+ clock: Clock::empty().with(&actor1, 2),
+ deps: Clock::empty().with(&actor1, 2),
+ diffs: vec![Diff {
+ action: DiffAction::RemoveMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("birds".to_string()),
+ ),
+ conflicts: Vec::new(),
+ }],
+ },
+ ],
+ },
+ ApplyLocalChangeTestCase {
+ name: "Should redo",
+ change_requests: vec![
+ ChangeRequest {
+ actor_id: actor1.clone(),
+ seq: 1,
+ message: None,
+ dependencies: Clock::empty(),
+ undoable: None,
+ request_type: ChangeRequestType::Change(vec![
+ Operation::MakeMap {
+ object_id: birds.clone(),
+ },
+ Operation::Link {
+ object_id: ObjectID::Root,
+ key: Key("birds".to_string()),
+ value: birds.clone(),
+ },
+ Operation::Set {
+ object_id: birds.clone(),
+ key: Key("chaffinch".to_string()),
+ value: PrimitiveValue::Boolean(true),
+ datatype: None,
+ },
+ ]),
+ },
+ ChangeRequest {
+ actor_id: actor1.clone(),
+ seq: 2,
+ message: None,
+ undoable: None,
+ dependencies: Clock::empty().with(&actor1, 1),
+ request_type: ChangeRequestType::Undo,
+ },
+ ChangeRequest {
+ actor_id: actor1.clone(),
+ seq: 3,
+ message: None,
+ undoable: None,
+ dependencies: Clock::empty().with(&actor1, 2),
+ request_type: ChangeRequestType::Redo,
+ },
+ ChangeRequest {
+ actor_id: actor1.clone(),
+ seq: 4,
+ message: None,
+ undoable: None,
+ dependencies: Clock::empty().with(&actor1, 3),
+ request_type: ChangeRequestType::Undo,
+ },
+ ChangeRequest {
+ actor_id: actor1.clone(),
+ seq: 5,
+ message: None,
+ undoable: None,
+ dependencies: Clock::empty().with(&actor1, 4),
+ request_type: ChangeRequestType::Redo,
+ },
+ ],
+ expected_patches: vec![
+ Patch {
+ actor: Some(actor1.clone()),
+ can_redo: false,
+ can_undo: true,
+ seq: Some(1),
+ clock: Clock::empty().with(&actor1, 1),
+ deps: Clock::empty().with(&actor1, 1),
+ diffs: vec![
+ Diff {
+ action: DiffAction::CreateMap(birds.clone(), MapType::Map),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("birds".to_string()),
+ ElementValue::Link(birds.clone()),
+ None,
+ ),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::SetMapKey(
+ birds.clone(),
+ MapType::Map,
+ Key("chaffinch".to_string()),
+ ElementValue::Primitive(PrimitiveValue::Boolean(true)),
+ None,
+ ),
+ conflicts: Vec::new(),
+ },
+ ],
+ },
+ Patch {
+ actor: Some(actor1.clone()),
+ can_redo: true,
+ can_undo: false,
+ seq: Some(2),
+ clock: Clock::empty().with(&actor1, 2),
+ deps: Clock::empty().with(&actor1, 2),
+ diffs: vec![Diff {
+ action: DiffAction::RemoveMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("birds".to_string()),
+ ),
+ conflicts: Vec::new(),
+ }],
+ },
+ Patch {
+ actor: Some(actor1.clone()),
+ can_redo: false,
+ can_undo: true,
+ seq: Some(3),
+ clock: Clock::empty().with(&actor1, 3),
+ deps: Clock::empty().with(&actor1, 3),
+ diffs: vec![Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("birds".to_string()),
+ ElementValue::Link(birds.clone()),
+ None,
+ ),
+ conflicts: Vec::new(),
+ }],
+ },
+ Patch {
+ actor: Some(actor1.clone()),
+ can_redo: true,
+ can_undo: false,
+ seq: Some(4),
+ clock: Clock::empty().with(&actor1, 4),
+ deps: Clock::empty().with(&actor1, 4),
+ diffs: vec![Diff {
+ action: DiffAction::RemoveMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("birds".to_string()),
+ ),
+ conflicts: Vec::new(),
+ }],
+ },
+ Patch {
+ actor: Some(actor1.clone()),
+ can_redo: false,
+ can_undo: true,
+ seq: Some(5),
+ clock: Clock::empty().with(&actor1, 5),
+ deps: Clock::empty().with(&actor1, 5),
+ diffs: vec![Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("birds".to_string()),
+ ElementValue::Link(birds),
+ None,
+ ),
+ conflicts: Vec::new(),
+ }],
+ },
+ ],
+ },
+ ];
+
+ for testcase in testcases {
+ let mut backend = Backend::init();
+ let patches = testcase
+ .change_requests
+ .into_iter()
+ .map(|c| backend.apply_local_change(c).unwrap());
+ for (index, (patch, expected_patch)) in
+ patches.zip(testcase.expected_patches).enumerate()
+ {
+ assert_eq!(
+ patch, expected_patch,
+ "Pathes no equal for testcase: {}, patch: {}",
+ testcase.name, index
+ );
+ }
+ }
+ }
+
+ #[test]
+ fn test_get_patch() {
+ let mut backend = Backend::init();
+ let actor = ActorID::from_string("actor1".to_string());
+ let change1 = Change {
+ actor_id: actor.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Set {
+ object_id: ObjectID::Root,
+ key: Key("bird".to_string()),
+ value: PrimitiveValue::Str("magpie".to_string()),
+ datatype: None,
+ }],
+ };
+ let change2 = Change {
+ actor_id: actor.clone(),
+ seq: 2,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Set {
+ object_id: ObjectID::Root,
+ key: Key("bird".to_string()),
+ value: PrimitiveValue::Str("blackbird".to_string()),
+ datatype: None,
+ }],
+ };
+ let _patch1 = backend.apply_changes(vec![change1, change2]).unwrap();
+ let patch2 = backend.get_patch();
+ let patch3 = Patch {
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor, 2),
+ deps: Clock::empty().with(&actor, 2),
+ seq: None,
+ actor: None,
+ diffs: vec![Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("bird".to_string()),
+ ElementValue::Primitive(PrimitiveValue::Str("blackbird".to_string())),
+ None,
+ ),
+ conflicts: Vec::new(),
+ }],
+ };
+ assert_eq!(patch2, patch3, "Patches not equal test_get_patch");
+ }
+
+ #[test]
+ fn test_get_missing_changes() {
+ let mut backend = Backend::init();
+ let actor = ActorID::from_string("actor1".to_string());
+ let change1 = Change {
+ actor_id: actor.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![Operation::Set {
+ object_id: ObjectID::Root,
+ key: Key("bird".to_string()),
+ value: PrimitiveValue::Str("magpie".to_string()),
+ datatype: None,
+ }],
+ };
+ backend.apply_changes(vec![change1]).unwrap();
+ let tmp : Vec<&Change> = Vec::new();
+ assert_eq!(
+ backend.get_missing_changes(Clock::empty().with(&actor, 1)),
+ tmp
+ )
+ }
+
+ #[test]
+ fn test_get_patch_list_state() {
+ let birds = ObjectID::ID("birds".into());
+ let actor = ActorID("actor1".into());
+ let changes = vec![
+ Change {
+ actor_id: actor.clone(),
+ seq: 1,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![
+ Operation::MakeList {
+ object_id: birds.clone(),
+ },
+ Operation::Insert {
+ list_id: birds.clone(),
+ key: ElementID::Head,
+ elem: 1,
+ },
+ Operation::Set {
+ object_id: birds.clone(),
+ key: Key("actor1:1".into()),
+ value: PrimitiveValue::Str("chaffinch".into()),
+ datatype: None,
+ },
+ Operation::Insert {
+ list_id: birds.clone(),
+ key: ElementID::from_actor_and_elem(actor.clone(), 1),
+ elem: 2,
+ },
+ Operation::Set {
+ object_id: birds.clone(),
+ key: Key("actor1:2".into()),
+ value: PrimitiveValue::Str("goldfinch".into()),
+ datatype: None,
+ },
+ Operation::Link {
+ object_id: ObjectID::Root,
+ key: Key("birds".into()),
+ value: birds.clone(),
+ },
+ ],
+ },
+ Change {
+ actor_id: actor.clone(),
+ seq: 2,
+ dependencies: Clock::empty(),
+ message: None,
+ operations: vec![
+ Operation::Delete {
+ object_id: birds.clone(),
+ key: Key("actor1:1".into()),
+ },
+ Operation::Insert {
+ list_id: birds.clone(),
+ key: ElementID::from_actor_and_elem(actor.clone(), 1),
+ elem: 3,
+ },
+ Operation::Set {
+ object_id: birds.clone(),
+ key: Key("actor1:3".into()),
+ value: PrimitiveValue::Str("greenfinch".into()),
+ datatype: None,
+ },
+ Operation::Set {
+ object_id: birds.clone(),
+ key: Key("actor1:2".into()),
+ value: PrimitiveValue::Str("goldfinches!!".into()),
+ datatype: None,
+ },
+ ],
+ },
+ ];
+ let expected_patch = Patch {
+ actor: None,
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty().with(&actor, 2),
+ deps: Clock::empty().with(&actor, 2),
+ seq: None,
+ diffs: vec![
+ Diff {
+ action: DiffAction::CreateList(birds.clone(), SequenceType::List),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::InsertSequenceElement(
+ birds.clone(),
+ SequenceType::List,
+ 0,
+ ElementValue::Primitive(PrimitiveValue::Str("greenfinch".into())),
+ None,
+ ElementID::from_actor_and_elem(actor.clone(), 3),
+ ),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::InsertSequenceElement(
+ birds.clone(),
+ SequenceType::List,
+ 1,
+ ElementValue::Primitive(PrimitiveValue::Str("goldfinches!!".into())),
+ None,
+ ElementID::from_actor_and_elem(actor.clone(), 2),
+ ),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::MaxElem(birds.clone(), 3, SequenceType::List),
+ conflicts: Vec::new(),
+ },
+ Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::Root,
+ MapType::Map,
+ Key("birds".into()),
+ ElementValue::Link(birds),
+ None,
+ ),
+ conflicts: Vec::new(),
+ },
+ ],
+ };
+ let mut backend = Backend::init();
+ backend.apply_changes(changes).unwrap();
+ assert_eq!(expected_patch, backend.get_patch());
+ }
+}
diff --git a/automerge-backend/src/concurrent_operations.rs b/automerge-backend/src/concurrent_operations.rs
new file mode 100644
index 00000000..ee030957
--- /dev/null
+++ b/automerge-backend/src/concurrent_operations.rs
@@ -0,0 +1,132 @@
+use crate::actor_states::ActorStates;
+use crate::error::AutomergeError;
+use crate::operation_with_metadata::OperationWithMetadata;
+use crate::patch::{Conflict, ElementValue};
+use crate::{DataType, Operation, PrimitiveValue};
+use std::cmp::PartialOrd;
+
+/// Represents a set of operations which are relevant to either an element ID
+/// or object ID and which occurred without knowledge of each other
+#[derive(Debug, Clone, PartialEq)]
+pub struct ConcurrentOperations {
+ operations: Vec,
+}
+
+impl ConcurrentOperations {
+ pub(crate) fn new() -> ConcurrentOperations {
+ ConcurrentOperations {
+ operations: Vec::new(),
+ }
+ }
+
+ pub fn active_op(&self) -> Option<&OperationWithMetadata> {
+ // operations are sorted in incorporate_new_op, so the first op is the
+ // active one
+ self.operations.first()
+ }
+
+ pub fn conflicts(&self) -> Vec {
+ self.operations
+ .split_first()
+ .map(|(_, tail)| {
+ tail.iter()
+ .map(|op| match &op.operation {
+ Operation::Set {
+ value, datatype, ..
+ } => Conflict {
+ actor: op.actor_id.clone(),
+ value: ElementValue::Primitive(value.clone()),
+ datatype: datatype.clone(),
+ },
+ Operation::Link { value, .. } => Conflict {
+ actor: op.actor_id.clone(),
+ value: ElementValue::Link(value.clone()),
+ datatype: None,
+ },
+ _ => panic!("Invalid operation in concurrent ops"),
+ })
+ .collect()
+ })
+ .unwrap_or_default()
+ }
+
+ /// Updates this set of operations based on a new operation.
+ ///
+ /// Returns the previous operations (multiple if concurrent) that this op
+ /// replaces
+ pub(crate) fn incorporate_new_op(
+ &mut self,
+ new_op: OperationWithMetadata,
+ actor_states: &ActorStates,
+ ) -> Result, AutomergeError> {
+ let previous = self
+ .operations
+ .clone()
+ .into_iter()
+ .map(|o| o.operation)
+ .collect();
+ let mut concurrent: Vec = match new_op.operation {
+ // If the operation is an increment op, then we are going to modify
+ // any Set operations to reflect the increment ops in the next
+ // part of this function
+ Operation::Increment { .. } => self.operations.clone(),
+ // Otherwise we filter out any operations that are not concurrent
+ // with the new one (i.e ones which causally precede the new one)
+ _ => self
+ .operations
+ .iter()
+ .filter(|op| actor_states.is_concurrent(&op, &new_op))
+ .cloned()
+ .collect(),
+ };
+ let this_op = new_op.clone();
+ match &new_op.operation {
+ // For Set or Link ops, we add them to the concurrent ops list, to
+ // be interpreted later as part of the document::walk
+ // implementation
+ Operation::Set { .. } | Operation::Link { .. } => {
+ concurrent.push(this_op);
+ }
+ // Increment ops are not stored in the op set, instead we update
+ // any Set operations which are a counter containing a number to
+ // reflect the increment operation
+ Operation::Increment {
+ value: inc_value, ..
+ } => concurrent.iter_mut().for_each(|op| {
+ let op_clone = op.clone();
+ if let Operation::Set {
+ value: PrimitiveValue::Number(ref mut n),
+ datatype: Some(DataType::Counter),
+ ..
+ } = op.operation
+ {
+ if !(actor_states.is_concurrent(&new_op, &op_clone)) {
+ *n += inc_value
+ }
+ }
+ }),
+ // All other operations are not relevant (e.g a concurrent
+ // operation set containing just a delete operation actually is an
+ // empty set, in document::walk we interpret this into a
+ // nonexistent part of the state)
+ _ => {}
+ }
+ // the partial_cmp implementation for `OperationWithMetadata` ensures
+ // that the operations are in the deterministic order required by
+ // automerge.
+ //
+ // Note we can unwrap because the partial_cmp definition never returns
+ // None
+ concurrent.sort_by(|a, b| a.partial_cmp(b).unwrap());
+ concurrent.reverse();
+ self.operations = concurrent;
+ Ok(previous)
+ }
+
+ pub fn pure_operations(&self) -> Vec {
+ self.operations
+ .iter()
+ .map(|o| o.operation.clone())
+ .collect()
+ }
+}
diff --git a/automerge-backend/src/error.rs b/automerge-backend/src/error.rs
new file mode 100644
index 00000000..7acbf72a
--- /dev/null
+++ b/automerge-backend/src/error.rs
@@ -0,0 +1,45 @@
+use crate::protocol::ObjectID;
+use std::error::Error;
+use std::fmt;
+
+#[derive(Debug)]
+pub enum AutomergeError {
+ DuplicateObjectError,
+ MissingObjectError(ObjectID),
+ InvalidObjectType(String),
+ InvalidLinkTarget,
+ DuplicateChange(String),
+ NotImplemented(String),
+ InvalidChange(String),
+ DivergedState(String),
+}
+
+impl fmt::Display for AutomergeError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{:?}", self)
+ }
+}
+
+impl Error for AutomergeError {}
+
+#[derive(Debug)]
+pub struct InvalidElementID(pub String);
+
+impl fmt::Display for InvalidElementID {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{:?}", self)
+ }
+}
+
+impl Error for InvalidElementID {}
+
+#[derive(Debug)]
+pub struct InvalidChangeRequest(pub String);
+
+impl Error for InvalidChangeRequest {}
+
+impl fmt::Display for InvalidChangeRequest {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{:?}", self)
+ }
+}
diff --git a/automerge-backend/src/lib.rs b/automerge-backend/src/lib.rs
new file mode 100644
index 00000000..db90612a
--- /dev/null
+++ b/automerge-backend/src/lib.rs
@@ -0,0 +1,35 @@
+extern crate web_sys;
+
+#[allow(unused_macros)]
+macro_rules! log {
+ ( $( $t:tt )* ) => {
+ web_sys::console::log_1(&format!( $( $t )* ).into());
+ }
+}
+
+mod actor_states;
+mod backend;
+mod concurrent_operations;
+mod error;
+mod object_store;
+mod op_set;
+mod operation_with_metadata;
+mod patch;
+mod patch_serialization;
+mod protocol;
+mod protocol_serialization;
+mod value;
+
+pub use crate::protocol::{
+ ActorID, Change, ChangeRequest, ChangeRequestType, Clock, DataType, ElementID, Key, ObjectID,
+ Operation, PrimitiveValue,
+};
+pub use actor_states::ActorStates;
+pub use backend::Backend;
+pub use concurrent_operations::ConcurrentOperations;
+pub use error::AutomergeError;
+pub use object_store::{ListState, MapState, ObjectState, ObjectStore};
+pub use op_set::{list_ops_in_order, OpSet};
+pub use operation_with_metadata::OperationWithMetadata;
+pub use patch::{Conflict, Diff, DiffAction, ElementValue, MapType, Patch, SequenceType};
+pub use value::Value;
diff --git a/automerge-backend/src/object_store.rs b/automerge-backend/src/object_store.rs
new file mode 100644
index 00000000..8aeca668
--- /dev/null
+++ b/automerge-backend/src/object_store.rs
@@ -0,0 +1,622 @@
+use crate::actor_states::ActorStates;
+use crate::concurrent_operations::ConcurrentOperations;
+use crate::error::AutomergeError;
+use crate::operation_with_metadata::OperationWithMetadata;
+use crate::protocol::ActorID;
+use crate::{
+ list_ops_in_order, DataType, Diff, DiffAction, ElementID, ElementValue, Key, MapType, ObjectID,
+ Operation, SequenceType,
+};
+use std::collections::{HashMap, HashSet};
+
+/// ObjectHistory is what the OpSet uses to store operations for a particular
+/// key, they represent the two possible container types in automerge, a map or
+/// a sequence (tables and text are effectively the maps and sequences
+/// respectively).
+#[derive(Debug, Clone, PartialEq)]
+pub enum ObjectState {
+ Map(MapState),
+ List(ListState),
+}
+
+impl ObjectState {
+ fn new_map(map_type: MapType, object_id: ObjectID) -> ObjectState {
+ ObjectState::Map(MapState::new(map_type, object_id))
+ }
+
+ fn new_sequence(sequence_type: SequenceType, object_id: ObjectID) -> ObjectState {
+ ObjectState::List(ListState::new(sequence_type, object_id))
+ }
+
+ // this feels like we should have a trait or something
+ fn generate_diffs(&self) -> Vec {
+ match self {
+ ObjectState::Map(map_state) => map_state.generate_diffs(),
+ ObjectState::List(list_state) => list_state.generate_diffs(),
+ }
+ }
+
+ fn handle_assign_op(
+ &mut self,
+ op_with_metadata: OperationWithMetadata,
+ actor_states: &ActorStates,
+ key: &Key,
+ ) -> Result<(Option, Vec), AutomergeError> {
+ let (diff, mut undo_ops) = match self {
+ ObjectState::Map(mapstate) => {
+ mapstate.handle_assign_op(op_with_metadata.clone(), actor_states, key)
+ }
+ ObjectState::List(liststate) => {
+ liststate.handle_assign_op(op_with_metadata.clone(), actor_states, key)
+ }
+ }?;
+
+ if let Operation::Increment {
+ object_id,
+ key,
+ value,
+ } = &op_with_metadata.operation
+ {
+ undo_ops = vec![Operation::Increment {
+ object_id: object_id.clone(),
+ key: key.clone(),
+ value: -value,
+ }]
+ };
+
+ if undo_ops.is_empty() {
+ undo_ops.push(Operation::Delete {
+ object_id: op_with_metadata.operation.object_id().clone(),
+ key: key.clone(),
+ })
+ }
+
+ Ok((diff, undo_ops))
+ }
+}
+
+/// Stores operations on list objects
+#[derive(Debug, Clone, PartialEq)]
+pub struct ListState {
+ pub operations_by_elemid: HashMap,
+ pub insertions: HashMap,
+ pub following: HashMap>,
+ pub max_elem: u32,
+ pub sequence_type: SequenceType,
+ pub object_id: ObjectID,
+}
+
+impl ListState {
+ fn new(sequence_type: SequenceType, object_id: ObjectID) -> ListState {
+ ListState {
+ operations_by_elemid: HashMap::new(),
+ following: HashMap::new(),
+ insertions: HashMap::new(),
+ max_elem: 0,
+ sequence_type,
+ object_id,
+ }
+ }
+
+ fn generate_diffs(&self) -> Vec {
+ let mut diffs = Vec::new();
+
+ let head = Diff {
+ action: DiffAction::CreateList(self.object_id.clone(), self.sequence_type.clone()),
+ conflicts: Vec::new(),
+ };
+
+ let ops_in_order = list_ops_in_order(&self.operations_by_elemid, &self.following)
+ .ok()
+ .unwrap_or_default();
+
+ let inserts = ops_in_order
+ .iter()
+ .filter_map(|(_, ops)| {
+ ops.active_op()
+ .map(|active_op| (active_op, ops.conflicts()))
+ })
+ .enumerate()
+ .map(|(after, (active_op, conflicts))| Diff {
+ action: list_op_to_assign_diff(
+ &active_op.operation,
+ &self.sequence_type,
+ after as u32,
+ )
+ .unwrap(),
+ conflicts,
+ });
+
+ let tail = Diff {
+ action: DiffAction::MaxElem(
+ self.object_id.clone(),
+ self.max_elem,
+ self.sequence_type.clone(),
+ ),
+ conflicts: Vec::new(),
+ };
+
+ diffs.push(head);
+ diffs.extend(inserts);
+ diffs.push(tail);
+
+ diffs
+ }
+
+ fn handle_assign_op(
+ &mut self,
+ op: OperationWithMetadata,
+ actor_states: &ActorStates,
+ key: &Key,
+ ) -> Result<(Option, Vec), AutomergeError> {
+ let elem_id = key.as_element_id().map_err(|_| AutomergeError::InvalidChange(format!("Attempted to link, set, delete, or increment an object in a list with invalid element ID {:?}", key.0)))?;
+
+ // We have to clone this here in order to avoid holding a reference to
+ // self which makes the borrow checker choke when adding an op to the
+ // operations_by_elemid map later
+ let ops_clone = self.operations_by_elemid.clone();
+ let ops_in_order_before_this_op = list_ops_in_order(&ops_clone, &self.following)?;
+
+ // This is a hack to avoid holding on to a mutable reference to self
+ // when adding a new operation
+ let (undo_ops, ops) = {
+ let mutable_ops = self
+ .operations_by_elemid
+ .entry(elem_id.clone())
+ .or_insert_with(ConcurrentOperations::new);
+ let undo_ops = mutable_ops.incorporate_new_op(op, actor_states)?;
+ (undo_ops, mutable_ops.clone())
+ };
+
+ let ops_in_order_after_this_op =
+ list_ops_in_order(&self.operations_by_elemid, &self.following)?;
+
+ let index_before_op = ops_in_order_before_this_op
+ .iter()
+ .filter_map(|(elem_id, ops)| ops.active_op().map(|_| elem_id))
+ .enumerate()
+ .find(|(_, op_elem_id)| &&elem_id == op_elem_id)
+ .map(|(index, _)| index as u32);
+
+ let index_and_value_after_op: Option<(u32, ElementValue, Option)> =
+ ops_in_order_after_this_op
+ .iter()
+ .filter_map(|(elem_id, ops)| ops.active_op().map(|op| (op, elem_id)))
+ .enumerate()
+ .find(|(_, (_, op_elem_id))| &&elem_id == op_elem_id)
+ .map(|(index, (op, _))| {
+ let (value, datatype) = match &op.operation {
+ Operation::Set {
+ ref value,
+ ref datatype,
+ ..
+ } => (ElementValue::Primitive(value.clone()), datatype),
+ Operation::Link { value, .. } => (ElementValue::Link(value.clone()), &None),
+ _ => panic!("Should not happen"),
+ };
+ (index as u32, value, datatype.clone())
+ });
+
+ let action: Option = match (index_before_op, index_and_value_after_op) {
+ (Some(_), Some((after, value, datatype))) => Some(DiffAction::SetSequenceElement(
+ self.object_id.clone(),
+ self.sequence_type.clone(),
+ after,
+ value,
+ datatype,
+ )),
+ (Some(before), None) => Some(DiffAction::RemoveSequenceElement(
+ self.object_id.clone(),
+ self.sequence_type.clone(),
+ before,
+ )),
+ (None, Some((after, value, datatype))) => Some(DiffAction::InsertSequenceElement(
+ self.object_id.clone(),
+ self.sequence_type.clone(),
+ after,
+ value,
+ datatype,
+ elem_id,
+ )),
+ (None, None) => None,
+ };
+ Ok((
+ action.map(|action| Diff {
+ action,
+ conflicts: ops.conflicts(),
+ }),
+ undo_ops,
+ ))
+ }
+
+ fn add_insertion(
+ &mut self,
+ actor_id: &ActorID,
+ elem_id: &ElementID,
+ elem: u32,
+ ) -> Result {
+ let inserted_elemid = ElementID::SpecificElementID(actor_id.clone(), elem);
+ if self.insertions.contains_key(&inserted_elemid) {
+ return Err(AutomergeError::InvalidChange(format!(
+ "Received an insertion for already present key: {:?}",
+ inserted_elemid
+ )));
+ }
+ self.insertions
+ .insert(inserted_elemid.clone(), inserted_elemid.clone());
+ let following_ops = self
+ .following
+ .entry(elem_id.clone())
+ .or_insert_with(Vec::new);
+ following_ops.push(inserted_elemid.clone());
+
+ let ops = self
+ .operations_by_elemid
+ .entry(inserted_elemid)
+ .or_insert_with(ConcurrentOperations::new);
+ self.max_elem = std::cmp::max(self.max_elem, elem);
+ Ok(Diff {
+ action: DiffAction::MaxElem(
+ self.object_id.clone(),
+ self.max_elem,
+ self.sequence_type.clone(),
+ ),
+ conflicts: ops.conflicts(),
+ })
+ }
+}
+
+/// Stores operations on map objects
+#[derive(Debug, Clone, PartialEq)]
+pub struct MapState {
+ pub operations_by_key: HashMap,
+ pub map_type: MapType,
+ pub object_id: ObjectID,
+}
+
+impl MapState {
+ fn new(map_type: MapType, object_id: ObjectID) -> MapState {
+ MapState {
+ operations_by_key: HashMap::new(),
+ map_type,
+ object_id,
+ }
+ }
+
+ fn generate_diffs(&self) -> Vec {
+ let mut diffs = Vec::new();
+ if self.object_id != ObjectID::Root {
+ diffs.push(Diff {
+ action: DiffAction::CreateMap(self.object_id.clone(), self.map_type.clone()),
+ conflicts: Vec::new(),
+ })
+ }
+ diffs.extend(self.operations_by_key.iter().filter_map(|(_, ops)| {
+ ops.active_op()
+ .and_then(|op| map_op_to_assign_diff(&op.operation, &self.map_type))
+ .map(|action| Diff {
+ action,
+ conflicts: ops.conflicts(),
+ })
+ }));
+ diffs
+ }
+
+ fn handle_assign_op(
+ &mut self,
+ op_with_metadata: OperationWithMetadata,
+ actor_states: &ActorStates,
+ key: &Key,
+ ) -> Result<(Option, Vec), AutomergeError> {
+ //log!("NEW OP {:?}",op_with_metadata);
+ let (undo_ops, ops) = {
+ let mutable_ops = self
+ .operations_by_key
+ .entry(key.clone())
+ .or_insert_with(ConcurrentOperations::new);
+ let undo_ops = mutable_ops.incorporate_new_op(op_with_metadata, actor_states)?;
+ (undo_ops, mutable_ops.clone())
+ };
+ //log!("OPS {:?}",ops);
+ Ok((
+ Some(
+ ops.active_op()
+ .map(|op| {
+ let action = match &op.operation {
+ Operation::Set {
+ object_id,
+ key,
+ value,
+ datatype,
+ } => DiffAction::SetMapKey(
+ object_id.clone(),
+ self.map_type.clone(),
+ key.clone(),
+ ElementValue::Primitive(value.clone()),
+ datatype.clone(),
+ ),
+ Operation::Link {
+ object_id,
+ key,
+ value,
+ } => DiffAction::SetMapKey(
+ object_id.clone(),
+ self.map_type.clone(),
+ key.clone(),
+ ElementValue::Link(value.clone()),
+ None,
+ ),
+ _ => panic!("Should not happen for objects"),
+ };
+ Diff {
+ action,
+ conflicts: ops.conflicts(),
+ }
+ })
+ .unwrap_or_else(|| Diff {
+ action: DiffAction::RemoveMapKey(
+ self.object_id.clone(),
+ self.map_type.clone(),
+ key.clone(),
+ ),
+ conflicts: ops.conflicts(),
+ }),
+ ),
+ undo_ops,
+ ))
+ }
+}
+
+/// The ObjectStore is responsible for storing the concurrent operations seen
+/// for each object ID and for the logic of incorporating a new operation.
+#[derive(Debug, Clone, PartialEq)]
+pub struct ObjectStore {
+ operations_by_object_id: HashMap,
+}
+
+impl ObjectStore {
+ pub(crate) fn new() -> ObjectStore {
+ let root = ObjectState::new_map(MapType::Map, ObjectID::Root);
+ let mut ops_by_id = HashMap::new();
+ ops_by_id.insert(ObjectID::Root, root);
+ ObjectStore {
+ operations_by_object_id: ops_by_id,
+ }
+ }
+
+ pub fn state_for_object_id(&self, object_id: &ObjectID) -> Option<&ObjectState> {
+ self.operations_by_object_id.get(object_id)
+ }
+
+ pub fn generate_diffs(&self) -> Vec {
+ let mut diffs = Vec::new();
+ let mut seen = HashSet::new();
+ let mut next = vec![ObjectID::Root];
+
+ while !next.is_empty() {
+ let oid = next.pop().unwrap();
+ if let Some(object_state) = self.operations_by_object_id.get(&oid) {
+ let new_diffs = object_state.generate_diffs();
+ for diff in new_diffs.iter() {
+ for link in diff.links() {
+ if !seen.contains(&link) {
+ next.push(link)
+ }
+ }
+ }
+ diffs.push(new_diffs);
+ seen.insert(oid);
+ }
+ }
+
+ diffs.iter().rev().flatten().cloned().collect()
+ }
+
+ /// Get the ConcurrentOperations instance corresponding to a key in an
+ /// object. If the object is a list this function will attempt to convert
+ /// the key into an element ID
+ pub fn concurrent_operations_for_field(
+ &self,
+ object_id: &ObjectID,
+ key: &Key,
+ ) -> Option {
+ self.operations_by_object_id
+ .get(object_id)
+ .and_then(|state| match state {
+ ObjectState::Map(mapstate) => mapstate.operations_by_key.get(&key),
+ ObjectState::List(liststate) => key
+ .as_element_id()
+ .ok()
+ .and_then(|elem_id| liststate.operations_by_elemid.get(&elem_id)),
+ })
+ .cloned()
+ }
+
+ /// Incorporates a new operation into the object store. The caller is
+ /// responsible for ensuring that all causal dependencies of the new
+ /// operation have already been applied.
+ ///
+ /// The return value is a tuple of a diff to send to the frontend, and
+ /// a (possibly empty) vector of operations which will undo the operation
+ /// later.
+ pub fn apply_operation(
+ &mut self,
+ actor_states: &ActorStates,
+ op_with_metadata: OperationWithMetadata,
+ ) -> Result<(Option, Vec), AutomergeError> {
+ let (diff, undo_ops) = match op_with_metadata.operation {
+ Operation::MakeMap { object_id } => {
+ let object = ObjectState::new_map(MapType::Map, object_id.clone());
+ self.operations_by_object_id
+ .insert(object_id.clone(), object);
+ (
+ Some(Diff {
+ action: DiffAction::CreateMap(object_id, MapType::Map),
+ conflicts: Vec::new(),
+ }),
+ Vec::new(),
+ )
+ }
+ Operation::MakeTable { object_id } => {
+ let object = ObjectState::new_map(MapType::Table, object_id.clone());
+ self.operations_by_object_id
+ .insert(object_id.clone(), object);
+ (
+ Some(Diff {
+ action: DiffAction::CreateMap(object_id, MapType::Table),
+ conflicts: Vec::new(),
+ }),
+ Vec::new(),
+ )
+ }
+ Operation::MakeList { object_id } => {
+ let object = ObjectState::new_sequence(SequenceType::List, object_id.clone());
+ self.operations_by_object_id
+ .insert(object_id.clone(), object);
+ (
+ Some(Diff {
+ action: DiffAction::CreateList(object_id, SequenceType::List),
+ conflicts: Vec::new(),
+ }),
+ Vec::new(),
+ )
+ }
+ Operation::MakeText { object_id } => {
+ let object = ObjectState::new_sequence(SequenceType::Text, object_id.clone());
+ self.operations_by_object_id
+ .insert(object_id.clone(), object);
+ (
+ Some(Diff {
+ action: DiffAction::CreateList(object_id, SequenceType::Text),
+ conflicts: Vec::new(),
+ }),
+ Vec::new(),
+ )
+ }
+ Operation::Link {
+ ref object_id,
+ ref key,
+ ..
+ }
+ | Operation::Set {
+ ref object_id,
+ ref key,
+ ..
+ }
+ | Operation::Delete {
+ ref object_id,
+ ref key,
+ }
+ | Operation::Increment {
+ ref object_id,
+ ref key,
+ ..
+ } => {
+ let object = self
+ .operations_by_object_id
+ .get_mut(&object_id)
+ .ok_or_else(|| AutomergeError::MissingObjectError(object_id.clone()))?;
+ object.handle_assign_op(op_with_metadata.clone(), actor_states, key)?
+ }
+ Operation::Insert {
+ ref list_id,
+ ref key,
+ ref elem,
+ } => {
+ let list = self
+ .operations_by_object_id
+ .get_mut(&list_id)
+ .ok_or_else(|| AutomergeError::MissingObjectError(list_id.clone()))?;
+ match list {
+ ObjectState::Map { .. } => {
+ return Err(AutomergeError::InvalidChange(format!(
+ "Insert operation received for object key (object ID: {:?}, key: {:?}",
+ list_id, key
+ )))
+ }
+ ObjectState::List(liststate) => (
+ Some(liststate.add_insertion(&op_with_metadata.actor_id, key, *elem)?),
+ Vec::new(),
+ ),
+ }
+ }
+ };
+ Ok((diff, undo_ops))
+ }
+}
+
+fn map_op_to_assign_diff(op: &Operation, map_type: &MapType) -> Option {
+ match op {
+ Operation::Set {
+ object_id,
+ key,
+ value,
+ datatype,
+ } => Some(DiffAction::SetMapKey(
+ object_id.clone(),
+ map_type.clone(),
+ key.clone(),
+ ElementValue::Primitive(value.clone()),
+ datatype.clone(),
+ )),
+ Operation::Link {
+ object_id,
+ key,
+ value,
+ } => Some(DiffAction::SetMapKey(
+ object_id.clone(),
+ map_type.clone(),
+ key.clone(),
+ ElementValue::Link(value.clone()),
+ None,
+ )),
+ _ => None,
+ }
+}
+
+fn list_op_to_assign_diff(
+ op: &Operation,
+ sequence_type: &SequenceType,
+ after: u32,
+) -> Option {
+ match op {
+ Operation::Set {
+ ref object_id,
+ ref key,
+ ref value,
+ ref datatype,
+ ..
+ } => key
+ .as_element_id()
+ .map(|eid| {
+ DiffAction::InsertSequenceElement(
+ object_id.clone(),
+ sequence_type.clone(),
+ after,
+ ElementValue::Primitive(value.clone()),
+ datatype.clone(),
+ eid,
+ )
+ })
+ .ok(),
+ Operation::Link {
+ value,
+ object_id,
+ key,
+ ..
+ } => key
+ .as_element_id()
+ .map(|eid| {
+ DiffAction::InsertSequenceElement(
+ object_id.clone(),
+ sequence_type.clone(),
+ after,
+ ElementValue::Link(value.clone()),
+ None,
+ eid,
+ )
+ })
+ .ok(),
+ _ => None,
+ }
+}
diff --git a/automerge-backend/src/op_set.rs b/automerge-backend/src/op_set.rs
new file mode 100644
index 00000000..66f5fdd8
--- /dev/null
+++ b/automerge-backend/src/op_set.rs
@@ -0,0 +1,340 @@
+//! The OpSet is where most of the interesting work is done in this library.
+//! It maintains a mapping from each object ID to a set of concurrent
+//! operations which have been seen for that object ID.
+//!
+//! When the client requests the value of the CRDT (via
+//! document::state) the implementation fetches the root object ID's history
+//! and then recursively walks through the tree of histories constructing the
+//! state. Obviously this is not very efficient.
+use crate::actor_states::ActorStates;
+use crate::concurrent_operations::ConcurrentOperations;
+use crate::error::AutomergeError;
+use crate::object_store::ObjectStore;
+use crate::operation_with_metadata::OperationWithMetadata;
+use crate::protocol::{Change, Clock, ElementID, ObjectID, Operation};
+use crate::{ActorID, Diff, DiffAction};
+use std::collections::HashMap;
+use std::collections::HashSet;
+use std::hash::BuildHasher;
+
+/// The OpSet manages an ObjectStore, and a queue of incoming changes in order
+/// to ensure that operations are delivered to the object store in causal order
+///
+/// Whenever a new change is received we iterate through any causally ready
+/// changes in the queue and apply them to the object store, then repeat until
+/// there are no causally ready changes left. The end result of this is that
+/// the object store will contain sets of concurrent operations for each object
+/// ID or element ID.
+///
+/// When we want to get the state of the CRDT we walk through the
+/// object store, starting with the root object ID and constructing the value
+/// at each node by examining the concurrent operationsi which are active for
+/// that node.
+#[derive(Debug, PartialEq, Clone)]
+pub struct OpSet {
+ pub object_store: ObjectStore,
+ queue: Vec,
+ pub clock: Clock,
+ undo_pos: usize,
+ pub undo_stack: Vec>,
+ pub redo_stack: Vec>,
+ pub states: ActorStates,
+}
+
+impl OpSet {
+ pub fn init() -> OpSet {
+ OpSet {
+ object_store: ObjectStore::new(),
+ queue: Vec::new(),
+ clock: Clock::empty(),
+ undo_pos: 0,
+ undo_stack: Vec::new(),
+ redo_stack: Vec::new(),
+ states: ActorStates::new(),
+ }
+ }
+
+ pub fn do_redo(
+ &mut self,
+ actor_id: ActorID,
+ seq: u32,
+ message: Option,
+ dependencies: Clock,
+ ) -> Result, AutomergeError> {
+ if let Some(redo_ops) = self.redo_stack.pop() {
+ let change = Change {
+ actor_id,
+ seq,
+ message,
+ dependencies,
+ operations: redo_ops,
+ };
+ self.undo_pos += 1;
+ self.apply_change(change, false)
+ } else {
+ Err(AutomergeError::InvalidChange("no redo ops".to_string()))
+ }
+ }
+
+ pub fn do_undo(
+ &mut self,
+ actor_id: ActorID,
+ seq: u32,
+ message: Option,
+ dependencies: Clock,
+ ) -> Result, AutomergeError> {
+ if let Some(undo_ops) = self.undo_stack.get(self.undo_pos - 1) {
+ let redo_ops = undo_ops
+ .iter()
+ .filter_map(|op| match &op {
+ Operation::Increment {
+ object_id: oid,
+ key,
+ value,
+ } => Some(vec![Operation::Increment {
+ object_id: oid.clone(),
+ key: key.clone(),
+ value: -value,
+ }]),
+ Operation::Set { object_id, key, .. }
+ | Operation::Link { object_id, key, .. }
+ | Operation::Delete { object_id, key } => self
+ .object_store
+ .concurrent_operations_for_field(object_id, key)
+ .map(|cops| {
+ if cops.active_op().is_some() {
+ cops.pure_operations()
+ } else {
+ vec![Operation::Delete {
+ object_id: object_id.clone(),
+ key: key.clone(),
+ }]
+ }
+ }),
+ _ => None,
+ })
+ .flatten()
+ .collect();
+ self.redo_stack.push(redo_ops);
+ let change = Change {
+ actor_id,
+ seq,
+ message,
+ dependencies,
+ operations: undo_ops.clone(),
+ };
+ self.undo_pos -= 1;
+ self.apply_change(change, false)
+ } else {
+ Err(AutomergeError::InvalidChange(
+ "No undo ops to execute".to_string(),
+ ))
+ }
+ }
+
+ /// Adds a change to the internal queue of operations, then iteratively
+ /// applies all causally ready changes until there are none remaining
+ ///
+ /// If `make_undoable` is true, the op set will store a set of operations
+ /// which can be used to undo this change.
+ pub fn apply_change(
+ &mut self,
+ change: Change,
+ make_undoable: bool,
+ ) -> Result, AutomergeError> {
+ self.queue.push(change);
+ let diffs = self.apply_causally_ready_changes(make_undoable)?;
+ Ok(diffs)
+ }
+
+ fn apply_causally_ready_changes(
+ &mut self,
+ make_undoable: bool,
+ ) -> Result, AutomergeError> {
+ let mut diffs = Vec::new();
+ while let Some(next_change) = self.pop_next_causally_ready_change() {
+ let change_diffs = self.apply_causally_ready_change(next_change, make_undoable)?;
+ diffs.extend(change_diffs);
+ }
+ Ok(diffs)
+ }
+
+ fn pop_next_causally_ready_change(&mut self) -> Option {
+ let mut index = 0;
+ while index < self.queue.len() {
+ let change = self.queue.get(index).unwrap();
+ let deps = change.dependencies.with(&change.actor_id, change.seq - 1);
+ if deps <= self.clock {
+ return Some(self.queue.remove(index));
+ }
+ index += 1
+ }
+ None
+ }
+
+ fn apply_causally_ready_change(
+ &mut self,
+ change: Change,
+ make_undoable: bool,
+ ) -> Result, AutomergeError> {
+ // This method is a little more complicated than it intuitively should
+ // be due to the bookkeeping required for undo. If we're asked to make
+ // this operation undoable we have to store the undo operations for
+ // each operation and then add them to the undo stack at the end of the
+ // method. However, it's unnecessary to store undo operations for
+ // objects which are created by this change (e.g if there's an insert
+ // operation for a list which was created in this operation we only
+ // need the undo operation for the creation of the list to achieve
+ // the undo), so we track newly created objects and only store undo
+ // operations which don't operate on them.
+ let actor_id = change.actor_id.clone();
+ let seq = change.seq;
+ let operations = change.operations.clone();
+
+ if !self.states.add_change(change)? {
+ return Ok(Vec::new()); // its a duplicate - ignore
+ }
+
+ let mut diffs = Vec::new();
+ let mut undo_operations = Vec::new();
+ let mut new_object_ids: HashSet = HashSet::new();
+ for operation in operations {
+ // Store newly created object IDs so we can decide whether we need
+ // undo ops later
+ match &operation {
+ Operation::MakeMap { object_id }
+ | Operation::MakeList { object_id }
+ | Operation::MakeText { object_id }
+ | Operation::MakeTable { object_id } => {
+ new_object_ids.insert(object_id.clone());
+ }
+ _ => {}
+ }
+ let op_with_metadata = OperationWithMetadata {
+ sequence: seq,
+ actor_id: actor_id.clone(),
+ operation: operation.clone(),
+ };
+ let (diff, undo_ops_for_this_op) = self
+ .object_store
+ .apply_operation(&self.states, op_with_metadata)?;
+
+ // If this object is not created in this change then we need to
+ // store the undo ops for it (if we're storing undo ops at all)
+ if make_undoable && !(new_object_ids.contains(operation.object_id())) {
+ undo_operations.extend(undo_ops_for_this_op);
+ }
+ if let Some(d) = diff {
+ diffs.push(d)
+ }
+ }
+ self.clock = self.clock.with(&actor_id, seq);
+ if make_undoable {
+ let (new_undo_stack_slice, _) = self.undo_stack.split_at(self.undo_pos);
+ let mut new_undo_stack: Vec> = new_undo_stack_slice.to_vec();
+ new_undo_stack.push(undo_operations);
+ self.undo_stack = new_undo_stack;
+ self.undo_pos += 1;
+ };
+ Ok(Self::simplify_diffs(diffs))
+ }
+
+ /// Remove any redundant diffs
+ fn simplify_diffs(diffs: Vec) -> Vec {
+ let mut result = Vec::new();
+ let mut known_maxelems: HashMap = HashMap::new();
+
+ for diff in diffs.into_iter().rev() {
+ if let DiffAction::MaxElem(ref oid, max_elem, _) = diff.action {
+ let current_max = known_maxelems.get(oid).unwrap_or(&0);
+ if *current_max < max_elem {
+ known_maxelems.insert(oid.clone(), max_elem);
+ result.push(diff);
+ }
+ } else if let DiffAction::InsertSequenceElement(
+ ref oid,
+ _,
+ _,
+ _,
+ _,
+ ElementID::SpecificElementID(_, max_elem),
+ ) = diff.action
+ {
+ let current_max = known_maxelems.get(oid).unwrap_or(&0);
+ if *current_max < max_elem {
+ known_maxelems.insert(oid.clone(), max_elem);
+ }
+ result.push(diff);
+ } else {
+ result.push(diff);
+ }
+ }
+
+ result.reverse();
+ result
+ }
+
+ pub fn can_undo(&self) -> bool {
+ self.undo_pos > 0
+ }
+
+ pub fn can_redo(&self) -> bool {
+ !self.redo_stack.is_empty()
+ }
+
+ /// Get all the changes we have that are not in `since`
+ pub fn get_missing_changes(&self, since: &Clock) -> Vec<&Change> {
+ self.states
+ .history
+ .iter()
+ .map(|rc| rc.as_ref())
+ .filter(|change| change.seq > since.get(&change.actor_id))
+ .collect()
+ }
+
+ pub fn get_missing_deps(&self) -> Clock {
+ // TODO: there's a lot of internal copying going on in here for something kinda simple
+ self.queue.iter().fold(Clock::empty(), |clock, change| {
+ clock
+ .union(&change.dependencies)
+ .with(&change.actor_id, change.seq - 1)
+ })
+ }
+}
+
+pub fn list_ops_in_order<'a, S: BuildHasher>(
+ operations_by_elemid: &'a HashMap,
+ following: &HashMap, S>,
+) -> Result, AutomergeError> {
+ // First we construct a vector of operations to process in order based
+ // on the insertion orders of the operations we've received
+ let mut ops_in_order: Vec<(ElementID, &ConcurrentOperations)> = Vec::new();
+ // start with everything that was inserted after _head
+ let mut to_process: Vec = following
+ .get(&ElementID::Head)
+ .map(|heads| {
+ let mut sorted = heads.to_vec();
+ sorted.sort();
+ sorted
+ })
+ .unwrap_or_else(Vec::new);
+
+ // for each element ID, add the operation to the ops_in_order list,
+ // then find all the following element IDs, sort them and add them to
+ // the list of element IDs still to process.
+ while let Some(next_element_id) = to_process.pop() {
+ let ops = operations_by_elemid.get(&next_element_id).ok_or_else(|| {
+ AutomergeError::InvalidChange(format!(
+ "Missing element ID {:?} when interpreting list ops",
+ next_element_id
+ ))
+ })?;
+ ops_in_order.push((next_element_id.clone(), ops));
+ if let Some(followers) = following.get(&next_element_id) {
+ let mut sorted = followers.to_vec();
+ sorted.sort();
+ to_process.extend(sorted);
+ }
+ }
+ Ok(ops_in_order)
+}
diff --git a/automerge-backend/src/operation_with_metadata.rs b/automerge-backend/src/operation_with_metadata.rs
new file mode 100644
index 00000000..c9861a8d
--- /dev/null
+++ b/automerge-backend/src/operation_with_metadata.rs
@@ -0,0 +1,25 @@
+use crate::protocol::{ActorID, Operation};
+use std::cmp::{Ordering, PartialOrd};
+
+/// We deserialize individual operations as part of the `Change` structure, but
+/// we need access to the actor ID and sequence when applying each individual
+/// operation, so we copy the operation, actor ID, and sequence into this
+/// struct.
+#[derive(PartialEq, Debug, Clone)]
+pub struct OperationWithMetadata {
+ pub sequence: u32,
+ pub actor_id: ActorID,
+ pub operation: Operation,
+}
+
+/// Note, we can't implement Ord because the Operation contains floating point
+/// elements
+impl PartialOrd for OperationWithMetadata {
+ fn partial_cmp(&self, other: &OperationWithMetadata) -> Option {
+ if self.actor_id == other.actor_id {
+ Some(self.sequence.cmp(&other.sequence))
+ } else {
+ Some(self.actor_id.cmp(&other.actor_id))
+ }
+ }
+}
diff --git a/automerge-backend/src/patch.rs b/automerge-backend/src/patch.rs
new file mode 100644
index 00000000..7a43e836
--- /dev/null
+++ b/automerge-backend/src/patch.rs
@@ -0,0 +1,119 @@
+use crate::{ActorID, Clock, DataType, ElementID, Key, ObjectID, PrimitiveValue};
+use serde::Serialize;
+
+#[derive(Debug, PartialEq, Clone)]
+pub enum ElementValue {
+ Primitive(PrimitiveValue),
+ Link(ObjectID),
+}
+
+impl ElementValue {
+ pub fn object_id(&self) -> Option {
+ match self {
+ ElementValue::Link(object_id) => Some(object_id.clone()),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Clone, Serialize)]
+pub enum SequenceType {
+ #[serde(rename = "list")]
+ List,
+ #[serde(rename = "text")]
+ Text,
+}
+
+#[derive(Debug, PartialEq, Clone, Serialize)]
+pub enum MapType {
+ #[serde(rename = "map")]
+ Map,
+ #[serde(rename = "table")]
+ Table,
+}
+
+#[derive(Debug, Clone, PartialEq)]
+pub enum DiffAction {
+ CreateMap(ObjectID, MapType),
+ CreateList(ObjectID, SequenceType),
+ MaxElem(ObjectID, u32, SequenceType),
+ RemoveMapKey(ObjectID, MapType, Key),
+ SetMapKey(ObjectID, MapType, Key, ElementValue, Option),
+ RemoveSequenceElement(ObjectID, SequenceType, u32),
+ InsertSequenceElement(
+ ObjectID,
+ SequenceType,
+ u32,
+ ElementValue,
+ Option,
+ ElementID,
+ ),
+ SetSequenceElement(ObjectID, SequenceType, u32, ElementValue, Option),
+}
+
+impl DiffAction {
+ fn value(&self) -> Option {
+ match self {
+ DiffAction::SetMapKey(_, _, _, value, _)
+ | DiffAction::InsertSequenceElement(_, _, _, value, _, _)
+ | DiffAction::SetSequenceElement(_, _, _, value, _) => Some(value.clone()),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Clone)]
+pub struct Conflict {
+ pub actor: ActorID,
+ pub value: ElementValue,
+ pub datatype: Option,
+}
+
+#[derive(Debug, PartialEq, Clone)]
+pub struct Diff {
+ pub action: DiffAction,
+ pub conflicts: Vec,
+}
+
+impl Diff {
+ pub fn links(&self) -> Vec {
+ let mut oids = Vec::new();
+ if let Some(oid) = self.action.value().and_then(|v| v.object_id()) {
+ oids.push(oid)
+ }
+ for c in self.conflicts.iter() {
+ if let Some(oid) = c.value.object_id() {
+ oids.push(oid)
+ }
+ }
+ oids
+ }
+}
+
+#[derive(Serialize, Debug, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub struct Patch {
+ #[serde(skip_serializing_if = "Option::is_none", default)]
+ pub actor: Option,
+ pub can_undo: bool,
+ pub can_redo: bool,
+ pub clock: Clock,
+ pub deps: Clock,
+ pub diffs: Vec,
+ #[serde(skip_serializing_if = "Option::is_none", default)]
+ pub seq: Option,
+}
+
+impl Patch {
+ pub fn empty() -> Patch {
+ Patch {
+ actor: None,
+ can_undo: false,
+ can_redo: false,
+ clock: Clock::empty(),
+ deps: Clock::empty(),
+ diffs: Vec::new(),
+ seq: None,
+ }
+ }
+}
diff --git a/automerge-backend/src/patch_serialization.rs b/automerge-backend/src/patch_serialization.rs
new file mode 100644
index 00000000..784df7a3
--- /dev/null
+++ b/automerge-backend/src/patch_serialization.rs
@@ -0,0 +1,933 @@
+use crate::{
+ ActorID, Conflict, DataType, Diff, DiffAction, ElementID, ElementValue, Key, MapType, ObjectID,
+ PrimitiveValue, SequenceType,
+};
+use serde::de::{Error, MapAccess, Unexpected, Visitor};
+use serde::ser::SerializeMap;
+use serde::{Deserialize, Deserializer, Serialize, Serializer};
+use std::fmt;
+
+impl Serialize for Conflict {
+ fn serialize(&self, serializer: S) -> Result
+ where
+ S: Serializer,
+ {
+ let mut map_serializer = serializer.serialize_map(None)?;
+ map_serializer.serialize_entry("actor", &self.actor)?;
+ match &self.datatype {
+ Some(d) => map_serializer.serialize_entry("datatype", &d)?,
+ None => {}
+ };
+ match &self.value {
+ ElementValue::Primitive(v) => map_serializer.serialize_entry("value", &v)?,
+ ElementValue::Link(oid) => {
+ map_serializer.serialize_entry("value", &oid)?;
+ map_serializer.serialize_entry("link", &true)?;
+ }
+ };
+ map_serializer.end()
+ }
+}
+
+impl<'de> Deserialize<'de> for Conflict {
+ fn deserialize(deserializer: D) -> Result
+ where
+ D: Deserializer<'de>,
+ {
+ const FIELDS: &[&str] = &["actor", "value", "datatype", "link"];
+ struct ConflictVisitor;
+ impl<'de> Visitor<'de> for ConflictVisitor {
+ type Value = Conflict;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("A conflict object")
+ }
+
+ fn visit_map(self, mut map: V) -> Result
+ where
+ V: MapAccess<'de>,
+ {
+ let mut actor: Option = None;
+ let mut value_raw: Option = None;
+ let mut datatype: Option = None;
+ let mut link: Option = None;
+
+ while let Some(key) = map.next_key::()? {
+ match key.as_ref() {
+ "actor" => {
+ if actor.is_some() {
+ return Err(Error::duplicate_field("actor"));
+ }
+ actor = Some(map.next_value()?);
+ }
+ "datatype" => {
+ if datatype.is_some() {
+ return Err(Error::duplicate_field("datatype"));
+ }
+ datatype = Some(map.next_value()?);
+ }
+ "value" => {
+ if value_raw.is_some() {
+ return Err(Error::duplicate_field("value"));
+ }
+ value_raw = Some(map.next_value()?);
+ }
+ "link" => {
+ if link.is_some() {
+ return Err(Error::duplicate_field("link"));
+ }
+ link = Some(map.next_value()?);
+ }
+ _ => return Err(Error::unknown_field(&key, FIELDS)),
+ }
+ }
+
+ let actor = actor.ok_or_else(|| Error::missing_field("actor"))?;
+ let value_raw = value_raw.ok_or_else(|| Error::missing_field("value"))?;
+ let is_link = link.unwrap_or(false);
+ let value = match (is_link, value_raw) {
+ (true, PrimitiveValue::Str(s)) => {
+ let oid = match s.as_ref() {
+ "00000000-0000-0000-0000-000000000000" => ObjectID::Root,
+ id => ObjectID::ID(id.to_string()),
+ };
+ ElementValue::Link(oid)
+ }
+ (false, v) => ElementValue::Primitive(v),
+ _ => return Err(Error::custom(
+ "Received a conflict with `link` set to true but no string in 'value' key",
+ )),
+ };
+ Ok(Conflict {
+ actor,
+ value,
+ datatype,
+ })
+ }
+ }
+ deserializer.deserialize_struct("Conflict", FIELDS, ConflictVisitor)
+ }
+}
+
+impl Serialize for Diff {
+ fn serialize(&self, serializer: S) -> Result
+ where
+ S: Serializer,
+ {
+ let mut map_serializer = serializer.serialize_map(None)?;
+ if !self.conflicts.is_empty() {
+ map_serializer.serialize_entry("conflicts", &self.conflicts)?;
+ }
+ match &self.action {
+ DiffAction::CreateMap(oid, map_type) => {
+ map_serializer.serialize_entry("action", "create")?;
+ map_serializer.serialize_entry("obj", &oid)?;
+ map_serializer.serialize_entry("type", &map_type)?;
+ }
+ DiffAction::CreateList(oid, seq_type) => {
+ map_serializer.serialize_entry("action", "create")?;
+ map_serializer.serialize_entry("obj", &oid)?;
+ map_serializer.serialize_entry("type", &seq_type)?;
+ }
+ DiffAction::MaxElem(oid, max, seq_type) => {
+ map_serializer.serialize_entry("action", "maxElem")?;
+ map_serializer.serialize_entry("obj", &oid)?;
+ map_serializer.serialize_entry("value", &max)?;
+ map_serializer.serialize_entry("type", &seq_type)?;
+ }
+ DiffAction::RemoveMapKey(oid, map_type, key) => {
+ map_serializer.serialize_entry("action", "remove")?;
+ map_serializer.serialize_entry("type", &map_type)?;
+ map_serializer.serialize_entry("obj", &oid)?;
+ map_serializer.serialize_entry("key", &key)?;
+ }
+ DiffAction::SetMapKey(oid, map_type, key, value, datatype) => {
+ map_serializer.serialize_entry("action", "set")?;
+ map_serializer.serialize_entry("type", &map_type)?;
+ map_serializer.serialize_entry("obj", &oid)?;
+ map_serializer.serialize_entry("key", &key)?;
+ match datatype {
+ Some(dtype) => map_serializer.serialize_entry("datatype", &dtype)?,
+ None => {}
+ };
+ match value {
+ ElementValue::Primitive(v) => map_serializer.serialize_entry("value", &v)?,
+ ElementValue::Link(linked_oid) => {
+ map_serializer.serialize_entry("link", &true)?;
+ map_serializer.serialize_entry("value", &linked_oid)?;
+ }
+ };
+ }
+ DiffAction::RemoveSequenceElement(oid, seq_type, index) => {
+ map_serializer.serialize_entry("action", "remove")?;
+ map_serializer.serialize_entry("type", &seq_type)?;
+ map_serializer.serialize_entry("obj", &oid)?;
+ map_serializer.serialize_entry("index", &index)?;
+ }
+ DiffAction::InsertSequenceElement(
+ oid,
+ seq_type,
+ index,
+ value,
+ datatype,
+ element_id,
+ ) => {
+ map_serializer.serialize_entry("action", "insert")?;
+ map_serializer.serialize_entry("type", &seq_type)?;
+ map_serializer.serialize_entry("obj", &oid)?;
+ map_serializer.serialize_entry("index", &index)?;
+ map_serializer.serialize_entry("elemId", &element_id)?;
+ match value {
+ ElementValue::Primitive(v) => map_serializer.serialize_entry("value", &v)?,
+ ElementValue::Link(linked_oid) => {
+ map_serializer.serialize_entry("link", &true)?;
+ map_serializer.serialize_entry("value", &linked_oid)?;
+ }
+ };
+ match datatype {
+ Some(d) => map_serializer.serialize_entry("datatype", &d)?,
+ None => {}
+ };
+ }
+ DiffAction::SetSequenceElement(oid, seq_type, index, value, datatype) => {
+ map_serializer.serialize_entry("action", "set")?;
+ map_serializer.serialize_entry("type", &seq_type)?;
+ map_serializer.serialize_entry("obj", &oid)?;
+ map_serializer.serialize_entry("index", &index)?;
+ match value {
+ ElementValue::Primitive(v) => map_serializer.serialize_entry("value", &v)?,
+ ElementValue::Link(linked_oid) => {
+ map_serializer.serialize_entry("link", &true)?;
+ map_serializer.serialize_entry("value", &linked_oid)?;
+ }
+ };
+ match datatype {
+ Some(d) => map_serializer.serialize_entry("datatype", &d)?,
+ None => {}
+ };
+ }
+ }
+ map_serializer.end()
+ }
+}
+
+impl<'de> Deserialize<'de> for Diff {
+ fn deserialize(deserializer: D) -> Result
+ where
+ D: Deserializer<'de>,
+ {
+ const FIELDS: &[&str] = &["actor", "value", "datatype", "link"];
+ struct DiffVisitor;
+ impl<'de> Visitor<'de> for DiffVisitor {
+ type Value = Diff;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("A diff object")
+ }
+
+ fn visit_map(self, mut map: V) -> Result
+ where
+ V: MapAccess<'de>,
+ {
+ let mut object_id: Option = None;
+ let mut type_str: Option = None;
+ let mut seq: Option = None;
+ let mut action: Option = None;
+ let mut key: Option = None;
+ let mut value: Option = None;
+ let mut datatype: Option = None;
+ let mut conflicts: Option> = None;
+ let mut index: Option = None;
+ let mut is_link: Option = None;
+ let mut elem_id: Option = None;
+
+ while let Some(map_key) = map.next_key::()? {
+ match map_key.as_ref() {
+ "obj" => {
+ if object_id.is_some() {
+ return Err(Error::duplicate_field("obj"));
+ }
+ object_id = Some(map.next_value()?);
+ }
+ "type" => {
+ if type_str.is_some() {
+ return Err(Error::duplicate_field("type"));
+ }
+ type_str = Some(map.next_value()?);
+ }
+ "seq" => {
+ if seq.is_some() {
+ return Err(Error::duplicate_field("seq"));
+ }
+ seq = Some(map.next_value()?);
+ }
+ "action" => {
+ if action.is_some() {
+ return Err(Error::duplicate_field("action"));
+ }
+ action = Some(map.next_value()?);
+ }
+ "key" => {
+ if key.is_some() {
+ return Err(Error::duplicate_field("key"));
+ }
+ key = Some(map.next_value()?);
+ }
+ "value" => {
+ if value.is_some() {
+ return Err(Error::duplicate_field("value"));
+ }
+ value = Some(map.next_value()?);
+ }
+ "datatype" => {
+ if datatype.is_some() {
+ return Err(Error::duplicate_field("datatype"));
+ }
+ datatype = Some(map.next_value()?);
+ }
+ "conflicts" => {
+ if conflicts.is_some() {
+ return Err(Error::duplicate_field("conflicts"));
+ }
+ conflicts = Some(map.next_value()?);
+ }
+ "index" => {
+ if index.is_some() {
+ return Err(Error::duplicate_field("index"));
+ }
+ index = Some(map.next_value()?);
+ }
+ "link" => {
+ if is_link.is_some() {
+ return Err(Error::duplicate_field("link"));
+ }
+ is_link = Some(map.next_value()?);
+ }
+ "elemId" => {
+ if elem_id.is_some() {
+ return Err(Error::duplicate_field("elemId"));
+ }
+ elem_id = Some(map.next_value()?);
+ }
+ _ => return Err(Error::unknown_field(&map_key, FIELDS)),
+ }
+ }
+
+ let is_link = is_link.unwrap_or(false);
+ let value =
+ match (is_link, value) {
+ (true, Some(PrimitiveValue::Str(s))) => {
+ let oid = match s.as_ref() {
+ "00000000-0000-0000-0000-000000000000" => ObjectID::Root,
+ id => ObjectID::ID(id.to_string()),
+ };
+ Some(ElementValue::Link(oid))
+ }
+ (false, Some(v)) => Some(ElementValue::Primitive(v)),
+ (_, None) => None,
+ _ => return Err(Error::custom(
+ "Received a diff with `link` set to true but no string in 'value' key",
+ )),
+ };
+
+ let diff_action = match action {
+ Some(action_str) => match action_str.as_ref() {
+ "create" => {
+ let obj_id = object_id.ok_or_else(|| Error::missing_field("obj"))?;
+ let create_type =
+ type_str.ok_or_else(|| Error::missing_field("type"))?;
+ match create_type.as_ref() {
+ "map" => DiffAction::CreateMap(obj_id, MapType::Map),
+ "table" => DiffAction::CreateMap(obj_id, MapType::Table),
+ "list" => DiffAction::CreateList(obj_id, SequenceType::List),
+ "text" => DiffAction::CreateList(obj_id, SequenceType::Text),
+ _ => {
+ return Err(Error::invalid_value(
+ Unexpected::Str(&create_type),
+ &"A valid object type",
+ ))
+ }
+ }
+ }
+ "maxElem" => {
+ let obj_id = object_id.ok_or_else(|| Error::missing_field("obj"))?;
+ let value = value.ok_or_else(|| Error::missing_field("value"))?;
+ let seq_type_str =
+ type_str.ok_or_else(|| Error::missing_field("type"))?;
+ let seq_type = match seq_type_str.as_ref() {
+ "list" => SequenceType::List,
+ "text" => SequenceType::Text,
+ _ => {
+ return Err(Error::invalid_value(
+ Unexpected::Str(&seq_type_str),
+ &"A valid sequence type",
+ ))
+ }
+ };
+ let seq = match value {
+ ElementValue::Primitive(PrimitiveValue::Number(n)) => n as u32,
+ _ => return Err(Error::custom("Invalid value for maxElem.value")),
+ };
+ DiffAction::MaxElem(obj_id, seq, seq_type)
+ }
+ "remove" => {
+ let type_str = type_str.ok_or_else(|| Error::missing_field("type"))?;
+ let obj_id = object_id.ok_or_else(|| Error::missing_field("obj"))?;
+ match key {
+ Some(k) => {
+ let map_type = match type_str.as_ref() {
+ "map" => MapType::Map,
+ "table" => MapType::Table,
+ _ => {
+ return Err(Error::invalid_value(
+ Unexpected::Str(&type_str),
+ &"A valid map type",
+ ))
+ }
+ };
+ DiffAction::RemoveMapKey(obj_id, map_type, k)
+ }
+ None => {
+ let seq_type = match type_str.as_ref() {
+ "list" => SequenceType::List,
+ "text" => SequenceType::Text,
+ _ => {
+ return Err(Error::invalid_value(
+ Unexpected::Str(&type_str),
+ &"A valid sequence type",
+ ))
+ }
+ };
+ let index =
+ index.ok_or_else(|| Error::missing_field("index"))?;
+ DiffAction::RemoveSequenceElement(obj_id, seq_type, index)
+ }
+ }
+ }
+ "set" => {
+ let type_str = type_str.ok_or_else(|| Error::missing_field("type"))?;
+ let obj_id = object_id.ok_or_else(|| Error::missing_field("obj"))?;
+ let value = value.ok_or_else(|| Error::missing_field("value"))?;
+ match key {
+ Some(k) => {
+ let map_type = match type_str.as_ref() {
+ "map" => MapType::Map,
+ "table" => MapType::Table,
+ _ => {
+ return Err(Error::invalid_value(
+ Unexpected::Str(&type_str),
+ &"A valid map type",
+ ))
+ }
+ };
+ DiffAction::SetMapKey(obj_id, map_type, k, value, datatype)
+ }
+ None => {
+ let seq_type = match type_str.as_ref() {
+ "list" => SequenceType::List,
+ "text" => SequenceType::Text,
+ _ => {
+ return Err(Error::invalid_value(
+ Unexpected::Str(&type_str),
+ &"A valid sequence type",
+ ))
+ }
+ };
+ let index =
+ index.ok_or_else(|| Error::missing_field("index"))?;
+ DiffAction::SetSequenceElement(
+ obj_id, seq_type, index, value, datatype,
+ )
+ }
+ }
+ }
+ "insert" => {
+ let obj_id = object_id.ok_or_else(|| Error::missing_field("obj"))?;
+ let type_str = type_str.ok_or_else(|| Error::missing_field("type"))?;
+ let value = value.ok_or_else(|| Error::missing_field("value"))?;
+ let elem_id = elem_id.ok_or_else(|| Error::missing_field("elemId"))?;
+ let seq_type = match type_str.as_ref() {
+ "list" => SequenceType::List,
+ "text" => SequenceType::Text,
+ _ => {
+ return Err(Error::invalid_value(
+ Unexpected::Str(&type_str),
+ &"A valid sequence type",
+ ))
+ }
+ };
+ let index = index.ok_or_else(|| Error::missing_field("index"))?;
+ DiffAction::InsertSequenceElement(
+ obj_id, seq_type, index, value, datatype, elem_id,
+ )
+ }
+ _ => {
+ return Err(Error::invalid_value(
+ Unexpected::Str(&action_str),
+ &"A valid action string",
+ ))
+ }
+ },
+ None => return Err(Error::missing_field("action")),
+ };
+
+ let conflicts = conflicts.unwrap_or_default();
+
+ Ok(Diff {
+ action: diff_action,
+ conflicts,
+ })
+ }
+ }
+ deserializer.deserialize_struct("Conflict", FIELDS, DiffVisitor)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ //use super::*;
+ use crate::{
+ ActorID, Conflict, DataType, Diff, DiffAction, ElementID, ElementValue, Key, MapType,
+ ObjectID, PrimitiveValue, SequenceType,
+ };
+ use serde_json;
+
+ struct TestCase {
+ name: &'static str,
+ diff: Diff,
+ json: serde_json::Value,
+ }
+
+ #[test]
+ fn do_tests() {
+ let testcases = vec![
+ TestCase {
+ name: "CreateMap",
+ diff: Diff {
+ action: DiffAction::CreateMap(ObjectID::ID("1234".to_string()), MapType::Map),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "create",
+ "obj": "1234",
+ "type": "map"
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "CreateMap (table)",
+ diff: Diff {
+ action: DiffAction::CreateMap(ObjectID::ID("1234".to_string()), MapType::Table),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "create",
+ "obj": "1234",
+ "type": "table"
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "CreateList",
+ diff: Diff {
+ action: DiffAction::CreateList(
+ ObjectID::ID("1234".to_string()),
+ SequenceType::List,
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "create",
+ "obj": "1234",
+ "type": "list"
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "CreateText",
+ diff: Diff {
+ action: DiffAction::CreateList(
+ ObjectID::ID("1234".to_string()),
+ SequenceType::Text,
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "create",
+ "obj": "1234",
+ "type": "text"
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "MaxElem(list)",
+ diff: Diff {
+ action: DiffAction::MaxElem(
+ ObjectID::ID("1234".to_string()),
+ 4,
+ SequenceType::List,
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "maxElem",
+ "obj": "1234",
+ "type": "list",
+ "value": 4
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "MaxElem(text)",
+ diff: Diff {
+ action: DiffAction::MaxElem(
+ ObjectID::ID("1234".to_string()),
+ 4,
+ SequenceType::Text,
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "maxElem",
+ "obj": "1234",
+ "type": "text",
+ "value": 4
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "RemoveMapKey(map)",
+ diff: Diff {
+ action: DiffAction::RemoveMapKey(
+ ObjectID::ID("1234".to_string()),
+ MapType::Map,
+ Key("key".to_string()),
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "remove",
+ "obj": "1234",
+ "type": "map",
+ "key": "key"
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "RemoveMapKey(table)",
+ diff: Diff {
+ action: DiffAction::RemoveMapKey(
+ ObjectID::ID("1234".to_string()),
+ MapType::Table,
+ Key("key".to_string()),
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "remove",
+ "obj": "1234",
+ "type": "table",
+ "key": "key"
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "SetMapKey(map)",
+ diff: Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::ID("1234".to_string()),
+ MapType::Map,
+ Key("key".to_string()),
+ ElementValue::Link(ObjectID::ID("5678".to_string())),
+ None,
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "set",
+ "obj": "1234",
+ "type": "map",
+ "key": "key",
+ "value": "5678",
+ "link": true
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "SetMapKey(table) with link",
+ diff: Diff {
+ action: DiffAction::SetMapKey(
+ ObjectID::ID("1234".to_string()),
+ MapType::Table,
+ Key("key".to_string()),
+ ElementValue::Link(ObjectID::ID("5678".to_string())),
+ Some(DataType::Counter),
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "set",
+ "obj": "1234",
+ "type": "table",
+ "key": "key",
+ "value": "5678",
+ "link": true,
+ "datatype": "counter"
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "RemoveSequenceElement",
+ diff: Diff {
+ action: DiffAction::RemoveSequenceElement(
+ ObjectID::ID("1234".to_string()),
+ SequenceType::List,
+ 5,
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "remove",
+ "obj": "1234",
+ "type": "list",
+ "index": 5
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "RemoveSequenceElement(text)",
+ diff: Diff {
+ action: DiffAction::RemoveSequenceElement(
+ ObjectID::ID("1234".to_string()),
+ SequenceType::Text,
+ 5,
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "remove",
+ "obj": "1234",
+ "type": "text",
+ "index": 5
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "InsertSequenceElement",
+ diff: Diff {
+ action: DiffAction::InsertSequenceElement(
+ ObjectID::ID("1234".to_string()),
+ SequenceType::List,
+ 5,
+ ElementValue::Primitive(PrimitiveValue::Str("hi".to_string())),
+ None,
+ ElementID::from_actor_and_elem(ActorID("someactor".to_string()), 1),
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "insert",
+ "obj": "1234",
+ "type": "list",
+ "index": 5,
+ "value": "hi",
+ "elemId": "someactor:1"
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "InsertSequenceElement(text with link and datatype)",
+ diff: Diff {
+ action: DiffAction::InsertSequenceElement(
+ ObjectID::ID("1234".to_string()),
+ SequenceType::Text,
+ 5,
+ ElementValue::Link(ObjectID::ID("5678".to_string())),
+ Some(DataType::Timestamp),
+ ElementID::from_actor_and_elem(ActorID("someactor".to_string()), 1),
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "insert",
+ "obj": "1234",
+ "type": "text",
+ "index": 5,
+ "value": "5678",
+ "link": true,
+ "datatype": "timestamp",
+ "elemId": "someactor:1"
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "SetSequenceElement",
+ diff: Diff {
+ action: DiffAction::SetSequenceElement(
+ ObjectID::ID("1234".to_string()),
+ SequenceType::Text,
+ 5,
+ ElementValue::Link(ObjectID::ID("5678".to_string())),
+ None,
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "set",
+ "obj": "1234",
+ "type": "text",
+ "index": 5,
+ "value": "5678",
+ "link": true
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ TestCase {
+ name: "SetSequenceElement(list with primitive and datatype)",
+ diff: Diff {
+ action: DiffAction::SetSequenceElement(
+ ObjectID::ID("1234".to_string()),
+ SequenceType::List,
+ 5,
+ ElementValue::Primitive(PrimitiveValue::Str("hi".to_string())),
+ Some(DataType::Counter),
+ ),
+ conflicts: Vec::new(),
+ },
+ json: serde_json::from_str(
+ r#"
+ {
+ "action": "set",
+ "obj": "1234",
+ "type": "list",
+ "index": 5,
+ "value": "hi",
+ "datatype": "counter"
+ }
+ "#,
+ )
+ .unwrap(),
+ },
+ ];
+ for testcase in testcases {
+ let serialized = serde_json::to_value(testcase.diff.clone()).unwrap_or_else(|_| {
+ panic!(std::format!("Failed to deserialize {}", testcase.name));
+ });
+ assert_eq!(
+ testcase.json, serialized,
+ "TestCase {} did not match",
+ testcase.name
+ );
+ let deserialized: Diff = serde_json::from_value(serialized).unwrap_or_else(|_| {
+ panic!(std::format!("Failed to deserialize for {}", testcase.name));
+ });
+ assert_eq!(
+ testcase.diff, deserialized,
+ "TestCase {} failed the round trip",
+ testcase.name
+ );
+ }
+ }
+
+ #[test]
+ fn test_deserialize_conflict_link() {
+ let json = serde_json::from_str(
+ r#"
+ {
+ "actor": "1234",
+ "value": "someid",
+ "link": true
+ }
+ "#,
+ )
+ .unwrap();
+ let expected = Conflict {
+ actor: ActorID("1234".to_string()),
+ value: ElementValue::Link(ObjectID::ID("someid".to_string())),
+ datatype: None,
+ };
+ let actual: Conflict = serde_json::from_value(json).unwrap();
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn test_deserialize_conflict_nolink() {
+ let json = serde_json::from_str(
+ r#"
+ {
+ "actor": "1234",
+ "value": 5,
+ "datatype": "counter"
+ }
+ "#,
+ )
+ .unwrap();
+ let expected = Conflict {
+ actor: ActorID("1234".to_string()),
+ value: ElementValue::Primitive(PrimitiveValue::Number(5.0)),
+ datatype: Some(DataType::Counter),
+ };
+ let actual: Conflict = serde_json::from_value(json).unwrap();
+ assert_eq!(expected, actual);
+ }
+}
diff --git a/automerge-backend/src/protocol.rs b/automerge-backend/src/protocol.rs
new file mode 100644
index 00000000..cf0387ce
--- /dev/null
+++ b/automerge-backend/src/protocol.rs
@@ -0,0 +1,549 @@
+//! This module contains types which are deserialized from the changes which
+//! are produced by the automerge JS library. Given the following code
+//!
+//! ```javascript
+//! doc = ... // create and edit an automerge document
+//! let changes = Automerge.getHistory(doc).map(h => h.change)
+//! console.log(JSON.stringify(changes, null, 4))
+//! ```
+//!
+//! The output of this can then be deserialized like so
+//!
+//! ```rust,no_run
+//! # use automerge_backend::Change;
+//! let changes_str = "";
+//! let changes: Vec = serde_json::from_str(changes_str).unwrap();
+//! ```
+use core::cmp::max;
+use serde::de;
+use serde::{Deserialize, Deserializer, Serialize, Serializer};
+use std::cmp::{Ordering, PartialOrd};
+use std::collections::HashMap;
+use std::str::FromStr;
+
+use crate::error;
+
+#[derive(Eq, PartialEq, Debug, Hash, Clone)]
+pub enum ObjectID {
+ ID(String),
+ Root,
+}
+
+impl ObjectID {
+ fn parse(s: &str) -> ObjectID {
+ match s {
+ "00000000-0000-0000-0000-000000000000" => ObjectID::Root,
+ _ => ObjectID::ID(s.into()),
+ }
+ }
+}
+
+impl<'de> Deserialize<'de> for ObjectID {
+ fn deserialize(deserializer: D) -> Result
+ where
+ D: Deserializer<'de>,
+ {
+ let s = String::deserialize(deserializer)?;
+ Ok(ObjectID::parse(&s))
+ }
+}
+
+impl Serialize for ObjectID {
+ fn serialize(&self, serializer: S) -> Result
+ where
+ S: Serializer,
+ {
+ let id_str = match self {
+ ObjectID::Root => "00000000-0000-0000-0000-000000000000",
+ ObjectID::ID(id) => id,
+ };
+ serializer.serialize_str(id_str)
+ }
+}
+
+#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Hash, Clone)]
+pub struct Key(pub String);
+
+impl Key {
+ pub fn as_element_id(&self) -> Result {
+ ElementID::from_str(&self.0)
+ }
+}
+
+#[derive(Deserialize, Serialize, Eq, PartialEq, Hash, Debug, Clone, PartialOrd, Ord)]
+pub struct ActorID(pub String);
+
+impl ActorID {
+ pub fn random() -> ActorID {
+ ActorID(uuid::Uuid::new_v4().to_string())
+ }
+
+ pub fn from_string(raw: String) -> ActorID {
+ ActorID(raw)
+ }
+}
+
+#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
+pub struct Clock(pub HashMap);
+
+impl Clock {
+ pub fn empty() -> Clock {
+ Clock(HashMap::new())
+ }
+
+ pub fn with(&self, actor_id: &ActorID, seq: u32) -> Clock {
+ let mut result = self.clone();
+ result.set(actor_id, max(seq, self.get(actor_id)));
+ result
+ }
+
+ pub fn merge(&mut self, other: &Clock) {
+ other.into_iter().for_each(|(actor_id, seq)| {
+ self.set(actor_id, max(*seq, self.get(actor_id)));
+ });
+ }
+
+ pub fn union(&self, other: &Clock) -> Clock {
+ let mut result = self.clone();
+ result.merge(other);
+ result
+ }
+
+ pub fn set(&mut self, actor_id: &ActorID, seq: u32) {
+ if seq == 0 {
+ self.0.remove(actor_id);
+ } else {
+ self.0.insert(actor_id.clone(), seq);
+ }
+ }
+
+ pub fn get(&self, actor_id: &ActorID) -> u32 {
+ *self.0.get(actor_id).unwrap_or(&0)
+ }
+
+ pub fn divergent(&self, other: &Clock) -> bool {
+ !self.less_or_equal(other)
+ }
+
+ fn less_or_equal(&self, other: &Clock) -> bool {
+ self.into_iter()
+ .all(|(actor_id, _)| self.get(actor_id) <= other.get(actor_id))
+ }
+}
+
+impl PartialOrd for Clock {
+ fn partial_cmp(&self, other: &Clock) -> Option {
+ let le1 = self.less_or_equal(other);
+ let le2 = other.less_or_equal(self);
+ match (le1, le2) {
+ (true, true) => Some(Ordering::Equal),
+ (true, false) => Some(Ordering::Less),
+ (false, true) => Some(Ordering::Greater),
+ (false, false) => None,
+ }
+ }
+}
+
+impl<'a> IntoIterator for &'a Clock {
+ type Item = (&'a ActorID, &'a u32);
+ type IntoIter = ::std::collections::hash_map::Iter<'a, ActorID, u32>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.iter()
+ }
+}
+
+#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
+#[serde(untagged)]
+pub enum PrimitiveValue {
+ Str(String),
+ Number(f64),
+ Boolean(bool),
+ Null,
+}
+
+#[derive(PartialEq, Eq, Debug, Hash, Clone)]
+pub enum ElementID {
+ Head,
+ SpecificElementID(ActorID, u32),
+}
+
+impl ElementID {
+ pub fn as_key(&self) -> Key {
+ match self {
+ ElementID::Head => Key("_head".to_string()),
+ ElementID::SpecificElementID(actor_id, elem) => Key(format!("{}:{}", actor_id.0, elem)),
+ }
+ }
+
+ pub fn from_actor_and_elem(actor: ActorID, elem: u32) -> ElementID {
+ ElementID::SpecificElementID(actor, elem)
+ }
+}
+
+impl<'de> Deserialize<'de> for ElementID {
+ fn deserialize(deserializer: D) -> Result
+ where
+ D: Deserializer<'de>,
+ {
+ let s = String::deserialize(deserializer)?;
+ ElementID::from_str(&s).map_err(|_| de::Error::custom("invalid element ID"))
+ }
+}
+
+impl Serialize for ElementID {
+ fn serialize(&self, serializer: S) -> Result
+ where
+ S: Serializer,
+ {
+ match self {
+ ElementID::Head => serializer.serialize_str("_head"),
+ ElementID::SpecificElementID(actor_id, elem) => {
+ serializer.serialize_str(&format!("{}:{}", actor_id.0, elem))
+ }
+ }
+ }
+}
+
+impl FromStr for ElementID {
+ type Err = error::InvalidElementID;
+
+ fn from_str(s: &str) -> Result {
+ match s {
+ "_head" => Ok(ElementID::Head),
+ id => {
+ let components: Vec<&str> = id.split(':').collect();
+ match components.as_slice() {
+ [actor_id, elem_str] => {
+ let elem = u32::from_str(elem_str)
+ .map_err(|_| error::InvalidElementID(id.to_string()))?;
+ Ok(ElementID::SpecificElementID(
+ ActorID((*actor_id).to_string()),
+ elem,
+ ))
+ }
+ _ => Err(error::InvalidElementID(id.to_string())),
+ }
+ }
+ }
+ }
+}
+
+impl PartialOrd for ElementID {
+ fn partial_cmp(&self, other: &ElementID) -> Option {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for ElementID {
+ fn cmp(&self, other: &ElementID) -> Ordering {
+ match (self, other) {
+ (ElementID::Head, ElementID::Head) => Ordering::Equal,
+ (ElementID::Head, _) => Ordering::Less,
+ (_, ElementID::Head) => Ordering::Greater,
+ (
+ ElementID::SpecificElementID(self_actor, self_elem),
+ ElementID::SpecificElementID(other_actor, other_elem),
+ ) => {
+ if self_elem == other_elem {
+ self_actor.cmp(other_actor)
+ } else {
+ self_elem.cmp(other_elem)
+ }
+ }
+ }
+ }
+}
+
+#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
+pub enum DataType {
+ #[serde(rename = "counter")]
+ Counter,
+ #[serde(rename = "timestamp")]
+ Timestamp,
+}
+
+#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
+#[serde(tag = "action")]
+pub enum Operation {
+ #[serde(rename = "makeMap")]
+ MakeMap {
+ #[serde(rename = "obj")]
+ object_id: ObjectID,
+ },
+ #[serde(rename = "makeList")]
+ MakeList {
+ #[serde(rename = "obj")]
+ object_id: ObjectID,
+ },
+ #[serde(rename = "makeText")]
+ MakeText {
+ #[serde(rename = "obj")]
+ object_id: ObjectID,
+ },
+ #[serde(rename = "makeTable")]
+ MakeTable {
+ #[serde(rename = "obj")]
+ object_id: ObjectID,
+ },
+ #[serde(rename = "ins")]
+ Insert {
+ #[serde(rename = "obj")]
+ list_id: ObjectID,
+ key: ElementID,
+ elem: u32,
+ },
+ #[serde(rename = "set")]
+ Set {
+ #[serde(rename = "obj")]
+ object_id: ObjectID,
+ key: Key,
+ value: PrimitiveValue,
+ #[serde(skip_serializing_if = "Option::is_none", default)]
+ datatype: Option,
+ },
+ #[serde(rename = "link")]
+ Link {
+ #[serde(rename = "obj")]
+ object_id: ObjectID,
+ key: Key,
+ value: ObjectID,
+ },
+ #[serde(rename = "del")]
+ Delete {
+ #[serde(rename = "obj")]
+ object_id: ObjectID,
+ key: Key,
+ },
+ #[serde(rename = "inc")]
+ Increment {
+ #[serde(rename = "obj")]
+ object_id: ObjectID,
+ key: Key,
+ value: f64,
+ },
+}
+
+impl Operation {
+ pub fn object_id(&self) -> &ObjectID {
+ match self {
+ Operation::MakeMap { object_id }
+ | Operation::MakeTable { object_id }
+ | Operation::MakeList { object_id }
+ | Operation::MakeText { object_id }
+ | Operation::Insert {
+ list_id: object_id, ..
+ }
+ | Operation::Set { object_id, .. }
+ | Operation::Link { object_id, .. }
+ | Operation::Delete { object_id, .. }
+ | Operation::Increment { object_id, .. } => object_id,
+ }
+ }
+}
+
+#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
+pub struct Change {
+ #[serde(rename = "ops")]
+ pub operations: Vec,
+ #[serde(rename = "actor")]
+ pub actor_id: ActorID,
+ pub seq: u32,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub message: Option,
+ #[serde(rename = "deps")]
+ pub dependencies: Clock,
+}
+
+#[derive(PartialEq, Debug, Clone)]
+pub struct ChangeRequest {
+ pub actor_id: ActorID,
+ pub seq: u32,
+ pub message: Option,
+ pub dependencies: Clock,
+ pub undoable: Option,
+ pub request_type: ChangeRequestType,
+}
+
+#[derive(PartialEq, Debug, Clone)]
+pub enum ChangeRequestType {
+ Change(Vec),
+ Undo,
+ Redo,
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use serde_json;
+ use std::iter::FromIterator;
+
+ #[test]
+ fn test_deserializing_operations() {
+ let json_str = r#"{
+ "ops": [
+ {
+ "action": "makeMap",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945"
+ },
+ {
+ "action": "makeList",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945"
+ },
+ {
+ "action": "makeText",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945"
+ },
+ {
+ "action": "makeTable",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945"
+ },
+ {
+ "action": "ins",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
+ "key": "someactorid:6",
+ "elem": 5
+ },
+ {
+ "action": "ins",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
+ "key": "_head",
+ "elem": 6
+ },
+ {
+ "action": "set",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
+ "key": "sometimestamp",
+ "value": 123456,
+ "datatype": "timestamp"
+ },
+ {
+ "action": "set",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
+ "key": "somekeyid",
+ "value": true
+ },
+ {
+ "action": "set",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
+ "key": "somekeyid",
+ "value": 123
+ },
+ {
+ "action": "set",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
+ "key": "somekeyid",
+ "value": null
+ },
+ {
+ "action": "link",
+ "obj": "00000000-0000-0000-0000-000000000000",
+ "key": "cards",
+ "value": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945"
+ },
+ {
+ "action": "del",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
+ "key": "somekey"
+ },
+ {
+ "action": "inc",
+ "obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
+ "key": "somekey",
+ "value": 123
+ }
+ ],
+ "actor": "741e7221-11cc-4ef8-86ee-4279011569fd",
+ "seq": 1,
+ "deps": {
+ "someid": 0
+ },
+ "message": "Initialization"
+ }"#;
+ let change: Change = serde_json::from_str(&json_str).unwrap();
+ assert_eq!(
+ change,
+ Change {
+ actor_id: ActorID("741e7221-11cc-4ef8-86ee-4279011569fd".to_string()),
+ operations: vec![
+ Operation::MakeMap {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string())
+ },
+ Operation::MakeList {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string())
+ },
+ Operation::MakeText {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string())
+ },
+ Operation::MakeTable {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string())
+ },
+ Operation::Insert {
+ list_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
+ key: ElementID::SpecificElementID(ActorID("someactorid".to_string()), 6),
+ elem: 5,
+ },
+ Operation::Insert {
+ list_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
+ key: ElementID::Head,
+ elem: 6,
+ },
+ Operation::Set {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
+ key: Key("sometimestamp".to_string()),
+ value: PrimitiveValue::Number(123_456.0),
+ datatype: Some(DataType::Timestamp)
+ },
+ Operation::Set {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
+ key: Key("somekeyid".to_string()),
+ value: PrimitiveValue::Boolean(true),
+ datatype: None
+ },
+ Operation::Set {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
+ key: Key("somekeyid".to_string()),
+ value: PrimitiveValue::Number(123.0),
+ datatype: None,
+ },
+ Operation::Set {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
+ key: Key("somekeyid".to_string()),
+ value: PrimitiveValue::Null,
+ datatype: None,
+ },
+ Operation::Link {
+ object_id: ObjectID::Root,
+ key: Key("cards".to_string()),
+ value: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string())
+ },
+ Operation::Delete {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
+ key: Key("somekey".to_string())
+ },
+ Operation::Increment {
+ object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
+ key: Key("somekey".to_string()),
+ value: 123.0,
+ }
+ ],
+ seq: 1,
+ message: Some("Initialization".to_string()),
+ dependencies: Clock(HashMap::from_iter(vec![(ActorID("someid".to_string()), 0)]))
+ }
+ );
+ }
+
+ #[test]
+ fn test_deserialize_elementid() {
+ let json_str = "\"_head\"";
+ let elem: ElementID = serde_json::from_str(json_str).unwrap();
+ assert_eq!(elem, ElementID::Head);
+ }
+
+ #[test]
+ fn test_serialize_elementid() {
+ let result = serde_json::to_value(ElementID::Head).unwrap();
+ assert_eq!(result, serde_json::Value::String("_head".to_string()));
+ }
+}
diff --git a/automerge-backend/src/protocol_serialization.rs b/automerge-backend/src/protocol_serialization.rs
new file mode 100644
index 00000000..18b75cc7
--- /dev/null
+++ b/automerge-backend/src/protocol_serialization.rs
@@ -0,0 +1,256 @@
+use crate::{ActorID, ChangeRequest, ChangeRequestType, Clock, Operation};
+use serde::de::{Error, MapAccess, Unexpected, Visitor};
+use serde::ser::SerializeMap;
+use serde::{Deserialize, Deserializer, Serialize, Serializer};
+use std::fmt;
+
+impl Serialize for ChangeRequest {
+ fn serialize(&self, serializer: S) -> Result
+ where
+ S: Serializer,
+ {
+ let mut map_serializer = serializer.serialize_map(None)?;
+ map_serializer.serialize_entry("actor", &self.actor_id)?;
+ map_serializer.serialize_entry("deps", &self.dependencies)?;
+ map_serializer.serialize_entry("message", &self.message)?;
+ map_serializer.serialize_entry("seq", &self.seq)?;
+ match &self.request_type {
+ ChangeRequestType::Change(ops) => {
+ map_serializer.serialize_entry("requestType", "change")?;
+ map_serializer.serialize_entry("ops", &ops)?;
+ }
+ ChangeRequestType::Undo => map_serializer.serialize_entry("requestType", "undo")?,
+ ChangeRequestType::Redo => map_serializer.serialize_entry("requestType", "redo")?,
+ };
+ if let Some(undoable) = &self.undoable {
+ map_serializer.serialize_entry("undoable", undoable)?;
+ }
+ map_serializer.end()
+ }
+}
+
+impl<'de> Deserialize<'de> for ChangeRequest {
+ fn deserialize(deserializer: D) -> Result
+ where
+ D: Deserializer<'de>,
+ {
+ const FIELDS: &[&str] = &["ops", "deps", "message", "seq", "actor", "requestType"];
+ struct ChangeRequestVisitor;
+ impl<'de> Visitor<'de> for ChangeRequestVisitor {
+ type Value = ChangeRequest;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("A change request object")
+ }
+
+ fn visit_map(self, mut map: V) -> Result
+ where
+ V: MapAccess<'de>,
+ {
+ let mut actor: Option = None;
+ let mut deps: Option = None;
+ let mut message: Option