Compare commits

...

No commits in common. "main" and "fuzz-apply-local-change" have entirely different histories.

531 changed files with 28741 additions and 341418 deletions

View file

@ -1,17 +0,0 @@
name: Advisories
on:
schedule:
- cron: '0 18 * * *'
jobs:
cargo-deny:
runs-on: ubuntu-latest
strategy:
matrix:
checks:
- advisories
- bans licenses sources
steps:
- uses: actions/checkout@v2
- uses: EmbarkStudios/cargo-deny-action@v1
with:
command: check ${{ matrix.checks }}

View file

@ -1,177 +0,0 @@
name: CI
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
fmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.67.0
default: true
components: rustfmt
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/fmt
shell: bash
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.67.0
default: true
components: clippy
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/lint
shell: bash
docs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.67.0
default: true
- uses: Swatinem/rust-cache@v1
- name: Build rust docs
run: ./scripts/ci/rust-docs
shell: bash
- name: Install doxygen
run: sudo apt-get install -y doxygen
shell: bash
cargo-deny:
runs-on: ubuntu-latest
strategy:
matrix:
checks:
- advisories
- bans licenses sources
continue-on-error: ${{ matrix.checks == 'advisories' }}
steps:
- uses: actions/checkout@v2
- uses: EmbarkStudios/cargo-deny-action@v1
with:
arguments: '--manifest-path ./rust/Cargo.toml'
command: check ${{ matrix.checks }}
wasm_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/wasm_tests
deno_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: denoland/setup-deno@v1
with:
deno-version: v1.x
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/deno_tests
js_fmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: install
run: yarn global add prettier
- name: format
run: prettier -c javascript/.prettierrc javascript
js_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/js_tests
cmake_build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly-2023-01-26
default: true
- uses: Swatinem/rust-cache@v1
- name: Install CMocka
run: sudo apt-get install -y libcmocka-dev
- name: Install/update CMake
uses: jwlawson/actions-setup-cmake@v1.12
with:
cmake-version: latest
- name: Install rust-src
run: rustup component add rust-src
- name: Build and test C bindings
run: ./scripts/ci/cmake-build Release Static
shell: bash
linux:
runs-on: ubuntu-latest
strategy:
matrix:
toolchain:
- 1.67.0
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.toolchain }}
default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.67.0
default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.67.0
default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash

View file

@ -1,52 +0,0 @@
on:
push:
branches:
- main
name: Documentation
jobs:
deploy-docs:
concurrency: deploy-docs
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Cache
uses: Swatinem/rust-cache@v1
- name: Clean docs dir
run: rm -rf docs
shell: bash
- name: Clean Rust docs dir
uses: actions-rs/cargo@v1
with:
command: clean
args: --manifest-path ./rust/Cargo.toml --doc
- name: Build Rust docs
uses: actions-rs/cargo@v1
with:
command: doc
args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps
- name: Move Rust docs
run: mkdir -p docs && mv rust/target/doc/* docs/.
shell: bash
- name: Configure root page
run: echo '<meta http-equiv="refresh" content="0; url=automerge">' > docs/index.html
- name: Deploy docs
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./docs

View file

@ -1,214 +0,0 @@
name: Release
on:
push:
branches:
- main
jobs:
check_if_wasm_version_upgraded:
name: Check if WASM version has been upgraded
runs-on: ubuntu-latest
outputs:
wasm_version: ${{ steps.version-updated.outputs.current-package-version }}
wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }}
steps:
- uses: JiPaix/package-json-updated-action@v1.0.5
id: version-updated
with:
path: rust/automerge-wasm/package.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publish-wasm:
name: Publish WASM package
runs-on: ubuntu-latest
needs:
- check_if_wasm_version_upgraded
# We create release only if the version in the package.json has been upgraded
if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true'
steps:
- uses: actions/setup-node@v3
with:
node-version: '16.x'
registry-url: 'https://registry.npmjs.org'
- uses: denoland/setup-deno@v1
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ github.ref }}
- name: Get rid of local github workflows
run: rm -r .github/workflows
- name: Remove tmp_branch if it exists
run: git push origin :tmp_branch || true
- run: git checkout -b tmp_branch
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run wasm js tests
id: wasm_js_tests
run: ./scripts/ci/wasm_tests
- name: run wasm deno tests
id: wasm_deno_tests
run: ./scripts/ci/deno_tests
- name: build release
id: build_release
run: |
npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release
- name: Collate deno release files
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
run: |
mkdir $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist
sed -i '1i /// <reference types="./index.d.ts" />' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js
- name: Create npm release
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
run: |
if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then
echo "This version is already published"
exit 0
fi
EXTRA_ARGS="--access public"
if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
echo "Is pre-release version"
EXTRA_ARGS="$EXTRA_ARGS --tag next"
fi
if [ "$NODE_AUTH_TOKEN" = "" ]; then
echo "Can't publish on NPM, You need a NPM_TOKEN secret."
false
fi
npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS
env:
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- name: Commit wasm deno release files
run: |
git config --global user.name "actions"
git config --global user.email actions@github.com
git add $GITHUB_WORKSPACE/deno_wasm_dist
git commit -am "Add deno release files"
git push origin tmp_branch
- name: Tag wasm release
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
uses: softprops/action-gh-release@v1
with:
name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
target_commitish: tmp_branch
generate_release_notes: false
draft: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Remove tmp_branch
run: git push origin :tmp_branch
check_if_js_version_upgraded:
name: Check if JS version has been upgraded
runs-on: ubuntu-latest
outputs:
js_version: ${{ steps.version-updated.outputs.current-package-version }}
js_has_updated: ${{ steps.version-updated.outputs.has-updated }}
steps:
- uses: JiPaix/package-json-updated-action@v1.0.5
id: version-updated
with:
path: javascript/package.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publish-js:
name: Publish JS package
runs-on: ubuntu-latest
needs:
- check_if_js_version_upgraded
- check_if_wasm_version_upgraded
- publish-wasm
# We create release only if the version in the package.json has been upgraded and after the WASM release
if: |
(always() && ! cancelled()) &&
(needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') &&
needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true'
steps:
- uses: actions/setup-node@v3
with:
node-version: '16.x'
registry-url: 'https://registry.npmjs.org'
- uses: denoland/setup-deno@v1
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ github.ref }}
- name: Get rid of local github workflows
run: rm -r .github/workflows
- name: Remove js_tmp_branch if it exists
run: git push origin :js_tmp_branch || true
- run: git checkout -b js_tmp_branch
- name: check js formatting
run: |
yarn global add prettier
prettier -c javascript/.prettierrc javascript
- name: run js tests
id: js_tests
run: |
cargo install wasm-bindgen-cli wasm-opt
rustup target add wasm32-unknown-unknown
./scripts/ci/js_tests
- name: build js release
id: build_release
run: |
npm --prefix $GITHUB_WORKSPACE/javascript run build
- name: build js deno release
id: build_deno_release
run: |
VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build
env:
WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- name: run deno tests
id: deno_tests
run: |
npm --prefix $GITHUB_WORKSPACE/javascript run deno:test
- name: Collate deno release files
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
run: |
mkdir $GITHUB_WORKSPACE/deno_js_dist
cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist
- name: Create npm release
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
run: |
if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then
echo "This version is already published"
exit 0
fi
EXTRA_ARGS="--access public"
if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
echo "Is pre-release version"
EXTRA_ARGS="$EXTRA_ARGS --tag next"
fi
if [ "$NODE_AUTH_TOKEN" = "" ]; then
echo "Can't publish on NPM, You need a NPM_TOKEN secret."
false
fi
npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS
env:
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }}
- name: Commit js deno release files
run: |
git config --global user.name "actions"
git config --global user.email actions@github.com
git add $GITHUB_WORKSPACE/deno_js_dist
git commit -am "Add deno js release files"
git push origin js_tmp_branch
- name: Tag JS release
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
uses: softprops/action-gh-release@v1
with:
name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }}
tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }}
target_commitish: js_tmp_branch
generate_release_notes: false
draft: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Remove js_tmp_branch
run: git push origin :js_tmp_branch

12
.gitignore vendored
View file

@ -1,6 +1,8 @@
/.direnv
perf.*
/Cargo.lock
build/
.vim/*
/target
**/*.rs.bk
Cargo.lock
libtest.rmeta
.direnv/
result
result-lib

2
.rustfmt.toml Normal file
View file

@ -0,0 +1,2 @@
group_imports = "StdExternalCrate"
imports_granularity = "Crate"

71
.travis.yml Normal file
View file

@ -0,0 +1,71 @@
os: linux
dist: xenial
language: rust
if: branch = main
install:
- rustup self update
- rustup component add clippy
- rustup component add rustfmt
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
- nvm install 10
jobs:
allow_failures:
- rust: nightly
fast_finish: true
include:
- name: Stable - Format and Clippy
rust: stable
script:
- cargo fmt --all -- --check
- cargo clippy --all-targets --all-features -- -D warnings
- name: Stable - Build and Test
rust: stable
script:
- cargo build --all-targets --workspace
- cargo test --workspace
- name: Stable - Wasm and Interop
rust: stable
script:
- wasm-pack test automerge-frontend --node
- cd automerge-backend-wasm
- yarn release
- yarn test:js
- name: Beta - Format and Clippy
rust: beta
script:
- cargo fmt --all -- --check
- cargo clippy --all-targets --all-features -- -D warnings
- name: Beta - Build and Test
rust: beta
script:
- cargo build --all-targets --workspace
- cargo test --workspace
- name: Beta - Wasm and Interop
rust: beta
script:
- wasm-pack test automerge-frontend --node
- cd automerge-backend-wasm
- yarn release
- yarn test:js
- name: Nightly - Format and Clippy
rust: nightly
script:
- cargo fmt --all -- --check
- cargo clippy --all-targets --all-features -- -D warnings
- name: Nightly - Build and Test
rust: nightly
script:
- cargo build --all-targets --workspace
- cargo test --workspace
- name: Nightly - Wasm and Interop
rust: nightly
script:
- wasm-pack test automerge-frontend --node
- cd automerge-backend-wasm
- yarn release
- yarn test:js

6345
Cargo.nix Normal file

File diff suppressed because it is too large Load diff

12
Cargo.toml Normal file
View file

@ -0,0 +1,12 @@
[workspace]
members = [
"automerge",
"automerge-c",
"automerge-backend",
"automerge-backend-wasm",
"automerge-frontend",
"automerge-cli",
"automerge-protocol",
"fuzz",
]

20
LICENSE
View file

@ -1,19 +1,7 @@
Copyright (c) 2019-2021 the Automerge contributors
Copyright 2019 Alex Good
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

176
README.md
View file

@ -1,147 +1,55 @@
# Automerge
<img src='./img/sign.svg' width='500' alt='Automerge logo' />
[![docs](https://docs.rs/automerge/badge.svg)](https://docs.rs/automerge)
[![crates](https://img.shields.io/crates/v/automerge.svg)](https://crates.io/crates/automerge)
[![Build Status](https://travis-ci.org/automerge/automerge-rs.svg?branch=main)](https://travis-ci.org/automerge/automerge-rs)
[![homepage](https://img.shields.io/badge/homepage-published-informational)](https://automerge.org/)
[![main docs](https://img.shields.io/badge/docs-main-informational)](https://automerge.org/automerge-rs/automerge/)
[![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml)
[![docs](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml)
This is a rust implementation of
[automerge](https://github.com/automerge/automerge). Currently this repo
contains an implementation of the "backend" of the Automerge library, designed
to be used via FFI from many different platforms. Very soon there will also be
a frontend which will be designed for Rust application developers to use.
Automerge is a library which provides fast implementations of several different
CRDTs, a compact compression format for these CRDTs, and a sync protocol for
efficiently transmitting those changes over the network. The objective of the
project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational
databases support server applications - by providing mechanisms for persistence
which allow application developers to avoid thinking about hard distributed
computing problems. Automerge aims to be PostgreSQL for your local-first app.
This project is tracking the `performance` branch of the JavaScript reference implementation of Automerge. The `performance` branch contains a lot of backwards incompatible changes and is intended to become a 1.0 release of the library, you can find more information about that [here](https://github.com/automerge/automerge/pull/253). Our goal is to release a pre 1.0 version of the rust library once the JavaScript library hits 1.0. As such we are keeping this project up to date with the frequent and often quite large changes in the `performance` branch of the JavaScript repo - that is to say, don't depend on anything in this repo to stay constant right now.
If you're looking for documentation on the JavaScript implementation take a look
at https://automerge.org/docs/hello/. There are other implementations in both
Rust and C, but they are earlier and don't have documentation yet. You can find
them in `rust/automerge` and `rust/automerge-c` if you are comfortable
reading the code and tests to figure out how to use them.
If you're familiar with CRDTs and interested in the design of Automerge in
particular take a look at https://automerge.org/docs/how-it-works/backend/
## Using automerge-backend-wasm with automerge
Finally, if you want to talk to us about this project please [join the
Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw)
This backend is tracking the [performance branch of automerge](https://github.com/automerge/automerge/tree/performance)
## Status
This project is formed of a core Rust implementation which is exposed via FFI in
javascript+WASM, C, and soon other languages. Alex
([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining
automerge, other members of Ink and Switch are also contributing time and there
are several other maintainers. The focus is currently on shipping the new JS
package. We expect to be iterating the API and adding new features over the next
six months so there will likely be several major version bumps in all packages
in that time.
In general we try and respect semver.
### JavaScript
A stable release of the javascript package is currently available as
`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are
available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at
https://deno.land/x/automerge
### Rust
The rust codebase is currently oriented around producing a performant backend
for the Javascript wrapper and as such the API for Rust code is low level and
not well documented. We will be returning to this over the next few months but
for now you will need to be comfortable reading the tests and asking questions
to figure out how to use it. If you are looking to build rust applications which
use automerge you may want to look into
[autosurgeon](https://github.com/alexjg/autosurgeon)
## Repository Organisation
- `./rust` - the rust rust implementation and also the Rust components of
platform specific wrappers (e.g. `automerge-wasm` for the WASM API or
`automerge-c` for the C FFI bindings)
- `./javascript` - The javascript library which uses `automerge-wasm`
internally but presents a more idiomatic javascript interface
- `./scripts` - scripts which are useful to maintenance of the repository.
This includes the scripts which are run in CI.
- `./img` - static assets for use in `.md` files
## Building
To build this codebase you will need:
- `rust`
- `node`
- `yarn`
- `cmake`
- `cmocka`
You will also need to install the following with `cargo install`
- `wasm-bindgen-cli`
- `wasm-opt`
- `cargo-deny`
And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation.
The various subprojects (the rust code, the wrapper projects) have their own
build instructions, but to run the tests that will be run in CI you can run
`./scripts/ci/run`.
### For macOS
These instructions worked to build locally on macOS 13.1 (arm64) as of
Nov 29th 2022.
```bash
# clone the repo
git clone https://github.com/automerge/automerge-rs
cd automerge-rs
# install rustup
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
# install homebrew
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
# install cmake, node, cmocka
brew install cmake node cmocka
# install yarn
npm install --global yarn
# install javascript dependencies
yarn --cwd ./javascript
# install rust dependencies
cargo install wasm-bindgen-cli wasm-opt cargo-deny
# get nightly rust to produce optimized automerge-c builds
rustup toolchain install nightly
rustup component add rust-src --toolchain nightly
# add wasm target in addition to current architecture
rustup target add wasm32-unknown-unknown
# Run ci script
./scripts/ci/run
```
If your build fails to find `cmocka.h` you may need to teach it about homebrew's
installation location:
To build the wasm backend you'll need to install [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/). Then:
```
export CPATH=/opt/homebrew/include
export LIBRARY_PATH=/opt/homebrew/lib
./scripts/ci/run
$ cd automerge-backend-wasm
$ yarn release
```
## Contributing
Once it is built set the new default backend in your js application like this.
```js
const wasmBackend = require(path.resolve(WASM_BACKEND_PATH))
Automerge.setDefaultBackend(wasmBackend)
```
## Backend? Frontend?
Automerge is a JSON CRDT, in this sense it is just a data structure with a set
of rules about how to merge two different versions of that data structure.
However, in practice one often needs two separate roles when writing
applications which use the CRDT:
- A very low latency process, usually running on some kind of UI thread, which
records changes made by the user and reflects them in the UI
- A less latency sensitive process which executes the complex logic of merging changes
received from the UI and over the network and send diffs to the frontend to apply
More details can be found [here](https://github.com/automerge/automerge/blob/performance/BINARY_FORMAT.md).
Note that the performance branch of automerge is under active development and is changing quickly.
## Community
Development of automerge rust is currently being coordinated at our [slack channel](https://automerge.slack.com/archives/CTQARU3NZ). Come say hi. =)
Please try and split your changes up into relatively independent commits which
change one subsystem at a time and add good commit messages which describe what
the change is and why you're making it (err on the side of longer commit
messages). `git blame` should give future maintainers a good idea of why
something is the way it is.

7
automerge-backend-wasm/.gitignore vendored Normal file
View file

@ -0,0 +1,7 @@
node_modules
/wasm-pack.log
/build
/dev
# Used for js-interop tests
/automerge-js-temp

View file

@ -1,15 +1,14 @@
# You must change these to your own details.
[package]
name = "automerge-wasm"
name = "automerge-backend-wasm"
description = "An js/wasm wrapper for the rust implementation of automerge-backend"
repository = "https://github.com/automerge/automerge-rs"
version = "0.1.0"
authors = ["Alex Good <alex@memoryandthought.me>","Orion Henry <orion@inkandswitch.com>", "Martin Kleppmann"]
categories = ["wasm"]
readme = "README.md"
edition = "2021"
edition = "2018"
license = "MIT"
rust-version = "1.57.0"
[lib]
crate-type = ["cdylib","rlib"]
@ -22,30 +21,19 @@ default = ["console_error_panic_hook"]
[dependencies]
console_error_panic_hook = { version = "^0.1", optional = true }
# wee_alloc = { version = "^0.4", optional = true }
automerge = { path = "../automerge", features=["wasm"] }
automerge-backend = { path = "../automerge-backend" }
automerge-protocol = { path = "../automerge-protocol" }
js-sys = "^0.3"
serde = "^1.0"
serde_json = "^1.0"
rand = { version = "^0.8.4" }
getrandom = { version = "^0.2.2", features=["js"] }
uuid = { version = "^1.2.1", features=["v4", "js", "serde"] }
serde-wasm-bindgen = "0.4.3"
getrandom = { version = "0.2.2", features=["js"] }
uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] }
serde-wasm-bindgen = "0.1.3"
serde_bytes = "0.11.5"
hex = "^0.4.3"
regex = "^1.5"
itertools = "^0.10.3"
thiserror = "^1.0.16"
[dependencies.wasm-bindgen]
version = "^0.2.83"
#features = ["std"]
features = ["serde-serialize", "std"]
[package.metadata.wasm-pack.profile.release]
# wasm-opt = false
[package.metadata.wasm-pack.profile.profiling]
wasm-opt = false
version = "^0.2"
features = ["serde-serialize"]
# The `web-sys` crate allows you to interact with the various browser APIs,
# like the DOM.
@ -53,10 +41,7 @@ wasm-opt = false
version = "0.3.22"
features = ["console"]
[dev-dependencies]
futures = "^0.1"
proptest = { version = "^1.0.0", default-features = false, features = ["std"] }
wasm-bindgen-futures = "^0.4"
wasm-bindgen-futures = "^0.3"
wasm-bindgen-test = "^0.3"

View file

@ -0,0 +1,78 @@
# WASM Goals and Issues
We set out with this project to see if we could create a backend implementation
for Automerge that could serve as a basis for native ports to many different
languages but also replace the javascript backend of the current implementation
without any compromises.
We chose Rust as the basis of this project.  It has the same performance
characteristics as C and C++ making it ideal for implementing a database-like
tool. It also has safety guarantees C and C++ which will protect us from
synchronization issues and data races that plague projects like this.  Rust
also has a very mature WASM integration suite of tools.
Our goal was to create a zero compromise implementation of the backend. We
almost achieved this goal. Here are the details of the compromises we found.
## Problem: WASM memory and garbage collection
Memory allocated in WASM needs to be explicitly freed. And there is no feature
(yet) in javascript to alert you when an object has been collected by the
GC. This makes immutable API's undoable since you need the GC to collect old
versions of objects.
Also this means that an Automerge backend would need to be explicitly freed at the
end of its life. Under normal circumstances a backend will live indefinitely so this
would not require a change but in situations where many small databases are being
created and thrown away this requires an API change.
## Solution
The performance branch of Automerge has made some small but important adjustments to
the Frontend/Backend API. These now assume the backends to be long lived and possibly
mutable and disallows creating divergent histories with old handles to the backend.
A `clone` function was added to allow this behavior if it was intentional and a `free`
that can do cleanup.
```js
let doc1 = Automerge.init();
let doc2 = Automerge.clone(doc1);
Automerge.free(doc1);
```
## Problem: WASM in fundamentally async - Automerge is sync
WASM's love of all things async was surely the largest thorn in our side was dealing with this. It basically boils down to this...
1. ### Loading WASM requires IO - IO is async
WASM binaries are not js - loading them from JS is async (with the notable exception of node's `readFileSync()`)
2. ### WebAssembly.Module(buffer) has a 4k limit on the render thread in browsers
Even if you can synchronously load and compile the wasm, most browsers impose a 4k limit on synchronous (but not asynchronous) WASM compilation in the render thread. This is not an issue in node applications or in web workers.
## Solutions
1. ### Compile Rust to ASM.js - (no problems except it's big and slow)
Now it's javascript. All the strangeness of WASM goes away. Webpack will happily inline the code into a bundle. The only downside, 400k of WASM becomes 5M of js and it runs 3 times slower.
2. ### Inline the WASM as a base64 encoded string - (no problems except the render thread)
This is actually surprisingly effective. The sized added to the js bundle is reasonable and the decode time is trivial. The only issue is, it still wont work in the render thread
3. ### Wait for top level await (no problems - someday)
There is a proposal for top level await support in js modules. This would allow us to insert an internal await into the backend module and hide the async load from users. Unfortunately its not in JS yet...
4. ### Change Automerge.init to be async (no problems except a breaking api change)
All of the async strangeness can be boiled down to the Automerge.init() call. This would require introducing an api change that has no purpose in the JS only implementation and represents a non-trivial compromise in adopting WASM
```js
const doc = Automerge.init();
// becomes
const doc = await Automerge.init();
```

View file

@ -0,0 +1,20 @@
Copyright (c) 2020 Ink & Switch LLC
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -0,0 +1,22 @@
## automerge-backend-wasm
This is a wrapper for the rust implementation of [automerge-backend](https://github.com/automerge/automerge-rs/tree/master/automerge-backend) to be used with [Automerge](https://github.com/automerge/automerge).
### Using
You can require this synchronously as a CommonJS module or import it as a ES6 module
```js
let Automerge = require("automerge")
let Backend = require("automerge-backend-wasm")
Automerge.setDefaultBackend(Backend)
```
```js
import * as Automerge from "automerge"
import * as Backend from "automerge-backend-wasm"
Automerge.setDefaultBackend(Backend)
```
Note that the first uses a synchronous filesystem load of the wasm and will not be transferable to a browser bundle. The second uses ES6 wasm import statements which should work in all modern browsers but require a '--experimental-wasm-modules' flag on nodejs (v13 on) unless you pack/bundle the code into compatible format.

View file

@ -0,0 +1,35 @@
{
"collaborators": [
"Orion Henry <orion@inkandswitch.com>",
"Alex Good <alex@memoryandthought.me>"
],
"name": "automerge-backend-wasm",
"description": "wasm-bindgen bindings to the automerge-backend rust implementation",
"version": "0.1.0",
"license": "MIT",
"files": [
"README.md",
"pkg.js",
"LICENSE",
"package.json",
"automerge_backend_wasm_bg.wasm",
"automerge_backend_wasm.js"
],
"main": "./dev/index.js",
"scripts": {
"cjs-release": "wasm-pack build --target nodejs --release --out-name index -d build/cjs && rm build/cjs/package.json",
"mjs-release": "wasm-pack build --target bundler --release --out-name index -d build/mjs && cp package.mjs.json build/mjs/package.json",
"prep-release": "rm build/*/.gitignore build/*/README.md build/*/LICENSE build/*/*.ts",
"release": "yarn cjs-release && yarn mjs-release && yarn prep-release && cp package.pkg.json build/package.json && cp README.md LICENSE build",
"build": "rm -rf dev && wasm-pack build --target nodejs --dev --out-name index -d dev",
"profiling": "wasm-pack build --target nodejs --profiling --out-name index -d dev",
"mocha": "yarn build && mocha --bail --full-trace",
"webpack": "webpack",
"test": "cargo test && wasm-pack test --node",
"test:js": "./scripts/js_tests.sh"
},
"dependencies": {},
"devDependencies": {
"mocha": "^7.1.1"
}
}

View file

@ -0,0 +1,3 @@
{
"type": "module"
}

View file

@ -0,0 +1,35 @@
{
"name": "automerge-backend-wasm",
"collaborators": [
"Alex Good <alex@memoryandthought.me>",
"Orion Henry <orion@inkandswitch.com>",
"Martin Kleppmann"
],
"description": "A js/wasm wrapper for the rust implementation of automerge-backend",
"version": "0.1.4",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/automerge/automerge-rs"
},
"files": [
"README.md",
"LICENSE",
"cjs/index_bg.wasm",
"cjs/index.js",
"mjs/package.json",
"mjs/index_bg.wasm",
"mjs/index_bg.js",
"mjs/index.js"
],
"type": "commonjs",
"exports": {
".": {
"require": "./cjs/index.js",
"default": "./mjs/index.js"
}
},
"main" : "./cjs/index.js",
"module" : "./mjs/index.js",
"sideEffects": false
}

View file

@ -0,0 +1,40 @@
#! /usr/bin/env bash
set -e
ORIGINAL_PWD=$PWD
if [[ -z $AUTOMERGE_JS_DIR ]]; then
COMMIT_HASH=d8ec3b4c31f150d68d0f4798128c78607b4c663f
AUTOMERGE_JS_DIR="./automerge-js-temp"
echo "'AUTOMERGE_JS_DIR' var not set. Using temporary dir: $AUTOMERGE_JS_DIR & commit hash: $COMMIT_HASH"
if [[ -d $AUTOMERGE_JS_DIR ]]; then
echo "Dir found, skipping clone"
cd $AUTOMERGE_JS_DIR
if ! git cat-file -e $COMMIT_HASH; then
echo "Commit hash: $COMMIT_HASH not found in $AUTOMERGE_JS_DIR"
exit 1
fi
else
git clone https://github.com/automerge/automerge.git $AUTOMERGE_JS_DIR
fi
cd $ORIGINAL_PWD
cd $AUTOMERGE_JS_DIR
git checkout $COMMIT_HASH
else
# if the env var is set, assume the user is using an existing checkout of automerge
echo "Using $AUTOMERGE_JS_DIR"
if [[ ! -d $AUTOMERGE_JS_DIR ]]; then
echo "$AUTOMERGE_JS_DIR dir not found."
exit 1
fi
fi
cd $ORIGINAL_PWD
cd $AUTOMERGE_JS_DIR
WASM_BACKEND_PATH="$ORIGINAL_PWD/build"
if [[ ! -d $WASM_BACKEND_PATH ]]; then
echo "$WASM_BACKEND_PATH does not exist. Run 'yarn release' to build WASM backend"
exit 1
fi
yarn install
WASM_BACKEND_PATH=$WASM_BACKEND_PATH yarn testwasm

View file

@ -0,0 +1,392 @@
//#![feature(set_stdio)]
mod types;
use std::{
collections::{HashMap, HashSet},
convert::TryFrom,
fmt::Display,
};
use automerge_backend::{AutomergeError, Backend, Change, SyncMessage, SyncState};
use automerge_protocol::{ChangeHash, UncompressedChange};
use js_sys::Array;
use serde::{de::DeserializeOwned, Serialize};
use types::{BinaryChange, BinaryDocument, BinarySyncMessage, BinarySyncState, RawSyncMessage};
use wasm_bindgen::prelude::*;
extern crate web_sys;
#[allow(unused_macros)]
macro_rules! log {
( $( $t:tt )* ) => {
web_sys::console::log_1(&format!( $( $t )* ).into());
};
}
fn array<T: Serialize>(data: &[T]) -> Result<Array, JsValue> {
let result = Array::new();
for d in data {
result.push(&rust_to_js(d)?);
}
Ok(result)
}
#[cfg(feature = "wee_alloc")]
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
fn js_to_rust<T: DeserializeOwned>(value: &JsValue) -> Result<T, JsValue> {
value.into_serde().map_err(json_error_to_js)
}
fn rust_to_js<T: Serialize>(value: T) -> Result<JsValue, JsValue> {
JsValue::from_serde(&value).map_err(json_error_to_js)
}
#[wasm_bindgen]
#[derive(Debug)]
struct State(Backend);
#[wasm_bindgen]
extern "C" {
pub type Object;
#[wasm_bindgen(constructor)]
fn new() -> Object;
#[wasm_bindgen(method, getter)]
fn state(this: &Object) -> State;
#[wasm_bindgen(method, setter)]
fn set_state(this: &Object, state: State);
#[wasm_bindgen(method, getter)]
fn frozen(this: &Object) -> bool;
#[wasm_bindgen(method, setter)]
fn set_frozen(this: &Object, frozen: bool);
#[wasm_bindgen(method, getter)]
fn heads(this: &Object) -> Array;
#[wasm_bindgen(method, setter)]
fn set_heads(this: &Object, heads: Array);
}
#[wasm_bindgen]
#[derive(Clone)]
pub struct JsSyncState(SyncState);
#[wasm_bindgen]
impl JsSyncState {
#[wasm_bindgen(getter, js_name = sharedHeads)]
pub fn shared_heads(&self) -> JsValue {
rust_to_js(&self.0.shared_heads).unwrap()
}
#[wasm_bindgen(getter, js_name = lastSentHeads)]
pub fn last_sent_heads(&self) -> JsValue {
rust_to_js(self.0.last_sent_heads.as_ref()).unwrap()
}
#[wasm_bindgen(setter, js_name = lastSentHeads)]
pub fn set_last_sent_heads(&mut self, heads: JsValue) {
let heads: Option<Vec<ChangeHash>> = js_to_rust(&heads).unwrap();
self.0.last_sent_heads = heads
}
#[wasm_bindgen(setter, js_name = sentHashes)]
pub fn set_sent_hashes(&mut self, hashes: JsValue) {
let hashes_map: HashMap<ChangeHash, bool> = js_to_rust(&hashes).unwrap();
let hashes_set: HashSet<ChangeHash> = hashes_map.keys().cloned().collect();
self.0.sent_hashes = hashes_set
}
}
#[wasm_bindgen]
pub fn init() -> Result<Object, JsValue> {
Ok(wrapper(State(Backend::new()), false, Vec::new()))
}
#[wasm_bindgen(js_name = getHeads)]
pub fn get_heads(input: Object) -> Result<Array, JsValue> {
Ok(input.heads())
}
#[wasm_bindgen(js_name = free)]
pub fn free(input: Object) -> Result<(), JsValue> {
let state: State = get_state(&input)?;
std::mem::drop(state);
input.set_frozen(true);
input.set_heads(Array::new());
Ok(())
}
#[wasm_bindgen(js_name = applyLocalChange)]
pub fn apply_local_change(input: Object, change: JsValue) -> Result<JsValue, JsValue> {
get_mut_input(input, |state| {
// FIXME unwrap
let change: UncompressedChange = js_to_rust(&change).unwrap();
let (patch, change) = state.0.apply_local_change(change)?;
let result = Array::new();
let change_bytes = types::BinaryChange(change.raw_bytes().to_vec());
// FIXME unwrap
let p = rust_to_js(&patch).unwrap();
result.push(&p);
result.push(&serde_wasm_bindgen::to_value(&change_bytes).unwrap());
Ok(result)
})
}
#[wasm_bindgen(js_name = applyChanges)]
pub fn apply_changes(input: Object, changes: Array) -> Result<JsValue, JsValue> {
get_mut_input(input, |state| {
let ch = import_changes(&changes)?;
let patch = state.0.apply_changes(ch)?;
Ok(array(&vec![patch]).unwrap())
})
}
#[wasm_bindgen(js_name = loadChanges)]
pub fn load_changes(input: Object, changes: Array) -> Result<JsValue, JsValue> {
get_mut_input(input, |state| {
let ch = import_changes(&changes)?;
state.0.load_changes(ch)?;
Ok(Array::new())
})
}
#[wasm_bindgen(js_name = load)]
pub fn load(data: JsValue) -> Result<JsValue, JsValue> {
let binary_document: BinaryDocument = serde_wasm_bindgen::from_value(data)?;
let backend = Backend::load(binary_document.0).map_err(to_js_err)?;
let heads = backend.get_heads();
Ok(wrapper(State(backend), false, heads).into())
}
#[wasm_bindgen(js_name = getPatch)]
pub fn get_patch(input: Object) -> Result<JsValue, JsValue> {
get_input(input, |state| {
state.0.get_patch().map_err(to_js_err).and_then(rust_to_js)
})
}
#[wasm_bindgen(js_name = clone)]
pub fn clone(input: Object) -> Result<Object, JsValue> {
let old_state = get_state(&input)?;
let state = State(old_state.0.clone());
let heads = state.0.get_heads();
input.set_state(old_state);
Ok(wrapper(state, false, heads))
}
#[wasm_bindgen(js_name = save)]
pub fn save(input: Object) -> Result<JsValue, JsValue> {
get_input(input, |state| {
state
.0
.save()
.map(BinaryDocument)
.as_ref()
.map_err(to_js_err)
.and_then(|binary_document| Ok(serde_wasm_bindgen::to_value(binary_document)?))
})
}
#[wasm_bindgen(js_name = getChanges)]
pub fn get_changes(input: Object, have_deps: JsValue) -> Result<JsValue, JsValue> {
let deps: Vec<ChangeHash> = js_to_rust(&have_deps)?;
get_input(input, |state| {
Ok(export_changes(state.0.get_changes(&deps)).into())
})
}
#[wasm_bindgen(js_name = getAllChanges)]
pub fn get_all_changes(input: Object) -> Result<JsValue, JsValue> {
let deps: Vec<ChangeHash> = vec![];
get_input(input, |state| {
Ok(export_changes(state.0.get_changes(&deps)).into())
})
}
#[wasm_bindgen(js_name = getMissingDeps)]
pub fn get_missing_deps(input: Object) -> Result<JsValue, JsValue> {
get_input(input, |state| rust_to_js(state.0.get_missing_deps(&[])))
}
fn import_changes(changes: &Array) -> Result<Vec<Change>, AutomergeError> {
let mut ch = Vec::with_capacity(changes.length() as usize);
for c in changes.iter() {
let change_bytes: types::BinaryChange = serde_wasm_bindgen::from_value(c).unwrap();
ch.push(Change::from_bytes(change_bytes.0)?);
}
Ok(ch)
}
fn export_changes(changes: Vec<&Change>) -> Array {
let result = Array::new();
for c in changes {
let change_bytes = BinaryChange(c.raw_bytes().to_vec());
result.push(&serde_wasm_bindgen::to_value(&change_bytes).unwrap());
}
result
}
#[wasm_bindgen(js_name = generateSyncMessage)]
pub fn generate_sync_message(input: Object, sync_state: &JsSyncState) -> Result<JsValue, JsValue> {
get_input(input, |state| {
let mut sync_state = sync_state.clone();
let message = state.0.generate_sync_message(&mut sync_state.0);
let result = Array::new();
result.push(&JsValue::from(sync_state));
let message = if let Some(message) = message {
serde_wasm_bindgen::to_value(&BinarySyncMessage(message.encode().map_err(to_js_err)?))?
} else {
JsValue::NULL
};
result.push(&message);
Ok(result.into())
})
}
#[wasm_bindgen(js_name = receiveSyncMessage)]
pub fn receive_sync_message(
input: Object,
sync_state: &JsSyncState,
message: JsValue,
) -> Result<JsValue, JsValue> {
let mut state: State = get_state(&input)?;
let binary_message: BinarySyncMessage = serde_wasm_bindgen::from_value(message)?;
let message = SyncMessage::decode(&binary_message.0).map_err(to_js_err)?;
let mut sync_state = sync_state.clone();
let patch = match state.0.receive_sync_message(&mut sync_state.0, message) {
Ok(r) => r,
Err(err) => {
input.set_state(state);
return Err(to_js_err(err));
}
};
let result = Array::new();
if patch.is_some() {
let heads = state.0.get_heads();
let new_state = wrapper(state, false, heads);
// the receiveSyncMessage in automerge.js returns the original doc when there is no patch so we should only freeze it when there is a patch
input.set_frozen(true);
result.push(&new_state.into());
} else {
input.set_state(state);
result.push(&input);
}
result.push(&JsValue::from(sync_state));
let p = rust_to_js(&patch)?;
result.push(&p);
Ok(result.into())
}
#[wasm_bindgen(js_name = initSyncState)]
pub fn init_sync_state() -> Result<JsSyncState, JsValue> {
Ok(JsSyncState(SyncState::default()))
}
#[wasm_bindgen(js_name = encodeSyncState)]
pub fn encode_sync_state(sync_state: &JsSyncState) -> Result<JsValue, JsValue> {
let binary_sync_state = BinarySyncState(sync_state.0.clone().encode().map_err(to_js_err)?);
Ok(serde_wasm_bindgen::to_value(&binary_sync_state)?)
}
#[wasm_bindgen(js_name = decodeSyncState)]
pub fn decode_sync_state(sync_state_bytes: JsValue) -> Result<JsSyncState, JsValue> {
let bytes: BinarySyncState = serde_wasm_bindgen::from_value(sync_state_bytes)?;
let sync_state = SyncState::decode(&bytes.0).map_err(to_js_err)?;
Ok(JsSyncState(sync_state))
}
#[wasm_bindgen(js_name = encodeSyncMessage)]
pub fn encode_sync_message(sync_message: JsValue) -> Result<JsValue, JsValue> {
let sync_message = SyncMessage::try_from(serde_wasm_bindgen::from_value::<RawSyncMessage>(
sync_message,
)?)
.map_err(to_js_err)?;
let binary_sync_message = BinarySyncMessage(sync_message.encode().map_err(to_js_err)?);
Ok(serde_wasm_bindgen::to_value(&binary_sync_message)?)
}
#[wasm_bindgen(js_name = decodeSyncMessage)]
pub fn decode_sync_message(sync_message_bytes: JsValue) -> Result<JsValue, JsValue> {
let bytes: BinarySyncMessage = serde_wasm_bindgen::from_value(sync_message_bytes)?;
let sync_message = SyncMessage::decode(&bytes.0).map_err(to_js_err)?;
serde_wasm_bindgen::to_value(&RawSyncMessage::try_from(sync_message).map_err(to_js_err)?)
.map_err(to_js_err)
}
fn get_state(input: &Object) -> Result<State, JsValue> {
if input.frozen() {
Err(js_sys::Error::new("Attempting to use an outdated Automerge document that has already been updated. Please use the latest document state, or call Automerge.clone() if you really need to use this old document state.").into())
} else {
Ok(input.state())
}
}
fn wrapper(state: State, frozen: bool, heads: Vec<ChangeHash>) -> Object {
let heads_array = Array::new();
for h in heads {
heads_array.push(&rust_to_js(h).unwrap());
}
let wrapper = Object::new();
wrapper.set_heads(heads_array);
wrapper.set_frozen(frozen);
wrapper.set_state(state);
wrapper
}
fn get_input<F>(input: Object, action: F) -> Result<JsValue, JsValue>
where
F: FnOnce(&State) -> Result<JsValue, JsValue>,
{
let state: State = get_state(&input)?;
let result = action(&state);
input.set_state(state);
result
}
fn get_mut_input<F>(input: Object, action: F) -> Result<JsValue, JsValue>
where
F: Fn(&mut State) -> Result<Array, AutomergeError>,
{
let mut state: State = get_state(&input)?;
match action(&mut state) {
Ok(result) => {
let heads = state.0.get_heads();
let new_state = wrapper(state, false, heads);
input.set_frozen(true);
if result.length() == 0 {
Ok(new_state.into())
} else {
result.unshift(&new_state.into());
Ok(result.into())
}
}
Err(err) => {
input.set_state(state);
Err(to_js_err(err))
}
}
}
fn to_js_err<T: Display>(err: T) -> JsValue {
js_sys::Error::new(&std::format!("Automerge error: {}", err)).into()
}
fn json_error_to_js(err: serde_json::Error) -> JsValue {
js_sys::Error::new(&std::format!("serde_json error: {}", err)).into()
}

View file

@ -0,0 +1,102 @@
use std::convert::TryFrom;
use automerge_backend::{AutomergeError, BloomFilter, Change, SyncHave, SyncMessage};
use automerge_protocol::ChangeHash;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct BinaryChange(#[serde(with = "serde_bytes")] pub Vec<u8>);
#[derive(Serialize, Deserialize)]
pub struct BinaryDocument(#[serde(with = "serde_bytes")] pub Vec<u8>);
#[derive(Serialize, Deserialize)]
pub struct BinarySyncState(#[serde(with = "serde_bytes")] pub Vec<u8>);
#[derive(Serialize, Deserialize)]
pub struct BinarySyncMessage(#[serde(with = "serde_bytes")] pub Vec<u8>);
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RawSyncMessage {
pub heads: Vec<ChangeHash>,
pub need: Vec<ChangeHash>,
pub have: Vec<RawSyncHave>,
pub changes: Vec<BinaryChange>,
}
impl TryFrom<SyncMessage> for RawSyncMessage {
type Error = AutomergeError;
fn try_from(value: SyncMessage) -> Result<Self, Self::Error> {
let have = value
.have
.into_iter()
.map(RawSyncHave::try_from)
.collect::<Result<_, _>>()?;
let changes = value
.changes
.into_iter()
.map(|c| BinaryChange(c.raw_bytes().to_vec()))
.collect();
Ok(Self {
heads: value.heads,
need: value.need,
have,
changes,
})
}
}
impl TryFrom<RawSyncMessage> for SyncMessage {
type Error = AutomergeError;
fn try_from(value: RawSyncMessage) -> Result<Self, Self::Error> {
let have = value
.have
.into_iter()
.map(SyncHave::try_from)
.collect::<Result<_, _>>()?;
let changes = value
.changes
.into_iter()
.map(|b| Change::from_bytes(b.0))
.collect::<Result<_, _>>()?;
Ok(Self {
heads: value.heads,
need: value.need,
have,
changes,
})
}
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RawSyncHave {
pub last_sync: Vec<ChangeHash>,
#[serde(with = "serde_bytes")]
pub bloom: Vec<u8>,
}
impl TryFrom<SyncHave> for RawSyncHave {
type Error = AutomergeError;
fn try_from(value: SyncHave) -> Result<Self, Self::Error> {
Ok(Self {
last_sync: value.last_sync,
bloom: value.bloom.into_bytes()?,
})
}
}
impl TryFrom<RawSyncHave> for SyncHave {
type Error = AutomergeError;
fn try_from(raw: RawSyncHave) -> Result<Self, Self::Error> {
Ok(Self {
last_sync: raw.last_sync,
bloom: BloomFilter::try_from(raw.bloom.as_slice())?,
})
}
}

View file

@ -0,0 +1,52 @@
const assert = require('assert')
const Backend = require('..')
describe('Automerge.Backend', () => {
describe('incremental diffs', () => {
it('should assign to a key in a map', () => {
const doc1 = Backend.init()
const change = {
actor: '55f250d0f76b4e15923600f98ebed8d7',
seq: 1,
startOp: 1,
deps: [],
time: 1609190674,
message: '',
ops: [
{
action: 'makeText',
obj: '_root',
key: 'text',
insert: false,
pred: []
},
{
action: 'set',
obj: '1@55f250d0f76b4e15923600f98ebed8d7',
key: '_head',
insert: true,
pred: [],
value: 'a'
},
{
action: 'makeMap',
obj: '1@55f250d0f76b4e15923600f98ebed8d7',
key: '2@55f250d0f76b4e15923600f98ebed8d7',
insert: true,
pred: []
},
{
action: 'set',
obj: '3@55f250d0f76b4e15923600f98ebed8d7',
key: 'attribute',
insert: false,
pred: [],
value: 'bold'
},
],
extra_bytes: []
}
const doc2 = Backend.applyLocalChange(doc1, change)
})
})
})

View file

@ -0,0 +1,3 @@
--use_strict
--watch-extensions js
test/*test*.js

View file

@ -0,0 +1,735 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
ansi-colors@3.2.3:
version "3.2.3"
resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.3.tgz#57d35b8686e851e2cc04c403f1c00203976a1813"
integrity sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==
ansi-regex@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998"
integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=
ansi-regex@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997"
integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==
ansi-styles@^3.2.0, ansi-styles@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
dependencies:
color-convert "^1.9.0"
anymatch@~3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142"
integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==
dependencies:
normalize-path "^3.0.0"
picomatch "^2.0.4"
argparse@^1.0.7:
version "1.0.10"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
dependencies:
sprintf-js "~1.0.2"
balanced-match@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
binary-extensions@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==
brace-expansion@^1.1.7:
version "1.1.11"
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
dependencies:
balanced-match "^1.0.0"
concat-map "0.0.1"
braces@~3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
dependencies:
fill-range "^7.0.1"
browser-stdout@1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60"
integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==
call-bind@^1.0.0, call-bind@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c"
integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==
dependencies:
function-bind "^1.1.1"
get-intrinsic "^1.0.2"
camelcase@^5.0.0:
version "5.3.1"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
chalk@^2.4.2:
version "2.4.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
dependencies:
ansi-styles "^3.2.1"
escape-string-regexp "^1.0.5"
supports-color "^5.3.0"
chokidar@3.3.0:
version "3.3.0"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6"
integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==
dependencies:
anymatch "~3.1.1"
braces "~3.0.2"
glob-parent "~5.1.0"
is-binary-path "~2.1.0"
is-glob "~4.0.1"
normalize-path "~3.0.0"
readdirp "~3.2.0"
optionalDependencies:
fsevents "~2.1.1"
cliui@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5"
integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==
dependencies:
string-width "^3.1.0"
strip-ansi "^5.2.0"
wrap-ansi "^5.1.0"
color-convert@^1.9.0:
version "1.9.3"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
dependencies:
color-name "1.1.3"
color-name@1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=
concat-map@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
debug@3.2.6:
version "3.2.6"
resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
dependencies:
ms "^2.1.1"
decamelize@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
define-properties@^1.1.2, define-properties@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1"
integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==
dependencies:
object-keys "^1.0.12"
diff@3.5.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12"
integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==
emoji-regex@^7.0.1:
version "7.0.3"
resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156"
integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==
es-abstract@^1.18.0-next.1:
version "1.18.0-next.2"
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.0-next.2.tgz#088101a55f0541f595e7e057199e27ddc8f3a5c2"
integrity sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw==
dependencies:
call-bind "^1.0.2"
es-to-primitive "^1.2.1"
function-bind "^1.1.1"
get-intrinsic "^1.0.2"
has "^1.0.3"
has-symbols "^1.0.1"
is-callable "^1.2.2"
is-negative-zero "^2.0.1"
is-regex "^1.1.1"
object-inspect "^1.9.0"
object-keys "^1.1.1"
object.assign "^4.1.2"
string.prototype.trimend "^1.0.3"
string.prototype.trimstart "^1.0.3"
es-to-primitive@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a"
integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==
dependencies:
is-callable "^1.1.4"
is-date-object "^1.0.1"
is-symbol "^1.0.2"
escape-string-regexp@1.0.5, escape-string-regexp@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
esprima@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
fill-range@^7.0.1:
version "7.0.1"
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
dependencies:
to-regex-range "^5.0.1"
find-up@3.0.0, find-up@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==
dependencies:
locate-path "^3.0.0"
flat@^4.1.0:
version "4.1.1"
resolved "https://registry.yarnpkg.com/flat/-/flat-4.1.1.tgz#a392059cc382881ff98642f5da4dde0a959f309b"
integrity sha512-FmTtBsHskrU6FJ2VxCnsDb84wu9zhmO3cUX2kGFb5tuwhfXxGciiT0oRY+cck35QmG+NmGh5eLz6lLCpWTqwpA==
dependencies:
is-buffer "~2.0.3"
fs.realpath@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
fsevents@~2.1.1:
version "2.1.3"
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.3.tgz#fb738703ae8d2f9fe900c33836ddebee8b97f23e"
integrity sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==
function-bind@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
get-caller-file@^2.0.1:
version "2.0.5"
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
get-intrinsic@^1.0.2:
version "1.1.1"
resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6"
integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==
dependencies:
function-bind "^1.1.1"
has "^1.0.3"
has-symbols "^1.0.1"
glob-parent@~5.1.0:
version "5.1.1"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229"
integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==
dependencies:
is-glob "^4.0.1"
glob@7.1.3:
version "7.1.3"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.3.tgz#3960832d3f1574108342dafd3a67b332c0969df1"
integrity sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==
dependencies:
fs.realpath "^1.0.0"
inflight "^1.0.4"
inherits "2"
minimatch "^3.0.4"
once "^1.3.0"
path-is-absolute "^1.0.0"
growl@1.10.5:
version "1.10.5"
resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e"
integrity sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==
has-flag@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
has-symbols@^1.0.0, has-symbols@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8"
integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==
has@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
dependencies:
function-bind "^1.1.1"
he@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
inflight@^1.0.4:
version "1.0.6"
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
dependencies:
once "^1.3.0"
wrappy "1"
inherits@2:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
is-binary-path@~2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
dependencies:
binary-extensions "^2.0.0"
is-buffer@~2.0.3:
version "2.0.5"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191"
integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==
is-callable@^1.1.4, is-callable@^1.2.2:
version "1.2.3"
resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.3.tgz#8b1e0500b73a1d76c70487636f368e519de8db8e"
integrity sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==
is-date-object@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e"
integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==
is-extglob@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=
is-fullwidth-code-point@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=
is-glob@^4.0.1, is-glob@~4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==
dependencies:
is-extglob "^2.1.1"
is-negative-zero@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24"
integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==
is-number@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
is-regex@^1.1.1:
version "1.1.2"
resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.2.tgz#81c8ebde4db142f2cf1c53fc86d6a45788266251"
integrity sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg==
dependencies:
call-bind "^1.0.2"
has-symbols "^1.0.1"
is-symbol@^1.0.2:
version "1.0.3"
resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937"
integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==
dependencies:
has-symbols "^1.0.1"
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
js-yaml@3.13.1:
version "3.13.1"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847"
integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==
dependencies:
argparse "^1.0.7"
esprima "^4.0.0"
locate-path@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==
dependencies:
p-locate "^3.0.0"
path-exists "^3.0.0"
lodash@^4.17.15:
version "4.17.21"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
log-symbols@3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-3.0.0.tgz#f3a08516a5dea893336a7dee14d18a1cfdab77c4"
integrity sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ==
dependencies:
chalk "^2.4.2"
minimatch@3.0.4, minimatch@^3.0.4:
version "3.0.4"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
dependencies:
brace-expansion "^1.1.7"
minimist@^1.2.5:
version "1.2.5"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==
mkdirp@0.5.5:
version "0.5.5"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==
dependencies:
minimist "^1.2.5"
mocha@^7.1.1:
version "7.2.0"
resolved "https://registry.yarnpkg.com/mocha/-/mocha-7.2.0.tgz#01cc227b00d875ab1eed03a75106689cfed5a604"
integrity sha512-O9CIypScywTVpNaRrCAgoUnJgozpIofjKUYmJhiCIJMiuYnLI6otcb1/kpW9/n/tJODHGZ7i8aLQoDVsMtOKQQ==
dependencies:
ansi-colors "3.2.3"
browser-stdout "1.3.1"
chokidar "3.3.0"
debug "3.2.6"
diff "3.5.0"
escape-string-regexp "1.0.5"
find-up "3.0.0"
glob "7.1.3"
growl "1.10.5"
he "1.2.0"
js-yaml "3.13.1"
log-symbols "3.0.0"
minimatch "3.0.4"
mkdirp "0.5.5"
ms "2.1.1"
node-environment-flags "1.0.6"
object.assign "4.1.0"
strip-json-comments "2.0.1"
supports-color "6.0.0"
which "1.3.1"
wide-align "1.1.3"
yargs "13.3.2"
yargs-parser "13.1.2"
yargs-unparser "1.6.0"
ms@2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==
ms@^2.1.1:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
node-environment-flags@1.0.6:
version "1.0.6"
resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.6.tgz#a30ac13621f6f7d674260a54dede048c3982c088"
integrity sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw==
dependencies:
object.getownpropertydescriptors "^2.0.3"
semver "^5.7.0"
normalize-path@^3.0.0, normalize-path@~3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
object-inspect@^1.9.0:
version "1.9.0"
resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.9.0.tgz#c90521d74e1127b67266ded3394ad6116986533a"
integrity sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==
object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e"
integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
object.assign@4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da"
integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==
dependencies:
define-properties "^1.1.2"
function-bind "^1.1.1"
has-symbols "^1.0.0"
object-keys "^1.0.11"
object.assign@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940"
integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==
dependencies:
call-bind "^1.0.0"
define-properties "^1.1.3"
has-symbols "^1.0.1"
object-keys "^1.1.1"
object.getownpropertydescriptors@^2.0.3:
version "2.1.1"
resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.1.tgz#0dfda8d108074d9c563e80490c883b6661091544"
integrity sha512-6DtXgZ/lIZ9hqx4GtZETobXLR/ZLaa0aqV0kzbn80Rf8Z2e/XFnhA0I7p07N2wH8bBBltr2xQPi6sbKWAY2Eng==
dependencies:
call-bind "^1.0.0"
define-properties "^1.1.3"
es-abstract "^1.18.0-next.1"
once@^1.3.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
dependencies:
wrappy "1"
p-limit@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
dependencies:
p-try "^2.0.0"
p-locate@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4"
integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==
dependencies:
p-limit "^2.0.0"
p-try@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
path-exists@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
path-is-absolute@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
picomatch@^2.0.4:
version "2.2.2"
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
readdirp@~3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839"
integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==
dependencies:
picomatch "^2.0.4"
require-directory@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
require-main-filename@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
semver@^5.7.0:
version "5.7.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
set-blocking@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
sprintf-js@~1.0.2:
version "1.0.3"
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
"string-width@^1.0.2 || 2":
version "2.1.1"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==
dependencies:
is-fullwidth-code-point "^2.0.0"
strip-ansi "^4.0.0"
string-width@^3.0.0, string-width@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961"
integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==
dependencies:
emoji-regex "^7.0.1"
is-fullwidth-code-point "^2.0.0"
strip-ansi "^5.1.0"
string.prototype.trimend@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz#a22bd53cca5c7cf44d7c9d5c732118873d6cd18b"
integrity sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==
dependencies:
call-bind "^1.0.0"
define-properties "^1.1.3"
string.prototype.trimstart@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz#9b4cb590e123bb36564401d59824298de50fd5aa"
integrity sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==
dependencies:
call-bind "^1.0.0"
define-properties "^1.1.3"
strip-ansi@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f"
integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8=
dependencies:
ansi-regex "^3.0.0"
strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae"
integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==
dependencies:
ansi-regex "^4.1.0"
strip-json-comments@2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
supports-color@6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.0.0.tgz#76cfe742cf1f41bb9b1c29ad03068c05b4c0e40a"
integrity sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==
dependencies:
has-flag "^3.0.0"
supports-color@^5.3.0:
version "5.5.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
dependencies:
has-flag "^3.0.0"
to-regex-range@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
dependencies:
is-number "^7.0.0"
which-module@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
which@1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
dependencies:
isexe "^2.0.0"
wide-align@1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457"
integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==
dependencies:
string-width "^1.0.2 || 2"
wrap-ansi@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09"
integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==
dependencies:
ansi-styles "^3.2.0"
string-width "^3.0.0"
strip-ansi "^5.0.0"
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
y18n@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4"
integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==
yargs-parser@13.1.2, yargs-parser@^13.1.2:
version "13.1.2"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38"
integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs-unparser@1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-1.6.0.tgz#ef25c2c769ff6bd09e4b0f9d7c605fb27846ea9f"
integrity sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==
dependencies:
flat "^4.1.0"
lodash "^4.17.15"
yargs "^13.3.0"
yargs@13.3.2, yargs@^13.3.0:
version "13.3.2"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==
dependencies:
cliui "^5.0.0"
find-up "^3.0.0"
get-caller-file "^2.0.1"
require-directory "^2.1.1"
require-main-filename "^2.0.0"
set-blocking "^2.0.0"
string-width "^3.0.0"
which-module "^2.0.0"
y18n "^4.0.0"
yargs-parser "^13.1.2"

View file

@ -0,0 +1,41 @@
[package]
name = "automerge-backend"
version = "0.0.1"
authors = ["Alex Good <alex@memoryandthought.me>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
bench = false
[dependencies]
serde = { version = "^1.0", features=["derive"] }
serde_json = "^1.0"
wasm-bindgen = "^0.2"
js-sys = "^0.3"
hex = "^0.4.2"
rand = { version = "^0.7.3", features=["small_rng"] }
maplit = "^1.0.2"
sha2 = "^0.8.1"
leb128 = "^0.2.4"
automerge-protocol = { path = "../automerge-protocol" }
fxhash = "^0.2.1"
thiserror = "1.0.16"
itertools = "0.9.0"
tracing = { version = "0.1.25", features = ["log"] }
flate2 = "1.0.20"
arbitrary = { version = "1", features = ["derive"], optional = true }
[dependencies.web-sys]
version = "0.3"
features = [
"console",
]
[dev-dependencies]
test-env-log = "0.2.6"
env_logger = "*"
tracing-subscriber = {version = "0.2", features = ["chrono", "env-filter", "fmt"]}
[features]
derive-arbitrary = ["arbitrary"]

View file

@ -0,0 +1,9 @@
### TODO
1. Performance work
2. Multi-Change compression `save() / load()`
3. `Automerge.ack()`
4. `Automerge.getLastLocalChange()`

View file

@ -0,0 +1,134 @@
use std::cmp::Ordering;
use automerge_protocol as amp;
use crate::internal::{ActorId, ElementId, InternalOp, InternalOpType, Key, ObjectId, OpId};
#[derive(PartialEq, Debug, Clone, Default)]
pub(crate) struct ActorMap(Vec<amp::ActorId>);
impl ActorMap {
pub fn import_key(&mut self, key: &amp::Key) -> Key {
match key {
amp::Key::Map(string) => Key::Map(string.to_string()),
amp::Key::Seq(eid) => Key::Seq(self.import_element_id(eid)),
}
}
pub fn import_actor(&mut self, actor: &amp::ActorId) -> ActorId {
if let Some(idx) = self.0.iter().position(|a| a == actor) {
ActorId(idx)
} else {
self.0.push(actor.clone());
ActorId(self.0.len() - 1)
}
}
pub fn import_opid(&mut self, opid: &amp::OpId) -> OpId {
OpId(opid.0, self.import_actor(&opid.1))
}
pub fn import_obj(&mut self, obj: &amp::ObjectId) -> ObjectId {
match obj {
amp::ObjectId::Root => ObjectId::Root,
amp::ObjectId::Id(ref opid) => ObjectId::Id(self.import_opid(opid)),
}
}
pub fn import_element_id(&mut self, eid: &amp::ElementId) -> ElementId {
match eid {
amp::ElementId::Head => ElementId::Head,
amp::ElementId::Id(ref opid) => ElementId::Id(self.import_opid(opid)),
}
}
pub fn import_op(&mut self, op: amp::Op) -> InternalOp {
InternalOp {
action: Self::import_optype(&op.action),
obj: self.import_obj(&op.obj),
key: self.import_key(&op.key),
pred: op
.pred
.into_iter()
.map(|ref id| self.import_opid(id))
.collect(),
insert: op.insert,
}
}
pub fn import_optype(optype: &amp::OpType) -> InternalOpType {
match optype {
amp::OpType::Make(val) => InternalOpType::Make(*val),
amp::OpType::Del => InternalOpType::Del,
amp::OpType::Inc(val) => InternalOpType::Inc(*val),
amp::OpType::Set(val) => InternalOpType::Set(val.clone()),
}
}
pub fn export_actor(&self, actor: ActorId) -> amp::ActorId {
self.0[actor.0].clone()
}
pub fn export_opid(&self, opid: &OpId) -> amp::OpId {
amp::OpId(opid.0, self.export_actor(opid.1))
}
pub fn export_obj(&self, obj: &ObjectId) -> amp::ObjectId {
match obj {
ObjectId::Root => amp::ObjectId::Root,
ObjectId::Id(opid) => amp::ObjectId::Id(self.export_opid(opid)),
}
}
#[allow(dead_code)]
pub fn index_of(&mut self, actor: &amp::ActorId) -> usize {
if let Some(index) = self.0.iter().position(|a| a == actor) {
return index;
}
self.0.push(actor.clone());
self.0.len() - 1
}
#[allow(dead_code)]
pub fn actor_for(&self, index: usize) -> Option<&amp::ActorId> {
self.0.get(index)
}
pub fn cmp(&self, eid1: &ElementId, eid2: &ElementId) -> Ordering {
match (eid1, eid2) {
(ElementId::Head, ElementId::Head) => Ordering::Equal,
(ElementId::Head, _) => Ordering::Less,
(_, ElementId::Head) => Ordering::Greater,
(ElementId::Id(opid1), ElementId::Id(opid2)) => self.cmp_opid(opid1, opid2),
}
}
pub fn opid_to_string(&self, id: &OpId) -> String {
format!("{}@{}", id.0, self.export_actor(id.1).to_hex_string())
}
pub fn elementid_to_string(&self, eid: &ElementId) -> String {
match eid {
ElementId::Head => "_head".into(),
ElementId::Id(id) => self.opid_to_string(id),
}
}
pub fn key_to_string(&self, key: &Key) -> String {
match &key {
Key::Map(s) => s.clone(),
Key::Seq(eid) => self.elementid_to_string(eid),
}
}
fn cmp_opid(&self, op1: &OpId, op2: &OpId) -> Ordering {
if op1.0 == op2.0 {
let actor1 = &self.0[(op1.1).0];
let actor2 = &self.0[(op2.1).0];
actor1.cmp(actor2)
//op1.1.cmp(&op2.1)
} else {
op1.0.cmp(&op2.0)
}
}
}

View file

@ -0,0 +1,608 @@
use core::cmp::max;
use std::{
collections::{HashMap, HashSet, VecDeque},
fmt::Debug,
};
use amp::ChangeHash;
use automerge_protocol as amp;
use crate::{
actor_map::ActorMap,
change::encode_document,
error::AutomergeError,
event_handlers::{EventHandlerId, EventHandlers},
internal::ObjectId,
op_handle::OpHandle,
op_set::OpSet,
pending_diff::PendingDiff,
Change, EventHandler,
};
#[derive(Debug, Default, Clone)]
pub struct Backend {
queue: Vec<Change>,
op_set: OpSet,
states: HashMap<amp::ActorId, Vec<usize>>,
actors: ActorMap,
history: Vec<Change>,
history_index: HashMap<amp::ChangeHash, usize>,
event_handlers: EventHandlers,
}
impl Backend {
pub fn new() -> Self {
Self::default()
}
fn make_patch(
&self,
diffs: Option<amp::Diff>,
actor_seq: Option<(amp::ActorId, u64)>,
) -> Result<amp::Patch, AutomergeError> {
let mut deps: Vec<_> = if let Some((ref actor, ref seq)) = actor_seq {
let last_hash = self.get_hash(actor, *seq)?;
self.op_set
.deps
.iter()
.filter(|&dep| dep != &last_hash)
.copied()
.collect()
} else {
self.op_set.deps.iter().copied().collect()
};
deps.sort_unstable();
let pending_changes = self.get_missing_deps(&[]).len();
Ok(amp::Patch {
diffs,
deps,
max_op: self.op_set.max_op,
clock: self
.states
.iter()
.map(|(k, v)| (k.clone(), v.len() as u64))
.collect(),
actor: actor_seq.clone().map(|(actor, _)| actor),
seq: actor_seq.map(|(_, seq)| seq),
pending_changes,
})
}
pub fn load_changes(&mut self, changes: Vec<Change>) -> Result<(), AutomergeError> {
self.apply(changes, None)?;
Ok(())
}
pub fn apply_changes(&mut self, changes: Vec<Change>) -> Result<amp::Patch, AutomergeError> {
self.apply(changes, None)
}
pub fn get_heads(&self) -> Vec<amp::ChangeHash> {
self.op_set.heads()
}
fn apply(
&mut self,
changes: Vec<Change>,
actor: Option<(amp::ActorId, u64)>,
) -> Result<amp::Patch, AutomergeError> {
let mut pending_diffs = HashMap::new();
for change in changes {
self.add_change(change, actor.is_some(), &mut pending_diffs)?;
}
let op_set = &mut self.op_set;
let diffs = op_set.finalize_diffs(pending_diffs, &self.actors)?;
self.make_patch(diffs, actor)
}
fn get_hash(&self, actor: &amp::ActorId, seq: u64) -> Result<amp::ChangeHash, AutomergeError> {
self.states
.get(actor)
.and_then(|v| v.get(seq as usize - 1))
.and_then(|&i| self.history.get(i))
.map(|c| c.hash)
.ok_or(AutomergeError::InvalidSeq(seq))
}
pub fn apply_local_change(
&mut self,
mut change: amp::UncompressedChange,
) -> Result<(amp::Patch, Change), AutomergeError> {
self.check_for_duplicate(&change)?; // Change has already been applied
let actor_seq = (change.actor_id.clone(), change.seq);
if change.seq > 1 {
let last_hash = self.get_hash(&change.actor_id, change.seq - 1)?;
if !change.deps.contains(&last_hash) {
change.deps.push(last_hash)
}
}
let bin_change: Change = change.into();
let patch: amp::Patch = self.apply(vec![bin_change.clone()], Some(actor_seq))?;
Ok((patch, bin_change))
}
fn check_for_duplicate(&self, change: &amp::UncompressedChange) -> Result<(), AutomergeError> {
if self
.states
.get(&change.actor_id)
.map_or(0, |v| v.len() as u64)
>= change.seq
{
return Err(AutomergeError::DuplicateChange(format!(
"Change request has already been applied {}:{}",
change.actor_id.to_hex_string(),
change.seq
)));
}
Ok(())
}
fn add_change(
&mut self,
change: Change,
local: bool,
diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>,
) -> Result<(), AutomergeError> {
if local {
self.apply_change(change, diffs)
} else {
self.queue.push(change);
self.apply_queued_ops(diffs)
}
}
fn apply_queued_ops(
&mut self,
diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>,
) -> Result<(), AutomergeError> {
while let Some(next_change) = self.pop_next_causally_ready_change() {
self.apply_change(next_change, diffs)?;
}
Ok(())
}
fn apply_change(
&mut self,
change: Change,
diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>,
) -> Result<(), AutomergeError> {
if self.history_index.contains_key(&change.hash) {
return Ok(());
}
self.event_handlers.before_apply_change(&change);
let change_index = self.update_history(change);
// SAFETY: change_index is the index for the change we've just added so this can't (and
// shouldn't) panic. This is to get around the borrow checker.
let change = &self.history[change_index];
let op_set = &mut self.op_set;
let start_op = change.start_op;
op_set.update_deps(change);
let ops = OpHandle::extract(change, &mut self.actors);
op_set.max_op = max(
op_set.max_op,
(start_op
.checked_add(ops.len() as u64)
.ok_or(AutomergeError::Overflow)?)
.saturating_sub(1),
);
op_set.apply_ops(ops, diffs, &mut self.actors)?;
self.event_handlers.after_apply_change(change);
Ok(())
}
fn update_history(&mut self, change: Change) -> usize {
let history_index = self.history.len();
self.states
.entry(change.actor_id().clone())
.or_default()
.push(history_index);
self.history_index.insert(change.hash, history_index);
self.history.push(change);
history_index
}
fn pop_next_causally_ready_change(&mut self) -> Option<Change> {
let mut index = 0;
while index < self.queue.len() {
let change = self.queue.get(index).unwrap();
if change
.deps
.iter()
.all(|d| self.history_index.contains_key(d))
{
return Some(self.queue.remove(index));
}
index += 1
}
None
}
pub fn get_patch(&self) -> Result<amp::Patch, AutomergeError> {
let diffs = self
.op_set
.construct_object(&ObjectId::Root, &self.actors)?;
self.make_patch(Some(diffs), None)
}
pub fn get_changes_for_actor_id(
&self,
actor_id: &amp::ActorId,
) -> Result<Vec<&Change>, AutomergeError> {
Ok(self
.states
.get(actor_id)
.map(|vec| vec.iter().filter_map(|&i| self.history.get(i)).collect())
.unwrap_or_default())
}
fn get_changes_fast(&self, have_deps: &[amp::ChangeHash]) -> Option<Vec<&Change>> {
if have_deps.is_empty() {
return Some(self.history.iter().collect());
}
let lowest_idx = have_deps
.iter()
.filter_map(|h| self.history_index.get(h))
.min()?
+ 1;
let mut missing_changes = vec![];
let mut has_seen: HashSet<_> = have_deps.iter().collect();
for change in &self.history[lowest_idx..] {
let deps_seen = change.deps.iter().filter(|h| has_seen.contains(h)).count();
if deps_seen > 0 {
if deps_seen != change.deps.len() {
// future change depends on something we haven't seen - fast path cant work
return None;
}
missing_changes.push(change);
has_seen.insert(&change.hash);
}
}
// if we get to the end and there is a head we haven't seen then fast path cant work
if self.get_heads().iter().all(|h| has_seen.contains(h)) {
Some(missing_changes)
} else {
None
}
}
fn get_changes_slow(&self, have_deps: &[amp::ChangeHash]) -> Vec<&Change> {
let mut stack: Vec<_> = have_deps.iter().collect();
let mut has_seen = HashSet::new();
while let Some(hash) = stack.pop() {
if has_seen.contains(&hash) {
continue;
}
if let Some(change) = self
.history_index
.get(hash)
.and_then(|i| self.history.get(*i))
{
stack.extend(change.deps.iter());
}
has_seen.insert(hash);
}
self.history
.iter()
.filter(|change| !has_seen.contains(&change.hash))
.collect()
}
pub fn get_changes(&self, have_deps: &[amp::ChangeHash]) -> Vec<&Change> {
if let Some(changes) = self.get_changes_fast(have_deps) {
changes
} else {
self.get_changes_slow(have_deps)
}
}
pub fn save(&self) -> Result<Vec<u8>, AutomergeError> {
let changes: Vec<amp::UncompressedChange> = self.history.iter().map(|r| r.into()).collect();
Ok(encode_document(&changes)?)
}
// allow this for API reasons
#[allow(clippy::needless_pass_by_value)]
pub fn load(data: Vec<u8>) -> Result<Self, AutomergeError> {
let changes = Change::load_document(&data)?;
let mut backend = Self::new();
backend.load_changes(changes)?;
Ok(backend)
}
pub fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec<amp::ChangeHash> {
let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect();
let mut missing = HashSet::new();
for head in self.queue.iter().flat_map(|change| &change.deps) {
if !self.history_index.contains_key(head) {
missing.insert(head);
}
}
for head in heads {
if !self.history_index.contains_key(head) {
missing.insert(head);
}
}
let mut missing = missing
.into_iter()
.filter(|hash| !in_queue.contains(hash))
.copied()
.collect::<Vec<_>>();
missing.sort();
missing
}
pub fn get_change_by_hash(&self, hash: &amp::ChangeHash) -> Option<&Change> {
self.history_index
.get(hash)
.and_then(|index| self.history.get(*index))
}
/// Filter the changes down to those that are not transitive dependencies of the heads.
///
/// Thus a graph with these heads has not seen the remaining changes.
pub(crate) fn filter_changes(
&self,
heads: &[amp::ChangeHash],
changes: &mut HashSet<amp::ChangeHash>,
) {
// Reduce the working set to find to those which we may be able to find.
// This filters out those hashes that are successors of or concurrent with all of the
// heads.
// This can help in avoiding traversing the entire graph back to the roots when we try to
// search for a hash we can know won't be found there.
let max_head_index = heads
.iter()
.map(|h| self.history_index.get(h).unwrap_or(&0))
.max()
.unwrap_or(&0);
let mut may_find: HashSet<ChangeHash> = changes
.iter()
.filter(|hash| {
let change_index = self.history_index.get(hash).unwrap_or(&0);
change_index <= max_head_index
})
.copied()
.collect();
if may_find.is_empty() {
return;
}
let mut queue: VecDeque<_> = heads.iter().collect();
let mut seen = HashSet::new();
while let Some(hash) = queue.pop_front() {
if seen.contains(hash) {
continue;
}
seen.insert(hash);
let removed = may_find.remove(hash);
changes.remove(hash);
if may_find.is_empty() {
break;
}
for dep in self
.history_index
.get(hash)
.and_then(|i| self.history.get(*i))
.map(|c| c.deps.as_slice())
.unwrap_or_default()
{
// if we just removed something from our hashes then it is likely there is more
// down here so do a quick inspection on the children.
// When we don't remove anything it is less likely that there is something down
// that chain so delay it.
if removed {
queue.push_front(dep)
} else {
queue.push_back(dep)
}
}
}
}
/// Adds the event handler and returns the id of the handler.
pub fn add_event_handler(&mut self, handler: EventHandler) -> EventHandlerId {
self.event_handlers.add_handler(handler)
}
/// Remove the handler with the given id, returning whether it removed a handler or not.
pub fn remove_event_handler(&mut self, id: EventHandlerId) -> bool {
self.event_handlers.remove_handler(id)
}
}
#[cfg(test)]
mod tests {
use std::convert::TryInto;
use automerge_protocol::{ActorId, ObjectId, Op, OpType, UncompressedChange};
use super::*;
#[test]
fn test_get_changes_fast_behavior() {
let actor_a: ActorId = "7b7723afd9e6480397a4d467b7693156".try_into().unwrap();
let actor_b: ActorId = "37704788917a499cb0206fa8519ac4d9".try_into().unwrap();
let change_a1: Change = UncompressedChange {
actor_id: actor_a.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
obj: ObjectId::Root,
action: OpType::Set("magpie".into()),
key: "bird".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let change_a2: Change = UncompressedChange {
actor_id: actor_a,
seq: 2,
start_op: 2,
time: 0,
message: None,
hash: None,
deps: vec![change_a1.hash],
operations: vec![Op {
obj: ObjectId::Root,
action: OpType::Set("ant".into()),
key: "bug".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let change_b1: Change = UncompressedChange {
actor_id: actor_b.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: vec![],
operations: vec![Op {
obj: ObjectId::Root,
action: OpType::Set("dove".into()),
key: "bird".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let change_b2: Change = UncompressedChange {
actor_id: actor_b.clone(),
seq: 2,
start_op: 2,
time: 0,
message: None,
hash: None,
deps: vec![change_b1.hash],
operations: vec![Op {
obj: ObjectId::Root,
action: OpType::Set("stag beetle".into()),
key: "bug".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let change_b3: Change = UncompressedChange {
actor_id: actor_b,
seq: 3,
start_op: 3,
time: 0,
message: None,
hash: None,
deps: vec![change_a2.hash, change_b2.hash],
operations: vec![Op {
obj: ObjectId::Root,
action: OpType::Set("bugs and birds".into()),
key: "title".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let mut backend = Backend::new();
backend
.apply_changes(vec![change_a1.clone(), change_a2.clone()])
.unwrap();
assert_eq!(
backend.get_changes_fast(&[]),
Some(vec![&change_a1, &change_a2])
);
assert_eq!(
backend.get_changes_fast(&[change_a1.hash]),
Some(vec![&change_a2])
);
assert_eq!(backend.get_heads(), vec![change_a2.hash]);
backend
.apply_changes(vec![change_b1.clone(), change_b2.clone()])
.unwrap();
assert_eq!(
backend.get_changes_fast(&[]),
Some(vec![&change_a1, &change_a2, &change_b1, &change_b2])
);
assert_eq!(backend.get_changes_fast(&[change_a1.hash]), None);
assert_eq!(backend.get_changes_fast(&[change_a2.hash]), None);
assert_eq!(
backend.get_changes_fast(&[change_a1.hash, change_b1.hash]),
Some(vec![&change_a2, &change_b2])
);
assert_eq!(
backend.get_changes_fast(&[change_a2.hash, change_b1.hash]),
Some(vec![&change_b2])
);
assert_eq!(backend.get_heads(), vec![change_b2.hash, change_a2.hash]);
backend.apply_changes(vec![change_b3.clone()]).unwrap();
assert_eq!(backend.get_heads(), vec![change_b3.hash]);
assert_eq!(
backend.get_changes_fast(&[]),
Some(vec![
&change_a1, &change_a2, &change_b1, &change_b2, &change_b3
])
);
assert_eq!(backend.get_changes_fast(&[change_a1.hash]), None);
assert_eq!(backend.get_changes_fast(&[change_a2.hash]), None);
assert_eq!(backend.get_changes_fast(&[change_b1.hash]), None);
assert_eq!(backend.get_changes_fast(&[change_b2.hash]), None);
assert_eq!(
backend.get_changes_fast(&[change_a1.hash, change_b1.hash]),
Some(vec![&change_a2, &change_b2, &change_b3])
);
assert_eq!(
backend.get_changes_fast(&[change_a2.hash, change_b1.hash]),
Some(vec![&change_b2, &change_b3])
);
assert_eq!(backend.get_changes_fast(&[change_b3.hash]), Some(vec![]));
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,69 @@
use std::ops::Deref;
use crate::{error::AutomergeError, internal::InternalOpType, op_handle::OpHandle};
/// Represents a set of operations which are relevant to either an element ID
/// or object ID and which occurred without knowledge of each other
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct ConcurrentOperations {
pub ops: Vec<OpHandle>,
}
impl Deref for ConcurrentOperations {
type Target = Vec<OpHandle>;
fn deref(&self) -> &Self::Target {
&self.ops
}
}
impl Default for ConcurrentOperations {
fn default() -> Self {
Self::new()
}
}
impl ConcurrentOperations {
pub fn new() -> ConcurrentOperations {
ConcurrentOperations { ops: Vec::new() }
}
pub fn is_empty(&self) -> bool {
self.ops.is_empty()
}
/// Updates this set of operations based on a new operation.
///
/// Returns the previous operations that this op
/// replaces
pub fn incorporate_new_op(
&mut self,
new_op: &OpHandle,
) -> Result<Vec<OpHandle>, AutomergeError> {
let mut overwritten_ops = Vec::new();
if new_op.is_inc() {
self.ops
.iter_mut()
.for_each(|other| other.maybe_increment(new_op))
} else {
let mut i = 0;
while i != self.ops.len() {
if new_op.pred.contains(&self.ops[i].id) {
overwritten_ops.push(self.ops.swap_remove(i));
} else {
i += 1;
}
}
}
match new_op.action {
InternalOpType::Set(_) | InternalOpType::Make(_) => {
self.ops.push(new_op.clone());
}
_ => {}
}
Ok(overwritten_ops)
}
}

View file

@ -1,13 +1,8 @@
use core::fmt::Debug;
use std::num::NonZeroU64;
use std::{borrow::Cow, io, io::Read, str};
use std::{borrow::Cow, convert::TryFrom, io, io::Read, str};
use crate::error;
use crate::legacy as amp;
use crate::ActorId;
use smol_str::SmolStr;
use automerge_protocol as amp;
/// The error type for decoding operations.
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error(
@ -23,7 +18,7 @@ pub enum Error {
)]
WrongType { expected_one_of: Vec<u8>, found: u8 },
#[error("Bad change format: {0}")]
BadChangeFormat(#[source] error::InvalidChangeHashSlice),
BadChangeFormat(#[source] amp::error::InvalidChangeHashSlice),
#[error("Not enough bytes")]
NotEnoughBytes,
#[error("Found the wrong magic bytes in the document")]
@ -44,68 +39,13 @@ pub enum Error {
NoDocChanges,
#[error("An overflow would have occurred, the data may be corrupt")]
Overflow,
#[error("Calculated heads differed from actual heads")]
MismatchedHeads,
#[error("Failed to read leb128 number {0}")]
Leb128(#[from] leb128::read::Error),
#[error(transparent)]
Io(#[from] io::Error),
}
impl PartialEq<Error> for Error {
fn eq(&self, other: &Error) -> bool {
match (self, other) {
(
Self::WrongType {
expected_one_of: l_expected_one_of,
found: l_found,
},
Self::WrongType {
expected_one_of: r_expected_one_of,
found: r_found,
},
) => l_expected_one_of == r_expected_one_of && l_found == r_found,
(Self::BadChangeFormat(l0), Self::BadChangeFormat(r0)) => l0 == r0,
(
Self::WrongByteLength {
expected: l_expected,
found: l_found,
},
Self::WrongByteLength {
expected: r_expected,
found: r_found,
},
) => l_expected == r_expected && l_found == r_found,
(
Self::ColumnsNotInAscendingOrder {
last: l_last,
found: l_found,
},
Self::ColumnsNotInAscendingOrder {
last: r_last,
found: r_found,
},
) => l_last == r_last && l_found == r_found,
(
Self::InvalidChecksum {
found: l_found,
calculated: l_calculated,
},
Self::InvalidChecksum {
found: r_found,
calculated: r_calculated,
},
) => l_found == r_found && l_calculated == r_calculated,
(Self::InvalidChange(l0), Self::InvalidChange(r0)) => l0 == r0,
(Self::ChangeDecompressFailed(l0), Self::ChangeDecompressFailed(r0)) => l0 == r0,
(Self::Leb128(_l0), Self::Leb128(_r0)) => true,
(Self::Io(l0), Self::Io(r0)) => l0.kind() == r0.kind(),
_ => core::mem::discriminant(self) == core::mem::discriminant(other),
}
}
}
#[derive(thiserror::Error, PartialEq, Debug)]
#[derive(thiserror::Error, Debug)]
pub enum InvalidChangeError {
#[error("Change contained an operation with action 'set' which did not have a 'value'")]
SetOpWithoutValue,
@ -114,24 +54,24 @@ pub enum InvalidChangeError {
#[error("Change contained an invalid object id: {}", source.0)]
InvalidObjectId {
#[from]
source: error::InvalidObjectId,
source: amp::error::InvalidObjectId,
},
#[error("Change contained an invalid hash: {:?}", source.0)]
InvalidChangeHash {
#[from]
source: error::InvalidChangeHashSlice,
source: amp::error::InvalidChangeHashSlice,
},
}
#[derive(Clone, Debug)]
pub(crate) struct Decoder<'a> {
pub(crate) offset: usize,
pub(crate) last_read: usize,
pub offset: usize,
pub last_read: usize,
data: Cow<'a, [u8]>,
}
impl<'a> Decoder<'a> {
pub(crate) fn new(data: Cow<'a, [u8]>) -> Self {
pub fn new(data: Cow<'a, [u8]>) -> Self {
Decoder {
offset: 0,
last_read: 0,
@ -139,7 +79,7 @@ impl<'a> Decoder<'a> {
}
}
pub(crate) fn read<T: Decodable + Debug>(&mut self) -> Result<T, Error> {
pub fn read<T: Decodable + Debug>(&mut self) -> Result<T, Error> {
let mut buf = &self.data[self.offset..];
let init_len = buf.len();
let val = T::decode::<&[u8]>(&mut buf).ok_or(Error::NoDecodedValue)?;
@ -153,7 +93,7 @@ impl<'a> Decoder<'a> {
}
}
pub(crate) fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> {
pub fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> {
if self.offset + index > self.data.len() {
Err(Error::TryingToReadPastEnd)
} else {
@ -164,12 +104,12 @@ impl<'a> Decoder<'a> {
}
}
pub(crate) fn done(&self) -> bool {
pub fn done(&self) -> bool {
self.offset >= self.data.len()
}
}
/// See discussion on [`crate::encoding::BooleanEncoder`] for the format data is stored in.
/// See discussion on [`BooleanEncoder`] for the format data is stored in.
pub(crate) struct BooleanDecoder<'a> {
decoder: Decoder<'a>,
last_value: bool,
@ -209,10 +149,10 @@ impl<'a> Iterator for BooleanDecoder<'a> {
}
}
/// See discussion on [`crate::encoding::RleEncoder`] for the format data is stored in.
/// See discussion on [`RleEncoder`] for the format data is stored in.
#[derive(Debug)]
pub(crate) struct RleDecoder<'a, T> {
pub(crate) decoder: Decoder<'a>,
pub decoder: Decoder<'a>,
last_value: Option<T>,
count: isize,
literal: bool,
@ -277,7 +217,7 @@ where
}
}
/// See discussion on [`crate::encoding::DeltaEncoder`] for the format data is stored in.
/// See discussion on [`DeltaEncoder`] for the format data is stored in.
pub(crate) struct DeltaDecoder<'a> {
rle: RleDecoder<'a, i64>,
absolute_val: u64,
@ -407,15 +347,6 @@ impl Decodable for u64 {
}
}
impl Decodable for NonZeroU64 {
fn decode<R>(bytes: &mut R) -> Option<Self>
where
R: Read,
{
NonZeroU64::new(leb128::read::unsigned(bytes).ok()?)
}
}
impl Decodable for Vec<u8> {
fn decode<R>(bytes: &mut R) -> Option<Self>
where
@ -430,17 +361,6 @@ impl Decodable for Vec<u8> {
Some(buffer)
}
}
impl Decodable for SmolStr {
fn decode<R>(bytes: &mut R) -> Option<SmolStr>
where
R: Read,
{
let buffer = Vec::decode(bytes)?;
str::from_utf8(&buffer).map(|t| t.into()).ok()
}
}
impl Decodable for String {
fn decode<R>(bytes: &mut R) -> Option<String>
where
@ -464,7 +384,7 @@ impl Decodable for Option<String> {
}
}
impl Decodable for ActorId {
impl Decodable for amp::ActorId {
fn decode<R>(bytes: &mut R) -> Option<Self>
where
R: Read,

View file

@ -0,0 +1,363 @@
use core::fmt::Debug;
use std::{
io,
io::{Read, Write},
mem,
};
use automerge_protocol as amp;
use flate2::{bufread::DeflateEncoder, Compression};
use crate::columnar::COLUMN_TYPE_DEFLATE;
pub(crate) const DEFLATE_MIN_SIZE: usize = 256;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error(transparent)]
Io(#[from] io::Error),
}
/// Encodes booleans by storing the count of the same value.
///
/// The sequence of numbers describes the count of false values on even indices (0-indexed) and the
/// count of true values on odd indices (0-indexed).
///
/// Counts are encoded as usize.
pub(crate) struct BooleanEncoder {
buf: Vec<u8>,
last: bool,
count: usize,
}
impl BooleanEncoder {
pub fn new() -> BooleanEncoder {
BooleanEncoder {
buf: Vec::new(),
last: false,
count: 0,
}
}
pub fn append(&mut self, value: bool) {
if value == self.last {
self.count += 1;
} else {
self.count.encode(&mut self.buf).ok();
self.last = value;
self.count = 1;
}
}
pub fn finish(mut self, col: u32) -> ColData {
if self.count > 0 {
self.count.encode(&mut self.buf).ok();
}
ColData::new(col, self.buf)
}
}
/// Encodes integers as the change since the previous value.
///
/// The initial value is 0 encoded as u64. Deltas are encoded as i64.
///
/// Run length encoding is then applied to the resulting sequence.
pub(crate) struct DeltaEncoder {
rle: RleEncoder<i64>,
absolute_value: u64,
}
impl DeltaEncoder {
pub fn new() -> DeltaEncoder {
DeltaEncoder {
rle: RleEncoder::new(),
absolute_value: 0,
}
}
pub fn append_value(&mut self, value: u64) {
self.rle
.append_value(value as i64 - self.absolute_value as i64);
self.absolute_value = value;
}
pub fn append_null(&mut self) {
self.rle.append_null();
}
pub fn finish(self, col: u32) -> ColData {
self.rle.finish(col)
}
}
enum RleState<T> {
Empty,
NullRun(usize),
LiteralRun(T, Vec<T>),
LoneVal(T),
Run(T, usize),
}
/// Encodes data in run lengh encoding format. This is very efficient for long repeats of data
///
/// There are 3 types of 'run' in this encoder:
/// - a normal run (compresses repeated values)
/// - a null run (compresses repeated nulls)
/// - a literal run (no compression)
///
/// A normal run consists of the length of the run (encoded as an i64) followed by the encoded value that this run contains.
///
/// A null run consists of a zero value (encoded as an i64) followed by the length of the null run (encoded as a usize).
///
/// A literal run consists of the **negative** length of the run (encoded as an i64) followed by the values in the run.
///
/// Therefore all the types start with an encoded i64, the value of which determines the type of the following data.
pub(crate) struct RleEncoder<T>
where
T: Encodable + PartialEq + Clone,
{
buf: Vec<u8>,
state: RleState<T>,
}
impl<T> RleEncoder<T>
where
T: Encodable + PartialEq + Clone,
{
pub fn new() -> RleEncoder<T> {
RleEncoder {
buf: Vec::new(),
state: RleState::Empty,
}
}
pub fn finish(mut self, col: u32) -> ColData {
match self.take_state() {
// this covers `only_nulls`
RleState::NullRun(size) => {
if !self.buf.is_empty() {
self.flush_null_run(size)
}
}
RleState::LoneVal(value) => self.flush_lit_run(vec![value]),
RleState::Run(value, len) => self.flush_run(&value, len),
RleState::LiteralRun(last, mut run) => {
run.push(last);
self.flush_lit_run(run);
}
RleState::Empty => {}
}
ColData::new(col, self.buf)
}
fn flush_run(&mut self, val: &T, len: usize) {
self.encode(&(len as i64));
self.encode(val);
}
fn flush_null_run(&mut self, len: usize) {
self.encode::<i64>(&0);
self.encode(&len);
}
fn flush_lit_run(&mut self, run: Vec<T>) {
self.encode(&-(run.len() as i64));
for val in run {
self.encode(&val);
}
}
fn take_state(&mut self) -> RleState<T> {
let mut state = RleState::Empty;
mem::swap(&mut self.state, &mut state);
state
}
pub fn append_null(&mut self) {
self.state = match self.take_state() {
RleState::Empty => RleState::NullRun(1),
RleState::NullRun(size) => RleState::NullRun(size + 1),
RleState::LoneVal(other) => {
self.flush_lit_run(vec![other]);
RleState::NullRun(1)
}
RleState::Run(other, len) => {
self.flush_run(&other, len);
RleState::NullRun(1)
}
RleState::LiteralRun(last, mut run) => {
run.push(last);
self.flush_lit_run(run);
RleState::NullRun(1)
}
}
}
pub fn append_value(&mut self, value: T) {
self.state = match self.take_state() {
RleState::Empty => RleState::LoneVal(value),
RleState::LoneVal(other) => {
if other == value {
RleState::Run(value, 2)
} else {
RleState::LiteralRun(value, vec![other])
}
}
RleState::Run(other, len) => {
if other == value {
RleState::Run(other, len + 1)
} else {
self.flush_run(&other, len);
RleState::LoneVal(value)
}
}
RleState::LiteralRun(last, mut run) => {
if last == value {
self.flush_lit_run(run);
RleState::Run(value, 2)
} else {
run.push(last);
RleState::LiteralRun(value, run)
}
}
RleState::NullRun(size) => {
self.flush_null_run(size);
RleState::LoneVal(value)
}
}
}
fn encode<V>(&mut self, val: &V)
where
V: Encodable,
{
val.encode(&mut self.buf).ok();
}
}
pub(crate) trait Encodable {
fn encode_with_actors_to_vec(&self, actors: &mut Vec<amp::ActorId>) -> io::Result<Vec<u8>> {
let mut buf = Vec::new();
self.encode_with_actors(&mut buf, actors)?;
Ok(buf)
}
fn encode_with_actors<R: Write>(
&self,
buf: &mut R,
_actors: &mut Vec<amp::ActorId>,
) -> io::Result<usize> {
self.encode(buf)
}
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize>;
}
impl Encodable for String {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
let bytes = self.as_bytes();
let head = bytes.len().encode(buf)?;
buf.write_all(bytes)?;
Ok(head + bytes.len())
}
}
impl Encodable for Option<String> {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
if let Some(s) = self {
s.encode(buf)
} else {
0.encode(buf)
}
}
}
impl Encodable for u64 {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
leb128::write::unsigned(buf, *self)
}
}
impl Encodable for f64 {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
let bytes = self.to_le_bytes();
buf.write_all(&bytes)?;
Ok(bytes.len())
}
}
impl Encodable for f32 {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
let bytes = self.to_le_bytes();
buf.write_all(&bytes)?;
Ok(bytes.len())
}
}
impl Encodable for i64 {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
leb128::write::signed(buf, *self)
}
}
impl Encodable for usize {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
(*self as u64).encode(buf)
}
}
impl Encodable for u32 {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
u64::from(*self).encode(buf)
}
}
impl Encodable for i32 {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
i64::from(*self).encode(buf)
}
}
#[derive(Debug)]
pub(crate) struct ColData {
pub col: u32,
pub data: Vec<u8>,
#[cfg(debug_assertions)]
has_been_deflated: bool,
}
impl ColData {
pub fn new(col_id: u32, data: Vec<u8>) -> ColData {
ColData {
col: col_id,
data,
#[cfg(debug_assertions)]
has_been_deflated: false,
}
}
pub fn encode_col_len<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
let mut len = 0;
if !self.data.is_empty() {
len += self.col.encode(buf)?;
len += self.data.len().encode(buf)?;
}
Ok(len)
}
pub fn deflate(&mut self) {
#[cfg(debug_assertions)]
{
debug_assert!(!self.has_been_deflated);
self.has_been_deflated = true;
}
if self.data.len() > DEFLATE_MIN_SIZE {
let mut deflated = Vec::new();
let mut deflater = DeflateEncoder::new(&self.data[..], Compression::best());
//This unwrap should be okay as we're reading and writing to in memory buffers
deflater.read_to_end(&mut deflated).unwrap();
self.col |= COLUMN_TYPE_DEFLATE;
self.data = deflated;
}
}
}

View file

@ -0,0 +1,69 @@
//use std::error::Error;
use std::fmt::Debug;
use automerge_protocol as amp;
use thiserror::Error;
use crate::{decoding, encoding};
#[derive(Error, Debug)]
pub enum AutomergeError {
#[error("Missing object ID")]
MissingObjectError,
#[error("Missing index in op {0}")]
MissingIndex(amp::OpId),
#[error("Missing element ID: {0}")]
MissingElement(amp::ObjectId, amp::ElementId),
#[error("No path to object: {0}")]
NoPathToObject(amp::ObjectId),
#[error("Cant extract object: {0}")]
CantExtractObject(amp::ObjectId),
#[error("Skiplist error: {0}")]
SkipListError(String),
#[error("Index out of bounds: {0}")]
IndexOutOfBounds(usize),
#[error("Invalid op id: {0}")]
InvalidOpId(String),
#[error("Invalid object ID: {0}")]
InvalidObjectId(String),
#[error("Missing value")]
MissingValue,
#[error("Unknown error: {0}")]
GeneralError(String),
#[error("Missing number value")]
MissingNumberValue,
#[error("Unknown version: {0}")]
UnknownVersion(u64),
#[error("Duplicate change {0}")]
DuplicateChange(String),
#[error("Diverged state {0}")]
DivergedState(String),
#[error("Invalid seq {0}")]
InvalidSeq(u64),
#[error("Map key in seq")]
MapKeyInSeq,
#[error("Head to opid")]
HeadToOpId,
#[error("Doc format not implemented yet")]
DocFormatUnimplemented,
#[error("Divergent change {0}")]
DivergentChange(String),
#[error("Encode failed")]
EncodeFailed,
#[error("Decode failed")]
DecodeFailed,
#[error("Encoding error {0}")]
EncodingError(#[from] encoding::Error),
#[error("Decoding error {0}")]
DecodingError(#[from] decoding::Error),
#[error("Attempted to create a cursor for opid {opid} which was not an element in a sequence")]
InvalidCursor { opid: amp::OpId },
#[error("A compressed chunk could not be decompressed")]
BadCompressedChunk,
#[error("Overflow would have ocurred")]
Overflow,
}
#[derive(Error, Debug)]
#[error("Invalid element ID: {0}")]
pub struct InvalidElementId(pub String);

View file

@ -0,0 +1,72 @@
use std::fmt::Debug;
use crate::Change;
#[derive(Clone, Copy)]
pub struct EventHandlerId(usize);
/// A sequence of event handlers.
///
/// This maintains the order of insertion so handlers will be called in a consistent order.
#[derive(Debug, Default)]
pub struct EventHandlers(Vec<EventHandler>);
impl Clone for EventHandlers {
fn clone(&self) -> Self {
EventHandlers(Vec::new())
}
}
impl EventHandlers {
pub(crate) fn before_apply_change(&mut self, change: &Change) {
for handler in &mut self.0 {
if let EventHandler::BeforeApplyChange(f) = handler {
f.0(change)
}
}
}
pub(crate) fn after_apply_change(&mut self, change: &Change) {
for handler in &mut self.0 {
if let EventHandler::AfterApplyChange(f) = handler {
f.0(change)
}
}
}
/// Adds the event handler and returns the id of the handler.
pub fn add_handler(&mut self, handler: EventHandler) -> EventHandlerId {
self.0.push(handler);
EventHandlerId(self.0.len() - 1)
}
/// Remove the handler with the given id, returning whether it removed a handler or not.
pub fn remove_handler(&mut self, id: EventHandlerId) -> bool {
if id.0 < self.0.len() {
self.0.remove(id.0);
true
} else {
false
}
}
}
/// A handler for changes.
pub struct ChangeEventHandler(pub Box<dyn FnMut(&Change) + Send>);
/// An general event handler.
pub enum EventHandler {
/// An event handler that gets called before a change is applied to the history.
BeforeApplyChange(ChangeEventHandler),
/// An event handler that gets called after a change has been applied to the history.
AfterApplyChange(ChangeEventHandler),
}
impl Debug for EventHandler {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
match self {
Self::BeforeApplyChange(_) => write!(f, "BeforeApplyChange"),
Self::AfterApplyChange(_) => write!(f, "AfterApplyChange"),
}
}
}

View file

@ -0,0 +1,89 @@
use automerge_protocol as amp;
#[derive(Eq, PartialEq, Hash, Debug, Clone, Copy)]
pub(crate) struct ActorId(pub usize);
#[derive(Eq, PartialEq, Debug, Hash, Clone, Copy)]
pub(crate) struct OpId(pub u64, pub ActorId);
#[derive(Eq, PartialEq, Debug, Hash, Clone, Copy)]
pub(crate) enum ObjectId {
Id(OpId),
Root,
}
#[derive(PartialEq, Eq, Debug, Hash, Clone, Copy)]
pub(crate) enum ElementId {
Head,
Id(OpId),
}
#[derive(PartialEq, Eq, Debug, Hash, Clone)]
pub(crate) enum Key {
Map(String),
Seq(ElementId),
}
#[derive(PartialEq, Debug, Clone)]
pub(crate) struct InternalOp {
pub action: InternalOpType,
pub obj: ObjectId,
pub key: Key,
pub pred: Vec<OpId>,
pub insert: bool,
}
impl InternalOp {
pub fn obj_type(&self) -> Option<amp::ObjType> {
match self.action {
InternalOpType::Make(objtype) => Some(objtype),
_ => None,
}
}
pub fn is_inc(&self) -> bool {
matches!(self.action, InternalOpType::Inc(_))
}
}
#[derive(PartialEq, Debug, Clone)]
pub(crate) enum InternalOpType {
Make(amp::ObjType),
Del,
Inc(i64),
Set(amp::ScalarValue),
}
impl Key {
pub fn as_element_id(&self) -> Option<ElementId> {
match self {
Key::Map(_) => None,
Key::Seq(eid) => Some(*eid),
}
}
pub fn to_opid(&self) -> Option<OpId> {
match self.as_element_id()? {
ElementId::Id(id) => Some(id),
ElementId::Head => None,
}
}
}
impl From<OpId> for ObjectId {
fn from(id: OpId) -> ObjectId {
ObjectId::Id(id)
}
}
impl From<OpId> for ElementId {
fn from(id: OpId) -> ElementId {
ElementId::Id(id)
}
}
impl From<OpId> for Key {
fn from(id: OpId) -> Key {
Key::Seq(ElementId::Id(id))
}
}

View file

@ -0,0 +1,64 @@
#![warn(clippy::pedantic)]
#![warn(clippy::nursery)]
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::must_use_candidate)]
#![allow(clippy::option_if_let_else)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::cast_possible_wrap)]
#![allow(clippy::doc_markdown)]
#![allow(clippy::similar_names)]
#![allow(clippy::shadow_unrelated)]
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::redundant_pub_crate)]
#![allow(clippy::missing_const_for_fn)]
#![allow(clippy::use_self)]
#![allow(clippy::too_many_lines)]
extern crate fxhash;
extern crate hex;
extern crate itertools;
extern crate maplit;
extern crate rand;
extern crate web_sys;
mod actor_map;
mod backend;
mod change;
mod columnar;
mod concurrent_operations;
mod decoding;
mod encoding;
mod error;
mod event_handlers;
mod internal;
mod object_store;
mod op_handle;
mod op_set;
mod ordered_set;
mod pending_diff;
mod sync;
pub use backend::Backend;
pub use change::Change;
pub use error::AutomergeError;
pub use event_handlers::{ChangeEventHandler, EventHandler, EventHandlerId};
pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState};
#[cfg(test)]
mod tests {
use std::{
sync::{Arc, Mutex},
thread,
};
#[test]
fn sync_and_send_backend() {
let b = crate::Backend::new();
let mb = Arc::new(Mutex::new(b));
thread::spawn(move || {
let b = mb.lock().unwrap();
b.get_changes(&[]);
});
}
}

View file

@ -0,0 +1,107 @@
use std::collections::{HashMap, HashSet};
use automerge_protocol as amp;
use fxhash::FxBuildHasher;
use crate::{
actor_map::ActorMap,
concurrent_operations::ConcurrentOperations,
internal::{ElementId, Key, OpId},
op_handle::OpHandle,
ordered_set::{OrderedSet, SkipList},
};
/// ObjectHistory is what the OpSet uses to store operations for a particular
/// key, they represent the two possible container types in automerge, a map or
/// a sequence (tables and text are effectively the maps and sequences
/// respectively).
/// Stores operations on map objects
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct ObjState {
pub props: HashMap<Key, ConcurrentOperations>,
pub obj_type: amp::ObjType,
pub inbound: HashSet<OpHandle, FxBuildHasher>,
pub following: HashMap<ElementId, Vec<ElementId>, FxBuildHasher>,
pub insertions: HashMap<ElementId, OpHandle, FxBuildHasher>,
pub seq: SkipList<OpId>,
}
impl ObjState {
pub fn new(obj_type: amp::ObjType) -> ObjState {
let mut following = HashMap::default();
following.insert(ElementId::Head, Vec::new());
ObjState {
props: HashMap::default(),
following,
insertions: HashMap::default(),
obj_type,
inbound: HashSet::default(),
seq: SkipList::new(),
}
}
pub fn is_seq(&self) -> bool {
matches!(self.obj_type, amp::ObjType::Sequence(_))
}
fn get_parent(&self, id: &ElementId) -> Option<ElementId> {
self.insertions.get(id).and_then(|i| i.key.as_element_id())
}
fn insertions_after(&self, parent: &ElementId) -> Vec<ElementId> {
self.following.get(parent).cloned().unwrap_or_default()
}
#[tracing::instrument(skip(self))]
pub fn index_of(&self, id: OpId) -> Option<usize> {
let mut prev_id = id.into();
let mut index = None;
// reverse walk through the following/insertions and looking for something that not deleted
while index.is_none() {
prev_id = match self.get_previous(&prev_id) {
Some(p) => p,
None => return None,
};
match prev_id {
ElementId::Id(id) => {
// FIXME maybe I can speed this up with self.props.get before looking for
index = self.seq.index_of(&id);
}
ElementId::Head => return None,
}
}
index.map(|i| i + 1)
}
fn get_previous(&self, element: &ElementId) -> Option<ElementId> {
let parent_id = match self.get_parent(element) {
Some(p) => p,
None => return None,
};
let children = self.insertions_after(&parent_id);
let pos = match children.iter().position(|k| k == element) {
Some(p) => p,
None => return None,
};
if pos == 0 {
Some(parent_id)
} else {
let mut prev_id = children[pos - 1]; // FIXME - use refs here
loop {
match self.insertions_after(&prev_id).last() {
Some(id) => prev_id = *id,
None => return Some(prev_id),
}
}
}
}
pub fn insert_after(&mut self, elem: ElementId, op: OpHandle, actors: &ActorMap) {
let eid = op.id.into();
self.insertions.insert(eid, op);
let following = self.following.entry(elem).or_default();
following.push(eid);
following.sort_unstable_by(|a, b| actors.cmp(b, a));
}
}

View file

@ -0,0 +1,105 @@
use std::{
fmt,
hash::{Hash, Hasher},
ops::Deref,
};
use automerge_protocol as amp;
use crate::{
actor_map::ActorMap,
internal::{InternalOp, InternalOpType, Key, ObjectId, OpId},
Change,
};
#[derive(Clone)]
pub(crate) struct OpHandle {
pub id: OpId,
pub op: InternalOp,
pub delta: i64,
}
impl OpHandle {
pub fn extract(change: &Change, actors: &mut ActorMap) -> Vec<OpHandle> {
change
.iter_ops()
.enumerate()
.map(|(index, op)| {
let id = OpId(
change.start_op + (index as u64),
actors.import_actor(change.actor_id()),
);
let op = actors.import_op(op);
OpHandle { id, op, delta: 0 }
})
.collect()
}
pub fn adjusted_value(&self) -> amp::ScalarValue {
match &self.action {
InternalOpType::Set(amp::ScalarValue::Counter(a)) => {
amp::ScalarValue::Counter(a + self.delta)
}
InternalOpType::Set(val) => val.clone(),
_ => amp::ScalarValue::Null,
}
}
pub fn child(&self) -> Option<ObjectId> {
match &self.action {
InternalOpType::Make(_) => Some(self.id.into()),
_ => None,
}
}
pub fn operation_key(&self) -> Key {
if self.insert {
self.id.into()
} else {
self.key.clone()
}
}
pub fn maybe_increment(&mut self, inc: &OpHandle) {
if let InternalOpType::Inc(amount) = inc.action {
if inc.pred.contains(&self.id) {
if let InternalOpType::Set(amp::ScalarValue::Counter(_)) = self.action {
self.delta += amount;
}
}
}
}
}
impl fmt::Debug for OpHandle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OpHandle")
.field("id", &self.id)
.field("action", &self.action)
.field("obj", &self.obj)
.field("key", &self.key)
.finish()
}
}
impl Hash for OpHandle {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}
impl PartialEq for OpHandle {
fn eq(&self, other: &Self) -> bool {
self.id.eq(&other.id)
}
}
impl Eq for OpHandle {}
impl Deref for OpHandle {
type Target = InternalOp;
fn deref(&self) -> &Self::Target {
&self.op
}
}

View file

@ -0,0 +1,537 @@
//! The OpSet is where most of the interesting work is done in this library.
//! It maintains a mapping from each object ID to a set of concurrent
//! operations which have been seen for that object ID.
//!
//! When the client requests the value of the CRDT (via
//! document::state) the implementation fetches the root object ID's history
//! and then recursively walks through the tree of histories constructing the
//! state. Obviously this is not very efficient.
use core::cmp::max;
use std::collections::{HashMap, HashSet};
use automerge_protocol as amp;
use fxhash::FxBuildHasher;
use tracing::instrument;
use crate::{
actor_map::ActorMap,
error::AutomergeError,
internal::{InternalOpType, ObjectId},
object_store::ObjState,
op_handle::OpHandle,
ordered_set::OrderedSet,
pending_diff::PendingDiff,
Change,
};
/// The OpSet manages an ObjectStore, and a queue of incoming changes in order
/// to ensure that operations are delivered to the object store in causal order
///
/// Whenever a new change is received we iterate through any causally ready
/// changes in the queue and apply them to the object store, then repeat until
/// there are no causally ready changes left. The end result of this is that
/// the object store will contain sets of concurrent operations for each object
/// ID or element ID.
///
/// When we want to get the state of the CRDT we walk through the
/// object store, starting with the root object ID and constructing the value
/// at each node by examining the concurrent operations which are active for
/// that node.
///
#[derive(Debug, PartialEq, Clone)]
pub(crate) struct OpSet {
pub objs: HashMap<ObjectId, ObjState, FxBuildHasher>,
pub deps: HashSet<amp::ChangeHash>,
pub max_op: u64,
cursors: HashMap<ObjectId, Vec<CursorState>>,
}
impl Default for OpSet {
fn default() -> Self {
Self::new()
}
}
impl OpSet {
pub fn new() -> OpSet {
let mut objs = HashMap::default();
objs.insert(ObjectId::Root, ObjState::new(amp::ObjType::map()));
OpSet {
objs,
max_op: 0,
deps: HashSet::default(),
cursors: HashMap::new(),
}
}
pub(crate) fn apply_ops(
&mut self,
mut ops: Vec<OpHandle>,
diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>,
actors: &mut ActorMap,
) -> Result<(), AutomergeError> {
for op in ops.drain(..) {
let obj_id = op.obj;
let pending_diff = self.apply_op(op, actors)?;
if let Some(diff) = pending_diff {
diffs.entry(obj_id).or_default().push(diff);
}
}
Ok(())
}
pub fn heads(&self) -> Vec<amp::ChangeHash> {
let mut deps: Vec<_> = self.deps.iter().copied().collect();
deps.sort_unstable();
deps
}
#[instrument(skip(self))]
fn apply_op(
&mut self,
op: OpHandle,
actors: &mut ActorMap,
) -> Result<Option<PendingDiff>, AutomergeError> {
if let (Some(child), Some(obj_type)) = (op.child(), op.obj_type()) {
//let child = actors.import_obj(child);
self.objs.insert(child, ObjState::new(obj_type));
}
if let InternalOpType::Set(amp::ScalarValue::Cursor(ref oid)) = op.op.action {
tracing::debug!(referred_opid=?oid, "Adding cursor");
let internal_opid = actors.import_opid(oid);
let mut target_found = false;
for (obj_id, obj) in &self.objs {
if obj.insertions.contains_key(&internal_opid.into()) {
target_found = true;
self.cursors.entry(*obj_id).or_default().push(CursorState {
referring_object_id: actors.export_obj(&op.obj),
internal_referring_object_id: op.obj,
key: op.key.clone(),
element_opid: oid.clone(),
internal_element_opid: internal_opid,
index: obj.index_of(internal_opid).unwrap_or(0),
referred_object_id: actors.export_obj(obj_id),
internal_referred_object_id: *obj_id,
});
}
}
if !target_found {
return Err(AutomergeError::InvalidCursor { opid: oid.clone() });
}
}
let object_id = &op.obj;
let object = self.get_obj_mut(object_id)?;
let (diff, overwritten) = if object.is_seq() {
if op.insert {
object.insert_after(
op.key.as_element_id().ok_or(AutomergeError::MapKeyInSeq)?,
op.clone(),
actors,
);
}
let ops = object.props.entry(op.operation_key()).or_default();
let before = !ops.is_empty();
let overwritten_ops = ops.incorporate_new_op(&op)?;
let after = !ops.is_empty();
let diff = match (before, after) {
(true, true) => {
tracing::debug!("updating existing element");
Some(PendingDiff::Set(op.clone()))
}
(true, false) => {
let opid = op
.operation_key()
.to_opid()
.ok_or(AutomergeError::HeadToOpId)?;
let index = object.seq.remove_key(&opid).unwrap();
tracing::debug!(opid=?opid, index=%index, "deleting element");
Some(PendingDiff::SeqRemove(op.clone(), index))
}
(false, true) => {
let id = op
.operation_key()
.to_opid()
.ok_or(AutomergeError::HeadToOpId)?;
let index = object.index_of(id).unwrap_or(0);
tracing::debug!(new_id=?id, index=%index, after=?op.operation_key(), "inserting new element");
object.seq.insert_index(index, id);
Some(PendingDiff::SeqInsert(op.clone(), index, op.id))
}
(false, false) => None,
};
self.unlink(&op, &overwritten_ops)?;
(diff, overwritten_ops)
} else {
let ops = object.props.entry(op.key.clone()).or_default();
let before = !ops.is_empty();
let overwritten_ops = ops.incorporate_new_op(&op)?;
let after = !ops.is_empty();
self.unlink(&op, &overwritten_ops)?;
if before || after {
tracing::debug!(overwritten_ops=?overwritten_ops, "setting new value");
(Some(PendingDiff::Set(op)), overwritten_ops)
} else {
tracing::debug!(overwritten_ops=?overwritten_ops, "deleting value");
(None, overwritten_ops)
}
};
for op in overwritten {
if let InternalOpType::Set(amp::ScalarValue::Cursor(ref oid)) = op.op.action {
if let Some(opids) = self.cursors.get_mut(&op.op.obj) {
opids.retain(|o| o.element_opid != *oid);
}
}
}
Ok(diff)
}
fn unlink(&mut self, op: &OpHandle, overwritten: &[OpHandle]) -> Result<(), AutomergeError> {
if let Some(child) = op.child() {
self.get_obj_mut(&child)?.inbound.insert(op.clone());
}
for old in overwritten.iter() {
if let Some(child) = old.child() {
self.get_obj_mut(&child)?.inbound.remove(old);
}
}
Ok(())
}
pub fn get_obj(&self, object_id: &ObjectId) -> Result<&ObjState, AutomergeError> {
self.objs
.get(object_id)
.ok_or(AutomergeError::MissingObjectError)
}
fn get_obj_mut(&mut self, object_id: &ObjectId) -> Result<&mut ObjState, AutomergeError> {
self.objs
.get_mut(object_id)
.ok_or(AutomergeError::MissingObjectError)
}
pub fn construct_map(
&self,
object_id: &ObjectId,
object: &ObjState,
actors: &ActorMap,
map_type: amp::MapType,
) -> Result<amp::Diff, AutomergeError> {
let mut props = HashMap::new();
for (key, ops) in &object.props {
if !ops.is_empty() {
let mut opid_to_value = HashMap::new();
for op in ops.iter() {
let amp_opid = actors.export_opid(&op.id);
if let Some(child_id) = op.child() {
opid_to_value.insert(amp_opid, self.construct_object(&child_id, actors)?);
} else {
opid_to_value
.insert(amp_opid, self.gen_value_diff(op, &op.adjusted_value()));
}
}
props.insert(actors.key_to_string(key), opid_to_value);
}
}
Ok(amp::MapDiff {
object_id: actors.export_obj(object_id),
obj_type: map_type,
props,
}
.into())
}
pub fn construct_list(
&self,
object_id: &ObjectId,
object: &ObjState,
actors: &ActorMap,
seq_type: amp::SequenceType,
) -> Result<amp::Diff, AutomergeError> {
let mut edits = Vec::new();
let mut props = HashMap::new();
let mut index = 0;
let mut max_counter = 0;
for opid in &object.seq {
max_counter = max(max_counter, opid.0);
let key = (*opid).into(); // FIXME - something is wrong here
let elem_id = actors.export_opid(opid).into();
if let Some(ops) = object.props.get(&key) {
if !ops.is_empty() {
edits.push(amp::DiffEdit::Insert { index, elem_id });
let mut opid_to_value = HashMap::new();
for op in ops.iter() {
let amp_opid = actors.export_opid(&op.id);
if let Some(child_id) = op.child() {
opid_to_value
.insert(amp_opid, self.construct_object(&child_id, actors)?);
} else {
opid_to_value
.insert(amp_opid, self.gen_value_diff(op, &op.adjusted_value()));
}
}
props.insert(index, opid_to_value);
index += 1;
}
}
}
Ok(amp::SeqDiff {
object_id: actors.export_obj(object_id),
obj_type: seq_type,
edits,
props,
}
.into())
}
pub fn construct_object(
&self,
object_id: &ObjectId,
actors: &ActorMap,
) -> Result<amp::Diff, AutomergeError> {
let object = self.get_obj(object_id)?;
match object.obj_type {
amp::ObjType::Map(map_type) => self.construct_map(object_id, object, actors, map_type),
amp::ObjType::Sequence(seq_type) => {
self.construct_list(object_id, object, actors, seq_type)
}
}
}
// this recursively walks through all the objects touched by the changes
// to generate a diff in a single pass
pub fn finalize_diffs(
&mut self,
mut pending: HashMap<ObjectId, Vec<PendingDiff>>,
actors: &ActorMap,
) -> Result<Option<amp::Diff>, AutomergeError> {
if pending.is_empty() {
return Ok(None);
}
// For each cursor, if the cursor references an object which has been changed we generate a
// diff for the cursor
let mut cursor_changes: HashMap<ObjectId, Vec<PendingDiff>> = HashMap::new();
for obj_id in pending.keys() {
if let Some(cursors) = self.cursors.get_mut(obj_id) {
for cursor in cursors.iter_mut() {
if let Some(obj) = self.objs.get(&cursor.internal_referred_object_id) {
cursor.index = obj.index_of(cursor.internal_element_opid).unwrap_or(0);
cursor_changes
.entry(cursor.internal_referring_object_id)
.or_default()
.push(PendingDiff::CursorChange(cursor.key.clone()))
}
}
}
}
for (obj_id, cursor_change) in cursor_changes {
pending.entry(obj_id).or_default().extend(cursor_change)
}
let mut objs: Vec<_> = pending.keys().copied().collect();
while let Some(obj_id) = objs.pop() {
let obj = self.get_obj(&obj_id)?;
if let Some(inbound) = obj.inbound.iter().next() {
if let Some(diffs) = pending.get_mut(&inbound.obj) {
diffs.push(PendingDiff::Set(inbound.clone()))
} else {
objs.push(inbound.obj);
pending.insert(inbound.obj, vec![PendingDiff::Set(inbound.clone())]);
}
}
}
Ok(Some(self.gen_obj_diff(
&ObjectId::Root,
&mut pending,
actors,
)?))
}
fn gen_seq_diff(
&self,
obj_id: &ObjectId,
obj: &ObjState,
pending: &[PendingDiff],
pending_diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>,
actors: &ActorMap,
seq_type: amp::SequenceType,
) -> Result<amp::Diff, AutomergeError> {
let mut props = HashMap::new();
let edits = pending.iter().filter_map(|p| p.edit(actors)).collect();
// i may have duplicate keys - this makes sure I hit each one only once
let keys: HashSet<_> = pending.iter().map(PendingDiff::operation_key).collect();
for key in &keys {
let mut opid_to_value = HashMap::new();
for op in obj.props.get(key).iter().flat_map(|i| i.iter()) {
let link = match op.action {
InternalOpType::Set(ref value) => self.gen_value_diff(op, value),
InternalOpType::Make(_) => {
self.gen_obj_diff(&op.id.into(), pending_diffs, actors)?
}
_ => panic!("del or inc found in field_operations"),
};
opid_to_value.insert(actors.export_opid(&op.id), link);
}
if let Some(index) = obj
.seq
.index_of(&key.to_opid().ok_or(AutomergeError::HeadToOpId)?)
{
props.insert(index, opid_to_value);
}
}
Ok(amp::SeqDiff {
object_id: actors.export_obj(obj_id),
obj_type: seq_type,
edits,
props,
}
.into())
}
fn gen_map_diff(
&self,
obj_id: &ObjectId,
obj: &ObjState,
pending: &[PendingDiff],
pending_diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>,
actors: &ActorMap,
map_type: amp::MapType,
) -> Result<amp::Diff, AutomergeError> {
let mut props = HashMap::new();
// I may have duplicate keys - I do this to make sure I visit each one only once
let keys: HashSet<_> = pending.iter().map(PendingDiff::operation_key).collect();
for key in &keys {
let key_string = actors.key_to_string(key);
let mut opid_to_value = HashMap::new();
for op in obj.props.get(key).iter().flat_map(|i| i.iter()) {
let link = match op.action {
InternalOpType::Set(ref value) => self.gen_value_diff(op, value),
InternalOpType::Make(_) => {
// FIXME
self.gen_obj_diff(&op.id.into(), pending_diffs, actors)?
}
_ => panic!("del or inc found in field_operations"),
};
opid_to_value.insert(actors.export_opid(&op.id), link);
}
props.insert(key_string, opid_to_value);
}
Ok(amp::MapDiff {
object_id: actors.export_obj(obj_id),
obj_type: map_type,
props,
}
.into())
}
pub fn update_deps(&mut self, change: &Change) {
//self.max_op = max(self.max_op, change.max_op());
for d in &change.deps {
self.deps.remove(d);
}
self.deps.insert(change.hash);
}
fn gen_obj_diff(
&self,
obj_id: &ObjectId,
pending_diffs: &mut HashMap<ObjectId, Vec<PendingDiff>>,
actors: &ActorMap,
) -> Result<amp::Diff, AutomergeError> {
let obj = self.get_obj(obj_id)?;
if let Some(pending) = pending_diffs.remove(obj_id) {
match obj.obj_type {
amp::ObjType::Sequence(seq_type) => {
self.gen_seq_diff(obj_id, obj, &pending, pending_diffs, actors, seq_type)
}
amp::ObjType::Map(map_type) => {
self.gen_map_diff(obj_id, obj, &pending, pending_diffs, actors, map_type)
}
}
} else {
Ok(amp::Diff::Unchanged(amp::ObjDiff {
object_id: actors.export_obj(obj_id),
obj_type: obj.obj_type,
}))
}
}
fn gen_value_diff(&self, op: &OpHandle, value: &amp::ScalarValue) -> amp::Diff {
match value {
amp::ScalarValue::Cursor(oid) => {
// .expect() is okay here because we check that the cursr exists at the start of
// `OpSet::apply_op()`
let cursor_state = self
.cursors
.values()
.flatten()
.find(|c| c.element_opid == *oid)
.expect("missing cursor");
amp::Diff::Cursor(amp::CursorDiff {
object_id: cursor_state.referred_object_id.clone(),
index: cursor_state.index as u32,
elem_id: oid.clone(),
})
}
_ => op.adjusted_value().into(),
}
}
}
/// `CursorState` is the information we need to track in order to update cursors as changes come
/// in. Cursors are created by `Set` operations and therefore live in a particular object (the
/// "referring object") and point at an element in a sequence (the "referred" object). For example
/// this operation:
///
/// ```json
/// {
/// "action": "set",
/// "obj": "_root",
/// "key": "a_cursor",
/// "refObjectId": "1@222"
/// }
/// ```
///
/// Creates a cursor in the root object under the "a_cursor" key which points at element "1@222".
/// When we process a set operation which is a cursor we find the object which contains "1@222" and
/// populate this `CursorState`.
///
/// Note that several fields are duplicated for internal and `automerge_protocol` types. This is
/// because we need to compare those fields against internal types when processing cursors, but we
/// need to create patches which use the `automerge_protocol` types.
#[derive(Debug, PartialEq, Clone)]
struct CursorState {
/// The id of the object this cursor lives in
referring_object_id: amp::ObjectId,
/// The same as `referring_object_id` but as an internal::ObjectID
internal_referring_object_id: ObjectId,
/// The key withing the referring object this cursor lives at
key: crate::internal::Key,
/// The id of the sequence this cursor refers
referred_object_id: amp::ObjectId,
/// The same as the `referred_object_id` but as an internal::ObjectID
internal_referred_object_id: ObjectId,
/// The OpID of the element within the sequence this cursor refers to
element_opid: amp::OpId,
/// The same as the `element_opid` but as an internal::OpID,
internal_element_opid: crate::internal::OpId,
index: usize,
}

View file

@ -0,0 +1,897 @@
#![allow(dead_code)]
use std::{
cmp::{max, min},
collections::HashMap,
fmt::Debug,
hash::Hash,
iter::Iterator,
mem,
ops::AddAssign,
};
use fxhash::FxBuildHasher;
use rand::{rngs::SmallRng, Rng, SeedableRng};
#[derive(Debug, Copy, Clone, PartialEq)]
struct Link<K>
where
K: Clone + Copy + Debug + PartialEq,
{
key: Option<K>,
count: usize,
}
#[derive(Debug, Clone, PartialEq)]
struct LinkLevel<K>
where
K: Copy + Clone + Debug + PartialEq,
{
next: Link<K>,
prev: Link<K>,
}
#[derive(Debug, Clone, PartialEq)]
struct Node<K>
where
K: Copy + Clone + Debug + PartialEq,
{
level: usize,
links: Vec<LinkLevel<K>>,
// IDEA: can I make this an unsized array??
// IDEA - Node could be Node(Vec<K>)
}
impl<K> AddAssign for Link<K>
where
K: Copy + Clone + Debug + PartialEq,
{
fn add_assign(&mut self, other: Self) {
self.key = other.key;
self.count += other.count;
}
}
impl<K> Node<K>
where
K: Debug + Copy + Clone + PartialEq,
{
fn successor(&self) -> Option<&K> {
if self.links.is_empty() {
None
} else {
self.links[0].next.key.as_ref()
}
}
fn remove_node_after(&mut self, from_level: usize, removed_level: usize, links: &[Link<K>]) {
for (level, link) in links.iter().enumerate().take(self.level).skip(from_level) {
if level < removed_level {
self.links[level].next = *link;
} else {
self.links[level].next.count -= 1;
}
}
}
fn remove_node_before(&mut self, from_level: usize, removed_level: usize, links: &[Link<K>]) {
for (level, link) in links.iter().enumerate().take(self.level).skip(from_level) {
if level < removed_level {
self.links[level].prev = *link;
} else {
self.links[level].prev.count -= 1;
}
}
}
fn insert_node_after(
&mut self,
new_key: &K,
new_level: usize,
from_level: usize,
distance: usize,
is_head: bool,
) {
if new_level > self.level && !is_head {
panic!("Cannot increase the level of a non-head node")
}
self.level = max(self.level, new_level);
for level in from_level..self.level {
if level < new_level {
let next = Link {
key: Some(*new_key),
count: distance,
};
let prev = Link {
key: None,
count: 0,
};
if self.links.len() == level {
self.links.push(LinkLevel { next, prev });
} else {
self.links[level].next = next;
}
} else {
self.links[level].next.count += 1;
}
}
}
fn insert_node_before(
&mut self,
new_key: &K,
new_level: usize,
from_level: usize,
distance: usize,
) {
if new_level > self.level {
panic!("Cannot increase the level on insert_node_before")
}
for level in from_level..self.level {
if level < new_level {
self.links[level].prev = Link {
key: Some(*new_key),
count: distance,
};
} else {
self.links[level].prev.count += 1;
}
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct VecOrderedSet<K>
where
K: Clone + Debug + Hash + PartialEq + Eq,
{
keys: Vec<K>,
}
impl<K> VecOrderedSet<K>
where
K: Clone + Debug + Hash + PartialEq + Eq,
{
pub fn new() -> VecOrderedSet<K> {
VecOrderedSet { keys: Vec::new() }
}
}
pub trait OrderedSet<K>
where
K: Clone + Debug + Hash + PartialEq + Eq,
{
fn index_of(&self, key: &K) -> Option<usize>;
fn remove_key(&mut self, key: &K) -> Option<usize>;
fn insert_index(&mut self, index: usize, key: K) -> bool;
fn remove_index(&mut self, index: usize) -> Option<K>;
fn key_of(&self, index: usize) -> Option<&K>;
}
impl<K> OrderedSet<K> for SkipList<K>
where
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
{
fn remove_index(&mut self, index: usize) -> Option<K> {
let key = self.key_of(index).copied();
if let Some(ref k) = &key {
self.remove(k);
}
key
}
fn remove_key(&mut self, key: &K) -> Option<usize> {
let index = self.index_of(key);
if index.is_some() {
self.remove(key);
}
index
}
fn key_of(&self, index: usize) -> Option<&K> {
if index >= self.len {
return None;
}
let target = index + 1;
let mut node = &self.head;
let mut level = node.level - 1;
let mut count = 0;
loop {
while count + node.links[level].next.count > target {
level -= 1
}
count += node.links[level].next.count;
let k = node.links[level].next.key.as_ref();
if count == target {
return k;
}
node = self.get_node(k)
}
}
fn index_of(&self, key: &K) -> Option<usize> {
let mut count = 0;
let mut key = key;
loop {
if let Some(node) = self.nodes.get(key) {
let link = &node.links[node.level - 1].prev;
count += link.count;
if let Some(ref k) = &link.key {
key = k;
} else {
break;
}
} else {
return None;
}
}
Some(count - 1)
}
fn insert_index(&mut self, index: usize, key: K) -> bool {
if index == 0 {
self.insert_head(key)
} else {
self.key_of(index - 1)
.copied()
.map_or(false, |suc| self.insert_after(&suc, key))
}
}
}
impl<K> OrderedSet<K> for VecOrderedSet<K>
where
K: Clone + Debug + Hash + PartialEq + Eq,
{
fn remove_index(&mut self, index: usize) -> Option<K> {
if self.keys.len() > index {
let k = self.keys.remove(index);
Some(k)
} else {
None
}
}
fn key_of(&self, index: usize) -> Option<&K> {
self.keys.get(index)
}
fn index_of(&self, key: &K) -> Option<usize> {
self.keys.iter().position(|o| o == key)
}
fn insert_index(&mut self, index: usize, key: K) -> bool {
self.keys.insert(index, key);
true
}
fn remove_key(&mut self, key: &K) -> Option<usize> {
if let Some(index) = self.keys.iter().position(|o| o == key) {
self.keys.remove(index);
Some(index)
} else {
None
}
}
}
impl<K> Default for SkipList<K>
where
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
{
fn default() -> Self {
Self::new()
}
}
impl<K> Default for VecOrderedSet<K>
where
K: Clone + Debug + Hash + PartialEq + Eq,
{
fn default() -> Self {
Self::new()
}
}
impl<'a, K> IntoIterator for &'a VecOrderedSet<K>
where
K: Clone + Debug + Hash + PartialEq + Eq,
{
type Item = &'a K;
type IntoIter = std::slice::Iter<'a, K>;
fn into_iter(self) -> std::slice::Iter<'a, K> {
self.keys.as_slice().iter()
}
}
impl<'a, K> IntoIterator for &'a SkipList<K>
where
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
{
type Item = &'a K;
type IntoIter = SkipIterator<'a, K>;
fn into_iter(self) -> Self::IntoIter {
SkipIterator {
id: self.head.successor(),
nodes: &self.nodes,
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct SkipList<K>
where
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
{
nodes: HashMap<K, Node<K>, FxBuildHasher>,
head: Node<K>,
rng: SmallRng,
pub len: usize,
}
impl<K> PartialEq for SkipList<K>
where
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
{
fn eq(&self, other: &Self) -> bool {
self.nodes.eq(&other.nodes)
}
}
impl<K> SkipList<K>
where
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
{
pub fn new() -> SkipList<K> {
let nodes = HashMap::default();
let head = Node {
links: Vec::new(),
level: 1,
//is_head: true,
};
let len = 0;
let rng = SmallRng::seed_from_u64(0);
SkipList {
nodes,
head,
rng,
len,
}
}
fn remove(&mut self, key: &K) {
let removed = self
.nodes
.remove(key)
.unwrap_or_else(|| panic!("The given key cannot be removed because it does not exist"));
let max_level = self.head.level;
let mut pre = self.predecessors(removed.links[0].prev.key.as_ref(), max_level);
let mut suc = self.successors(removed.links[0].next.key.as_ref(), max_level);
for i in 0..max_level {
let distance = pre[i].count + suc[i].count - 1;
pre[i].count = distance;
suc[i].count = distance;
}
self.len -= 1;
let mut pre_level = 0;
let mut suc_level = 0;
for level in 1..=max_level {
let update_level = min(level, removed.level);
if level == max_level
|| pre.get(level).map(|l| &l.key) != pre.get(pre_level).map(|l| &l.key)
{
self.get_node_mut(pre[pre_level].key.as_ref())
.remove_node_after(pre_level, update_level, &suc);
pre_level = level;
}
if suc[suc_level].key.is_some()
&& (level == max_level
|| suc.get(level).map(|l| &l.key) != suc.get(suc_level).map(|l| &l.key))
{
self.get_node_mut(suc[suc_level].key.as_ref())
.remove_node_before(suc_level, update_level, &pre);
suc_level = level;
}
}
}
fn get_node(&self, key: Option<&K>) -> &Node<K> {
if let Some(k) = key {
self.nodes
.get(k)
.unwrap_or_else(|| panic!("get_node - missing key {:?}", key))
} else {
&self.head
}
}
fn get_node_mut(&mut self, key: Option<&K>) -> &mut Node<K> {
if let Some(k) = key {
self.nodes
.get_mut(k)
.unwrap_or_else(|| panic!("get_node - missing key {:?}", key))
} else {
&mut self.head
}
}
// IDEA: Can i merge the successors and predecessors into a singlue unified function
// so we dont need to zip the results?
fn predecessors(&self, predecessor: Option<&K>, max_level: usize) -> Vec<Link<K>> {
let mut pre = Vec::with_capacity(max_level);
pre.push(Link {
key: predecessor.copied(),
count: 1,
});
for level in 1..max_level {
let mut link = pre[level - 1];
while link.key.is_some() {
let node = self.get_node(link.key.as_ref());
if node.level > level {
break;
}
if node.level < level {
panic!("Level lower than expected");
}
link += node.links[level - 1].prev;
}
pre.push(link);
}
pre
}
fn successors(&self, successor: Option<&K>, max_level: usize) -> Vec<Link<K>> {
let mut suc = Vec::with_capacity(max_level);
suc.push(Link {
key: successor.copied(),
count: 1,
});
for level in 1..max_level {
let mut link = suc[level - 1];
while link.key.is_some() {
let node = self.get_node(link.key.as_ref());
if node.level > level {
break;
}
if node.level < level {
panic!("Level lower than expected");
}
link += node.links[level - 1].next;
}
suc.push(link);
}
suc
}
pub fn insert_head(&mut self, key: K) -> bool {
self.insert(None, key)
}
pub fn insert_after(&mut self, predecessor: &K, key: K) -> bool {
// TODO add check that `predecessor` is actually in the list and is not identical to `K`.
// The latter point is especially important as allowing cycles leads to an infinite loop in
// `ObjState.index_of`
self.insert(Some(predecessor), key)
}
fn insert(&mut self, predecessor: Option<&K>, key: K) -> bool {
if self.nodes.contains_key(&key) {
return false;
}
let new_level = self.random_level();
let max_level = max(new_level, self.head.level);
let successor = self.get_node(predecessor).successor();
let mut pre = self.predecessors(predecessor, max_level);
let mut suc = self.successors(successor, max_level);
self.len += 1;
let mut pre_level = 0;
let mut suc_level = 0;
for level in 1..=max_level {
let update_level = min(level, new_level);
if level == max_level
|| pre.get(level).map(|l| &l.key) != pre.get(pre_level).map(|l| &l.key)
{
self.get_node_mut(pre[pre_level].key.as_ref())
.insert_node_after(
&key,
update_level,
pre_level,
pre[pre_level].count,
pre[pre_level].key.is_none(),
);
pre_level = level;
}
if suc[suc_level].key.is_some()
&& (level == max_level
|| suc.get(level).map(|l| &l.key) != suc.get(suc_level).map(|l| &l.key))
{
self.get_node_mut(suc[suc_level].key.as_ref())
.insert_node_before(&key, update_level, suc_level, suc[suc_level].count);
suc_level = level;
}
}
pre.truncate(new_level);
suc.truncate(new_level);
let links = pre
.into_iter()
.zip(suc.into_iter())
.map(|(prev, next)| LinkLevel { next, prev })
.collect();
self.nodes.insert(
key,
Node {
level: new_level,
links,
},
);
true
}
// Returns a random number from the geometric distribution with p = 0.75.
// That is, returns k with probability p * (1 - p)^(k - 1).
// For example, returns 1 with probability 3/4, returns 2 with probability 3/16,
// returns 3 with probability 3/64, and so on.
fn random_level(&mut self) -> usize {
// Create random number between 0 and 2^32 - 1
// Count leading zeros in that 32-bit number
let rand: u32 = self.rng.gen();
let mut level = 1;
while rand < 1 << (32 - 2 * level) && level < 16 {
level += 1
}
level
}
}
pub(crate) struct SkipIterator<'a, K>
where
K: Debug + Copy + Clone + PartialEq,
{
id: Option<&'a K>,
nodes: &'a HashMap<K, Node<K>, FxBuildHasher>,
}
impl<'a, K> Iterator for SkipIterator<'a, K>
where
K: Debug + Copy + Clone + Hash + PartialEq + Eq,
{
type Item = &'a K;
fn next(&mut self) -> Option<&'a K> {
let mut successor = match self.id {
None => None,
Some(key) => self.nodes.get(key).and_then(Node::successor),
};
mem::swap(&mut successor, &mut self.id);
successor
}
}
#[derive(Debug, Clone, PartialEq)]
struct Delta<K>
where
K: Clone + Debug + Hash + PartialEq + Eq,
{
index: isize,
key: Option<K>,
}
// this is an experiment to if I can change request processing
// index lookups by not mutating the skip list
// throughput was quite significant actually - about 1.5x over in the
// mass edit perf test
// ideally we can speed up the skip list enough to not need this
// also this could perform worse if the ops per change were huge
// eg.. 10,000 changes with 10 ops each vs 10 changes with 10,000 ops each
/*
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct OrdDelta<'a, K>
where
K: Clone + Debug + Hash + PartialEq + Eq,
{
list: Option<&'a SkipList<K>>,
delta: Vec<Delta<K>>,
}
impl<'a, K> OrdDelta<'a, K>
where
K: Clone + Debug + Hash + PartialEq + Eq,
{
pub fn new(list: Option<&'a SkipList<K>>) -> OrdDelta<'a, K> {
OrdDelta {
list,
delta: Vec::new(),
}
}
}
impl<'a, K> OrderedSet<K> for OrdDelta<'a, K>
where
K: Clone + Debug + Hash + PartialEq + Eq,
{
fn insert_index(&mut self, index: usize, key: K) -> bool {
let index = index as isize;
let delta = Delta {
index,
key: Some(key),
};
for i in 0..self.delta.len() {
if self.delta[i].index >= index {
self.delta.iter_mut().skip(i).for_each(|d| d.index += 1);
self.delta.insert(i, delta);
return true;
}
}
self.delta.push(delta);
true
}
fn key_of(&self, index: usize) -> Option<&K> {
let index = index as isize;
let mut acc: isize = 0;
for i in 0..self.delta.len() {
match &self.delta[i] {
Delta {
index: j,
key: Some(key),
} => {
if j == &index {
return Some(&key);
}
if j > &index {
break;
}
acc += 1;
}
Delta {
index: j,
key: None,
} => {
if j > &index {
break;
}
acc -= 1;
}
}
}
self.list
.and_then(|l| l.key_of((index as isize - acc) as usize))
}
fn remove_index(&mut self, index: usize) -> Option<K> {
let index = index as isize;
let delta = Delta { index, key: None };
for i in 0..self.delta.len() {
if self.delta[i].index == index && self.delta[i].key.is_some() {
let old_insert = self.delta.remove(i);
self.delta.iter_mut().skip(i).for_each(|d| d.index -= 1);
return old_insert.key;
}
if self.delta[i].index > index {
let key = self.key_of(index as usize).cloned();
self.delta.iter_mut().skip(i).for_each(|d| d.index -= 1);
self.delta.insert(i, delta);
return key;
}
}
let key = self.key_of(index as usize).cloned();
self.delta.push(delta);
key
}
fn index_of(&self, _key: &K) -> Option<usize> {
panic!("not implemented");
}
fn remove_key(&mut self, _key: &K) -> Option<usize> {
panic!("not implemented");
}
}
*/
// get(n)
// insert(n)
// len()
// remove(n)
// get_index_for(T)
// insert_after_(i,K,V)
#[cfg(test)]
mod tests {
use super::*;
//use std::str::FromStr;
#[test]
fn test_index_of() {
let mut s = SkipList::<&str>::new();
// should return None on an empty list
assert_eq!(s.index_of(&"foo"), None);
// should return None for a nonexistent key
s.insert_head("foo");
assert_eq!(s.index_of(&"baz"), None);
// should return 0 for the first list element
assert_eq!(s.index_of(&"foo"), Some(0));
// should return length-1 for the last list element
s.insert_after(&"foo", "bar");
s.insert_after(&"bar", "baz");
assert_eq!(s.index_of(&"baz"), Some(s.len - 1));
// should adjust based on removed elements
s.remove_key(&"foo");
assert_eq!(s.index_of(&"bar"), Some(0));
assert_eq!(s.index_of(&"baz"), Some(1));
s.remove_key(&"bar");
assert_eq!(s.index_of(&"baz"), Some(0));
}
#[test]
fn test_len() {
let mut s = SkipList::<&str>::new();
//should be 0 for an empty list
assert_eq!(s.len, 0);
// should increase by 1 for every insertion
s.insert_head("a3");
s.insert_head("a2");
s.insert_head("a1");
assert_eq!(s.len, 3);
//should decrease by 1 for every removal
s.remove_key(&"a2");
assert_eq!(s.len, 2);
}
#[test]
fn test_key_of() {
let mut s = SkipList::<&str>::new();
// should return None on an empty list
assert_eq!(s.key_of(0), None);
// should return None for an index past the end of the list
s.insert_head("a3");
s.insert_head("a2");
s.insert_head("a1");
assert_eq!(s.key_of(10), None);
// should return the first key for index 0
assert_eq!(s.key_of(0), Some(&"a1"));
// should return the last key for index -1
// assert_eq!(s.key_of(-1), Some("a3"));
// should return the last key for index length-1
assert_eq!(s.key_of(s.len - 1), Some(&"a3"));
// should not count removed elements
s.remove_key(&"a1");
s.remove_key(&"a3");
assert_eq!(s.key_of(0), Some(&"a2"));
}
#[test]
fn test_insert_index() {
let mut s = SkipList::<&str>::new();
// should insert the new key-value pair at the given index
s.insert_head("aaa");
s.insert_after(&"aaa", "ccc");
s.insert_index(1, "bbb");
assert_eq!(s.index_of(&"aaa"), Some(0));
assert_eq!(s.index_of(&"bbb"), Some(1));
assert_eq!(s.index_of(&"ccc"), Some(2));
// should insert at the head if the index is zero
s.insert_index(0, "a");
assert_eq!(s.key_of(0), Some(&"a"));
}
#[test]
fn test_remove_index() {
let mut s = SkipList::<&str>::new();
// should remove the value at the given index
s.insert_head("ccc");
s.insert_head("bbb");
s.insert_head("aaa");
s.remove_index(1);
assert_eq!(s.index_of(&"aaa"), Some(0));
assert_eq!(s.index_of(&"bbb"), None);
assert_eq!(s.index_of(&"ccc"), Some(1));
// should raise an error if the given index is out of bounds
assert_eq!(s.remove_index(100), None);
}
#[test]
fn test_remove_key_big() {
//String is not Copy so we have to create our elements first and then insert them
let elems: Vec<String> = (0..10000)
.map(|i| {
let j = 9999 - i;
format!("a{}", j)
})
.collect();
let mut s = SkipList::<&str>::new();
for elem in &elems {
s.insert_head(elem);
}
assert_eq!(s.index_of(&"a20"), Some(20));
assert_eq!(s.index_of(&"a500"), Some(500));
assert_eq!(s.index_of(&"a1000"), Some(1000));
for i in 0..5000 {
let j = (4999 - i) * 2 + 1;
s.remove_index(j);
}
assert_eq!(s.index_of(&"a4000"), Some(2000));
assert_eq!(s.index_of(&"a1000"), Some(500));
assert_eq!(s.index_of(&"a500"), Some(250));
assert_eq!(s.index_of(&"a20"), Some(10));
}
#[test]
fn test_remove_key() {
let mut s = SkipList::<&str>::new();
s.insert_head("a20");
s.insert_head("a19");
s.insert_head("a18");
s.insert_head("a17");
s.insert_head("a16");
s.insert_head("a15");
s.insert_head("a14");
s.insert_head("a13");
s.insert_head("a12");
s.insert_head("a11");
s.insert_head("a10");
s.insert_head("a9");
s.insert_head("a8");
s.insert_head("a7");
s.insert_head("a6");
s.insert_head("a5");
s.insert_head("a4");
s.insert_head("a3");
s.insert_head("a2");
s.insert_head("a1");
s.insert_head("a0");
assert_eq!(s.index_of(&"a20"), Some(20));
s.remove_key(&"a1");
s.remove_key(&"a3");
s.remove_key(&"a5");
s.remove_key(&"a7");
s.remove_key(&"a9");
s.remove_key(&"a11");
s.remove_key(&"a13");
s.remove_key(&"a15");
s.remove_key(&"a17");
s.remove_key(&"a19");
assert_eq!(s.index_of(&"a20"), Some(10));
assert_eq!(s.index_of(&"a10"), Some(5));
}
}

View file

@ -0,0 +1,37 @@
use automerge_protocol as amp;
use crate::{
actor_map::ActorMap,
internal::{Key, OpId},
op_handle::OpHandle,
};
#[derive(Debug, Clone, PartialEq)]
pub(crate) enum PendingDiff {
SeqInsert(OpHandle, usize, OpId),
SeqRemove(OpHandle, usize),
Set(OpHandle),
CursorChange(Key),
}
impl PendingDiff {
pub fn operation_key(&self) -> Key {
match self {
Self::SeqInsert(op, _, _) | Self::SeqRemove(op, _) | Self::Set(op) => {
op.operation_key()
}
Self::CursorChange(k) => k.clone(),
}
}
pub fn edit(&self, actors: &ActorMap) -> Option<amp::DiffEdit> {
match *self {
Self::SeqInsert(_, index, opid) => Some(amp::DiffEdit::Insert {
index,
elem_id: actors.export_opid(&opid).into(),
}),
Self::SeqRemove(_, index) => Some(amp::DiffEdit::Remove { index }),
_ => None,
}
}
}

View file

@ -0,0 +1,367 @@
use std::{
borrow::Cow,
collections::{HashMap, HashSet},
convert::TryFrom,
io,
io::Write,
};
use automerge_protocol::{ChangeHash, Patch};
use crate::{
decoding, decoding::Decoder, encoding, encoding::Encodable, AutomergeError, Backend, Change,
};
mod bloom;
mod state;
pub use bloom::BloomFilter;
pub use state::{SyncHave, SyncState};
const HASH_SIZE: usize = 32; // 256 bits = 32 bytes
const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification
impl Backend {
pub fn generate_sync_message(&self, sync_state: &mut SyncState) -> Option<SyncMessage> {
let our_heads = self.get_heads();
let our_need = self.get_missing_deps(sync_state.their_heads.as_ref().unwrap_or(&vec![]));
let their_heads_set = if let Some(ref heads) = sync_state.their_heads {
heads.iter().collect::<HashSet<_>>()
} else {
HashSet::new()
};
let our_have = if our_need.iter().all(|hash| their_heads_set.contains(hash)) {
vec![self.make_bloom_filter(sync_state.shared_heads.clone())]
} else {
Vec::new()
};
if let Some(ref their_have) = sync_state.their_have {
if let Some(first_have) = their_have.first().as_ref() {
if !first_have
.last_sync
.iter()
.all(|hash| self.get_change_by_hash(hash).is_some())
{
let reset_msg = SyncMessage {
heads: our_heads,
need: Vec::new(),
have: vec![SyncHave::default()],
changes: Vec::new(),
};
return Some(reset_msg);
}
}
}
let mut changes_to_send = if let (Some(their_have), Some(their_need)) = (
sync_state.their_have.as_ref(),
sync_state.their_need.as_ref(),
) {
self.get_changes_to_send(their_have.clone(), their_need)
} else {
Vec::new()
};
let heads_unchanged = if let Some(last_sent_heads) = sync_state.last_sent_heads.as_ref() {
last_sent_heads == &our_heads
} else {
false
};
let heads_equal = if let Some(their_heads) = sync_state.their_heads.as_ref() {
their_heads == &our_heads
} else {
false
};
if heads_unchanged && heads_equal && changes_to_send.is_empty() && our_need.is_empty() {
return None;
}
// deduplicate the changes to send with those we have already sent
changes_to_send.retain(|change| !sync_state.sent_hashes.contains(&change.hash));
sync_state.last_sent_heads = Some(our_heads.clone());
sync_state
.sent_hashes
.extend(changes_to_send.iter().map(|c| c.hash));
let sync_message = SyncMessage {
heads: our_heads,
have: our_have,
need: our_need,
changes: changes_to_send.into_iter().cloned().collect(),
};
Some(sync_message)
}
pub fn receive_sync_message(
&mut self,
sync_state: &mut SyncState,
message: SyncMessage,
) -> Result<Option<Patch>, AutomergeError> {
let mut patch = None;
let before_heads = self.get_heads();
let SyncMessage {
heads: message_heads,
changes: message_changes,
need: message_need,
have: message_have,
} = message;
let changes_is_empty = message_changes.is_empty();
if !changes_is_empty {
patch = Some(self.apply_changes(message_changes)?);
sync_state.shared_heads = advance_heads(
&before_heads.iter().collect(),
&self.get_heads().into_iter().collect(),
&sync_state.shared_heads,
)
}
// trim down the sent hashes to those that we know they haven't seen
self.filter_changes(&message_heads, &mut sync_state.sent_hashes);
if changes_is_empty && message_heads == before_heads {
sync_state.last_sent_heads = Some(message_heads.clone())
}
let known_heads = message_heads
.iter()
.filter(|head| self.get_change_by_hash(head).is_some())
.collect::<Vec<_>>();
if known_heads.len() == message_heads.len() {
sync_state.shared_heads = message_heads.clone()
} else {
sync_state.shared_heads = sync_state
.shared_heads
.iter()
.chain(known_heads)
.collect::<HashSet<_>>()
.into_iter()
.copied()
.collect::<Vec<_>>();
sync_state.shared_heads.sort();
}
sync_state.their_have = Some(message_have);
sync_state.their_heads = Some(message_heads);
sync_state.their_need = Some(message_need);
Ok(patch)
}
fn make_bloom_filter(&self, last_sync: Vec<ChangeHash>) -> SyncHave {
let new_changes = self.get_changes(&last_sync);
let hashes = new_changes
.into_iter()
.map(|change| change.hash)
.collect::<Vec<_>>();
SyncHave {
last_sync,
bloom: BloomFilter::from(&hashes[..]),
}
}
pub fn get_changes_to_send(&self, have: Vec<SyncHave>, need: &[ChangeHash]) -> Vec<&Change> {
if have.is_empty() {
need.iter()
.filter_map(|hash| self.get_change_by_hash(hash))
.collect()
} else {
let mut last_sync_hashes = HashSet::new();
let mut bloom_filters = Vec::with_capacity(have.len());
for h in have {
let SyncHave { last_sync, bloom } = h;
for hash in last_sync {
last_sync_hashes.insert(hash);
}
bloom_filters.push(bloom)
}
let last_sync_hashes = last_sync_hashes.into_iter().collect::<Vec<_>>();
let changes = self.get_changes(&last_sync_hashes);
let mut change_hashes = HashSet::with_capacity(changes.len());
let mut dependents: HashMap<ChangeHash, Vec<ChangeHash>> = HashMap::new();
let mut hashes_to_send = HashSet::new();
for change in &changes {
change_hashes.insert(change.hash);
for dep in &change.deps {
dependents.entry(*dep).or_default().push(change.hash);
}
if bloom_filters
.iter()
.all(|bloom| !bloom.contains_hash(&change.hash))
{
hashes_to_send.insert(change.hash);
}
}
let mut stack = hashes_to_send.iter().copied().collect::<Vec<_>>();
while let Some(hash) = stack.pop() {
if let Some(deps) = dependents.get(&hash) {
for dep in deps {
if hashes_to_send.insert(*dep) {
stack.push(*dep)
}
}
}
}
let mut changes_to_send = Vec::new();
for hash in need {
hashes_to_send.insert(*hash);
if !change_hashes.contains(hash) {
let change = self.get_change_by_hash(hash);
if let Some(change) = change {
changes_to_send.push(change)
}
}
}
for change in changes {
if hashes_to_send.contains(&change.hash) {
changes_to_send.push(change)
}
}
changes_to_send
}
}
}
#[derive(Debug, Clone)]
pub struct SyncMessage {
pub heads: Vec<ChangeHash>,
pub need: Vec<ChangeHash>,
pub have: Vec<SyncHave>,
pub changes: Vec<Change>,
}
impl SyncMessage {
pub fn encode(self) -> Result<Vec<u8>, encoding::Error> {
let mut buf = vec![MESSAGE_TYPE_SYNC];
encode_hashes(&mut buf, &self.heads)?;
encode_hashes(&mut buf, &self.need)?;
(self.have.len() as u32).encode(&mut buf)?;
for have in self.have {
encode_hashes(&mut buf, &have.last_sync)?;
have.bloom.into_bytes()?.encode(&mut buf)?;
}
(self.changes.len() as u32).encode(&mut buf)?;
for change in self.changes {
change.raw_bytes().encode(&mut buf)?;
}
Ok(buf)
}
pub fn decode(bytes: &[u8]) -> Result<SyncMessage, decoding::Error> {
let mut decoder = Decoder::new(Cow::Borrowed(bytes));
let message_type = decoder.read::<u8>()?;
if message_type != MESSAGE_TYPE_SYNC {
return Err(decoding::Error::WrongType {
expected_one_of: vec![MESSAGE_TYPE_SYNC],
found: message_type,
});
}
let heads = decode_hashes(&mut decoder)?;
let need = decode_hashes(&mut decoder)?;
let have_count = decoder.read::<u32>()?;
let mut have = Vec::with_capacity(have_count as usize);
for _ in 0..have_count {
let last_sync = decode_hashes(&mut decoder)?;
let bloom_bytes: Vec<u8> = decoder.read()?;
let bloom = BloomFilter::try_from(bloom_bytes.as_slice())?;
have.push(SyncHave { last_sync, bloom });
}
let change_count = decoder.read::<u32>()?;
let mut changes = Vec::with_capacity(change_count as usize);
for _ in 0..change_count {
let change = decoder.read()?;
changes.push(Change::from_bytes(change)?);
}
Ok(SyncMessage {
heads,
need,
have,
changes,
})
}
}
fn encode_hashes(buf: &mut Vec<u8>, hashes: &[ChangeHash]) -> Result<(), encoding::Error> {
debug_assert!(
hashes.windows(2).all(|h| h[0] <= h[1]),
"hashes were not sorted"
);
hashes.encode(buf)?;
Ok(())
}
impl Encodable for &[ChangeHash] {
fn encode<W: Write>(&self, buf: &mut W) -> io::Result<usize> {
let head = self.len().encode(buf)?;
let mut body = 0;
for hash in self.iter() {
buf.write_all(&hash.0)?;
body += hash.0.len()
}
Ok(head + body)
}
}
fn decode_hashes(decoder: &mut Decoder) -> Result<Vec<ChangeHash>, decoding::Error> {
let length = decoder.read::<u32>()?;
let mut hashes = Vec::with_capacity(length as usize);
for _ in 0..length {
let hash_bytes = decoder.read_bytes(HASH_SIZE)?;
let hash = ChangeHash::try_from(hash_bytes).map_err(decoding::Error::BadChangeFormat)?;
hashes.push(hash);
}
Ok(hashes)
}
fn advance_heads(
my_old_heads: &HashSet<&ChangeHash>,
my_new_heads: &HashSet<ChangeHash>,
our_old_shared_heads: &[ChangeHash],
) -> Vec<ChangeHash> {
let new_heads = my_new_heads
.iter()
.filter(|head| !my_old_heads.contains(head))
.copied()
.collect::<Vec<_>>();
let common_heads = our_old_shared_heads
.iter()
.filter(|head| my_new_heads.contains(head))
.copied()
.collect::<Vec<_>>();
let mut advanced_heads = HashSet::with_capacity(new_heads.len() + common_heads.len());
for head in new_heads.into_iter().chain(common_heads) {
advanced_heads.insert(head);
}
let mut advanced_heads = advanced_heads.into_iter().collect::<Vec<_>>();
advanced_heads.sort();
advanced_heads
}

View file

@ -1,7 +1,8 @@
use std::borrow::Borrow;
use std::{borrow::Cow, convert::TryFrom};
use crate::storage::parse;
use crate::ChangeHash;
use automerge_protocol::ChangeHash;
use crate::{decoding, decoding::Decoder, encoding, encoding::Encodable};
// These constants correspond to a 1% false positive rate. The values can be changed without
// breaking compatibility of the network protocol, since the parameters used for a particular
@ -9,7 +10,7 @@ use crate::ChangeHash;
const BITS_PER_ENTRY: u32 = 10;
const NUM_PROBES: u32 = 7;
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize)]
#[derive(Default, Debug, Clone)]
pub struct BloomFilter {
num_entries: u32,
num_bits_per_entry: u32,
@ -17,52 +18,17 @@ pub struct BloomFilter {
bits: Vec<u8>,
}
impl Default for BloomFilter {
fn default() -> Self {
BloomFilter {
num_entries: 0,
num_bits_per_entry: BITS_PER_ENTRY,
num_probes: NUM_PROBES,
bits: Vec::new(),
}
}
}
#[derive(Debug, thiserror::Error)]
pub(crate) enum ParseError {
#[error(transparent)]
Leb128(#[from] parse::leb128::Error),
}
impl BloomFilter {
pub fn to_bytes(&self) -> Vec<u8> {
let mut buf = Vec::new();
if self.num_entries != 0 {
leb128::write::unsigned(&mut buf, self.num_entries as u64).unwrap();
leb128::write::unsigned(&mut buf, self.num_bits_per_entry as u64).unwrap();
leb128::write::unsigned(&mut buf, self.num_probes as u64).unwrap();
buf.extend(&self.bits);
}
buf
}
pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, ParseError> {
if input.is_empty() {
Ok((input, Self::default()))
pub fn into_bytes(self) -> Result<Vec<u8>, encoding::Error> {
if self.num_entries == 0 {
Ok(Vec::new())
} else {
let (i, num_entries) = parse::leb128_u32(input)?;
let (i, num_bits_per_entry) = parse::leb128_u32(i)?;
let (i, num_probes) = parse::leb128_u32(i)?;
let (i, bits) = parse::take_n(bits_capacity(num_entries, num_bits_per_entry), i)?;
Ok((
i,
Self {
num_entries,
num_bits_per_entry,
num_probes,
bits: bits.to_vec(),
},
))
let mut buf = Vec::new();
self.num_entries.encode(&mut buf)?;
self.num_bits_per_entry.encode(&mut buf)?;
self.num_probes.encode(&mut buf)?;
buf.extend(self.bits);
Ok(buf)
}
}
@ -79,8 +45,7 @@ impl BloomFilter {
let z = u32::from_le_bytes([hash_bytes[8], hash_bytes[9], hash_bytes[10], hash_bytes[11]])
% modulo;
let mut probes = Vec::with_capacity(self.num_probes as usize);
probes.push(x);
let mut probes = vec![x];
for _ in 1..self.num_probes {
x = (x + y) % modulo;
y = (y + z) % modulo;
@ -91,13 +56,13 @@ impl BloomFilter {
fn add_hash(&mut self, hash: &ChangeHash) {
for probe in self.get_probes(hash) {
self.set_bit(probe as usize);
self.set_bit(probe as usize)
}
}
fn set_bit(&mut self, probe: usize) {
if let Some(byte) = self.bits.get_mut(probe >> 3) {
*byte |= 1 << (probe & 7);
*byte |= 1 << (probe & 7)
}
}
@ -121,23 +86,6 @@ impl BloomFilter {
true
}
}
pub fn from_hashes<H: Borrow<ChangeHash>>(hashes: impl ExactSizeIterator<Item = H>) -> Self {
let num_entries = hashes.len() as u32;
let num_bits_per_entry = BITS_PER_ENTRY;
let num_probes = NUM_PROBES;
let bits = vec![0; bits_capacity(num_entries, num_bits_per_entry)];
let mut filter = Self {
num_entries,
num_bits_per_entry,
num_probes,
bits,
};
for hash in hashes {
filter.add_hash(hash.borrow());
}
filter
}
}
fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize {
@ -145,16 +93,44 @@ fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize {
f as usize
}
#[derive(thiserror::Error, Debug)]
#[error("{0}")]
pub struct DecodeError(String);
impl TryFrom<&[u8]> for BloomFilter {
type Error = DecodeError;
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
Self::parse(parse::Input::new(bytes))
.map(|(_, b)| b)
.map_err(|e| DecodeError(e.to_string()))
impl From<&[ChangeHash]> for BloomFilter {
fn from(hashes: &[ChangeHash]) -> Self {
let num_entries = hashes.len() as u32;
let num_bits_per_entry = BITS_PER_ENTRY;
let num_probes = NUM_PROBES;
let bits = vec![0; bits_capacity(num_entries, num_bits_per_entry) as usize];
let mut filter = Self {
num_entries,
num_bits_per_entry,
num_probes,
bits,
};
for hash in hashes {
filter.add_hash(hash)
}
filter
}
}
impl TryFrom<&[u8]> for BloomFilter {
type Error = decoding::Error;
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
if bytes.is_empty() {
Ok(Self::default())
} else {
let mut decoder = Decoder::new(Cow::Borrowed(bytes));
let num_entries = decoder.read()?;
let num_bits_per_entry = decoder.read()?;
let num_probes = decoder.read()?;
let bits =
decoder.read_bytes(bits_capacity(num_entries, num_bits_per_entry) as usize)?;
Ok(Self {
num_entries,
num_bits_per_entry,
num_probes,
bits: bits.to_vec(),
})
}
}
}

View file

@ -0,0 +1,67 @@
use std::{borrow::Cow, collections::HashSet};
use automerge_protocol::ChangeHash;
use super::{decode_hashes, encode_hashes};
use crate::{decoding, decoding::Decoder, encoding, BloomFilter};
const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification
#[derive(Debug, Clone)]
pub struct SyncState {
pub shared_heads: Vec<ChangeHash>,
pub last_sent_heads: Option<Vec<ChangeHash>>,
pub their_heads: Option<Vec<ChangeHash>>,
pub their_need: Option<Vec<ChangeHash>>,
pub their_have: Option<Vec<SyncHave>>,
pub sent_hashes: HashSet<ChangeHash>,
}
#[derive(Debug, Clone, Default)]
pub struct SyncHave {
pub last_sync: Vec<ChangeHash>,
pub bloom: BloomFilter,
}
impl SyncState {
pub fn encode(&self) -> Result<Vec<u8>, encoding::Error> {
let mut buf = vec![SYNC_STATE_TYPE];
encode_hashes(&mut buf, &self.shared_heads)?;
Ok(buf)
}
pub fn decode(bytes: &[u8]) -> Result<Self, decoding::Error> {
let mut decoder = Decoder::new(Cow::Borrowed(bytes));
let record_type = decoder.read::<u8>()?;
if record_type != SYNC_STATE_TYPE {
return Err(decoding::Error::WrongType {
expected_one_of: vec![SYNC_STATE_TYPE],
found: record_type,
});
}
let shared_heads = decode_hashes(&mut decoder)?;
Ok(Self {
shared_heads,
last_sent_heads: Some(Vec::new()),
their_heads: None,
their_need: None,
their_have: Some(Vec::new()),
sent_hashes: HashSet::new(),
})
}
}
impl Default for SyncState {
fn default() -> Self {
Self {
shared_heads: Vec::new(),
last_sent_heads: Some(Vec::new()),
their_heads: None,
their_need: None,
their_have: None,
sent_hashes: HashSet::new(),
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,636 @@
extern crate automerge_backend;
use std::{collections::HashSet, convert::TryInto};
use automerge_backend::{Backend, Change};
use automerge_protocol as protocol;
use automerge_protocol::{
ActorId, ChangeHash, Diff, DiffEdit, ElementId, MapDiff, MapType, ObjType, ObjectId, Op,
OpType, Patch, SeqDiff, SequenceType, UncompressedChange,
};
use maplit::hashmap;
use protocol::{Key, ScalarValue};
#[test]
fn test_apply_local_change() {
let actor: ActorId = "eb738e04ef8848ce8b77309b6c7f7e39".try_into().unwrap();
let change_request = UncompressedChange {
actor_id: actor.clone(),
time: 0,
message: None,
hash: None,
seq: 1,
deps: Vec::new(),
start_op: 1,
operations: vec![Op {
action: protocol::OpType::Set("magpie".into()),
key: "bird".into(),
obj: ObjectId::Root,
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
let mut backend = Backend::new();
let patch = backend.apply_local_change(change_request).unwrap().0;
let changes = backend.get_changes(&[]);
let expected_change = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 1,
time: changes[0].time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
action: OpType::Set("magpie".into()),
obj: ObjectId::Root,
key: "bird".into(),
pred: Vec::new(),
insert: false,
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
assert_eq!(changes[0], &expected_change);
let expected_patch = Patch {
actor: Some(actor.clone()),
max_op: 1,
pending_changes: 0,
seq: Some(1),
clock: hashmap! {
actor => 1,
},
deps: Vec::new(),
diffs: Some(Diff::Map(MapDiff {
object_id: ObjectId::Root,
obj_type: MapType::Map,
props: hashmap! {
"bird".into() => hashmap!{
"1@eb738e04ef8848ce8b77309b6c7f7e39".try_into().unwrap() => Diff::Value("magpie".into())
}
},
})),
};
assert_eq!(patch, expected_patch);
}
#[test]
fn test_error_on_duplicate_requests() {
let actor: ActorId = "37704788917a499cb0206fa8519ac4d9".try_into().unwrap();
let change_request1 = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
message: None,
hash: None,
time: 0,
deps: Vec::new(),
start_op: 1,
operations: vec![Op {
action: protocol::OpType::Set("magpie".into()),
obj: ObjectId::Root,
key: "bird".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
let change_request2 = UncompressedChange {
actor_id: actor,
seq: 2,
message: None,
hash: None,
time: 0,
deps: Vec::new(),
start_op: 2,
operations: vec![Op {
action: protocol::OpType::Set("jay".into()),
obj: ObjectId::Root,
key: "bird".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
let mut backend = Backend::new();
backend.apply_local_change(change_request1.clone()).unwrap();
backend.apply_local_change(change_request2.clone()).unwrap();
assert!(backend.apply_local_change(change_request1).is_err());
assert!(backend.apply_local_change(change_request2).is_err());
}
#[test]
fn test_handle_concurrent_frontend_and_backend_changes() {
let actor: ActorId = "cb55260e9d7e457886a4fc73fd949202".try_into().unwrap();
let local1 = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
time: 0,
deps: Vec::new(),
message: None,
hash: None,
start_op: 1,
operations: vec![Op {
action: protocol::OpType::Set("magpie".into()),
obj: ObjectId::Root,
key: "bird".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
let local2 = UncompressedChange {
actor_id: actor.clone(),
seq: 2,
start_op: 2,
time: 0,
deps: Vec::new(),
message: None,
hash: None,
operations: vec![Op {
action: protocol::OpType::Set("jay".into()),
obj: ObjectId::Root,
key: "bird".into(),
insert: false,
pred: vec![actor.op_id_at(1)],
}],
extra_bytes: Vec::new(),
};
let remote_actor: ActorId = "6d48a01318644eed90455d2cb68ac657".try_into().unwrap();
let remote1 = UncompressedChange {
actor_id: remote_actor.clone(),
seq: 1,
start_op: 1,
time: 0,
deps: Vec::new(),
message: None,
hash: None,
operations: vec![Op {
action: protocol::OpType::Set("goldfish".into()),
obj: ObjectId::Root,
key: "fish".into(),
pred: Vec::new(),
insert: false,
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let mut expected_change1 = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
action: protocol::OpType::Set("magpie".into()),
obj: ObjectId::Root,
key: "bird".into(),
pred: Vec::new(),
insert: false,
}],
extra_bytes: Vec::new(),
};
let mut expected_change2 = UncompressedChange {
actor_id: remote_actor,
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
action: protocol::OpType::Set("goldfish".into()),
key: "fish".into(),
obj: ObjectId::Root,
pred: Vec::new(),
insert: false,
}],
extra_bytes: Vec::new(),
};
let mut expected_change3 = UncompressedChange {
actor_id: actor.clone(),
seq: 2,
start_op: 2,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
action: protocol::OpType::Set("jay".into()),
obj: ObjectId::Root,
key: "bird".into(),
pred: vec![actor.op_id_at(1)],
insert: false,
}],
extra_bytes: Vec::new(),
};
let mut backend = Backend::new();
backend.apply_local_change(local1).unwrap();
let backend_after_first = backend.clone();
let changes1 = backend_after_first.get_changes(&[]);
let change01 = changes1.get(0).unwrap();
backend.apply_changes(vec![remote1]).unwrap();
let backend_after_second = backend.clone();
let changes2 = backend_after_second.get_changes(&[change01.hash]);
let change12 = *changes2.get(0).unwrap();
backend.apply_local_change(local2).unwrap();
let changes3 = backend.get_changes(&[change01.hash, change12.hash]);
let change23 = changes3.get(0).unwrap();
expected_change1.time = change01.time;
expected_change2.time = change12.time;
expected_change3.time = change23.time;
expected_change3.deps = vec![change01.hash];
assert_eq!(change01, &&expected_change1.try_into().unwrap());
assert_eq!(change12, &expected_change2.try_into().unwrap());
assert_changes_equal(change23.decode(), expected_change3.clone());
assert_eq!(change23, &&expected_change3.try_into().unwrap());
}
#[test]
fn test_transform_list_indexes_into_element_ids() {
let actor: ActorId = "8f389df8fecb4ddc989102321af3578e".try_into().unwrap();
let remote_actor: ActorId = "9ba21574dc44411b8ce37bc6037a9687".try_into().unwrap();
let remote1: Change = UncompressedChange {
actor_id: remote_actor.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
action: protocol::OpType::Make(ObjType::list()),
key: "birds".into(),
obj: ObjectId::Root,
pred: Vec::new(),
insert: false,
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let remote2: Change = UncompressedChange {
actor_id: remote_actor.clone(),
seq: 2,
start_op: 2,
time: 0,
message: None,
hash: None,
deps: vec![remote1.hash],
operations: vec![Op {
action: protocol::OpType::Set("magpie".into()),
obj: ObjectId::from(remote_actor.op_id_at(1)),
key: ElementId::Head.into(),
insert: true,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let local1 = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
message: None,
hash: None,
time: 0,
deps: vec![remote1.hash],
start_op: 2,
operations: vec![Op {
obj: ObjectId::from(remote_actor.op_id_at(1)),
action: protocol::OpType::Set("goldfinch".into()),
key: ElementId::Head.into(),
insert: true,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
let local2 = UncompressedChange {
actor_id: actor.clone(),
seq: 2,
message: None,
hash: None,
deps: Vec::new(),
time: 0,
start_op: 3,
operations: vec![Op {
obj: ObjectId::from(remote_actor.op_id_at(1)),
action: protocol::OpType::Set("wagtail".into()),
key: actor.op_id_at(2).into(),
insert: true,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
let local3 = UncompressedChange {
actor_id: actor.clone(),
seq: 3,
message: None,
hash: None,
deps: vec![remote2.hash],
time: 0,
start_op: 4,
operations: vec![
Op {
obj: ObjectId::from(remote_actor.op_id_at(1)),
action: protocol::OpType::Set("Magpie".into()),
key: remote_actor.op_id_at(2).into(),
insert: false,
pred: vec![remote_actor.op_id_at(2)],
},
Op {
obj: ObjectId::from(remote_actor.op_id_at(1)),
action: protocol::OpType::Set("Goldfinch".into()),
key: actor.op_id_at(2).into(),
insert: false,
pred: vec![actor.op_id_at(2)],
},
],
extra_bytes: Vec::new(),
};
let mut expected_change1 = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 2,
time: 0,
message: None,
hash: None,
deps: vec![remote1.hash],
operations: vec![Op {
obj: ObjectId::from(remote_actor.op_id_at(1)),
action: protocol::OpType::Set("goldfinch".into()),
key: ElementId::Head.into(),
insert: true,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
let mut expected_change2 = UncompressedChange {
actor_id: actor.clone(),
seq: 2,
start_op: 3,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
obj: ObjectId::from(remote_actor.op_id_at(1)),
action: protocol::OpType::Set("wagtail".into()),
key: actor.op_id_at(2).into(),
insert: true,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
let mut expected_change3 = UncompressedChange {
actor_id: actor.clone(),
seq: 3,
start_op: 4,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![
Op {
obj: ObjectId::from(remote_actor.op_id_at(1)),
action: protocol::OpType::Set("Magpie".into()),
key: remote_actor.op_id_at(2).into(),
pred: vec![remote_actor.op_id_at(2)],
insert: false,
},
Op {
obj: ObjectId::from(remote_actor.op_id_at(1)),
action: protocol::OpType::Set("Goldfinch".into()),
key: actor.op_id_at(2).into(),
pred: vec![actor.op_id_at(2)],
insert: false,
},
],
extra_bytes: Vec::new(),
};
let mut backend = Backend::new();
backend.apply_changes(vec![remote1.clone()]).unwrap();
backend.apply_local_change(local1).unwrap();
let backend_after_first = backend.clone();
let changes1 = backend_after_first.get_changes(&[remote1.hash]);
let change12 = *changes1.get(0).unwrap();
backend.apply_changes(vec![remote2.clone()]).unwrap();
backend.apply_local_change(local2).unwrap();
let backend_after_second = backend.clone();
let changes2 = backend_after_second.get_changes(&[remote2.hash, change12.hash]);
let change23 = *changes2.get(0).unwrap();
backend.apply_local_change(local3).unwrap();
let changes3 = backend.get_changes(&[remote2.hash, change23.hash]);
let change34 = changes3.get(0).unwrap().decode();
expected_change1.time = change12.time;
expected_change2.time = change23.time;
expected_change2.deps = vec![change12.hash];
expected_change3.time = change34.time;
expected_change3.deps = vec![remote2.hash, change23.hash];
assert_changes_equal(change34, expected_change3);
assert_eq!(change12, &expected_change1.try_into().unwrap());
assert_changes_equal(change23.decode(), expected_change2.clone());
assert_eq!(change23, &expected_change2.try_into().unwrap());
}
#[test]
fn test_handle_list_insertion_and_deletion_in_same_change() {
let actor: ActorId = "0723d2a1940744868ffd6b294ada813f".try_into().unwrap();
let local1 = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
message: None,
hash: None,
time: 0,
deps: Vec::new(),
start_op: 1,
operations: vec![Op {
obj: ObjectId::Root,
action: protocol::OpType::Make(ObjType::list()),
key: "birds".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
let local2 = UncompressedChange {
actor_id: actor.clone(),
seq: 2,
message: None,
hash: None,
time: 0,
deps: Vec::new(),
start_op: 2,
operations: vec![
Op {
obj: ObjectId::from(actor.op_id_at(1)),
action: protocol::OpType::Set("magpie".into()),
key: ElementId::Head.into(),
insert: true,
pred: Vec::new(),
},
Op {
obj: ObjectId::from(actor.op_id_at(1)),
action: protocol::OpType::Del,
key: actor.op_id_at(2).into(),
insert: false,
pred: vec![actor.op_id_at(2)],
},
],
extra_bytes: Vec::new(),
};
let mut expected_patch = Patch {
actor: Some(actor.clone()),
seq: Some(2),
max_op: 3,
pending_changes: 0,
clock: hashmap! {
actor.clone() => 2
},
deps: Vec::new(),
diffs: Some(Diff::Map(MapDiff {
object_id: ObjectId::Root,
obj_type: MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => Diff::Seq(SeqDiff{
object_id: ObjectId::from(actor.op_id_at(1)),
obj_type: SequenceType::List,
edits: vec![
DiffEdit::Insert{index: 0, elem_id: actor.op_id_at(2).into()},
DiffEdit::Remove{index: 0},
],
props: hashmap!{},
})
}
},
})),
};
let mut backend = Backend::new();
backend.apply_local_change(local1).unwrap();
let patch = backend.apply_local_change(local2).unwrap().0;
expected_patch.deps = patch.deps.clone();
assert_eq!(patch, expected_patch);
let changes = backend.get_changes(&[]);
assert_eq!(changes.len(), 2);
let change1 = changes[0].clone();
let change2 = changes[1].clone();
let expected_change1 = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 1,
time: change1.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
obj: ObjectId::Root,
action: protocol::OpType::Make(ObjType::list()),
key: "birds".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let expected_change2 = UncompressedChange {
actor_id: actor.clone(),
seq: 2,
start_op: 2,
time: change2.time,
message: None,
hash: None,
deps: vec![change1.hash],
operations: vec![
Op {
obj: ObjectId::from(actor.op_id_at(1)),
action: protocol::OpType::Set("magpie".into()),
key: ElementId::Head.into(),
insert: true,
pred: Vec::new(),
},
Op {
obj: ObjectId::from(actor.op_id_at(1)),
action: protocol::OpType::Del,
key: actor.op_id_at(2).into(),
pred: vec![actor.op_id_at(2)],
insert: false,
},
],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
assert_eq!(change1, expected_change1);
assert_eq!(change2, expected_change2);
}
/// Asserts that the changes are equal without respect to order of the hashes
/// in the change dependencies
fn assert_changes_equal(mut change1: UncompressedChange, change2: UncompressedChange) {
let change2_clone = change2.clone();
let deps1: HashSet<&ChangeHash> = change1.deps.iter().collect();
let deps2: HashSet<&ChangeHash> = change2.deps.iter().collect();
assert_eq!(
deps1, deps2,
"The two changes did not have equal dependencies, left: {:?}, right: {:?}",
deps1, deps2
);
change1.deps = change2.deps;
assert_eq!(change1, change2_clone)
}
#[test]
fn test_random_change_start_op_overflow() {
let change = UncompressedChange {
operations: vec![Op {
action: OpType::Set(ScalarValue::Int(-2512681860335064791)),
obj: ObjectId::Root,
key: Key::Map("".to_owned()),
pred: vec![],
insert: false,
}],
actor_id: ActorId::random(),
hash: None,
seq: 1,
start_op: 18446744073709551615,
time: 10766414268858367,
message: None,
deps: vec![],
extra_bytes: vec![65, 41, 1, 67, 0, 0, 0, 0, 0, 7, 210, 214, 194, 2, 0],
};
let mut b = Backend::new();
let _ = b.apply_local_change(change);
}

View file

@ -0,0 +1,33 @@
use automerge_backend::{Backend, Change};
// This test reproduces issue 95 (https://github.com/automerge/automerge-rs/issues/95)
// where compressed changes were losing their header during decompression such
// that when the compressed changes were written out again they were invalid.
#[test]
fn test_deflate_correctly() {
let init_change: Vec<u8> = vec![
133, 111, 74, 131, 252, 38, 106, 255, 2, 195, 2, 117, 143, 189, 74, 4, 49, 16, 128, 147,
189, 61, 239, 7, 185, 83, 196, 43, 101, 26, 75, 183, 178, 179, 17, 181, 177, 17, 27, 181,
14, 217, 120, 55, 144, 77, 150, 73, 178, 156, 87, 172, 133, 224, 3, 88, 248, 58, 98, 227,
29, 86, 98, 167, 22, 118, 190, 133, 96, 86, 177, 176, 48, 153, 129, 249, 253, 102, 134,
173, 124, 108, 220, 111, 221, 188, 239, 14, 239, 6, 184, 57, 111, 157, 84, 156, 127, 190,
190, 93, 45, 13, 14, 13, 122, 20, 26, 103, 194, 163, 53, 172, 207, 219, 201, 112, 181, 179,
54, 90, 223, 217, 238, 239, 45, 159, 246, 207, 94, 120, 217, 98, 201, 19, 103, 44, 153, 37,
173, 180, 189, 212, 89, 240, 110, 221, 110, 177, 222, 188, 137, 177, 228, 146, 49, 254,
171, 53, 235, 61, 112, 206, 146, 186, 35, 3, 57, 75, 174, 43, 39, 168, 115, 82, 38, 230,
255, 179, 83, 175, 166, 158, 45, 120, 146, 250, 139, 82, 37, 252, 251, 69, 119, 218, 208,
227, 79, 31, 57, 239, 198, 252, 168, 190, 229, 215, 252, 192, 26, 37, 161, 176, 90, 163,
131, 137, 50, 17, 66, 232, 129, 208, 5, 151, 193, 49, 9, 229, 148, 241, 80, 41, 163, 76,
188, 201, 65, 161, 124, 112, 32, 60, 120, 75, 81, 160, 12, 186, 66, 35, 8, 42, 65, 216,
244, 252, 16, 43, 244, 66, 129, 37, 137, 224, 84, 14, 185, 213, 177, 150, 130, 167, 80,
128, 8, 50, 118, 102, 112, 20, 180, 22, 5, 52, 183, 69, 164, 22, 18, 13, 10, 80, 36, 124,
6, 251, 36, 28, 4, 237, 9, 37, 170, 56, 21, 65, 5, 240, 129, 202, 63, 107, 158, 19, 154,
49, 70, 74, 86, 10, 18, 99, 18, 229, 36, 183, 50, 20, 113, 229, 103, 206, 190, 0,
];
let change: Change = Change::from_bytes(init_change.clone()).unwrap();
let mut backend = Backend::new();
backend.apply_changes(vec![change]).unwrap();
let change_back = backend.get_changes(&[]);
assert_eq!(change_back[0].raw_bytes().to_vec(), init_change);
}

View file

@ -0,0 +1,593 @@
extern crate automerge_backend;
use std::convert::TryInto;
use automerge_backend::{Backend, Change};
use automerge_protocol as amp;
use automerge_protocol::{
ActorId, Diff, DiffEdit, ElementId, MapDiff, MapType, ObjectId, Op, Patch, ScalarValue,
SeqDiff, SequenceType, UncompressedChange,
};
use maplit::hashmap;
#[test]
fn test_include_most_recent_value_for_key() {
let actor: ActorId = "ec28cfbcdb9e4f32ad24b3c776e651b0".try_into().unwrap();
let change1: Change = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 1,
time: 0,
deps: Vec::new(),
message: None,
hash: None,
operations: vec![Op {
action: amp::OpType::Set("magpie".into()),
key: "bird".into(),
obj: ObjectId::Root,
pred: Vec::new(),
insert: false,
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let change2: Change = UncompressedChange {
actor_id: actor.clone(),
seq: 2,
start_op: 2,
time: 0,
message: None,
hash: None,
deps: vec![change1.hash],
operations: vec![Op {
obj: ObjectId::Root,
action: amp::OpType::Set("blackbird".into()),
key: "bird".into(),
pred: vec![actor.op_id_at(1)],
insert: false,
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let expected_patch = Patch {
actor: None,
seq: None,
max_op: 2,
pending_changes: 0,
clock: hashmap! {
actor.clone() => 2,
},
deps: vec![change2.hash],
diffs: Some(Diff::Map(MapDiff {
object_id: ObjectId::Root,
obj_type: MapType::Map,
props: hashmap! {
"bird".into() => hashmap!{
actor.op_id_at(2) => Diff::Value("blackbird".into()),
}
},
})),
};
let mut backend = Backend::new();
backend.load_changes(vec![change1, change2]).unwrap();
let patch = backend.get_patch().unwrap();
assert_eq!(patch, expected_patch)
}
#[test]
fn test_includes_conflicting_values_for_key() {
let actor1: ActorId = "111111".try_into().unwrap();
let actor2: ActorId = "222222".try_into().unwrap();
let change1: Change = UncompressedChange {
actor_id: actor1.clone(),
seq: 1,
start_op: 1,
time: 0,
deps: Vec::new(),
message: None,
hash: None,
operations: vec![Op {
action: amp::OpType::Set("magpie".into()),
obj: ObjectId::Root,
key: "bird".into(),
pred: Vec::new(),
insert: false,
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let change2: Change = UncompressedChange {
actor_id: actor2.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
action: amp::OpType::Set("blackbird".into()),
key: "bird".into(),
obj: ObjectId::Root,
pred: Vec::new(),
insert: false,
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let expected_patch = Patch {
clock: hashmap! {
actor1.clone() => 1,
actor2.clone() => 1,
},
max_op: 1,
pending_changes: 0,
seq: None,
actor: None,
deps: vec![change1.hash, change2.hash],
diffs: Some(Diff::Map(MapDiff {
object_id: ObjectId::Root,
obj_type: MapType::Map,
props: hashmap! {
"bird".into() => hashmap!{
actor1.op_id_at(1) => Diff::Value("magpie".into()),
actor2.op_id_at(1) => Diff::Value("blackbird".into()),
},
},
})),
};
let mut backend = Backend::new();
backend.load_changes(vec![change1, change2]).unwrap();
let patch = backend.get_patch().unwrap();
assert_eq!(patch, expected_patch)
}
#[test]
fn test_handles_counter_increment_at_keys_in_a_map() {
let actor: ActorId = "46c92088e4484ae5945dc63bf606a4a5".try_into().unwrap();
let change1: Change = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
action: amp::OpType::Set(ScalarValue::Counter(1)),
obj: ObjectId::Root,
key: "counter".into(),
pred: Vec::new(),
insert: false,
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let change2: Change = UncompressedChange {
actor_id: actor.clone(),
seq: 2,
start_op: 2,
time: 0,
deps: vec![change1.hash],
message: None,
hash: None,
operations: vec![Op {
action: amp::OpType::Inc(2),
obj: ObjectId::Root,
key: "counter".into(),
pred: vec![actor.op_id_at(1)],
insert: false,
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let expected_patch = Patch {
seq: None,
actor: None,
clock: hashmap! {
actor.clone() => 2,
},
max_op: 2,
pending_changes: 0,
deps: vec![change2.hash],
diffs: Some(Diff::Map(MapDiff {
object_id: ObjectId::Root,
obj_type: MapType::Map,
props: hashmap! {
"counter".into() => hashmap!{
actor.op_id_at(1) => Diff::Value(ScalarValue::Counter(3))
}
},
})),
};
let mut backend = Backend::new();
backend.load_changes(vec![change1, change2]).unwrap();
let patch = backend.get_patch().unwrap();
assert_eq!(patch, expected_patch)
}
#[test]
fn test_creates_nested_maps() {
let actor: ActorId = "06148f9422cb40579fd02f1975c34a51".try_into().unwrap();
let change1: Change = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![
Op {
action: amp::OpType::Make(amp::ObjType::map()),
obj: ObjectId::Root,
key: "birds".into(),
pred: Vec::new(),
insert: false,
},
Op {
action: amp::OpType::Set(ScalarValue::F64(3.0)),
key: "wrens".into(),
obj: ObjectId::from(actor.op_id_at(1)),
pred: Vec::new(),
insert: false,
},
],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let change2: Change = UncompressedChange {
actor_id: actor.clone(),
seq: 2,
start_op: 3,
time: 0,
deps: vec![change1.hash],
message: None,
hash: None,
operations: vec![
Op {
obj: ObjectId::from(actor.op_id_at(1)),
action: amp::OpType::Del,
key: "wrens".into(),
pred: vec![actor.op_id_at(2)],
insert: false,
},
Op {
obj: ObjectId::from(actor.op_id_at(1)),
action: amp::OpType::Set(ScalarValue::F64(15.0)),
key: "sparrows".into(),
pred: Vec::new(),
insert: false,
},
],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let expected_patch = Patch {
clock: hashmap! {
actor.clone() => 2,
},
actor: None,
seq: None,
max_op: 4,
pending_changes: 0,
deps: vec![change2.hash],
diffs: Some(Diff::Map(MapDiff {
object_id: ObjectId::Root,
obj_type: MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => Diff::Map(MapDiff{
object_id: ObjectId::from(actor.op_id_at(1)),
obj_type: MapType::Map,
props: hashmap!{
"sparrows".into() => hashmap!{
actor.op_id_at(4) => Diff::Value(ScalarValue::F64(15.0))
}
}
})
}
},
})),
};
let mut backend = Backend::new();
backend.load_changes(vec![change1, change2]).unwrap();
let patch = backend.get_patch().unwrap();
assert_eq!(patch, expected_patch)
}
#[test]
fn test_create_lists() {
let actor: ActorId = "90bf7df682f747fa82ac604b35010906".try_into().unwrap();
let change1: Change = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![
Op {
action: amp::OpType::Make(amp::ObjType::list()),
obj: ObjectId::Root,
key: "birds".into(),
pred: Vec::new(),
insert: false,
},
Op {
obj: ObjectId::from(actor.op_id_at(1)),
action: amp::OpType::Set("chaffinch".into()),
key: ElementId::Head.into(),
insert: true,
pred: Vec::new(),
},
],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let expected_patch = Patch {
clock: hashmap! {
actor.clone() => 1,
},
max_op: 2,
pending_changes: 0,
actor: None,
seq: None,
deps: vec![change1.hash],
diffs: Some(Diff::Map(MapDiff {
object_id: ObjectId::Root,
obj_type: MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => Diff::Seq(SeqDiff{
object_id: ObjectId::from(actor.op_id_at(1)),
obj_type: SequenceType::List,
edits: vec![DiffEdit::Insert {
index: 0,
elem_id: actor.op_id_at(2).into()
}],
props: hashmap!{
0 => hashmap!{
"2@90bf7df682f747fa82ac604b35010906".try_into().unwrap() => Diff::Value("chaffinch".into())
}
}
})
}
},
})),
};
let mut backend = Backend::new();
backend.load_changes(vec![change1]).unwrap();
let patch = backend.get_patch().unwrap();
assert_eq!(patch, expected_patch)
}
#[test]
fn test_includes_latests_state_of_list() {
let actor: ActorId = "6caaa2e433de42ae9c3fa65c9ff3f03e".try_into().unwrap();
let change1: Change = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![
Op {
action: amp::OpType::Make(amp::ObjType::list()),
obj: ObjectId::Root,
key: "todos".into(),
pred: Vec::new(),
insert: false,
},
Op {
action: amp::OpType::Make(amp::ObjType::map()),
obj: ObjectId::from(actor.op_id_at(1)),
key: ElementId::Head.into(),
insert: true,
pred: Vec::new(),
},
Op {
obj: ObjectId::from(actor.op_id_at(2)),
action: amp::OpType::Set("water plants".into()),
key: "title".into(),
pred: Vec::new(),
insert: false,
},
Op {
obj: ObjectId::from(actor.op_id_at(2)),
action: amp::OpType::Set(false.into()),
key: "done".into(),
pred: Vec::new(),
insert: false,
},
],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let expected_patch = Patch {
clock: hashmap! {
actor.clone() => 1
},
max_op: 4,
pending_changes: 0,
actor: None,
seq: None,
deps: vec![change1.hash],
diffs: Some(Diff::Map(MapDiff {
object_id: ObjectId::Root,
obj_type: MapType::Map,
props: hashmap! {
"todos".into() => hashmap!{
actor.op_id_at(1) => Diff::Seq(SeqDiff{
object_id: ObjectId::from(actor.op_id_at(1)),
obj_type: SequenceType::List,
edits: vec![DiffEdit::Insert{index: 0, elem_id: actor.op_id_at(2).into()}],
props: hashmap!{
0 => hashmap!{
actor.op_id_at(2) => Diff::Map(MapDiff{
object_id: "2@6caaa2e433de42ae9c3fa65c9ff3f03e".try_into().unwrap(),
obj_type: MapType::Map,
props: hashmap!{
"title".into() => hashmap!{
actor.op_id_at(3) => Diff::Value("water plants".into()),
},
"done".into() => hashmap!{
actor.op_id_at(4) => Diff::Value(false.into())
}
}
})
}
}
})
}
},
})),
};
let mut backend = Backend::new();
backend.load_changes(vec![change1]).unwrap();
let patch = backend.get_patch().unwrap();
assert_eq!(patch, expected_patch)
}
#[test]
fn test_includes_date_objects_at_root() {
let actor: ActorId = "90f5dd5d4f524e95ad5929e08d1194f1".try_into().unwrap();
let change1: Change = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![Op {
obj: ObjectId::Root,
action: amp::OpType::Set(ScalarValue::Timestamp(1_586_541_033_457)),
key: "now".into(),
pred: Vec::new(),
insert: false,
}],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let expected_patch = Patch {
clock: hashmap! {
actor.clone() => 1,
},
max_op: 1,
pending_changes: 0,
actor: None,
seq: None,
deps: vec![change1.hash],
diffs: Some(Diff::Map(MapDiff {
object_id: ObjectId::Root,
obj_type: MapType::Map,
props: hashmap! {
"now".into() => hashmap!{
actor.op_id_at(1) => Diff::Value(ScalarValue::Timestamp(1_586_541_033_457))
}
},
})),
};
let mut backend = Backend::new();
backend.load_changes(vec![change1]).unwrap();
let patch = backend.get_patch().unwrap();
assert_eq!(patch, expected_patch)
}
#[test]
fn test_includes_date_objects_in_a_list() {
let actor: ActorId = "08b050f976a249349021a2e63d99c8e8".try_into().unwrap();
let change1: Change = UncompressedChange {
actor_id: actor.clone(),
seq: 1,
start_op: 1,
time: 0,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![
Op {
obj: ObjectId::Root,
action: amp::OpType::Make(amp::ObjType::list()),
key: "list".into(),
pred: Vec::new(),
insert: false,
},
Op {
obj: ObjectId::from(actor.op_id_at(1)),
action: amp::OpType::Set(ScalarValue::Timestamp(1_586_541_089_595)),
key: ElementId::Head.into(),
insert: true,
pred: Vec::new(),
},
],
extra_bytes: Vec::new(),
}
.try_into()
.unwrap();
let expected_patch = Patch {
clock: hashmap! {
actor.clone() => 1,
},
max_op: 2,
pending_changes: 0,
actor: None,
seq: None,
deps: vec![change1.hash],
diffs: Some(Diff::Map(MapDiff {
object_id: ObjectId::Root,
obj_type: MapType::Map,
props: hashmap! {
"list".into() => hashmap!{
actor.op_id_at(1) => Diff::Seq(SeqDiff{
object_id: ObjectId::from(actor.op_id_at(1)),
obj_type: SequenceType::List,
edits: vec![DiffEdit::Insert {index: 0, elem_id: actor.op_id_at(2).into()}],
props: hashmap!{
0 => hashmap!{
actor.op_id_at(2) => Diff::Value(ScalarValue::Timestamp(1_586_541_089_595))
}
}
})
}
},
})),
};
let mut backend = Backend::new();
backend.load_changes(vec![change1]).unwrap();
let patch = backend.get_patch().unwrap();
assert_eq!(patch, expected_patch)
}

View file

@ -0,0 +1,52 @@
use automerge_backend::Backend;
#[test]
fn test_load_index_out_of_bounds() {
// these are just random bytes
let bytes = vec![133, 111, 74, 131, 0, 46, 128, 0];
let _ = Backend::load(bytes);
}
#[test]
fn test_load_index_out_of_bounds_2() {
// these are just random bytes
let bytes = vec![
133, 111, 74, 131, 171, 99, 102, 54, 2, 16, 42, 0, 18, 255, 255, 61, 57, 57, 57, 29, 48,
48, 48, 116, 0, 0, 0, 46, 46,
];
let _ = Backend::load(bytes);
}
#[test]
fn test_load_index_out_of_bounds_3() {
// these are just random bytes
let bytes = vec![133, 111, 74, 131, 29, 246, 20, 11, 0, 2, 8, 61, 44];
let _ = Backend::load(bytes);
}
#[test]
fn test_load_leb_failed_to_read_whole_buffer() {
// these are just random bytes
let bytes = vec![133, 111, 74, 131, 46, 46, 46, 46, 46];
let _ = Backend::load(bytes);
}
#[test]
fn test_load_overflowing_add() {
// these are just random bytes
let bytes = vec![
133, 111, 74, 131, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 1,
16,
];
let _ = Backend::load(bytes);
}
#[test]
fn test_load_overflowing_sub() {
// these are just random bytes
let bytes = vec![
133, 111, 74, 131, 68, 193, 221, 243, 2, 16, 35, 80, 80, 10, 131, 0, 255, 28, 10, 0, 0, 65,
8, 0, 133, 0,
];
let _ = Backend::load(bytes);
}

2
automerge-c/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
automerge
automerge.o

21
automerge-c/Cargo.toml Normal file
View file

@ -0,0 +1,21 @@
[package]
name = "automerge-c"
version = "0.1.0"
authors = ["Orion Henry <orion.henry@gmail.com>"]
edition = "2018"
[lib]
name = "automerge"
crate-type = ["cdylib", "staticlib"]
bench = false
[dependencies]
automerge-backend = { path = "../automerge-backend" }
automerge-protocol = { path = "../automerge-protocol" }
libc = "^0.2"
serde = "^1.0"
serde_json = "^1.0"
errno = "^0.2"
[build-dependencies]
cbindgen = "^0.14"

30
automerge-c/Makefile Normal file
View file

@ -0,0 +1,30 @@
CC=gcc
CFLAGS=-I.
DEPS=automerge.h
LIBS=-lpthread -ldl -lm
LDIR=../target/release
LIB=../target/release/libautomerge.a
DEBUG_LIB=../target/debug/libautomerge.a
all: automerge $(LIB)
debug: LDIR=../target/debug
debug: automerge $(DEBUG_LIB)
automerge: automerge.o $(LDIR)/libautomerge.a
$(CC) -o $@ automerge.o $(LDIR)/libautomerge.a $(LIBS) -L$(LDIR)
$(DEBUG_LIB): src/lib.rs
cargo build
$(LIB): src/lib.rs
cargo build --release
%.o: %.c $(DEPS)
$(CC) -c -o $@ $< $(CFLAGS)
.PHONY: clean
clean:
rm -f *.o automerge $(LIB) $(DEBUG_LIB)

258
automerge-c/automerge.c Normal file
View file

@ -0,0 +1,258 @@
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include "automerge.h"
#define BUFSIZE 4096
void test_sync_basic() {
printf("begin sync test - basic\n");
int len;
// In a real application you would need to check to make sure your buffer is large enough for any given read
char buff[BUFSIZE];
Backend * dbA = automerge_init();
Backend * dbB = automerge_init();
SyncState * ssA = automerge_sync_state_init();
SyncState * ssB = automerge_sync_state_init();
len = automerge_generate_sync_message(dbA, ssA);
// In a real application, we would use `len` to allocate `buff` here
int len2 = automerge_read_binary(dbA, buff);
automerge_receive_sync_message(dbB, ssB, buff, len);
len = automerge_generate_sync_message(dbB, ssB);
// No more sync messages were generated
assert(len == 0);
}
void test_sync_encode_decode() {
printf("begin sync test - encode/decode\n");
int len;
char buff[BUFSIZE];
char sync_state_buff[BUFSIZE];
Backend * dbA = automerge_init();
Backend * dbB = automerge_init();
const char * requestA1 = "{\"actor\":\"111111\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"magpie\",\"pred\":[]}]}";
const char * requestB1 = "{\"actor\":\"222222\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"crow\",\"pred\":[]}]}";
automerge_apply_local_change(dbA, requestA1);
automerge_apply_local_change(dbB, requestB1);
SyncState * ssA = automerge_sync_state_init();
SyncState * ssB = automerge_sync_state_init();
len = automerge_generate_sync_message(dbA, ssA);
automerge_read_binary(dbA, buff);
automerge_receive_sync_message(dbB, ssB, buff, len);
len = automerge_generate_sync_message(dbB, ssB);
automerge_read_binary(dbB, buff);
automerge_receive_sync_message(dbA, ssA, buff, len);
len = automerge_generate_sync_message(dbA, ssA);
automerge_read_binary(dbA, buff);
automerge_receive_sync_message(dbB, ssB, buff, len);
len = automerge_generate_sync_message(dbB, ssB);
automerge_read_binary(dbB, buff);
automerge_receive_sync_message(dbA, ssA, buff, len);
len = automerge_generate_sync_message(dbA, ssA);
// Save the sync state
int encoded_len = automerge_encode_sync_state(dbB, ssB);
automerge_read_binary(dbB, sync_state_buff);
// Read it back
ssB = automerge_decode_sync_state(sync_state_buff, encoded_len);
len = automerge_generate_sync_message(dbB, ssB);
automerge_read_binary(dbB, buff);
automerge_receive_sync_message(dbA, ssA, buff, len);
len = automerge_generate_sync_message(dbA, ssA);
assert(len == 0);
}
void test_sync() {
printf("begin sync test");
test_sync_basic();
test_sync_encode_decode();
}
int main() {
int len;
// In a real application you would need to check to make sure your buffer is large enough for any given read
char buff[BUFSIZE];
char buff2[BUFSIZE];
char buff3[BUFSIZE];
printf("begin\n");
Backend * dbA = automerge_init();
Backend * dbB = automerge_init();
const char * requestA1 = "{\"actor\":\"111111\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"magpie\",\"pred\":[]}]}";
const char * requestA2 = "{\"actor\":\"111111\",\"seq\":2,\"time\":0,\"deps\":[],\"startOp\":2,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"dog\",\"value\":\"mastiff\",\"pred\":[]}]}";
const char * requestB1 = "{\"actor\":\"222222\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"crow\",\"pred\":[]}]}";
const char * requestB2 = "{\"actor\":\"222222\",\"seq\":2,\"time\":0,\"deps\":[],\"startOp\":2,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"cat\",\"value\":\"tabby\",\"pred\":[]}]}";
printf("*** requestA1 ***\n\n%s\n\n",requestA1);
len = automerge_get_last_local_change(dbA);
assert(len == -1);
printf("*** last_local expected error string ** (%s)\n\n",automerge_error(dbA));
len = automerge_apply_local_change(dbA, requestA1);
assert(len <= BUFSIZE);
automerge_read_json(dbA, buff);
printf("*** patchA1 ***\n\n%s\n\n",buff);
len = automerge_get_last_local_change(dbA);
assert(len > 0);
assert(len <= BUFSIZE);
len = automerge_read_binary(dbA, buff);
assert(len == 0);
len = automerge_apply_local_change(dbA, "{}");
assert(len == -1);
printf("*** patchA2 expected error string ** (%s)\n\n",automerge_error(dbA));
len = automerge_apply_local_change(dbA, requestA2);
assert(len <= BUFSIZE);
automerge_read_json(dbA, buff);
printf("*** patchA2 ***\n\n%s\n\n",buff);
len = automerge_apply_local_change(dbB, requestB1);
assert(len <= BUFSIZE);
automerge_read_json(dbB, buff);
printf("*** patchB1 ***\n\n%s\n\n",buff);
len = automerge_apply_local_change(dbB, requestB2);
assert(len <= BUFSIZE);
automerge_read_json(dbB, buff);
printf("*** patchB2 ***\n\n%s\n\n",buff);
printf("*** clone dbA -> dbC ***\n\n");
Backend * dbC = automerge_clone(dbA);
len = automerge_get_patch(dbA);
assert(len <= BUFSIZE);
automerge_read_json(dbA, buff);
len = automerge_get_patch(dbC);
assert(len <= BUFSIZE);
automerge_read_json(dbC, buff2);
// the json can serialize in different orders so I can do a stright strcmp()
printf("*** get_patch of dbA & dbC -- equal? *** --> %s\n\n",strlen(buff) == strlen(buff2) ? "true" : "false");
assert(strlen(buff) == strlen(buff2));
len = automerge_save(dbA);
assert(len <= BUFSIZE);
automerge_read_binary(dbA, buff2);
printf("*** save dbA - %d bytes ***\n\n",len);
printf("*** load the save into dbD ***\n\n");
Backend * dbD = automerge_load(len, buff2);
len = automerge_get_patch(dbD);
assert(len <= BUFSIZE);
automerge_read_json(dbD, buff2);
printf("*** get_patch of dbA & dbD -- equal? *** --> %s\n\n",strlen(buff) == strlen(buff2) ? "true" : "false");
assert(strlen(buff) == strlen(buff2));
printf("*** copy changes from dbA to B ***\n\n");
len = automerge_get_changes_for_actor(dbA,"111111");
while (len > 0) {
assert(len <= BUFSIZE);
int nextlen = automerge_read_binary(dbA,buff);
automerge_write_change(dbB,len,buff);
// decode the change for debug
// encode and decode could happen with either dbA or dbB,
// however encode needs to be done against dbB instead of dbA
// only because dbA is in the middle of iterating over some binary results
// and needs to finish before queuing another
automerge_decode_change(dbA,len,buff);
automerge_read_json(dbA, buff2);
printf("Change decoded to json -- %s\n",buff2);
automerge_encode_change(dbB,buff2);
automerge_read_binary(dbB,buff3);
assert(memcmp(buff,buff3,len) == 0);
len = nextlen;
}
automerge_apply_changes(dbB);
printf("*** get head from dbB ***\n\n");
int num_heads = 0;
len = automerge_get_heads(dbB);
while (len > 0) {
assert(len == 32);
int nextlen = automerge_read_binary(dbB,buff3 + (num_heads * 32));
num_heads++;
len = nextlen;
}
assert(num_heads == 2);
len = automerge_get_changes(dbB,num_heads,buff3);
assert(len == 0);
printf("*** copy changes from dbB to A ***\n\n");
len = automerge_get_changes_for_actor(dbB,"222222");
while (len > 0) {
assert(len <= BUFSIZE);
int nextlen = automerge_read_binary(dbB,buff);
automerge_write_change(dbA,len,buff);
len = nextlen;
}
automerge_apply_changes(dbA);
len = automerge_get_patch(dbA);
assert(len <= BUFSIZE);
automerge_read_json(dbA, buff);
len = automerge_get_patch(dbB);
assert(len <= BUFSIZE);
automerge_read_json(dbB, buff2);
printf("*** get_patch of dbA & dbB -- equal? *** --> %s\n\n",strlen(buff) == strlen(buff2) ? "true" : "false");
assert(strlen(buff) == strlen(buff2));
printf("*** copy changes from dbA to E using load ***\n\n");
Backend * dbE = automerge_init();
len = automerge_get_changes(dbA,0,NULL);
while (len > 0) {
assert(len <= BUFSIZE);
int nextlen = automerge_read_binary(dbA,buff);
automerge_write_change(dbE,len,buff);
len = nextlen;
}
automerge_load_changes(dbE);
len = automerge_get_patch(dbA);
assert(len <= BUFSIZE);
automerge_read_json(dbA, buff);
len = automerge_get_patch(dbE);
assert(len <= BUFSIZE);
automerge_read_json(dbE, buff2);
printf("*** get_patch of dbA & dbE -- equal? *** --> %s\n\n",strlen(buff) == strlen(buff2) ? "true" : "false");
assert(strlen(buff) == strlen(buff2));
len = automerge_get_missing_deps(dbE, num_heads, buff3);
automerge_read_json(dbE, buff); // [] - nothing missing
assert(strlen(buff) == 2);
test_sync();
printf("free resources\n");
automerge_free(dbA);
automerge_free(dbB);
automerge_free(dbC);
automerge_free(dbD);
automerge_free(dbE);
printf("end\n");
}

183
automerge-c/automerge.h Normal file
View file

@ -0,0 +1,183 @@
#ifndef automerge_h
#define automerge_h
/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */
#include <stdint.h>
#include <stdbool.h>
typedef struct Backend Backend;
typedef struct SyncState SyncState;
/**
* # Safety
* This must me called with a valid backend pointer
*/
intptr_t automerge_apply_changes(Backend *backend);
/**
* # Safety
* This must me called with a valid backend pointer
* request must be a valid pointer pointing to a cstring
*/
intptr_t automerge_apply_local_change(Backend *backend, const char *request);
/**
* # Safety
* This must me called with a valid backend pointer
*/
Backend *automerge_clone(Backend *backend);
/**
* # Safety
* This must me called with a valid pointer to a change and the correct len
*/
intptr_t automerge_decode_change(Backend *backend, uintptr_t len, const uint8_t *change);
/**
* # Safety
* `encoded_state_[ptr|len]` must be the address & length of a byte array
* Returns an opaque pointer to a SyncState
* panics (segfault?) if the buffer was invalid
*/
SyncState *automerge_decode_sync_state(const uint8_t *encoded_state_ptr, uintptr_t encoded_state_len);
/**
* # Safety
* This must me called with a valid pointer a json string of a change
*/
intptr_t automerge_encode_change(Backend *backend, const char *change);
/**
* # Safety
* Must be called with a valid backend pointer
* sync_state must be a valid pointer to a SyncState
* Returns an `isize` indicating the length of the binary message
* (-1 if there was an error)
*/
intptr_t automerge_encode_sync_state(Backend *backend, SyncState *sync_state);
/**
* # Safety
* This must me called with a valid backend pointer
*/
const char *automerge_error(Backend *backend);
/**
* # Safety
* This must me called with a valid backend pointer
*/
void automerge_free(Backend *backend);
/**
* # Safety
* Must be called with a valid backend pointer
* sync_state must be a valid pointer to a SyncState
* Returns an `isize` indicating the length of the binary message
* (-1 if there was an error, 0 if there is no message)
*/
intptr_t automerge_generate_sync_message(Backend *backend, SyncState *sync_state);
/**
* # Safety
* This must me called with a valid backend pointer
* binary must be a valid pointer to len bytes
*/
intptr_t automerge_get_changes(Backend *backend, uintptr_t len, const uint8_t *binary);
/**
* # Safety
* This must me called with a valid backend pointer
*/
intptr_t automerge_get_changes_for_actor(Backend *backend, const char *actor);
/**
* # Safety
* This must me called with a valid pointer a json string of a change
*/
intptr_t automerge_get_heads(Backend *backend);
/**
* # Safety
* This must me called with a valid pointer to a backend
* the automerge api changed to return a change and a patch
* this C api was not designed to returned mixed values so i borrowed the
* get_last_local_change call from the javascript api to solve the same problem
*/
intptr_t automerge_get_last_local_change(Backend *backend);
/**
* # Safety
* This must me called with a valid backend pointer
* binary must be a valid pointer to len bytes
*/
intptr_t automerge_get_missing_deps(Backend *backend, uintptr_t len, const uint8_t *binary);
/**
* # Safety
* This must me called with a valid backend pointer
*/
intptr_t automerge_get_patch(Backend *backend);
Backend *automerge_init(void);
/**
* # Safety
* data pointer must be a valid pointer to len bytes
*/
Backend *automerge_load(uintptr_t len, const uint8_t *data);
/**
* # Safety
* This must me called with a valid backend pointer
*/
intptr_t automerge_load_changes(Backend *backend);
/**
* # Safety
*
* This must me called with a valid backend pointer
* the buffer must be a valid pointer pointing to at least as much space as was
* required by the previous binary result call
*/
intptr_t automerge_read_binary(Backend *backend, uint8_t *buffer);
/**
* # Safety
* This must me called with a valid backend pointer
* and buffer must be a valid pointer of at least the number of bytes returned by the previous
* call that generated a json result
*/
intptr_t automerge_read_json(Backend *backend, char *buffer);
/**
* # Safety
* Must be called with a valid backend pointer
* sync_state must be a valid pointer to a SyncState
* `encoded_msg_[ptr|len]` must be the address & length of a byte array
*/
intptr_t automerge_receive_sync_message(Backend *backend, SyncState *sync_state, const uint8_t *encoded_msg_ptr, uintptr_t encoded_msg_len);
/**
* # Safety
* This must me called with a valid backend pointer
*/
intptr_t automerge_save(Backend *backend);
/**
* # Safety
* sync_state must be a valid pointer to a SyncState
*/
void automerge_sync_state_free(SyncState *sync_state);
SyncState *automerge_sync_state_init(void);
/**
* # Safety
* This must me called with a valid backend pointer
* change must point to a valid memory location with at least len bytes
*/
void automerge_write_change(Backend *backend, uintptr_t len, const uint8_t *change);
#endif /* automerge_h */

16
automerge-c/build.rs Normal file
View file

@ -0,0 +1,16 @@
extern crate cbindgen;
use std::{env, path::PathBuf};
fn main() {
let crate_dir = PathBuf::from(
env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR env var is not defined"),
);
let config = cbindgen::Config::from_file("cbindgen.toml")
.expect("Unable to find cbindgen.toml configuration file");
if let Ok(writer) = cbindgen::generate_with_config(&crate_dir, config) {
writer.write_to_file(crate_dir.join("automerge.h"));
}
}

View file

@ -0,0 +1,8 @@
include_guard = "automerge_h"
autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */"
language = "C"
includes = []
sys_includes = ["stdint.h", "stdbool.h"]
no_includes = true
line_length = 140

561
automerge-c/src/lib.rs Normal file
View file

@ -0,0 +1,561 @@
extern crate automerge_backend;
extern crate errno;
extern crate libc;
extern crate serde;
use core::fmt::Debug;
use std::{
convert::TryInto,
ffi::{CStr, CString},
ops::{Deref, DerefMut},
os::raw::c_char,
ptr,
};
use automerge_backend::{AutomergeError, Change};
use automerge_protocol::{ChangeHash, UncompressedChange};
use errno::{set_errno, Errno};
use serde::ser::Serialize;
#[derive(Clone)]
pub struct Backend {
handle: automerge_backend::Backend,
text: Option<String>,
last_local_change: Option<Change>,
binary: Vec<Vec<u8>>,
queue: Option<Vec<Vec<u8>>>,
error: Option<CString>,
}
struct BinaryResults(Result<Vec<Vec<u8>>, AutomergeError>);
impl Deref for Backend {
type Target = automerge_backend::Backend;
fn deref(&self) -> &Self::Target {
&self.handle
}
}
unsafe fn from_buf_raw<T>(ptr: *const T, elts: usize) -> Vec<T> {
let mut dst = Vec::with_capacity(elts);
dst.set_len(elts);
ptr::copy(ptr, dst.as_mut_ptr(), elts);
dst
}
fn err<T, V: Debug>(result: Result<T, V>) -> Result<T, String> {
match result {
Ok(val) => Ok(val),
Err(err) => Err(format!("{:?}", err)),
}
}
impl Backend {
fn init(handle: automerge_backend::Backend) -> Backend {
Backend {
handle,
text: None,
last_local_change: None,
binary: Vec::new(),
queue: None,
error: None,
}
}
fn handle_result(&mut self, result: Result<isize, String>) -> isize {
match result {
Ok(len) => {
self.error = None;
len
}
Err(err) => self.handle_error(err),
}
}
fn generate_json<T: Serialize>(&mut self, val: Result<T, AutomergeError>) -> isize {
let result = err(val)
.and_then(|val| err(serde_json::to_string(&val)))
.map(|text| {
let len = (text.len() + 1) as isize;
self.text = Some(text);
len
});
self.handle_result(result)
}
fn handle_binary(&mut self, b: Result<Vec<u8>, AutomergeError>) -> isize {
let result = err(b).map(|bin| {
let len = bin.len();
self.binary = vec![bin];
len as isize
});
self.handle_result(result)
}
fn handle_ok(&mut self) -> isize {
self.error = None;
0
}
fn handle_error<E: Debug>(&mut self, err: E) -> isize {
// in theory - if an error string had embedded nulls
// we could get a error = None and -1
self.error = CString::new(format!("{:?}", err)).ok();
-1
}
fn handle_binaries(&mut self, b: BinaryResults) -> isize {
let result = err(b.0).map(|bin| {
self.error = None;
if !bin.is_empty() {
let len = bin[0].len();
self.binary = bin;
self.binary.reverse();
len as isize
} else {
0
}
});
self.handle_result(result)
}
}
impl DerefMut for Backend {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.handle
}
}
impl From<Backend> for *mut Backend {
fn from(b: Backend) -> Self {
Box::into_raw(Box::new(b))
}
}
impl From<Vec<&Change>> for BinaryResults {
fn from(changes: Vec<&Change>) -> Self {
BinaryResults(Ok(changes.iter().map(|b| b.raw_bytes().into()).collect()))
}
}
impl From<Result<Vec<&Change>, AutomergeError>> for BinaryResults {
fn from(result: Result<Vec<&Change>, AutomergeError>) -> Self {
BinaryResults(result.map(|changes| changes.iter().map(|b| b.raw_bytes().into()).collect()))
}
}
impl From<Vec<ChangeHash>> for BinaryResults {
fn from(heads: Vec<ChangeHash>) -> Self {
BinaryResults(Ok(heads.iter().map(|head| head.0.to_vec()).collect()))
}
}
/*
init => automerge_init
clone => automerge_clone
free => automerge_free
save => automerge_save
load => automerge_load
applyLocalChange => automerge_apply_local_change
getPatch => automerge_get_patch
applyChanges => automerge_apply_changes
loadChanges => automerge_load_changes
getChangesForActor => automerge_get_changes_for_actor
getChanges => automerge_get_changes
getMissingDeps => automerge_get_missing_deps
*/
#[no_mangle]
pub extern "C" fn automerge_init() -> *mut Backend {
Backend::init(automerge_backend::Backend::new()).into()
}
/// # Safety
/// This must me called with a valid backend pointer
#[no_mangle]
pub unsafe extern "C" fn automerge_free(backend: *mut Backend) {
let backend: Backend = *Box::from_raw(backend);
drop(backend)
}
/// # Safety
/// This must me called with a valid backend pointer
/// request must be a valid pointer pointing to a cstring
#[no_mangle]
pub unsafe extern "C" fn automerge_apply_local_change(
backend: *mut Backend,
request: *const c_char,
) -> isize {
let request: &CStr = CStr::from_ptr(request);
let request = request.to_string_lossy();
let request: Result<UncompressedChange, _> = serde_json::from_str(&request);
match request {
Ok(request) => {
let result = (*backend).apply_local_change(request);
match result {
Ok((patch, change)) => {
(*backend).last_local_change = Some(change);
(*backend).generate_json(Ok(patch))
}
Err(err) => (*backend).handle_error(err),
}
}
Err(err) => (*backend).handle_error(err),
}
}
/// # Safety
/// This must me called with a valid backend pointer
/// change must point to a valid memory location with at least len bytes
#[no_mangle]
pub unsafe extern "C" fn automerge_write_change(
backend: *mut Backend,
len: usize,
change: *const u8,
) {
let bytes = from_buf_raw(change, len);
if let Some(ref mut queue) = (*backend).queue {
queue.push(bytes)
} else {
(*backend).queue = Some(vec![bytes])
}
}
/// # Safety
/// This must me called with a valid backend pointer
#[no_mangle]
pub unsafe extern "C" fn automerge_apply_changes(backend: *mut Backend) -> isize {
match (*backend).queue.take() {
Some(changes) => {
let changes = changes
.iter()
.map(|c| Change::from_bytes(c.to_vec()).unwrap())
.collect();
let patch = (*backend).apply_changes(changes);
(*backend).generate_json(patch)
}
None => (*backend).handle_error("no changes queued"),
}
}
/// # Safety
/// This must me called with a valid backend pointer
#[no_mangle]
pub unsafe extern "C" fn automerge_get_patch(backend: *mut Backend) -> isize {
let patch = (*backend).get_patch();
(*backend).generate_json(patch)
}
/// # Safety
/// This must me called with a valid backend pointer
#[no_mangle]
pub unsafe extern "C" fn automerge_load_changes(backend: *mut Backend) -> isize {
if let Some(changes) = (*backend).queue.take() {
let changes = changes
.iter()
.map(|c| Change::from_bytes(c.to_vec()).unwrap())
.collect();
if (*backend).load_changes(changes).is_ok() {
return (*backend).handle_ok();
}
}
(*backend).handle_error("no changes queued")
}
/// # Safety
/// This must me called with a valid backend pointer
#[no_mangle]
pub unsafe extern "C" fn automerge_clone(backend: *mut Backend) -> *mut Backend {
(*backend).clone().into()
}
/// # Safety
/// This must me called with a valid backend pointer
#[no_mangle]
pub unsafe extern "C" fn automerge_save(backend: *mut Backend) -> isize {
let data = (*backend).save();
(*backend).handle_binary(data)
}
/// # Safety
/// data pointer must be a valid pointer to len bytes
#[no_mangle]
pub unsafe extern "C" fn automerge_load(len: usize, data: *const u8) -> *mut Backend {
let bytes = from_buf_raw(data, len);
let result = automerge_backend::Backend::load(bytes);
if let Ok(backend) = result {
Backend::init(backend).into()
} else {
set_errno(Errno(1));
ptr::null_mut()
}
}
/// # Safety
/// This must me called with a valid backend pointer
#[no_mangle]
pub unsafe extern "C" fn automerge_get_changes_for_actor(
backend: *mut Backend,
actor: *const c_char,
) -> isize {
let actor: &CStr = CStr::from_ptr(actor);
let actor = actor.to_string_lossy();
match actor.as_ref().try_into() {
Ok(actor) => {
let changes = (*backend).get_changes_for_actor_id(&actor);
(*backend).handle_binaries(changes.into())
}
Err(err) => (*backend).handle_error(err),
}
}
/// # Safety
/// This must me called with a valid pointer to a change and the correct len
#[no_mangle]
pub unsafe extern "C" fn automerge_decode_change(
backend: *mut Backend,
len: usize,
change: *const u8,
) -> isize {
let bytes = from_buf_raw(change, len);
let change = Change::from_bytes(bytes).unwrap();
(*backend).generate_json(Ok(change.decode()))
}
/// # Safety
/// This must me called with a valid pointer a json string of a change
#[no_mangle]
pub unsafe extern "C" fn automerge_encode_change(
backend: *mut Backend,
change: *const c_char,
) -> isize {
let change: &CStr = CStr::from_ptr(change);
let change = change.to_string_lossy();
let uncomp_change: UncompressedChange = serde_json::from_str(&change).unwrap();
let change: Change = uncomp_change.try_into().unwrap();
(*backend).handle_binary(Ok(change.raw_bytes().into()))
}
/// # Safety
/// This must me called with a valid pointer to a backend
/// the automerge api changed to return a change and a patch
/// this C api was not designed to returned mixed values so i borrowed the
/// get_last_local_change call from the javascript api to solve the same problem
#[no_mangle]
pub unsafe extern "C" fn automerge_get_last_local_change(backend: *mut Backend) -> isize {
match (*backend).last_local_change.as_ref() {
Some(change) => (*backend).handle_binary(Ok(change.raw_bytes().into())),
None => (*backend).handle_error("no last change"),
}
}
/// # Safety
/// This must me called with a valid pointer a json string of a change
#[no_mangle]
pub unsafe extern "C" fn automerge_get_heads(backend: *mut Backend) -> isize {
let heads = (*backend).get_heads();
(*backend).handle_binaries(heads.into())
}
/// # Safety
/// This must me called with a valid backend pointer
/// binary must be a valid pointer to len bytes
#[no_mangle]
pub unsafe extern "C" fn automerge_get_changes(
backend: *mut Backend,
len: usize,
binary: *const u8,
) -> isize {
let mut have_deps = Vec::new();
for i in 0..len {
have_deps.push(
from_buf_raw(binary.offset(i as isize * 32), 32)
.as_slice()
.try_into()
.unwrap(),
)
}
let changes = (*backend).get_changes(&have_deps);
(*backend).handle_binaries(Ok(changes).into())
}
/// # Safety
/// This must me called with a valid backend pointer
/// binary must be a valid pointer to len bytes
#[no_mangle]
pub unsafe extern "C" fn automerge_get_missing_deps(
backend: *mut Backend,
len: usize,
binary: *const u8,
) -> isize {
let mut heads = Vec::new();
for i in 0..len {
heads.push(
from_buf_raw(binary.offset(i as isize * 32), 32)
.as_slice()
.try_into()
.unwrap(),
)
}
let missing = (*backend).get_missing_deps(&heads);
(*backend).generate_json(Ok(missing))
}
/// # Safety
/// This must me called with a valid backend pointer
#[no_mangle]
pub unsafe extern "C" fn automerge_error(backend: *mut Backend) -> *const c_char {
(*backend)
.error
.as_ref()
.map(|e| e.as_ptr())
.unwrap_or_else(|| ptr::null_mut())
}
/// # Safety
/// This must me called with a valid backend pointer
/// and buffer must be a valid pointer of at least the number of bytes returned by the previous
/// call that generated a json result
#[no_mangle]
pub unsafe extern "C" fn automerge_read_json(backend: *mut Backend, buffer: *mut c_char) -> isize {
if let Some(text) = &(*backend).text {
let len = text.len();
buffer.copy_from(text.as_ptr().cast(), len);
(*buffer.add(len)) = 0; // null terminate
(*backend).text = None;
0
} else {
(*buffer) = 0;
(*backend).handle_error("no json to be read")
}
}
/// # Safety
///
/// This must me called with a valid backend pointer
/// the buffer must be a valid pointer pointing to at least as much space as was
/// required by the previous binary result call
#[no_mangle]
pub unsafe extern "C" fn automerge_read_binary(backend: *mut Backend, buffer: *mut u8) -> isize {
if let Some(bin) = (*backend).binary.pop() {
let len = bin.len();
buffer.copy_from(bin.as_ptr(), len);
if let Some(next) = (*backend).binary.last() {
next.len() as isize
} else {
0
}
} else {
(*backend).handle_error("no binary to be read")
}
}
#[derive(Debug)]
pub struct SyncState {
handle: automerge_backend::SyncState,
}
impl From<SyncState> for *mut SyncState {
fn from(s: SyncState) -> Self {
Box::into_raw(Box::new(s))
}
}
/// # Safety
/// Must be called with a valid backend pointer
/// sync_state must be a valid pointer to a SyncState
/// `encoded_msg_[ptr|len]` must be the address & length of a byte array
// Returns an `isize` indicating the length of the patch as a JSON string
// (-1 if there was an error, 0 if there is no patch)
#[no_mangle]
pub unsafe extern "C" fn automerge_receive_sync_message(
backend: *mut Backend,
sync_state: &mut SyncState,
encoded_msg_ptr: *const u8,
encoded_msg_len: usize,
) -> isize {
let slice = std::slice::from_raw_parts(encoded_msg_ptr, encoded_msg_len);
let decoded = automerge_backend::SyncMessage::decode(slice);
let msg = match decoded {
Ok(msg) => msg,
Err(e) => {
return (*backend).handle_error(e);
}
};
let patch = (*backend).receive_sync_message(&mut sync_state.handle, msg);
if let Ok(None) = patch {
0
} else {
(*backend).generate_json(patch)
}
}
/// # Safety
/// Must be called with a valid backend pointer
/// sync_state must be a valid pointer to a SyncState
/// Returns an `isize` indicating the length of the binary message
/// (-1 if there was an error, 0 if there is no message)
#[no_mangle]
pub unsafe extern "C" fn automerge_generate_sync_message(
backend: *mut Backend,
sync_state: &mut SyncState,
) -> isize {
let msg = (*backend).generate_sync_message(&mut sync_state.handle);
if let Some(msg) = msg {
(*backend).handle_binary(msg.encode().or(Err(AutomergeError::EncodeFailed)))
} else {
0
}
}
#[no_mangle]
pub extern "C" fn automerge_sync_state_init() -> *mut SyncState {
let state = SyncState {
handle: automerge_backend::SyncState::default(),
};
state.into()
}
/// # Safety
/// Must be called with a valid backend pointer
/// sync_state must be a valid pointer to a SyncState
/// Returns an `isize` indicating the length of the binary message
/// (-1 if there was an error)
#[no_mangle]
pub unsafe extern "C" fn automerge_encode_sync_state(
backend: *mut Backend,
sync_state: &mut SyncState,
) -> isize {
(*backend).handle_binary(
sync_state
.handle
.encode()
.or(Err(AutomergeError::EncodeFailed)),
)
}
/// # Safety
/// `encoded_state_[ptr|len]` must be the address & length of a byte array
/// Returns an opaque pointer to a SyncState
/// panics (segfault?) if the buffer was invalid
#[no_mangle]
pub unsafe extern "C" fn automerge_decode_sync_state(
encoded_state_ptr: *const u8,
encoded_state_len: usize,
) -> *mut SyncState {
let slice = std::slice::from_raw_parts(encoded_state_ptr, encoded_state_len);
let decoded_state = automerge_backend::SyncState::decode(slice);
// TODO: Is there a way to avoid `unwrap` here?
let state = decoded_state.unwrap();
let state = SyncState { handle: state };
state.into()
}
/// # Safety
/// sync_state must be a valid pointer to a SyncState
#[no_mangle]
pub unsafe extern "C" fn automerge_sync_state_free(sync_state: *mut SyncState) {
let sync_state: SyncState = *Box::from_raw(sync_state);
drop(sync_state);
}

27
automerge-cli/Cargo.toml Normal file
View file

@ -0,0 +1,27 @@
[package]
name = "automerge-cli"
version = "0.1.0"
authors = ["Alex Good <alex@memoryandthought.me>"]
edition = "2018"
[[bin]]
name = "automerge"
path = "src/main.rs"
bench = false
[dependencies]
clap = "3.0.0-beta.2"
serde_json = "^1.0"
anyhow = "1.0"
atty = "0.2"
thiserror = "1.0.16"
combine = "4.5.2"
maplit = "1.0.2"
colored_json = "2.1.0"
automerge-backend = { path = "../automerge-backend" }
automerge-frontend = { path = "../automerge-frontend" }
automerge-protocol = { path = "../automerge-protocol" }
[dev-dependencies]
duct = "0.13"

View file

@ -1,4 +1,5 @@
use automerge as am;
use automerge_backend as amb;
use automerge_frontend as amf;
use combine::{parser::char as charparser, EasyParser, ParseError, Parser};
use thiserror::Error;
@ -14,7 +15,12 @@ pub enum ChangeError {
#[error("Error loading changes: {:?}", source)]
ErrApplyingInitialChanges {
#[source]
source: am::AutomergeError,
source: amb::AutomergeError,
},
#[error("Some changes were invalid: {:?}", source)]
InvalidChangeRequest {
#[from]
source: amf::InvalidChangeRequest,
},
#[error("Error writing changes to output file: {:?}", source)]
ErrWritingChanges {
@ -134,7 +140,7 @@ where
op_parser()
.skip(charparser::spaces())
.skip(charparser::string("$"))
.and(path_segment_parser(am::Path::root())),
.and(path_segment_parser(amf::Path::root())),
)
.skip(charparser::spaces())
.then(|(operation, path)| {
@ -169,17 +175,29 @@ pub fn change(
mut writer: impl std::io::Write,
script: &str,
) -> Result<(), ChangeError> {
let mut backend = amb::Backend::new();
let mut buf: Vec<u8> = Vec::new();
reader
.read_to_end(&mut buf)
.map_err(|e| ChangeError::ErrReadingChanges { source: e })?;
let backend = am::Automerge::load(&buf)
let changes = amb::Change::load_document(&buf)
.map_err(|e| ChangeError::ErrApplyingInitialChanges { source: e })?;
let mut frontend = amf::Frontend::new();
let patch = backend
.apply_changes(changes)
.map_err(|e| ChangeError::ErrApplyingInitialChanges { source: e })?;
// This unwrap should be fine, we've generated the patch ourselves, if it's invalid then
// there's no way for the user to recover
frontend.apply_patch(patch).unwrap();
let local_change = parse_change_script(script)?;
let ((), new_changes) = frontend.change::<_, _, amf::InvalidChangeRequest>(None, |d| {
d.add_change(local_change)?;
Ok(())
})?;
if let Some(c) = new_changes {
// The user can't do anything to recover if this fails so we unwrap
backend.apply_local_change(c).unwrap();
}
let change_bytes = backend.save().unwrap();
writer
.write_all(&change_bytes)

View file

@ -1,55 +1,47 @@
use automerge as am;
use automerge_backend as amb;
use automerge_protocol::UncompressedChange;
use thiserror::Error;
use crate::{color_json::print_colored_json, SkipVerifyFlag};
#[derive(Error, Debug)]
pub enum ExamineError {
#[error("Error reading change file: {:?}", source)]
ReadingChanges {
ErrReadingChanges {
#[source]
source: std::io::Error,
},
#[error("Error loading changes: {:?}", source)]
ApplyingInitialChanges {
ErrApplyingInitialChanges {
#[source]
source: am::AutomergeError,
source: amb::AutomergeError,
},
#[error("Error writing to output: {:?}", source)]
WritingToOutput {
ErrWritingToOutput {
#[source]
source: std::io::Error,
},
}
pub(crate) fn examine(
pub fn examine(
mut input: impl std::io::Read,
mut output: impl std::io::Write,
skip: SkipVerifyFlag,
is_tty: bool,
) -> Result<(), ExamineError> {
let mut buf: Vec<u8> = Vec::new();
input
.read_to_end(&mut buf)
.map_err(|e| ExamineError::ReadingChanges { source: e })?;
let doc = skip
.load(&buf)
.map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?;
let uncompressed_changes: Vec<_> = doc
.get_changes(&[])
.unwrap()
.iter()
.map(|c| c.decode())
.collect();
.map_err(|e| ExamineError::ErrReadingChanges { source: e })?;
let changes = amb::Change::load_document(&buf)
.map_err(|e| ExamineError::ErrApplyingInitialChanges { source: e })?;
let uncompressed_changes: Vec<UncompressedChange> =
changes.iter().map(|c| c.decode()).collect();
if is_tty {
let json_changes = serde_json::to_value(uncompressed_changes).unwrap();
print_colored_json(&json_changes).unwrap();
writeln!(output).unwrap();
colored_json::write_colored_json(&json_changes, &mut output).unwrap();
} else {
let json_changes = serde_json::to_string_pretty(&uncompressed_changes).unwrap();
output
.write_all(&json_changes.into_bytes())
.map_err(|e| ExamineError::WritingToOutput { source: e })?;
.map_err(|e| ExamineError::ErrWritingToOutput { source: e })?;
}
Ok(())
}

View file

@ -0,0 +1,93 @@
use anyhow::Result;
fn get_state_json(input_data: Vec<u8>) -> Result<serde_json::Value> {
let mut backend = automerge_backend::Backend::new();
let changes = automerge_backend::Change::load_document(&input_data)?;
let patch = backend.apply_changes(changes)?;
let mut frontend = automerge_frontend::Frontend::new();
frontend.apply_patch(patch)?;
Ok(frontend.state().to_json())
}
pub fn export_json(
mut changes_reader: impl std::io::Read,
mut writer: impl std::io::Write,
is_tty: bool,
) -> Result<()> {
let mut input_data = vec![];
changes_reader.read_to_end(&mut input_data)?;
let state_json = get_state_json(input_data)?;
if is_tty {
colored_json::write_colored_json(&state_json, &mut writer).unwrap()
} else {
writeln!(
writer,
"{}",
serde_json::to_string_pretty(&state_json).unwrap()
)?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn cli_export_with_empty_input() {
assert_eq!(get_state_json(vec![]).unwrap(), serde_json::json!({}))
}
#[test]
fn cli_export_with_flat_map() {
let initial_state_json: serde_json::Value =
serde_json::from_str(r#"{"sparrows": 15.0}"#).unwrap();
let value: automerge_frontend::Value =
automerge_frontend::Value::from_json(&initial_state_json);
let (_, initial_change) =
automerge_frontend::Frontend::new_with_initial_state(value).unwrap();
let mut backend = automerge_backend::Backend::new();
backend.apply_local_change(initial_change).unwrap();
let change_bytes = backend.save().unwrap();
assert_eq!(
get_state_json(change_bytes).unwrap(),
serde_json::json!({"sparrows": 15.0})
)
}
#[test]
fn cli_export_with_nested_map() {
let initial_state_json: serde_json::Value = serde_json::from_str(
r#"{
"birds": {
"wrens": 3.0,
"sparrows": 15.0
}
}"#,
)
.unwrap();
let value: automerge_frontend::Value =
automerge_frontend::Value::from_json(&initial_state_json);
let (_, initial_change) =
automerge_frontend::Frontend::new_with_initial_state(value).unwrap();
let mut backend = automerge_backend::Backend::new();
backend.apply_local_change(initial_change).unwrap();
let change_bytes = backend.save().unwrap();
assert_eq!(
get_state_json(change_bytes).unwrap(),
serde_json::json!({
"birds": {
"wrens": 3.0,
"sparrows": 15.0
}
})
)
}
}

View file

@ -0,0 +1,23 @@
use anyhow::Result;
use automerge_backend::Backend;
use automerge_frontend::{Frontend, Value};
fn initialize_from_json(json_value: &serde_json::Value) -> Result<Vec<u8>> {
let value: Value = Value::from_json(&json_value);
let (_, initial_change) = Frontend::new_with_initial_state(value)?;
let mut backend = Backend::new();
backend.apply_local_change(initial_change)?;
Ok(backend.save()?)
}
pub fn import_json(mut reader: impl std::io::Read, mut writer: impl std::io::Write) -> Result<()> {
let mut buffer = String::new();
reader.read_to_string(&mut buffer)?;
let json_value: serde_json::Value = serde_json::from_str(&buffer)?;
let changes_bytes = initialize_from_json(&json_value)?;
writer.write_all(&changes_bytes)?;
Ok(())
}

182
automerge-cli/src/main.rs Normal file
View file

@ -0,0 +1,182 @@
use std::{fs::File, path::PathBuf, str::FromStr};
use anyhow::{anyhow, Result};
use clap::Clap;
mod change;
mod examine;
mod export;
mod import;
#[derive(Debug, Clap)]
#[clap(about = "Automerge CLI")]
struct Opts {
#[clap(subcommand)]
cmd: Command,
}
#[derive(Debug)]
enum ExportFormat {
Json,
Toml,
}
impl FromStr for ExportFormat {
type Err = anyhow::Error;
fn from_str(input: &str) -> Result<ExportFormat> {
match input {
"json" => Ok(ExportFormat::Json),
"toml" => Ok(ExportFormat::Toml),
_ => Err(anyhow!("Invalid export format: {}", input)),
}
}
}
#[derive(Debug, Clap)]
enum Command {
/// Output current state of an Automerge document in a specified format
Export {
/// Format for output: json, toml
#[clap(long, short, default_value = "json")]
format: ExportFormat,
/// Path that contains Automerge changes
#[clap(parse(from_os_str))]
changes_file: Option<PathBuf>,
},
Import {
/// Format for input: json, toml
#[clap(long, short, default_value = "json")]
format: ExportFormat,
#[clap(parse(from_os_str))]
input_file: Option<PathBuf>,
/// Path to write Automerge changes to
#[clap(parse(from_os_str), long("out"), short('o'))]
changes_file: Option<PathBuf>,
},
/// Read an automerge document from a file or stdin, perform a change on it and write a new
/// document to stdout or the specified output file.
Change {
/// The change script to perform. Change scripts have the form <command> <path> [<JSON value>].
/// The possible commands are 'set', 'insert', 'delete', and 'increment'.
///
/// Paths look like this: $["mapkey"][0]. They always lways start with a '$', then each
/// subsequent segment of the path is either a string in double quotes to index a key in a
/// map, or an integer index to address an array element.
///
/// Examples
///
/// ## set
///
/// > automerge change 'set $["someobject"] {"items": []}' somefile
///
/// ## insert
///
/// > automerge change 'insert $["someobject"]["items"][0] "item1"' somefile
///
/// ## increment
///
/// > automerge change 'increment $["mycounter"]'
///
/// ## delete
///
/// > automerge change 'delete $["someobject"]["items"]' somefile
script: String,
/// The file to change, if omitted will assume stdin
#[clap(parse(from_os_str))]
input_file: Option<PathBuf>,
/// Path to write Automerge changes to, if omitted will write to stdout
#[clap(parse(from_os_str), long("out"), short('o'))]
output_file: Option<PathBuf>,
},
/// Read an automerge document and print a JSON representation of the changes in it to stdout
Examine { input_file: Option<PathBuf> },
}
fn open_file_or_stdin(maybe_path: Option<PathBuf>) -> Result<Box<dyn std::io::Read>> {
if atty::is(atty::Stream::Stdin) {
if let Some(path) = maybe_path {
Ok(Box::new(File::open(&path).unwrap()))
} else {
Err(anyhow!(
"Must provide file path if not providing input via stdin"
))
}
} else {
Ok(Box::new(std::io::stdin()))
}
}
fn create_file_or_stdout(maybe_path: Option<PathBuf>) -> Result<Box<dyn std::io::Write>> {
if atty::is(atty::Stream::Stdout) {
if let Some(path) = maybe_path {
Ok(Box::new(File::create(&path).unwrap()))
} else {
Err(anyhow!("Must provide file path if not piping to stdout"))
}
} else {
Ok(Box::new(std::io::stdout()))
}
}
fn main() -> Result<()> {
let opts = Opts::parse();
match opts.cmd {
Command::Export {
changes_file,
format,
} => match format {
ExportFormat::Json => {
let mut in_buffer = open_file_or_stdin(changes_file)?;
export::export_json(
&mut in_buffer,
&mut std::io::stdout(),
atty::is(atty::Stream::Stdout),
)
}
ExportFormat::Toml => unimplemented!(),
},
Command::Import {
format,
input_file,
changes_file,
} => match format {
ExportFormat::Json => {
let mut out_buffer = create_file_or_stdout(changes_file)?;
let mut in_buffer = open_file_or_stdin(input_file)?;
import::import_json(&mut in_buffer, &mut out_buffer)
}
ExportFormat::Toml => unimplemented!(),
},
Command::Change {
input_file,
output_file,
script,
} => {
let in_buffer = open_file_or_stdin(input_file)?;
let mut out_buffer = create_file_or_stdout(output_file)?;
change::change(in_buffer, &mut out_buffer, script.as_str())
.map_err(|e| anyhow::format_err!("Unable to make changes: {:?}", e))
}
Command::Examine { input_file } => {
let in_buffer = open_file_or_stdin(input_file)?;
let out_buffer = std::io::stdout();
match examine::examine(in_buffer, out_buffer, atty::is(atty::Stream::Stdout)) {
Ok(()) => {}
Err(e) => {
eprintln!("Error: {:?}", e);
}
}
Ok(())
}
}
}

View file

@ -2,45 +2,45 @@ use std::env;
use duct::cmd;
// #[test]
// fn import_stdin() {
// let bin = env!("CARGO_BIN_EXE_automerge");
// let initial_state_json = serde_json::json!({
// "birds": {
// "wrens": 3.0,
// "sparrows": 15.0
// }
// });
// let json_bytes = serde_json::to_string_pretty(&initial_state_json).unwrap();
#[test]
fn import_stdin() {
let bin = env!("CARGO_BIN_EXE_automerge");
let initial_state_json = serde_json::json!({
"birds": {
"wrens": 3.0,
"sparrows": 15.0
}
});
let json_bytes = serde_json::to_string_pretty(&initial_state_json).unwrap();
// let no_pipe_no_file = cmd!(bin, "import").stdin_bytes(json_bytes.clone()).run();
let no_pipe_no_file = cmd!(bin, "import").stdin_bytes(json_bytes.clone()).run();
// assert!(no_pipe_no_file.is_err());
assert!(no_pipe_no_file.is_err());
// let pipe_no_file = cmd!(bin, "import")
// .stdin_bytes(json_bytes.clone())
// .stdout_capture()
// .run();
let pipe_no_file = cmd!(bin, "import")
.stdin_bytes(json_bytes.clone())
.stdout_capture()
.run();
// assert!(pipe_no_file.is_ok());
assert!(pipe_no_file.is_ok());
// let mut temp_file = std::env::temp_dir();
// temp_file.push("import_test.mpl");
// let no_pipe_file = cmd!(bin, "import", "--out", &temp_file)
// .stdin_bytes(json_bytes)
// .run();
let mut temp_file = std::env::temp_dir();
temp_file.push("import_test.mpl");
let no_pipe_file = cmd!(bin, "import", "--out", &temp_file)
.stdin_bytes(json_bytes)
.run();
// assert!(no_pipe_file.is_ok());
// std::fs::remove_file(temp_file).unwrap();
// }
assert!(no_pipe_file.is_ok());
std::fs::remove_file(temp_file).unwrap();
}
// #[test]
// fn export_stdout() {
// let bin = env!("CARGO_BIN_EXE_automerge");
// let no_pipe_no_file = cmd!(bin, "export").stdout_capture().run();
#[test]
fn export_stdout() {
let bin = env!("CARGO_BIN_EXE_automerge");
let no_pipe_no_file = cmd!(bin, "export").stdout_capture().run();
// assert!(no_pipe_no_file.is_err());
// }
assert!(no_pipe_no_file.is_err());
}
#[test]
fn import_export_isomorphic() {
@ -61,7 +61,6 @@ fn import_export_isomorphic() {
assert_eq!(stdout, json_bytes);
}
/*
#[test]
fn import_change_export() {
let bin = env!("CARGO_BIN_EXE_automerge");
@ -90,4 +89,3 @@ fn import_change_export() {
});
assert_eq!(result, expected);
}
*/

1
automerge-frontend/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
target/*

View file

@ -0,0 +1,46 @@
[package]
name = "automerge-frontend"
version = "0.1.0"
authors = ["Alex Good <alex@memoryandthought.me>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
bench = false
[dependencies]
automerge-protocol = { path = "../automerge-protocol" }
futures = "0.3.4"
serde = { version = "^1.0", features=["derive"] }
serde_json = "^1.0"
uuid = { version = "^0.8.2", features=["v4"] }
maplit = "1.0.2"
thiserror = "1.0.16"
im-rc = "15.0.0"
unicode-segmentation = "1.7.1"
arbitrary = { version = "1", features = ["derive"], optional = true }
[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies]
getrandom = { version = "0.2.2", features=["js"] }
uuid = { version = "0.8.1", features = ["wasm-bindgen", "v4", "serde"] }
[dev-dependencies]
automerge-backend = { path = "../automerge-backend" }
criterion = "0.3.3"
rand = "0.8.2"
env_logger = "0.8.3"
log = "0.4.14"
wasm-bindgen-test = "0.3.22"
[[bench]]
name = "statetree_apply_diff"
harness = false
[[bench]]
name = "change"
harness = false
[features]
default = ["std"]
derive-arbitrary = ["arbitrary"]
std = []

View file

@ -0,0 +1,4 @@
# Automerge Frontend
This is an implementation of the "frontend" of the automerge data structure. It
is designed to be used on the UI thread of a user facing application.

View file

@ -0,0 +1,40 @@
use automerge_frontend::{Frontend, InvalidChangeRequest, LocalChange, Path, Value};
use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion};
use rand::{thread_rng, Rng};
use unicode_segmentation::UnicodeSegmentation;
pub fn insert_long_string(c: &mut Criterion) {
c.bench_function("Frontend::change insert long string", move |b| {
b.iter_batched(
|| {
let doc = Frontend::new();
let random_string: String = thread_rng()
.sample_iter(&rand::distributions::Alphanumeric)
.take(6000)
.map(char::from)
.collect();
(doc, random_string)
},
|(mut doc, string)| {
#[allow(clippy::unit_arg)]
black_box({
doc.change::<_, _, InvalidChangeRequest>(None, |d| {
d.add_change(LocalChange::set(
Path::root().key("text"),
Value::Text(string.graphemes(true).map(|s| s.to_owned()).collect()),
))
})
.unwrap()
})
},
BatchSize::SmallInput,
)
});
}
criterion_group! {
name = frontend_benches;
config = Criterion::default().sample_size(10);
targets = insert_long_string,
}
criterion_main!(frontend_benches);

View file

@ -0,0 +1,149 @@
use std::collections::HashMap;
use automerge_frontend::Frontend;
use automerge_protocol as amp;
use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion};
use maplit::hashmap;
pub fn sequential_inserts_in_multiple_patches(c: &mut Criterion) {
let actor_id = amp::ActorId::random();
let make_list_opid = actor_id.op_id_at(1);
let mut patches: Vec<amp::Patch> = vec![amp::Patch {
actor: None,
seq: None,
clock: hashmap! {actor_id.clone() => 1},
deps: Vec::new(),
max_op: 1,
pending_changes: 0,
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"text".to_string() => hashmap!{
make_list_opid.clone() => amp::Diff::Unchanged(amp::ObjDiff{
object_id: make_list_opid.clone().into(),
obj_type: amp::ObjType::text(),
}),
}
},
})),
}];
for index in 0..6000 {
let op_num = index + 2;
let this_op_id = actor_id.op_id_at(op_num as u64);
patches.push(amp::Patch{
actor: None,
seq: None,
clock: hashmap!{actor_id.clone() => op_num as u64},
deps: Vec::new(),
max_op: op_num as u64,
pending_changes:0,
diffs: Some(amp::Diff::Map(amp::MapDiff{
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap!{
"text".to_string() => hashmap!{
make_list_opid.clone() => amp::Diff::Seq(amp::SeqDiff{
object_id: make_list_opid.clone().into(),
obj_type: amp::SequenceType::Text,
edits: vec![amp::DiffEdit::Insert{
index,
elem_id: this_op_id.clone().into(),
}],
props: hashmap!{
index => hashmap!{
this_op_id => amp::Diff::Value(amp::ScalarValue::Str("c".to_string()))
}
}
})
}
}
})),
});
}
c.bench_function(
"StateTreeValue::apply_diff sequential text inserts across multiple patches",
move |b| {
b.iter_batched(
|| {
let doc = Frontend::new();
(doc, patches.clone())
},
|(mut doc, patches)| {
#[allow(clippy::unit_arg)]
black_box({
for patch in patches.into_iter() {
doc.apply_patch(patch).unwrap();
}
doc
})
},
BatchSize::SmallInput,
)
},
);
}
pub fn sequential_inserts_in_single_patch(c: &mut Criterion) {
let actor_id = amp::ActorId::random();
let make_list_opid = actor_id.op_id_at(1);
let mut edits: Vec<amp::DiffEdit> = Vec::new();
let mut props: HashMap<usize, HashMap<amp::OpId, amp::Diff>> = HashMap::new();
for index in 0..6000 {
let op_num = index + 2;
let this_op_id = actor_id.op_id_at(op_num as u64);
edits.push(amp::DiffEdit::Insert {
index,
elem_id: this_op_id.clone().into(),
});
props.insert(
index,
hashmap! {this_op_id => amp::Diff::Value(amp::ScalarValue::Str("c".to_string()))},
);
}
let patch: amp::Patch = amp::Patch {
actor: None,
seq: None,
clock: hashmap! {actor_id => 1},
deps: Vec::new(),
max_op: 1,
pending_changes: 0,
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"text".to_string() => hashmap!{
make_list_opid.clone() => amp::Diff::Seq(amp::SeqDiff{
object_id: make_list_opid.into(),
obj_type: amp::SequenceType::Text,
edits,
props,
}),
}
},
})),
};
c.bench_function(
"StateTreeValue::apply_diff sequential text inserts in a single patch",
move |b| {
b.iter_batched(
|| patch.clone(),
|patch| {
#[allow(clippy::unit_arg)]
black_box({
let mut doc = Frontend::new();
doc.apply_patch(patch).unwrap()
})
},
BatchSize::SmallInput,
)
},
);
}
criterion_group! {
name = benches;
config = Criterion::default().sample_size(10);
targets = sequential_inserts_in_multiple_patches, sequential_inserts_in_single_patch,
}
criterion_main!(benches);

View file

@ -0,0 +1,119 @@
use std::{error::Error, fmt};
use automerge_protocol as amp;
use automerge_protocol::ObjectId;
use thiserror::Error;
use crate::{value::Value, Path};
#[derive(Debug, PartialEq)]
pub enum AutomergeFrontendError {
InvalidChangeRequest,
MissingObjectError(ObjectId),
NoSuchPathError(Path),
PathIsNotCounter,
CannotOverwriteCounter,
MismatchedSequenceNumber,
InvalidActorIdString(String),
}
impl fmt::Display for AutomergeFrontendError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl From<automerge_protocol::error::InvalidActorId> for AutomergeFrontendError {
fn from(e: automerge_protocol::error::InvalidActorId) -> AutomergeFrontendError {
AutomergeFrontendError::InvalidActorIdString(e.0)
}
}
impl Error for AutomergeFrontendError {}
#[derive(Debug, PartialEq)]
pub enum InvalidInitialStateError {
InitialStateMustBeMap,
}
impl fmt::Display for InvalidInitialStateError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Error for InvalidInitialStateError {}
//TODO Most of these errors should have paths associated with them to make it
//easier to understand where things are going wrong
#[derive(Error, Debug, PartialEq)]
pub enum InvalidPatch {
#[error("Patch did not begin as a map with root object ID")]
PatchDidNotBeginAtRoot,
#[error("Mismatched sequence number, expected: {expected} but got {actual}")]
MismatchedSequenceNumber { expected: u64, actual: u64 },
#[error("Received a diff inserting a non text object in a text object. Target object id was {object_id}, diff was {diff:?}")]
InsertNonTextInTextObject {
object_id: ObjectId,
diff: amp::Diff,
},
#[error(
"Received a diff for a character in a text object which created more than one character"
)]
InsertMultipleCharsInTextChar,
#[error("Received a diff which had multiple values for a key in a table. Table id was {table_id}, diff was {diff:?}")]
ConflictsReceivedForTableKey { table_id: ObjectId, diff: amp::Diff },
#[error("Patch contained a diff which expected object with ID {object_id:?} to be {patch_expected_type:?} but we think it is {actual_type:?}")]
MismatchingObjectType {
object_id: ObjectId,
patch_expected_type: Option<amp::ObjType>,
actual_type: Option<amp::ObjType>,
},
#[error("Patch referenced an object id {patch_expected_id:?} at a path where we ecpected {actual_id:?}")]
MismatchingObjectIDs {
patch_expected_id: Option<ObjectId>,
actual_id: ObjectId,
},
#[error("Patch attempted to reference an index which did not exist for object {object_id}")]
InvalidIndex { object_id: ObjectId, index: usize },
#[error("The patch tried to create an object but specified no value for the new object")]
DiffCreatedObjectWithNoValue,
#[error("The patch contained a diff with a list edit which referenced the '_head' of a list, rather than a specific element ID")]
DiffEditWithHeadElemId,
#[error("Value diff containing cursor")]
ValueDiffContainedCursor,
}
#[derive(Error, Debug, PartialEq)]
pub enum InvalidChangeRequest {
#[error("attempted to set the value of {path:?}, which is not allowed because that value is a counter")]
CannotOverwriteCounter { path: Path },
#[error("attempted an operation on a path that does not exist: {path:?}")]
NoSuchPathError { path: Path },
#[error("attempted to set a non map object {value:?} as the root")]
CannotSetNonMapObjectAsRoot { value: Value },
#[error("attempted to increment an object which is not a counter at {path:?}")]
IncrementForNonCounterObject { path: Path },
#[error("attempted to insert using a path which does not end in an index: {path:?}")]
InsertWithNonSequencePath { path: Path },
#[error("attempted to insert into an object which is not a sequence at {path:?}")]
InsertForNonSequenceObject { path: Path },
#[error("attempted to insert past the end of a sequence, path was {path:?}, max length of sequence is {sequence_length}")]
InsertPastEndOfSequence { path: Path, sequence_length: u64 },
#[error("attempted to insert something into a text object which is not a character, object: {object:?}")]
InsertNonTextInTextObject { path: Path, object: Value },
#[error("attmpted to delete root object")]
CannotDeleteRootObject,
#[error("Attempted to access a missing index")]
MissingIndexError {
#[from]
source: MissingIndexError,
},
}
#[derive(Error, Debug, PartialEq)]
#[error("Attempted to access index {missing_index} in a collection with max index: {size_of_collection}")]
pub struct MissingIndexError {
pub missing_index: usize,
pub size_of_collection: usize,
}

View file

@ -0,0 +1,475 @@
use automerge_protocol::{
ActorId, ChangeHash, MapType, ObjectId, Op, OpId, Patch, UncompressedChange,
};
mod error;
mod mutation;
mod path;
mod state_tree;
mod value;
use std::{collections::HashMap, convert::TryFrom, error::Error, fmt::Debug};
pub use error::{
AutomergeFrontendError, InvalidChangeRequest, InvalidInitialStateError, InvalidPatch,
};
pub use mutation::{LocalChange, MutableDocument};
pub use path::Path;
use path::PathElement;
use state_tree::ResolvedPath;
pub use value::{Conflicts, Cursor, Primitive, Value};
/// Tracks the possible states of the frontend
///
/// What does this mean and why do we need it? The reason the frontend/backend
/// split exists in the first place is that we want to quickly apply local
/// changes (local in this sense means something like "on the UI thread") on a
/// low latency local cache whilst also shipping those same changes off to a
/// backend, which can reconcile them with historical changes and new changes
/// received over the network - work which may be more compute intensive and
/// so have to high a latency to be acceptable on the UI thread.
///
/// This frontend/backend split implies that we need to optimistically apply
/// local changes somehow. In order to do this we immediately apply changes to
/// a copy of the local state (state being an instance of [StateTree]) and
/// add the sequence number of the new change to a list of in flight requests.
/// In detail the logic looks like this:
///
/// When we receive a patch from the backend:
/// 1. Check that if the patch is for our actor ID then the sequence number of
/// the patch is the same as the sequence number of the oldest in flight
/// request.
/// 2. Apply the patch to the `reconciled_state` of the current state
/// 3. If there are no in flight requests remaining then transition from
/// the `WaitingForInFlightRequests` state to the `Reconciled` state,
/// moving the `reconciled_state` into the `Reconciled` enum branch
#[derive(Clone, Debug)]
enum FrontendState {
WaitingForInFlightRequests {
in_flight_requests: Vec<u64>,
reconciled_root_state: state_tree::StateTree,
optimistically_updated_root_state: state_tree::StateTree,
max_op: u64,
},
Reconciled {
root_state: state_tree::StateTree,
max_op: u64,
deps_of_last_received_patch: Vec<ChangeHash>,
},
}
impl FrontendState {
/// Apply a patch received from the backend to this frontend state,
/// returns the updated cached value (if it has changed) and a new
/// `FrontendState` which replaces this one
fn apply_remote_patch(self, self_actor: &ActorId, patch: &Patch) -> Result<Self, InvalidPatch> {
match self {
FrontendState::WaitingForInFlightRequests {
in_flight_requests,
reconciled_root_state,
optimistically_updated_root_state,
max_op,
} => {
let mut new_in_flight_requests = in_flight_requests;
// If the actor ID and seq exist then this patch corresponds
// to a local change (i.e it came from Backend::apply_local_change
// so we don't need to apply it, we just need to remove it from
// the in_flight_requests vector
if let (Some(patch_actor), Some(patch_seq)) = (&patch.actor, patch.seq) {
// If this is a local change corresponding to our actor then we
// need to match it against in flight requests
if self_actor == patch_actor {
// Check that if the patch is for our actor ID then it is not
// out of order
if new_in_flight_requests[0] != patch_seq {
return Err(InvalidPatch::MismatchedSequenceNumber {
expected: new_in_flight_requests[0],
actual: patch_seq,
});
}
// unwrap should be fine here as `in_flight_requests` should never have zero length
// because we transition to reconciled state when that happens
let (_, remaining_requests) = new_in_flight_requests.split_first().unwrap();
new_in_flight_requests = remaining_requests.iter().copied().collect();
}
}
let new_reconciled_root_state = if let Some(diff) = &patch.diffs {
reconciled_root_state.apply_diff(diff)?
} else {
reconciled_root_state
};
Ok(match new_in_flight_requests[..] {
[] => FrontendState::Reconciled {
root_state: new_reconciled_root_state,
max_op: patch.max_op,
deps_of_last_received_patch: patch.deps.clone(),
},
_ => FrontendState::WaitingForInFlightRequests {
in_flight_requests: new_in_flight_requests,
reconciled_root_state: new_reconciled_root_state,
optimistically_updated_root_state,
max_op,
},
})
}
FrontendState::Reconciled { root_state, .. } => {
let new_root_state = if let Some(diff) = &patch.diffs {
root_state.apply_diff(diff)?
} else {
root_state
};
Ok(FrontendState::Reconciled {
root_state: new_root_state,
max_op: patch.max_op,
deps_of_last_received_patch: patch.deps.clone(),
})
}
}
}
fn get_object_id(&self, path: &Path) -> Option<ObjectId> {
self.resolve_path(path).and_then(|r| r.object_id())
}
fn get_value(&self, path: &Path) -> Option<Value> {
self.resolve_path(path).map(|r| r.default_value())
}
fn resolve_path(&self, path: &Path) -> Option<ResolvedPath> {
let root = match self {
FrontendState::WaitingForInFlightRequests {
optimistically_updated_root_state,
..
} => optimistically_updated_root_state,
FrontendState::Reconciled { root_state, .. } => root_state,
};
root.resolve_path(path)
}
/// Apply a patch. The change closure will be passed a `MutableDocument`
/// which it can use to query the document state and make changes. It
/// can also throw an error of type `E`. If an error is thrown in the
/// closure no chnages are made and the error is returned.
pub fn optimistically_apply_change<F, O, E>(
self,
actor: &ActorId,
change_closure: F,
seq: u64,
) -> Result<OptimisticChangeResult<O>, E>
where
E: Error,
F: FnOnce(&mut dyn MutableDocument) -> Result<O, E>,
{
match self {
FrontendState::WaitingForInFlightRequests {
mut in_flight_requests,
reconciled_root_state,
optimistically_updated_root_state,
max_op,
} => {
let mut mutation_tracker = mutation::MutationTracker::new(
optimistically_updated_root_state,
max_op,
actor.clone(),
);
let result = change_closure(&mut mutation_tracker)?;
let new_root_state = mutation_tracker.state.clone();
in_flight_requests.push(seq);
Ok(OptimisticChangeResult {
ops: mutation_tracker.ops(),
new_state: FrontendState::WaitingForInFlightRequests {
in_flight_requests,
optimistically_updated_root_state: new_root_state,
reconciled_root_state,
max_op: mutation_tracker.max_op,
},
deps: Vec::new(),
closure_result: result,
})
}
FrontendState::Reconciled {
root_state,
max_op,
deps_of_last_received_patch,
} => {
let mut mutation_tracker =
mutation::MutationTracker::new(root_state.clone(), max_op, actor.clone());
let result = change_closure(&mut mutation_tracker)?;
let new_root_state = mutation_tracker.state.clone();
let in_flight_requests = vec![seq];
Ok(OptimisticChangeResult {
ops: mutation_tracker.ops(),
new_state: FrontendState::WaitingForInFlightRequests {
in_flight_requests,
optimistically_updated_root_state: new_root_state,
reconciled_root_state: root_state,
max_op: mutation_tracker.max_op,
},
deps: deps_of_last_received_patch,
closure_result: result,
})
}
}
}
fn in_flight_requests(&self) -> Vec<u64> {
match self {
FrontendState::WaitingForInFlightRequests {
in_flight_requests, ..
} => in_flight_requests.clone(),
_ => Vec::new(),
}
}
fn max_op(&self) -> u64 {
match self {
FrontendState::WaitingForInFlightRequests { max_op, .. } => *max_op,
FrontendState::Reconciled { max_op, .. } => *max_op,
}
}
fn value(&self) -> Value {
match self {
FrontendState::WaitingForInFlightRequests {
optimistically_updated_root_state,
..
} => optimistically_updated_root_state.value(),
FrontendState::Reconciled { root_state, .. } => root_state.value(),
}
}
}
pub struct Frontend {
pub actor_id: ActorId,
pub seq: u64,
/// The current state of the frontend, see the description of
/// `FrontendState` for details. It's an `Option` to allow consuming it
/// using Option::take whilst behind a mutable reference.
state: Option<FrontendState>,
/// A cache of the value of this frontend
cached_value: Option<Value>,
/// A function for generating timestamps
timestamper: Box<dyn Fn() -> Option<i64>>,
}
impl Debug for Frontend {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
let Frontend {
actor_id,
seq,
state,
cached_value,
timestamper: _,
} = self;
{
let mut builder = f.debug_struct("Person");
let _ = builder.field("actor_id", &actor_id);
let _ = builder.field("seq", &seq);
let _ = builder.field("state", &state);
let _ = builder.field("cached_value", &cached_value);
builder.finish()
}
}
}
#[cfg(feature = "std")]
impl Default for Frontend {
fn default() -> Self {
Self::new()
}
}
impl Frontend {
#[cfg(feature = "std")]
pub fn new() -> Self {
let system_time = || {
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.ok()
.and_then(|d| i64::try_from(d.as_millis()).ok())
};
Self::new_with_timestamper(Box::new(system_time))
}
#[cfg(feature = "std")]
pub fn new_with_actor_id(actor_id: uuid::Uuid) -> Self {
let system_time = || {
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.ok()
.and_then(|d| i64::try_from(d.as_millis()).ok())
};
Self::new_with_timestamper_and_actor_id(Box::new(system_time), actor_id)
}
pub fn new_with_timestamper(t: Box<dyn Fn() -> Option<i64>>) -> Self {
Self::new_with_timestamper_and_actor_id(t, uuid::Uuid::new_v4())
}
pub fn new_with_timestamper_and_actor_id(
t: Box<dyn Fn() -> Option<i64>>,
actor_id: uuid::Uuid,
) -> Self {
let root_state = state_tree::StateTree::new();
Frontend {
actor_id: ActorId::from_bytes(actor_id.as_bytes()),
seq: 0,
state: Some(FrontendState::Reconciled {
root_state,
max_op: 0,
deps_of_last_received_patch: Vec::new(),
}),
cached_value: None,
timestamper: t,
}
}
#[cfg(feature = "std")]
pub fn new_with_initial_state(
initial_state: Value,
) -> Result<(Self, UncompressedChange), InvalidInitialStateError> {
match &initial_state {
Value::Map(kvs, MapType::Map) => {
let mut front = Frontend::new();
let (init_ops, _) =
kvs.iter()
.fold((Vec::new(), 1), |(mut ops, max_op), (k, v)| {
let (more_ops, max_op) = value::value_to_op_requests(
&front.actor_id,
max_op,
ObjectId::Root,
&k.into(),
v,
false,
);
ops.extend(more_ops);
(ops, max_op)
});
let init_change_request = UncompressedChange {
actor_id: front.actor_id.clone(),
start_op: 1,
time: (front.timestamper)().unwrap_or(0),
seq: 1,
message: Some("Initialization".to_string()),
hash: None,
deps: Vec::new(),
operations: init_ops,
extra_bytes: Vec::new(),
};
// Unwrap here is fine because it should be impossible to
// cause an error applying a local change from a `Value`. If
// that happens we've made an error, not the user.
front.change(Some("initialization".into()), |doc| {
doc.add_change(LocalChange::set(Path::root(), initial_state))
.map_err(|_| InvalidInitialStateError::InitialStateMustBeMap)
})?;
Ok((front, init_change_request))
}
_ => Err(InvalidInitialStateError::InitialStateMustBeMap),
}
}
pub fn state(&mut self) -> &Value {
if let Some(ref v) = self.cached_value {
v
} else {
let value = self.state.as_ref().unwrap().value();
self.cached_value = Some(value);
self.cached_value.as_ref().unwrap()
}
}
pub fn change<F, O, E>(
&mut self,
message: Option<String>,
change_closure: F,
) -> Result<(O, Option<UncompressedChange>), E>
where
E: Error,
F: FnOnce(&mut dyn MutableDocument) -> Result<O, E>,
{
let start_op = self.state.as_ref().unwrap().max_op() + 1;
// TODO this leaves the `state` as `None` if there's an error, it shouldn't
let change_result = self.state.take().unwrap().optimistically_apply_change(
&self.actor_id,
change_closure,
self.seq + 1,
)?;
self.cached_value = None;
self.state = Some(change_result.new_state);
if let Some(ops) = change_result.ops {
self.seq += 1;
let change = UncompressedChange {
start_op,
actor_id: self.actor_id.clone(),
seq: self.seq,
time: (self.timestamper)().unwrap_or(0),
message,
hash: None,
deps: change_result.deps,
operations: ops,
extra_bytes: Vec::new(),
};
Ok((change_result.closure_result, Some(change)))
} else {
Ok((change_result.closure_result, None))
}
}
pub fn apply_patch(&mut self, patch: Patch) -> Result<(), InvalidPatch> {
// TODO this leaves the `state` as `None` if there's an error, it shouldn't
self.cached_value = None;
let new_state = self
.state
.take()
.unwrap()
.apply_remote_patch(&self.actor_id, &patch)?;
self.state = Some(new_state);
if let Some(seq) = patch.clock.get(&self.actor_id) {
if *seq > self.seq {
self.seq = *seq;
}
}
Ok(())
}
pub fn get_object_id(&self, path: &Path) -> Option<ObjectId> {
self.state.as_ref().and_then(|s| s.get_object_id(path))
}
pub fn in_flight_requests(&self) -> Vec<u64> {
self.state
.as_ref()
.map(|s| s.in_flight_requests())
.unwrap_or_default()
}
/// Gets the set of values for `path`, returns None if the path does not
/// exist
pub fn get_conflicts(&self, path: &Path) -> Option<HashMap<OpId, Value>> {
self.state
.as_ref()
.and_then(|s| s.resolve_path(path))
.map(|o| o.values())
}
pub fn get_value(&self, path: &Path) -> Option<Value> {
self.state.as_ref().and_then(|s| s.get_value(path))
}
/// Returns the value given by path, if it exists
pub fn value_at_path(&self, path: &Path) -> Option<Value> {
self.state
.as_ref()
.and_then(|s| s.resolve_path(&path))
.map(|o| o.default_value())
}
}
struct OptimisticChangeResult<O> {
ops: Option<Vec<Op>>,
new_state: FrontendState,
deps: Vec<ChangeHash>,
closure_result: O,
}

View file

@ -0,0 +1,383 @@
use automerge_protocol as amp;
use unicode_segmentation::UnicodeSegmentation;
use crate::{
error::InvalidChangeRequest,
state_tree::{LocalOperationResult, SetOrInsertPayload, StateTree, Target},
value::{Cursor, Primitive, Value},
Path, PathElement,
};
pub trait MutableDocument {
fn value_at_path(&self, path: &Path) -> Option<Value>;
fn cursor_to_path(&self, path: &Path) -> Option<Cursor>;
fn add_change(&mut self, change: LocalChange) -> Result<(), InvalidChangeRequest>;
}
#[derive(Debug, PartialEq, Clone)]
pub enum LocalOperation {
Set(Value),
Delete,
Increment(i64),
Insert(Value),
}
#[derive(Debug, PartialEq, Clone)]
pub struct LocalChange {
path: Path,
operation: LocalOperation,
}
impl LocalChange {
/// Set the value at `path` to `value`
pub fn set<TV>(path: Path, value: TV) -> LocalChange
where
TV: Into<Value>,
{
LocalChange {
path,
operation: LocalOperation::Set(value.into()),
}
}
/// Delete the entry at `path`
pub fn delete(path: Path) -> LocalChange {
LocalChange {
path,
operation: LocalOperation::Delete,
}
}
/// Increment the counter at `path` by 1
pub fn increment(path: Path) -> LocalChange {
LocalChange {
path,
operation: LocalOperation::Increment(1),
}
}
/// Increment the counter at path by a (possibly negative) amount `by`
pub fn increment_by(path: Path, by: i64) -> LocalChange {
LocalChange {
path,
operation: LocalOperation::Increment(by),
}
}
pub fn insert(path: Path, value: Value) -> LocalChange {
LocalChange {
path,
operation: LocalOperation::Insert(value),
}
}
}
/// `MutationTracker` is used as the context in which a mutation closure is
/// applied. The mutation tracker implements `MutableDocument`, which is how it
/// captures the changes that the mutation closure is making.
///
/// For each operation in the mutation closure the `MutationTracker` generates
/// a diff and immediately applies it to the `StateTree` it is constructed
/// with. It also adds the change to a set of operations. This set of operations
/// is used to generate a `ChangeRequest` once the closure is completed.
pub struct MutationTracker {
pub(crate) state: StateTree,
pub(crate) ops: Vec<amp::Op>,
pub max_op: u64,
actor_id: amp::ActorId,
}
impl MutationTracker {
pub(crate) fn new(
state_tree: StateTree,
max_op: u64,
actor_id: amp::ActorId,
) -> MutationTracker {
MutationTracker {
state: state_tree,
ops: Vec::new(),
max_op,
actor_id,
}
}
pub fn ops(&self) -> Option<Vec<amp::Op>> {
if !self.ops.is_empty() {
Some(self.ops.clone())
} else {
None
}
}
/// If the `value` is a map, individually assign each k,v in it to a key in
/// the root object
fn wrap_root_assignment(&mut self, value: &Value) -> Result<(), InvalidChangeRequest> {
match value {
Value::Map(kvs, amp::MapType::Map) => {
for (k, v) in kvs.iter() {
self.add_change(LocalChange::set(Path::root().key(k), v.clone()))?;
}
Ok(())
}
_ => Err(InvalidChangeRequest::CannotSetNonMapObjectAsRoot {
value: value.clone(),
}),
}
}
fn apply_state_change(&mut self, change: LocalOperationResult) {
self.state = change.new_state();
self.max_op += change.new_ops.len() as u64;
self.ops.extend(change.new_ops);
}
}
impl MutableDocument for MutationTracker {
fn value_at_path(&self, path: &Path) -> Option<Value> {
self.state.resolve_path(path).map(|r| r.default_value())
}
fn cursor_to_path(&self, path: &Path) -> Option<Cursor> {
if let Some(PathElement::Index(i)) = path.name() {
if let Some(parent) = self.state.resolve_path(&path.parent()) {
match parent.target {
Target::List(list_target) => list_target.get_cursor(*i).ok(),
Target::Text(text_target) => text_target.get_cursor(*i).ok(),
_ => None,
}
} else {
None
}
} else {
None
}
}
fn add_change(&mut self, change: LocalChange) -> Result<(), InvalidChangeRequest> {
match &change.operation {
LocalOperation::Set(value) => {
//TODO double resolving is ugly here
if let Some(Target::Counter(_)) =
self.state.resolve_path(&change.path).map(|p| p.target)
{
return Err(InvalidChangeRequest::CannotOverwriteCounter { path: change.path });
};
if let Some(name) = change.path.name() {
if let Some(parent) = self.state.resolve_path(&change.path.parent()) {
let payload = SetOrInsertPayload {
start_op: self.max_op + 1,
actor: &self.actor_id.clone(),
value,
};
match (name, parent.target) {
(PathElement::Key(ref k), Target::Root(ref root_target)) => {
self.apply_state_change(root_target.set_key(k, payload));
Ok(())
}
(PathElement::Key(ref k), Target::Map(ref maptarget)) => {
self.apply_state_change(maptarget.set_key(k, payload));
Ok(())
}
(PathElement::Key(ref k), Target::Table(ref tabletarget)) => {
self.apply_state_change(tabletarget.set_key(k, payload));
Ok(())
}
// In this case we are trying to modify a key in something which is not
// an object or a table, so the path does not exist
(PathElement::Key(_), _) => {
Err(InvalidChangeRequest::NoSuchPathError { path: change.path })
}
(PathElement::Index(i), Target::List(ref list_target)) => {
self.apply_state_change(list_target.set(*i, payload)?);
Ok(())
}
(PathElement::Index(i), Target::Text(ref text)) => match value {
Value::Primitive(Primitive::Str(s)) => {
if s.graphemes(true).count() == 1 {
let payload = SetOrInsertPayload {
start_op: self.max_op + 1,
actor: &self.actor_id.clone(),
value: s.clone(),
};
self.apply_state_change(text.set(*i, payload)?);
Ok(())
} else {
Err(InvalidChangeRequest::InsertNonTextInTextObject {
path: change.path.clone(),
object: value.clone(),
})
}
}
_ => Err(InvalidChangeRequest::InsertNonTextInTextObject {
path: change.path.clone(),
object: value.clone(),
}),
},
(PathElement::Index(_), _) => {
Err(InvalidChangeRequest::InsertWithNonSequencePath {
path: change.path.clone(),
})
}
}
} else {
Err(InvalidChangeRequest::NoSuchPathError { path: change.path })
}
} else {
self.wrap_root_assignment(value)
}
}
LocalOperation::Delete => {
if let Some(name) = change.path.name() {
if let Some(pr) = self.state.resolve_path(&change.path.parent()) {
let state_change = match pr.target {
Target::Counter(_) => {
return Err(InvalidChangeRequest::NoSuchPathError {
path: change.path,
})
}
Target::List(l) => match name {
PathElement::Index(i) => l.remove(*i)?,
_ => {
return Err(InvalidChangeRequest::NoSuchPathError {
path: change.path,
})
}
},
Target::Text(t) => match name {
PathElement::Index(i) => t.remove(*i)?,
_ => {
return Err(InvalidChangeRequest::NoSuchPathError {
path: change.path,
})
}
},
Target::Primitive(_) => {
return Err(InvalidChangeRequest::NoSuchPathError {
path: change.path,
})
}
Target::Map(m) => match name {
PathElement::Key(k) => m.delete_key(k),
_ => {
return Err(InvalidChangeRequest::NoSuchPathError {
path: change.path,
})
}
},
Target::Table(t) => match name {
PathElement::Key(k) => t.delete_key(k),
_ => {
return Err(InvalidChangeRequest::NoSuchPathError {
path: change.path,
})
}
},
Target::Character(_) => {
return Err(InvalidChangeRequest::NoSuchPathError {
path: change.path,
})
}
Target::Root(r) => match name {
PathElement::Key(k) => r.delete_key(k),
_ => {
return Err(InvalidChangeRequest::NoSuchPathError {
path: change.path,
})
}
},
};
self.apply_state_change(state_change);
Ok(())
} else {
Err(InvalidChangeRequest::NoSuchPathError { path: change.path })
}
} else {
Err(InvalidChangeRequest::CannotDeleteRootObject)
}
}
LocalOperation::Increment(by) => {
if change.path.name().is_some() {
if let Some(pr) = self.state.resolve_path(&change.path) {
match pr.target {
Target::Counter(counter_target) => {
self.apply_state_change(counter_target.increment(*by));
Ok(())
}
_ => Err(InvalidChangeRequest::IncrementForNonCounterObject {
path: change.path.clone(),
}),
}
} else {
Err(InvalidChangeRequest::NoSuchPathError { path: change.path })
}
} else {
Err(InvalidChangeRequest::IncrementForNonCounterObject {
path: change.path.clone(),
})
}
}
LocalOperation::Insert(value) => {
if let Some(name) = change.path.name() {
let index = match name {
PathElement::Index(i) => i,
_ => {
return Err(InvalidChangeRequest::InsertWithNonSequencePath {
path: change.path,
})
}
};
if let Some(parent) = self
.state
.resolve_path(&change.path.parent())
.map(|p| p.target)
{
match (parent, value) {
(Target::List(list_target), _) => {
let payload = SetOrInsertPayload {
start_op: self.max_op + 1,
actor: &self.actor_id.clone(),
value,
};
self.apply_state_change(list_target.insert(*index, payload)?);
Ok(())
}
(Target::Text(text_target), val) => match val {
Value::Primitive(Primitive::Str(s)) => {
if s.graphemes(true).count() == 1 {
let payload = SetOrInsertPayload {
start_op: self.max_op + 1,
actor: &self.actor_id.clone(),
value: s.clone(),
};
self.apply_state_change(
text_target.insert(*index, payload)?,
);
Ok(())
} else {
Err(InvalidChangeRequest::InsertNonTextInTextObject {
path: change.path,
object: value.clone(),
})
}
}
_ => Err(InvalidChangeRequest::InsertNonTextInTextObject {
path: change.path,
object: value.clone(),
}),
},
_ => Err(InvalidChangeRequest::NoSuchPathError {
path: change.path.clone(),
}),
}
} else {
Err(InvalidChangeRequest::InsertForNonSequenceObject { path: change.path })
}
} else {
Err(InvalidChangeRequest::NoSuchPathError {
path: change.path.clone(),
})
}
}
}
}
}

View file

@ -0,0 +1,58 @@
use std::fmt;
#[derive(Debug, Clone, PartialEq)]
pub(crate) enum PathElement {
Key(String),
Index(u32),
}
#[derive(Debug, Clone, PartialEq)]
pub struct Path(Vec<PathElement>);
impl Path {
pub fn root() -> Path {
Path(Vec::new())
}
pub fn index(mut self, index: u32) -> Self {
self.0.push(PathElement::Index(index));
self
}
pub fn key<S: Into<String>>(mut self, key: S) -> Path {
self.0.push(PathElement::Key(key.into()));
self
}
pub fn parent(&self) -> Self {
if self.0.is_empty() {
Path(Vec::new())
} else {
let mut new_path = self.0.clone();
new_path.pop();
Path(new_path)
}
}
/// Get the final component of the path, if any
pub(crate) fn name(&self) -> Option<&PathElement> {
self.0.last()
}
pub(crate) fn elements(self) -> Vec<PathElement> {
self.0
}
pub(crate) fn is_root(&self) -> bool {
self.0.is_empty()
}
}
impl fmt::Display for PathElement {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
PathElement::Key(k) => write!(f, "{}", k),
PathElement::Index(i) => write!(f, "{}", i),
}
}
}

View file

@ -0,0 +1,63 @@
use super::StateTreeChange;
pub(super) struct DiffApplicationResult<T> {
pub(super) value: T,
pub(super) change: StateTreeChange,
}
impl<T> DiffApplicationResult<T> {
pub(crate) fn pure(t: T) -> DiffApplicationResult<T> {
DiffApplicationResult {
value: t,
change: StateTreeChange::empty(),
}
}
pub(crate) fn with_changes(mut self, changes: StateTreeChange) -> Self {
self.change = changes;
self
}
pub(crate) fn map<F, U>(self, f: F) -> DiffApplicationResult<U>
where
F: FnOnce(T) -> U,
{
DiffApplicationResult {
value: f(self.value),
change: self.change,
}
}
pub(crate) fn try_map<F, U, E>(self, f: F) -> Result<DiffApplicationResult<U>, E>
where
F: FnOnce(T) -> Result<U, E>,
{
let value = f(self.value)?;
Ok(DiffApplicationResult {
value,
change: self.change,
})
}
pub(crate) fn and_then<F, U>(self, f: F) -> DiffApplicationResult<U>
where
F: FnOnce(T) -> DiffApplicationResult<U>,
{
let result = f(self.value);
DiffApplicationResult {
value: result.value,
change: result.change + self.change,
}
}
pub(crate) fn try_and_then<F, U, E>(self, f: F) -> Result<DiffApplicationResult<U>, E>
where
F: FnOnce(T) -> Result<DiffApplicationResult<U>, E>,
{
let result = f(self.value)?;
Ok(DiffApplicationResult {
value: result.value,
change: result.change + self.change,
})
}
}

View file

@ -0,0 +1,296 @@
use std::collections::HashMap;
use automerge_protocol as amp;
use super::{DiffApplicationResult, DiffToApply, MultiGrapheme, MultiValue, StateTreeChange};
use crate::error::InvalidPatch;
pub(super) trait DiffableValue: Sized {
fn construct<K>(
opid: &amp::OpId,
diff: DiffToApply<K, &amp::Diff>,
) -> Result<DiffApplicationResult<Self>, InvalidPatch>
where
K: Into<amp::Key>;
fn apply_diff<K>(
&self,
opid: &amp::OpId,
diff: DiffToApply<K, &amp::Diff>,
) -> Result<DiffApplicationResult<Self>, InvalidPatch>
where
K: Into<amp::Key>;
fn apply_diff_iter<'a, 'b, 'c, 'd, I, K: 'c>(
&'a self,
diff: &mut I,
) -> Result<DiffApplicationResult<Self>, InvalidPatch>
where
K: Into<amp::Key>,
I: Iterator<Item = (&'b amp::OpId, DiffToApply<'c, K, &'d amp::Diff>)>;
fn default_opid(&self) -> amp::OpId;
}
impl DiffableValue for MultiGrapheme {
fn construct<K>(
opid: &amp::OpId,
diff: DiffToApply<K, &amp::Diff>,
) -> Result<DiffApplicationResult<Self>, InvalidPatch>
where
K: Into<amp::Key>,
{
let c = MultiGrapheme::new_from_diff(opid, diff)?;
Ok(DiffApplicationResult::pure(c))
}
fn apply_diff<K>(
&self,
opid: &amp::OpId,
diff: DiffToApply<K, &amp::Diff>,
) -> Result<DiffApplicationResult<Self>, InvalidPatch>
where
K: Into<amp::Key>,
{
MultiGrapheme::apply_diff(self, opid, diff).map(DiffApplicationResult::pure)
}
fn apply_diff_iter<'a, 'b, 'c, 'd, I, K: 'c>(
&'a self,
diff: &mut I,
) -> Result<DiffApplicationResult<Self>, InvalidPatch>
where
K: Into<amp::Key>,
I: Iterator<Item = (&'b amp::OpId, DiffToApply<'c, K, &'d amp::Diff>)>,
{
self.apply_diff_iter(diff)
//MultiGrapheme::apply_diff_iter(self, diff)
}
fn default_opid(&self) -> amp::OpId {
self.default_opid().clone()
}
}
impl DiffableValue for MultiValue {
fn construct<K>(
opid: &amp::OpId,
diff: DiffToApply<K, &amp::Diff>,
) -> Result<DiffApplicationResult<Self>, InvalidPatch>
where
K: Into<amp::Key>,
{
MultiValue::new_from_diff(opid.clone(), diff)
}
fn apply_diff<K>(
&self,
opid: &amp::OpId,
diff: DiffToApply<K, &amp::Diff>,
) -> Result<DiffApplicationResult<Self>, InvalidPatch>
where
K: Into<amp::Key>,
{
self.apply_diff(opid, diff)
}
fn apply_diff_iter<'a, 'b, 'c, 'd, I, K: 'c>(
&'a self,
diff: &mut I,
) -> Result<DiffApplicationResult<Self>, InvalidPatch>
where
K: Into<amp::Key>,
I: Iterator<Item = (&'b amp::OpId, DiffToApply<'c, K, &'d amp::Diff>)>,
{
self.apply_diff_iter(diff)
}
fn default_opid(&self) -> amp::OpId {
self.default_opid()
}
}
/// This represents a sequence which can be updated with a diff. The reason we need it is that
/// whilst diffing a sequence we need to be able to insert placeholder values when processing the
/// `edits` key of the diff. We don't want to have to unwrap options the whole time though so we
/// guarantee the invariant that every value contains a `Some(T)` after each diff application.
#[derive(Clone, Debug)]
pub(super) struct DiffableSequence<T>
where
T: DiffableValue,
T: Clone,
{
underlying: im_rc::Vector<Box<(amp::OpId, Option<T>)>>,
}
impl<T> DiffableSequence<T>
where
T: Clone,
T: DiffableValue,
{
pub fn new() -> DiffableSequence<T> {
DiffableSequence {
underlying: im_rc::Vector::new(),
}
}
pub(super) fn new_from<I>(i: I) -> DiffableSequence<T>
where
I: IntoIterator<Item = (amp::OpId, T)>,
{
DiffableSequence {
underlying: i
.into_iter()
.map(|(oid, v)| Box::new((oid, Some(v))))
.collect(),
}
}
pub fn apply_diff<K>(
&self,
object_id: &amp::ObjectId,
edits: &[amp::DiffEdit],
new_props: DiffToApply<K, &HashMap<usize, HashMap<amp::OpId, amp::Diff>>>,
) -> Result<DiffApplicationResult<DiffableSequence<T>>, InvalidPatch>
where
K: Into<amp::Key>,
{
let mut new_underlying = self.underlying.clone();
for edit in edits.iter() {
match edit {
amp::DiffEdit::Remove { index } => {
if *index >= new_underlying.len() {
return Err(InvalidPatch::InvalidIndex {
object_id: object_id.clone(),
index: *index,
});
}
new_underlying.remove(*index);
}
amp::DiffEdit::Insert { index, elem_id } => {
let op_id = match elem_id {
amp::ElementId::Head => return Err(InvalidPatch::DiffEditWithHeadElemId),
amp::ElementId::Id(oid) => oid.clone(),
};
if (*index) == new_underlying.len() {
new_underlying.push_back(Box::new((op_id, None)));
} else {
new_underlying.insert(*index, Box::new((op_id, None)));
}
}
};
}
let mut changes = StateTreeChange::empty();
for (index, prop_diff) in new_props.diff.iter() {
let mut diff_iter = prop_diff.iter();
match diff_iter.next() {
None => {
new_underlying.remove(*index);
}
Some((opid, diff)) => {
let current_objects =
changes.objects().union(new_props.current_objects.clone());
let entry = new_underlying.get_mut(*index);
match entry {
Some(e) => {
let mut updated_node = match &e.1 {
Some(n) => n.apply_diff(
opid,
DiffToApply {
current_objects: current_objects.clone(),
parent_object_id: object_id,
parent_key: opid,
diff,
},
)?,
None => T::construct(
opid,
DiffToApply {
current_objects: current_objects.clone(),
parent_object_id: object_id,
parent_key: opid,
diff,
},
)?,
};
let mut diffiter2 = diff_iter.map(|(oid, diff)| {
(
oid,
DiffToApply {
current_objects: current_objects.clone(),
parent_object_id: object_id,
parent_key: oid,
diff,
},
)
});
updated_node = updated_node
.try_and_then(move |n| n.apply_diff_iter(&mut diffiter2))?;
changes += updated_node.change;
e.1 = Some(updated_node.value);
}
None => {
return Err(InvalidPatch::InvalidIndex {
object_id: object_id.clone(),
index: *index,
})
}
};
}
};
}
//This is where we maintain the invariant that allows us to provide an iterator over T
//rather than Option<T>
for (index, b) in new_underlying.iter().enumerate() {
if b.1.is_none() {
return Err(InvalidPatch::InvalidIndex {
object_id: object_id.clone(),
index,
});
}
}
let new_sequence = DiffableSequence {
underlying: new_underlying,
};
Ok(DiffApplicationResult::pure(new_sequence).with_changes(changes))
}
pub(super) fn remove(&mut self, index: usize) -> T {
let a = self.underlying.remove(index);
a.1.unwrap()
}
pub(super) fn len(&self) -> usize {
self.underlying.len()
}
pub(super) fn update(&self, index: usize, value: T) -> Self {
DiffableSequence {
underlying: self
.underlying
.update(index, Box::new((value.default_opid(), Some(value)))),
}
}
pub(super) fn get(&self, index: usize) -> Option<&T> {
self.underlying.get(index).and_then(|b| b.1.as_ref())
}
pub(super) fn insert(&mut self, index: usize, value: T) {
self.underlying
.insert(index, Box::new((value.default_opid(), Some(value))))
}
pub(super) fn mutate<F>(&mut self, index: usize, f: F)
where
F: FnOnce(T) -> T,
{
if let Some(entry) = self.underlying.get_mut(index) {
if let Some(v) = entry.1.take() {
entry.1 = Some(f(v));
}
}
}
pub(super) fn iter(&self) -> impl std::iter::Iterator<Item = &T> {
// Making this unwrap safe is the entire point of this data structure
self.underlying.iter().map(|b| b.1.as_ref().unwrap())
}
}

View file

@ -0,0 +1,168 @@
use super::{
DiffApplicationResult, MultiValue, StateTree, StateTreeChange, StateTreeComposite,
StateTreeList, StateTreeMap, StateTreeTable, StateTreeValue,
};
#[derive(Clone)]
pub(crate) struct Focus(FocusInner);
impl Focus {
pub(super) fn update(&self, diffapp: DiffApplicationResult<MultiValue>) -> StateTree {
match &self.0 {
FocusInner::Root(root) => root.update(diffapp),
FocusInner::Map(mapfocus) => mapfocus.update(diffapp),
FocusInner::Table(tablefocus) => tablefocus.update(diffapp),
FocusInner::List(listfocus) => listfocus.update(diffapp),
}
}
pub fn new_root(root_tree: StateTree, key: String) -> Focus {
Focus(FocusInner::Root(RootFocus {
root: root_tree,
key,
}))
}
pub(super) fn new_map(
state_tree: StateTree,
map: StateTreeMap,
key: String,
multivalue: MultiValue,
) -> Focus {
Focus(FocusInner::Map(MapFocus {
state_tree,
key,
map,
multivalue,
}))
}
pub(super) fn new_table(
state_tree: StateTree,
table: StateTreeTable,
key: String,
multivalue: MultiValue,
) -> Focus {
Focus(FocusInner::Table(TableFocus {
state_tree,
key,
table,
multivalue,
}))
}
pub(super) fn new_list(
state_tree: StateTree,
list: StateTreeList,
index: usize,
multivalue: MultiValue,
) -> Focus {
Focus(FocusInner::List(ListFocus {
state_tree,
index,
list,
multivalue,
}))
}
}
#[derive(Clone)]
enum FocusInner {
Root(RootFocus),
Map(MapFocus),
Table(TableFocus),
List(ListFocus),
}
#[derive(Clone)]
struct RootFocus {
root: StateTree,
key: String,
}
impl RootFocus {
fn update(&self, diffapp: DiffApplicationResult<MultiValue>) -> StateTree {
self.root.update(self.key.clone(), diffapp)
}
}
#[derive(Clone)]
struct MapFocus {
state_tree: StateTree,
key: String,
map: StateTreeMap,
multivalue: MultiValue,
}
impl MapFocus {
fn update(&self, diffapp: DiffApplicationResult<MultiValue>) -> StateTree {
let new_diffapp = diffapp.and_then(|v| {
let updated = StateTreeComposite::Map(StateTreeMap {
object_id: self.map.object_id.clone(),
props: self.map.props.update(self.key.clone(), v),
});
DiffApplicationResult::pure(
self.multivalue
.update_default(StateTreeValue::Link(updated.object_id())),
)
.with_changes(StateTreeChange::single(self.map.object_id.clone(), updated))
});
self.state_tree.apply(new_diffapp.change)
}
}
#[derive(Clone)]
struct TableFocus {
state_tree: StateTree,
key: String,
table: StateTreeTable,
multivalue: MultiValue,
}
impl TableFocus {
fn update(&self, diffapp: DiffApplicationResult<MultiValue>) -> StateTree {
let new_diffapp = diffapp.and_then(|v| {
let updated = StateTreeComposite::Table(StateTreeTable {
object_id: self.table.object_id.clone(),
props: self.table.props.update(self.key.clone(), v),
});
DiffApplicationResult::pure(
self.multivalue
.update_default(StateTreeValue::Link(updated.object_id())),
)
.with_changes(StateTreeChange::single(
self.table.object_id.clone(),
updated,
))
});
self.state_tree.apply(new_diffapp.change)
}
}
#[derive(Clone)]
struct ListFocus {
state_tree: StateTree,
index: usize,
list: StateTreeList,
multivalue: MultiValue,
}
impl ListFocus {
fn update(&self, diffapp: DiffApplicationResult<MultiValue>) -> StateTree {
let new_diffapp = diffapp.and_then(|v| {
let updated = StateTreeComposite::List(StateTreeList {
object_id: self.list.object_id.clone(),
elements: self.list.elements.update(self.index, v),
});
DiffApplicationResult::pure(
self.multivalue
.update_default(StateTreeValue::Link(updated.object_id())),
)
.with_changes(StateTreeChange::single(
self.list.object_id.clone(),
updated,
))
});
self.state_tree.apply(new_diffapp.change)
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,599 @@
use std::iter::Iterator;
use automerge_protocol as amp;
use unicode_segmentation::UnicodeSegmentation;
use super::{
CursorState, Cursors, DiffApplicationResult, DiffToApply, DiffableSequence, StateTreeChange,
StateTreeComposite, StateTreeList, StateTreeMap, StateTreeTable, StateTreeText, StateTreeValue,
};
use crate::{
error,
value::{Primitive, Value},
};
pub(crate) struct NewValueRequest<'a, 'b, 'c, 'd> {
pub(crate) actor: &'a amp::ActorId,
pub(crate) start_op: u64,
pub(crate) key: &'b amp::Key,
pub(crate) value: &'c Value,
pub(crate) parent_obj: &'d amp::ObjectId,
pub(crate) insert: bool,
pub(crate) pred: Vec<amp::OpId>,
}
/// A set of conflicting values for the same key, indexed by OpID
#[derive(Debug, Clone)]
pub(super) struct MultiValue {
winning_value: (amp::OpId, StateTreeValue),
conflicts: im_rc::HashMap<amp::OpId, StateTreeValue>,
}
impl MultiValue {
pub fn new_from_diff<K>(
opid: amp::OpId,
diff: DiffToApply<K, &amp::Diff>,
) -> Result<DiffApplicationResult<MultiValue>, error::InvalidPatch>
where
K: Into<amp::Key>,
{
StateTreeValue::new_from_diff(diff)?.try_map(move |value| {
Ok(MultiValue {
winning_value: (opid, value),
conflicts: im_rc::HashMap::new(),
})
})
}
pub fn from_statetree_value(statetree_val: StateTreeValue, opid: amp::OpId) -> MultiValue {
MultiValue {
winning_value: (opid, statetree_val),
conflicts: im_rc::HashMap::new(),
}
}
pub(super) fn new_from_value_2(req: NewValueRequest) -> NewValue {
Self::new_from_value(
req.actor,
req.start_op,
req.parent_obj.clone(),
req.key,
req.value,
req.insert,
req.pred.into_iter().collect(),
)
}
pub(super) fn new_from_value(
actor: &amp::ActorId,
start_op: u64,
parent_id: amp::ObjectId,
key: &amp::Key,
value: &Value,
insert: bool,
pred: Vec<amp::OpId>,
) -> NewValue {
NewValueContext {
start_op,
actor,
key,
insert,
pred,
parent_obj: &parent_id,
}
.create(value)
}
pub(super) fn apply_diff<K>(
&self,
opid: &amp::OpId,
subdiff: DiffToApply<K, &amp::Diff>,
) -> Result<DiffApplicationResult<MultiValue>, error::InvalidPatch>
where
K: Into<amp::Key>,
{
self.apply_diff_iter(&mut std::iter::once((opid, subdiff)))
}
pub(super) fn apply_diff_iter<'a, 'b, 'c, 'd, I, K: 'c>(
&'a self,
diff: &mut I,
) -> Result<DiffApplicationResult<MultiValue>, error::InvalidPatch>
where
K: Into<amp::Key>,
I: Iterator<Item = (&'b amp::OpId, DiffToApply<'c, K, &'d amp::Diff>)>,
{
let mut changes = StateTreeChange::empty();
let mut updated = self.tree_values();
for (opid, subdiff) in diff {
let u = if let Some(existing_value) = updated.get(opid) {
match existing_value {
StateTreeValue::Leaf(_) => StateTreeValue::new_from_diff(subdiff),
StateTreeValue::Link(obj_id) => subdiff
.current_objects
.get(obj_id)
.expect("link to nonexistent object")
.apply_diff(&subdiff)
.map(|c| c.map(|c| StateTreeValue::Link(c.object_id()))),
}
} else {
StateTreeValue::new_from_diff(subdiff)
}?;
changes += u.change;
updated = updated.update(opid, &u.value)
}
Ok(DiffApplicationResult::pure(updated.result()).with_changes(changes))
}
pub(super) fn default_statetree_value(&self) -> StateTreeValue {
self.winning_value.1.clone()
}
pub(super) fn default_value(
&self,
objects: &im_rc::HashMap<amp::ObjectId, StateTreeComposite>,
) -> Value {
self.winning_value.1.realise_value(objects)
}
pub(super) fn default_opid(&self) -> amp::OpId {
self.winning_value.0.clone()
}
pub(super) fn update_default(&self, val: StateTreeValue) -> MultiValue {
MultiValue {
winning_value: (self.winning_value.0.clone(), val),
conflicts: self.conflicts.clone(),
}
}
fn tree_values(&self) -> MultiValueTreeValues {
MultiValueTreeValues {
current: self.clone(),
}
}
pub(super) fn realise_values(
&self,
objects: &im_rc::HashMap<amp::ObjectId, StateTreeComposite>,
) -> std::collections::HashMap<amp::OpId, Value> {
self.tree_values()
.iter()
.map(|(opid, v)| (opid.clone(), v.realise_value(objects)))
.collect()
}
pub(super) fn opids(&self) -> impl Iterator<Item = &amp::OpId> {
std::iter::once(&self.winning_value.0).chain(self.conflicts.keys())
}
pub(super) fn has_opid(&self, opid: &amp::OpId) -> bool {
self.opids().any(|o| o == opid)
}
}
#[derive(Clone)]
struct MultiValueTreeValues {
current: MultiValue,
}
impl MultiValueTreeValues {
fn get(&self, opid: &amp::OpId) -> Option<&StateTreeValue> {
if opid == &self.current.winning_value.0 {
Some(&self.current.winning_value.1)
} else {
self.current.conflicts.get(opid)
}
}
fn iter(&self) -> impl std::iter::Iterator<Item = (&amp::OpId, &StateTreeValue)> {
std::iter::once((
&(self.current.winning_value).0,
&(self.current.winning_value.1),
))
.chain(self.current.conflicts.iter())
}
fn update(mut self, key: &amp::OpId, value: &StateTreeValue) -> MultiValueTreeValues {
if *key >= self.current.winning_value.0 {
self.current
.conflicts
.insert(self.current.winning_value.0, self.current.winning_value.1);
self.current.winning_value.0 = key.clone();
self.current.winning_value.1 = value.clone();
} else {
self.current.conflicts.insert(key.clone(), value.clone());
}
self
}
fn result(self) -> MultiValue {
self.current
}
}
#[derive(Debug)]
pub(super) struct NewValue {
value: StateTreeValue,
opid: amp::OpId,
ops: Vec<amp::Op>,
new_objects: im_rc::HashMap<amp::ObjectId, StateTreeComposite>,
new_cursors: Cursors,
max_op: u64,
}
impl NewValue {
pub(super) fn ops(self) -> Vec<amp::Op> {
self.ops
}
fn multivalue(&self) -> MultiValue {
MultiValue::from_statetree_value(self.value.clone(), self.opid.clone())
}
pub(super) fn diff_app_result(&self) -> DiffApplicationResult<MultiValue> {
DiffApplicationResult::pure(self.multivalue()).with_changes(
StateTreeChange::from_updates(self.new_objects.clone())
.with_cursors(self.new_cursors.clone()),
)
}
}
/// This struct exists to constrain the values of a text type to just containing
/// sequences of grapheme clusters
#[derive(Debug, Clone)]
pub(super) struct MultiGrapheme {
winning_value: (amp::OpId, String),
conflicts: Option<im_rc::HashMap<amp::OpId, String>>,
}
impl MultiGrapheme {
pub(super) fn new_from_grapheme_cluster(opid: amp::OpId, s: String) -> MultiGrapheme {
debug_assert_eq!(s.graphemes(true).count(), 1);
MultiGrapheme {
winning_value: (opid, s),
conflicts: None,
}
}
pub(super) fn new_from_diff<K>(
opid: &amp::OpId,
diff: DiffToApply<K, &amp::Diff>,
) -> Result<MultiGrapheme, error::InvalidPatch>
where
K: Into<amp::Key>,
{
let winning_value = match diff.diff {
amp::Diff::Value(amp::ScalarValue::Str(s)) => {
if s.graphemes(true).count() != 1 {
return Err(error::InvalidPatch::InsertNonTextInTextObject {
object_id: diff.parent_object_id.clone(),
diff: diff.diff.clone(),
});
} else {
s.clone()
}
}
_ => {
return Err(error::InvalidPatch::InsertNonTextInTextObject {
object_id: diff.parent_object_id.clone(),
diff: diff.diff.clone(),
});
}
};
Ok(MultiGrapheme {
winning_value: (opid.clone(), winning_value),
conflicts: None,
})
}
pub(super) fn apply_diff<K>(
&self,
opid: &amp::OpId,
diff: DiffToApply<K, &amp::Diff>,
) -> Result<MultiGrapheme, error::InvalidPatch>
where
K: Into<amp::Key>,
{
self.apply_diff_iter(&mut std::iter::once((opid, diff)))
.map(|d| d.value)
}
pub(super) fn apply_diff_iter<'a, 'b, 'c, 'd, I, K: 'c>(
&'a self,
diff: &mut I,
) -> Result<DiffApplicationResult<MultiGrapheme>, error::InvalidPatch>
where
K: Into<amp::Key>,
I: Iterator<Item = (&'b amp::OpId, DiffToApply<'c, K, &'d amp::Diff>)>,
{
let mut updated = self.values();
for (opid, subdiff) in diff {
match subdiff.diff {
amp::Diff::Value(amp::ScalarValue::Str(s)) => {
if s.graphemes(true).count() != 1 {
return Err(error::InvalidPatch::InsertNonTextInTextObject {
object_id: subdiff.parent_object_id.clone(),
diff: subdiff.diff.clone(),
});
} else {
updated = updated.update(opid, s.clone());
}
}
_ => {
return Err(error::InvalidPatch::InsertNonTextInTextObject {
object_id: subdiff.parent_object_id.clone(),
diff: subdiff.diff.clone(),
})
}
}
}
Ok(DiffApplicationResult::pure(updated.result()))
}
pub(super) fn default_grapheme(&self) -> String {
self.winning_value.1.clone()
}
pub fn default_opid(&self) -> &amp::OpId {
&self.winning_value.0
}
fn values(&self) -> MultiGraphemeValues {
MultiGraphemeValues {
current: self.clone(),
}
}
pub(super) fn has_opid(&self, opid: &amp::OpId) -> bool {
if let Some(ref conflicts) = self.conflicts {
let mut opids = std::iter::once(&self.winning_value.0).chain(conflicts.keys());
opids.any(|o| o == opid)
} else {
self.winning_value.0 == *opid
}
}
}
struct MultiGraphemeValues {
current: MultiGrapheme,
}
impl MultiGraphemeValues {
fn update(mut self, key: &amp::OpId, value: String) -> MultiGraphemeValues {
let mut conflicts = self.current.conflicts.unwrap_or_else(im_rc::HashMap::new);
if *key >= self.current.winning_value.0 {
conflicts.insert(self.current.winning_value.0, self.current.winning_value.1);
self.current.winning_value.0 = key.clone();
self.current.winning_value.1 = value;
} else {
conflicts.insert(key.clone(), value);
}
self.current.conflicts = Some(conflicts);
self
}
fn result(self) -> MultiGrapheme {
self.current
}
}
#[derive(Clone)]
pub(crate) struct NewValueContext<'a, 'b, O>
where
O: Into<amp::ObjectId>,
O: Clone,
{
pub(crate) actor: &'a amp::ActorId,
pub(crate) start_op: u64,
pub(crate) key: &'b amp::Key,
pub(crate) parent_obj: O,
pub(crate) insert: bool,
pub(crate) pred: Vec<amp::OpId>,
}
impl<'a, 'b, O> NewValueContext<'a, 'b, O>
where
O: Into<amp::ObjectId>,
O: Clone,
{
fn create(self, value: &Value) -> NewValue {
match value {
Value::Map(props, map_type) => self.new_map_or_table(props, map_type),
Value::Sequence(values) => self.new_list(values),
Value::Text(graphemes) => self.new_text(graphemes),
Value::Primitive(p) => self.new_primitive(p),
}
}
fn new_map_or_table(
self,
props: &std::collections::HashMap<String, Value>,
map_type: &amp::MapType,
) -> NewValue {
let make_op_id = amp::OpId(self.start_op, self.actor.clone());
let make_op = amp::Op {
action: amp::OpType::Make(amp::ObjType::Map(*map_type)),
obj: self.parent_obj.clone().into(),
key: self.key.clone(),
insert: self.insert,
pred: self.pred.clone(),
};
let mut ops = vec![make_op];
let mut current_max_op = self.start_op;
let mut cursors = Cursors::new();
let mut objects: im_rc::HashMap<amp::ObjectId, StateTreeComposite> = im_rc::HashMap::new();
let mut result_props: im_rc::HashMap<String, MultiValue> = im_rc::HashMap::new();
for (prop, value) in props.iter() {
let context = NewValueContext {
actor: self.actor,
parent_obj: &make_op_id,
start_op: current_max_op + 1,
key: &prop.into(),
pred: Vec::new(),
insert: false,
};
let next_value = context.create(value);
current_max_op = next_value.max_op;
cursors = next_value.new_cursors.clone().union(cursors);
objects = next_value.new_objects.clone().union(objects.clone());
ops.extend_from_slice(&next_value.ops[..]);
result_props = result_props.update(prop.clone(), next_value.multivalue())
}
let map = match map_type {
amp::MapType::Map => StateTreeComposite::Map(StateTreeMap {
object_id: make_op_id.clone().into(),
props: result_props,
}),
amp::MapType::Table => StateTreeComposite::Table(StateTreeTable {
object_id: make_op_id.clone().into(),
props: result_props,
}),
};
let value = StateTreeValue::Link(make_op_id.clone().into());
objects = objects.update(make_op_id.clone().into(), map);
NewValue {
value,
opid: make_op_id,
max_op: current_max_op,
new_cursors: cursors,
new_objects: objects,
ops,
}
}
fn new_list(self, values: &[Value]) -> NewValue {
let make_list_opid = amp::OpId::new(self.start_op, self.actor);
let make_op = amp::Op {
action: amp::OpType::Make(amp::ObjType::list()),
obj: self.parent_obj.into(),
key: self.key.clone(),
insert: self.insert,
pred: self.pred,
};
let mut ops = vec![make_op];
let mut current_max_op = self.start_op;
let mut cursors = Cursors::new();
let mut objects = im_rc::HashMap::new();
let mut result_elems: Vec<(amp::OpId, MultiValue)> = Vec::with_capacity(values.len());
let mut last_elemid = amp::ElementId::Head;
for value in values.iter() {
let elem_opid = self.actor.op_id_at(current_max_op + 1);
let context = NewValueContext {
start_op: current_max_op + 1,
pred: Vec::new(),
insert: true,
key: &last_elemid.into(),
actor: self.actor,
parent_obj: make_list_opid.clone(),
};
last_elemid = elem_opid.clone().into();
let next_value = context.create(value);
current_max_op = next_value.max_op;
result_elems.push((elem_opid, next_value.multivalue()));
objects = next_value.new_objects.union(objects.clone());
cursors = next_value.new_cursors.union(cursors);
ops.extend(next_value.ops);
}
let list = StateTreeComposite::List(StateTreeList {
object_id: make_list_opid.clone().into(),
elements: DiffableSequence::new_from(result_elems),
});
objects = objects.update(make_list_opid.clone().into(), list);
let value = StateTreeValue::Link(make_list_opid.clone().into());
NewValue {
value,
opid: make_list_opid,
max_op: current_max_op,
new_cursors: cursors,
new_objects: objects,
ops,
}
}
fn new_text(self, graphemes: &[String]) -> NewValue {
let make_text_opid = self.actor.op_id_at(self.start_op);
let mut ops: Vec<amp::Op> = vec![amp::Op {
action: amp::OpType::Make(amp::ObjType::text()),
obj: self.parent_obj.into(),
key: self.key.clone(),
insert: self.insert,
pred: self.pred,
}];
let mut current_max_op = self.start_op;
let mut last_elemid = amp::ElementId::Head;
let mut multigraphemes: Vec<(amp::OpId, MultiGrapheme)> =
Vec::with_capacity(graphemes.len());
for grapheme in graphemes.iter() {
current_max_op += 1;
let opid = self.actor.op_id_at(current_max_op);
let op = amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Str(grapheme.clone())),
obj: make_text_opid.clone().into(),
key: last_elemid.clone().into(),
insert: true,
pred: Vec::new(),
};
multigraphemes.push((
opid.clone(),
MultiGrapheme::new_from_grapheme_cluster(opid.clone(), grapheme.clone()),
));
ops.push(op);
last_elemid = opid.clone().into();
}
let seq = DiffableSequence::new_from(multigraphemes);
let text = StateTreeComposite::Text(StateTreeText {
object_id: make_text_opid.clone().into(),
graphemes: seq,
});
let value = StateTreeValue::Link(make_text_opid.clone().into());
NewValue {
value,
opid: make_text_opid.clone(),
ops,
new_objects: im_rc::hashmap! {make_text_opid.into() => text},
new_cursors: Cursors::new(),
max_op: current_max_op,
}
}
fn new_primitive(self, primitive: &Primitive) -> NewValue {
let new_cursors = match primitive {
Primitive::Cursor(c) => Cursors::new_from(CursorState {
index: c.index as usize,
referring_object_id: self.parent_obj.clone().into(),
referring_key: self.key.clone(),
referred_opid: c.elem_opid.clone(),
referred_object_id: c.object.clone(),
}),
_ => Cursors::new(),
};
let value = match primitive {
Primitive::Str(s) => amp::ScalarValue::Str(s.clone()),
Primitive::Int(i) => amp::ScalarValue::Int(*i),
Primitive::Uint(u) => amp::ScalarValue::Uint(*u),
Primitive::F64(f) => amp::ScalarValue::F64(*f),
Primitive::F32(f) => amp::ScalarValue::F32(*f),
Primitive::Counter(i) => amp::ScalarValue::Counter(*i),
Primitive::Timestamp(t) => amp::ScalarValue::Timestamp(*t),
Primitive::Boolean(b) => amp::ScalarValue::Boolean(*b),
Primitive::Cursor(c) => amp::ScalarValue::Cursor(c.elem_opid.clone()),
Primitive::Null => amp::ScalarValue::Null,
};
let opid = self.actor.op_id_at(self.start_op);
NewValue {
value: StateTreeValue::Leaf(primitive.clone()),
opid,
ops: vec![amp::Op {
action: amp::OpType::Set(value),
obj: self.parent_obj.into(),
key: self.key.clone(),
insert: self.insert,
pred: self.pred.clone(),
}],
max_op: self.start_op,
new_cursors,
new_objects: im_rc::HashMap::new(),
}
}
}

View file

@ -0,0 +1,633 @@
use std::convert::TryInto;
use automerge_protocol as amp;
use super::{
focus::Focus, random_op_id, DiffApplicationResult, LocalOperationResult, MultiGrapheme,
MultiValue, NewValueRequest, StateTree, StateTreeChange, StateTreeComposite, StateTreeList,
StateTreeMap, StateTreeTable, StateTreeText, StateTreeValue,
};
use crate::{error, Cursor, Primitive, Value};
#[derive(Debug)]
pub struct ResolvedPath<'a> {
root: &'a StateTree,
pub(crate) target: Target,
}
pub enum Target {
Root(ResolvedRoot),
Map(ResolvedMap),
Table(ResolvedTable),
List(ResolvedList),
Text(ResolvedText),
Character(ResolvedChar),
Counter(ResolvedCounter),
Primitive(ResolvedPrimitive),
}
impl std::fmt::Debug for Target {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Target::Map(maptarget) => write!(f, "MapTarget {:?}", maptarget.value.object_id),
Target::Root(_) => write!(f, "Root"),
Target::Table(tabletarget) => {
write!(f, "Table {:?}", tabletarget.value.object_id)
}
Target::List(listtarget) => write!(f, "list {:?}", listtarget.value.object_id),
Target::Text(texttarget) => write!(f, "text {:?}", texttarget.value.object_id),
Target::Counter(countertarget) => write!(
f,
"counter {0}:{1:?}",
countertarget.containing_object_id, countertarget.key_in_container
),
Target::Primitive(p) => write!(f, "primitive: {:?}", p.multivalue),
Target::Character(ctarget) => write!(f, "character {:?}", ctarget.multivalue),
}
}
}
impl<'a> ResolvedPath<'a> {
pub(super) fn new_root(tree: &StateTree) -> ResolvedPath {
ResolvedPath {
root: tree,
target: Target::Root(ResolvedRoot { root: tree.clone() }),
}
}
pub(super) fn new_map(
tree: &StateTree,
mv: MultiValue,
focus: Focus,
map: StateTreeMap,
) -> ResolvedPath {
ResolvedPath {
root: tree,
target: Target::Map(ResolvedMap {
multivalue: mv,
value: map,
focus,
}),
}
}
pub(super) fn new_list(
tree: &StateTree,
mv: MultiValue,
focus: Focus,
list: StateTreeList,
) -> ResolvedPath {
ResolvedPath {
root: tree,
target: Target::List(ResolvedList {
multivalue: mv,
focus,
value: list,
}),
}
}
pub(super) fn new_text(
tree: &StateTree,
mv: MultiValue,
update: Box<dyn Fn(DiffApplicationResult<MultiValue>) -> StateTree>,
text: StateTreeText,
) -> ResolvedPath {
ResolvedPath {
root: tree,
target: Target::Text(ResolvedText {
multivalue: mv,
value: text,
update,
}),
}
}
pub(super) fn new_table(
tree: &StateTree,
mv: MultiValue,
focus: Focus,
table: StateTreeTable,
) -> ResolvedPath {
ResolvedPath {
root: tree,
target: Target::Table(ResolvedTable {
multivalue: mv,
focus,
value: table,
}),
}
}
pub(super) fn new_counter(
tree: &StateTree,
object_id: amp::ObjectId,
key: amp::Key,
mv: MultiValue,
focus: Focus,
value: i64,
) -> ResolvedPath {
ResolvedPath {
root: tree,
target: Target::Counter(ResolvedCounter {
multivalue: mv,
key_in_container: key,
containing_object_id: object_id,
current_value: value,
focus,
}),
}
}
pub(super) fn new_primitive(tree: &StateTree, value: MultiValue) -> ResolvedPath {
ResolvedPath {
root: tree,
target: Target::Primitive(ResolvedPrimitive { multivalue: value }),
}
}
pub(super) fn new_character(tree: &StateTree, c: MultiValue) -> ResolvedPath {
ResolvedPath {
root: tree,
target: Target::Character(ResolvedChar { multivalue: c }),
}
}
pub fn default_value(&self) -> Value {
match &self.target {
Target::Map(maptarget) => maptarget.multivalue.default_value(&self.root.objects),
Target::Root(root) => root.root.value(),
Target::Table(tabletarget) => tabletarget.multivalue.default_value(&self.root.objects),
Target::List(listtarget) => listtarget.multivalue.default_value(&self.root.objects),
Target::Text(texttarget) => texttarget.multivalue.default_value(&self.root.objects),
Target::Counter(countertarget) => {
countertarget.multivalue.default_value(&self.root.objects)
}
Target::Primitive(p) => p.multivalue.default_value(&self.root.objects),
Target::Character(ctarget) => ctarget.multivalue.default_value(&self.root.objects),
}
}
pub fn values(&self) -> std::collections::HashMap<amp::OpId, Value> {
match &self.target {
Target::Map(maptarget) => maptarget.multivalue.realise_values(&self.root.objects),
Target::Root(root) => {
let mut result = std::collections::HashMap::new();
result.insert(random_op_id(), root.root.value());
result
}
Target::Table(tabletarget) => tabletarget.multivalue.realise_values(&self.root.objects),
Target::List(listtarget) => listtarget.multivalue.realise_values(&self.root.objects),
Target::Text(texttarget) => texttarget.multivalue.realise_values(&self.root.objects),
Target::Counter(countertarget) => {
countertarget.multivalue.realise_values(&self.root.objects)
}
Target::Primitive(p) => p.multivalue.realise_values(&self.root.objects),
Target::Character(ctarget) => ctarget.multivalue.realise_values(&self.root.objects),
}
}
pub fn object_id(&self) -> Option<amp::ObjectId> {
match &self.target {
Target::Map(maptarget) => Some(maptarget.value.object_id.clone()),
Target::Root(_) => Some(amp::ObjectId::Root),
Target::Table(tabletarget) => Some(tabletarget.value.object_id.clone()),
Target::List(listtarget) => Some(listtarget.value.object_id.clone()),
Target::Text(texttarget) => Some(texttarget.value.object_id.clone()),
Target::Counter(_) => None,
Target::Primitive(_) => None,
Target::Character(_) => None,
}
}
}
pub(crate) struct SetOrInsertPayload<'a, T> {
pub start_op: u64,
pub actor: &'a amp::ActorId,
pub value: T,
}
pub struct ResolvedRoot {
pub(super) root: StateTree,
}
impl ResolvedRoot {
pub(crate) fn set_key(
&self,
key: &str,
payload: SetOrInsertPayload<&Value>,
) -> LocalOperationResult {
let newvalue = MultiValue::new_from_value_2(NewValueRequest {
actor: payload.actor,
start_op: payload.start_op,
key: &key.into(),
parent_obj: &amp::ObjectId::Root,
value: payload.value,
insert: false,
pred: self
.root
.get(key)
.map(|mv| vec![mv.default_opid()])
.unwrap_or_else(Vec::new),
});
let new_state = self
.root
.update(key.to_string(), newvalue.diff_app_result());
LocalOperationResult {
new_state,
new_ops: newvalue.ops(),
}
}
pub(crate) fn delete_key(&self, key: &str) -> LocalOperationResult {
let existing_value = self.root.get(key);
let pred = existing_value
.map(|v| vec![v.default_opid()])
.unwrap_or_else(Vec::new);
LocalOperationResult {
new_state: self.root.remove(key),
new_ops: vec![amp::Op {
action: amp::OpType::Del,
obj: amp::ObjectId::Root,
key: key.into(),
insert: false,
pred,
}],
}
}
}
pub struct ResolvedCounter {
pub(super) current_value: i64,
pub(super) multivalue: MultiValue,
pub(super) containing_object_id: amp::ObjectId,
pub(super) key_in_container: amp::Key,
pub(super) focus: Focus,
}
impl ResolvedCounter {
pub(crate) fn increment(&self, by: i64) -> LocalOperationResult {
let diffapp = DiffApplicationResult::pure(self.multivalue.update_default(
StateTreeValue::Leaf(Primitive::Counter(self.current_value + by)),
));
let new_state = self.focus.update(diffapp);
LocalOperationResult {
new_state,
new_ops: vec![amp::Op {
action: amp::OpType::Inc(by),
obj: self.containing_object_id.clone(),
key: self.key_in_container.clone(),
insert: false,
pred: vec![self.multivalue.default_opid()],
}],
}
}
}
pub struct ResolvedMap {
pub(super) value: StateTreeMap,
pub(super) multivalue: MultiValue,
pub(super) focus: Focus,
}
impl ResolvedMap {
pub(crate) fn set_key(
&self,
key: &str,
payload: SetOrInsertPayload<&Value>,
) -> LocalOperationResult {
let newvalue = MultiValue::new_from_value_2(NewValueRequest {
actor: payload.actor,
start_op: payload.start_op,
parent_obj: &self.value.object_id,
key: &key.into(),
value: payload.value,
insert: false,
pred: self.value.pred_for_key(key),
});
let diffapp = newvalue.diff_app_result().and_then(|v| {
let new_value = self.value.update(key.to_string(), v);
let new_composite = StateTreeComposite::Map(new_value);
let new_mv = self
.multivalue
.update_default(StateTreeValue::Link(new_composite.object_id()));
DiffApplicationResult::pure(new_mv).with_changes(StateTreeChange::single(
self.value.object_id.clone(),
new_composite,
))
});
LocalOperationResult {
new_state: self.focus.update(diffapp),
new_ops: newvalue.ops(),
}
}
pub(crate) fn delete_key(&self, key: &str) -> LocalOperationResult {
let new_value = self.value.without(key);
let new_composite = StateTreeComposite::Map(new_value);
let new_mv = self
.multivalue
.update_default(StateTreeValue::Link(new_composite.object_id()));
let diffapp = DiffApplicationResult::pure(new_mv).with_changes(StateTreeChange::single(
new_composite.object_id(),
new_composite,
));
LocalOperationResult {
new_state: self.focus.update(diffapp),
new_ops: vec![amp::Op {
action: amp::OpType::Del,
obj: self.value.object_id.clone(),
key: key.into(),
insert: false,
pred: self.value.pred_for_key(key),
}],
}
}
}
pub struct ResolvedTable {
pub(super) value: StateTreeTable,
pub(super) multivalue: MultiValue,
pub(super) focus: Focus,
}
impl ResolvedTable {
pub(crate) fn set_key(
&self,
key: &str,
payload: SetOrInsertPayload<&Value>,
) -> LocalOperationResult {
let newvalue = MultiValue::new_from_value_2(NewValueRequest {
actor: payload.actor,
start_op: payload.start_op,
parent_obj: &self.value.object_id,
key: &key.into(),
value: payload.value,
insert: false,
pred: self.value.pred_for_key(key),
});
let treechange = newvalue.diff_app_result().and_then(|v| {
let new_value = self.value.update(key.to_string(), v);
let new_composite = StateTreeComposite::Table(new_value);
let new_mv = self
.multivalue
.update_default(StateTreeValue::Link(new_composite.object_id()));
DiffApplicationResult::pure(new_mv).with_changes(StateTreeChange::single(
self.value.object_id.clone(),
new_composite,
))
});
LocalOperationResult {
new_state: self.focus.update(treechange),
new_ops: newvalue.ops(),
}
}
pub(crate) fn delete_key(&self, key: &str) -> LocalOperationResult {
let new_value = self.value.without(key);
let new_composite = StateTreeComposite::Table(new_value);
let new_mv = self
.multivalue
.update_default(StateTreeValue::Link(new_composite.object_id()));
let diffapp = DiffApplicationResult::pure(new_mv).with_changes(StateTreeChange::single(
new_composite.object_id(),
new_composite,
));
LocalOperationResult {
new_state: self.focus.update(diffapp),
new_ops: vec![amp::Op {
action: amp::OpType::Del,
obj: self.value.object_id.clone(),
key: key.into(),
insert: false,
pred: self.value.pred_for_key(key),
}],
}
}
}
pub struct ResolvedText {
pub(super) value: StateTreeText,
pub(super) multivalue: MultiValue,
pub(super) update: Box<dyn Fn(DiffApplicationResult<MultiValue>) -> StateTree>,
}
impl ResolvedText {
pub(crate) fn insert(
&self,
index: u32,
payload: SetOrInsertPayload<String>,
) -> Result<LocalOperationResult, error::MissingIndexError> {
let current_elemid = match index {
0 => amp::ElementId::Head,
i => self.value.elem_at((i - 1).try_into().unwrap())?.0.into(),
};
let insert_op = amp::OpId::new(payload.start_op, payload.actor);
let c = MultiGrapheme::new_from_grapheme_cluster(insert_op, payload.value.clone());
let new_text = self.value.insert(index.try_into().unwrap(), c)?;
let updated = StateTreeComposite::Text(new_text);
let mv = self
.multivalue
.update_default(StateTreeValue::Link(updated.object_id()));
let treechange = DiffApplicationResult::pure(mv).with_changes(StateTreeChange::single(
self.value.object_id.clone(),
updated,
));
Ok(LocalOperationResult {
new_state: (self.update)(treechange),
new_ops: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Str(payload.value)),
obj: self.value.object_id.clone(),
key: current_elemid.into(),
insert: true,
pred: Vec::new(),
}],
})
}
pub(crate) fn set(
&self,
index: u32,
payload: SetOrInsertPayload<String>,
) -> Result<LocalOperationResult, error::MissingIndexError> {
let index: usize = index.try_into().unwrap();
let (current_elemid, _) = self.value.elem_at(index)?;
let update_op = amp::OpId::new(payload.start_op, payload.actor);
let c = MultiGrapheme::new_from_grapheme_cluster(update_op, payload.value.clone());
let updated = StateTreeComposite::Text(self.value.set(index, c)?);
let mv = self
.multivalue
.update_default(StateTreeValue::Link(updated.object_id()));
let diffapp = DiffApplicationResult::pure(mv).with_changes(StateTreeChange::single(
self.value.object_id.clone(),
updated,
));
let new_state = (self.update)(diffapp);
Ok(LocalOperationResult {
new_state,
new_ops: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Str(payload.value)),
obj: self.value.object_id.clone(),
key: current_elemid.into(),
pred: self.value.pred_for_index(index as u32),
insert: false,
}],
})
}
pub(crate) fn remove(
&self,
index: u32,
) -> Result<LocalOperationResult, error::MissingIndexError> {
let (current_elemid, _) = self.value.elem_at(index.try_into().unwrap())?;
let updated = StateTreeComposite::Text(self.value.remove(index.try_into().unwrap())?);
let mv = self
.multivalue
.update_default(StateTreeValue::Link(updated.object_id()));
let diffapp = DiffApplicationResult::pure(mv).with_changes(StateTreeChange::single(
self.value.object_id.clone(),
updated,
));
let new_state = (self.update)(diffapp);
Ok(LocalOperationResult {
new_state,
new_ops: vec![amp::Op {
action: amp::OpType::Del,
obj: self.value.object_id.clone(),
key: current_elemid.into(),
insert: false,
pred: self.value.pred_for_index(index as u32),
}],
})
}
pub(crate) fn get_cursor(&self, index: u32) -> Result<Cursor, error::MissingIndexError> {
let (current_elemid, _) = self.value.elem_at(index.try_into().unwrap())?;
Ok(Cursor::new(
index,
self.value.object_id.clone(),
current_elemid.clone(),
))
}
}
pub struct ResolvedList {
pub(super) value: StateTreeList,
pub(super) multivalue: MultiValue,
pub(super) focus: Focus,
}
impl ResolvedList {
pub(crate) fn set(
&self,
index: u32,
payload: SetOrInsertPayload<&Value>,
) -> Result<LocalOperationResult, error::MissingIndexError> {
let (current_elemid, _) = self.value.elem_at(index.try_into().unwrap())?;
let newvalue = MultiValue::new_from_value_2(NewValueRequest {
actor: payload.actor,
start_op: payload.start_op,
value: payload.value,
pred: self.value.pred_for_index(index),
parent_obj: &self.value.object_id.clone(),
key: &current_elemid.into(),
insert: false,
});
let treechange = newvalue.diff_app_result().try_and_then(|v| {
let new_value = StateTreeComposite::List(self.value.set(index.try_into().unwrap(), v)?);
let mv = self
.multivalue
.update_default(StateTreeValue::Link(new_value.object_id()));
Ok(
DiffApplicationResult::pure(mv).with_changes(StateTreeChange::single(
self.value.object_id.clone(),
new_value,
)),
)
})?;
let new_state = self.focus.update(treechange);
Ok(LocalOperationResult {
new_state,
new_ops: newvalue.ops(),
})
}
pub(crate) fn insert(
&self,
index: u32,
payload: SetOrInsertPayload<&Value>,
) -> Result<LocalOperationResult, error::MissingIndexError> {
let current_elemid = match index {
0 => amp::ElementId::Head,
i => self.value.elem_at((i - 1).try_into().unwrap())?.0.into(),
};
let newvalue = MultiValue::new_from_value_2(NewValueRequest {
actor: payload.actor,
start_op: payload.start_op,
value: payload.value,
parent_obj: &self.value.object_id,
key: &current_elemid.into(),
insert: true,
pred: Vec::new(),
});
let treechange = newvalue.diff_app_result().try_and_then(|v| {
let new_value =
StateTreeComposite::List(self.value.insert(index.try_into().unwrap(), v)?);
let mv = self
.multivalue
.update_default(StateTreeValue::Link(new_value.object_id()));
Ok(
DiffApplicationResult::pure(mv).with_changes(StateTreeChange::single(
self.value.object_id.clone(),
new_value,
)),
)
})?;
Ok(LocalOperationResult {
new_state: self.focus.update(treechange),
new_ops: newvalue.ops(),
})
}
pub(crate) fn remove(
&self,
index: u32,
) -> Result<LocalOperationResult, error::MissingIndexError> {
let (current_elemid, _) = self.value.elem_at(index.try_into().unwrap())?;
let new_value = StateTreeComposite::List(self.value.remove(index.try_into().unwrap())?);
let mv = self
.multivalue
.update_default(StateTreeValue::Link(new_value.object_id()));
let treechange = DiffApplicationResult::pure(mv).with_changes(StateTreeChange::single(
self.value.object_id.clone(),
new_value,
));
Ok(LocalOperationResult {
new_state: self.focus.update(treechange),
new_ops: vec![amp::Op {
action: amp::OpType::Del,
obj: self.value.object_id.clone(),
key: current_elemid.into(),
insert: false,
pred: self.value.pred_for_index(index),
}],
})
}
pub(crate) fn get_cursor(&self, index: u32) -> Result<Cursor, error::MissingIndexError> {
let (current_elemid, _) = self.value.elem_at(index.try_into().unwrap())?;
Ok(Cursor::new(
index,
self.value.object_id.clone(),
current_elemid,
))
}
}
pub struct ResolvedChar {
pub(super) multivalue: MultiValue,
}
pub struct ResolvedPrimitive {
pub(super) multivalue: MultiValue,
}

View file

@ -0,0 +1,75 @@
use std::ops::{Add, AddAssign};
use automerge_protocol as amp;
use super::{Cursors, StateTreeComposite};
#[derive(Clone)]
pub struct StateTreeChange {
objects: im_rc::HashMap<amp::ObjectId, StateTreeComposite>,
new_cursors: Cursors,
}
impl StateTreeChange {
pub(super) fn empty() -> StateTreeChange {
StateTreeChange {
objects: im_rc::HashMap::new(),
new_cursors: Cursors::new(),
}
}
pub(super) fn single(object_id: amp::ObjectId, object: StateTreeComposite) -> StateTreeChange {
StateTreeChange {
objects: im_rc::hashmap! {object_id => object},
new_cursors: Cursors::new(),
}
}
pub(super) fn from_updates(
objects: im_rc::HashMap<amp::ObjectId, StateTreeComposite>,
) -> StateTreeChange {
StateTreeChange {
objects,
new_cursors: Cursors::new(),
}
}
pub(super) fn with_cursors(mut self, cursors: Cursors) -> StateTreeChange {
self.new_cursors = cursors.union(self.new_cursors);
self
}
pub(super) fn objects(&self) -> im_rc::HashMap<amp::ObjectId, StateTreeComposite> {
self.objects.clone()
}
pub(super) fn new_cursors(&self) -> Cursors {
self.new_cursors.clone()
}
}
impl Add for &StateTreeChange {
type Output = StateTreeChange;
fn add(self, rhs: &StateTreeChange) -> Self::Output {
StateTreeChange {
objects: self.objects.clone().union(rhs.objects.clone()),
new_cursors: self.new_cursors.clone().union(rhs.new_cursors.clone()),
}
}
}
impl Add for StateTreeChange {
type Output = StateTreeChange;
fn add(self, rhs: StateTreeChange) -> Self::Output {
&self + &rhs
}
}
impl AddAssign for StateTreeChange {
fn add_assign(&mut self, rhs: StateTreeChange) {
self.objects = self.objects.clone().union(rhs.objects);
self.new_cursors = self.new_cursors.clone().union(rhs.new_cursors);
}
}

View file

@ -0,0 +1,313 @@
use std::{borrow::Borrow, collections::HashMap};
use automerge_protocol as amp;
use serde::Serialize;
#[derive(Serialize, Clone, Debug, PartialEq)]
pub struct Conflicts(HashMap<amp::OpId, Value>);
impl From<HashMap<amp::OpId, Value>> for Conflicts {
fn from(hmap: HashMap<amp::OpId, Value>) -> Self {
Conflicts(hmap)
}
}
#[derive(Serialize, Clone, Debug, PartialEq)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
#[serde(untagged)]
pub enum Value {
Map(HashMap<String, Value>, amp::MapType),
Sequence(Vec<Value>),
/// Sequence of grapheme clusters
Text(Vec<String>),
Primitive(Primitive),
}
#[derive(Serialize, Clone, Debug, PartialEq)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
pub enum Primitive {
Str(String),
Int(i64),
Uint(u64),
F64(f64),
F32(f32),
Counter(i64),
Timestamp(i64),
Boolean(bool),
Cursor(Cursor),
Null,
}
#[derive(Serialize, Clone, Debug, PartialEq)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
pub struct Cursor {
pub index: u32,
pub(crate) object: amp::ObjectId,
pub(crate) elem_opid: amp::OpId,
}
impl Cursor {
pub fn new(index: u32, obj: amp::ObjectId, op: amp::OpId) -> Cursor {
Cursor {
index,
object: obj,
elem_opid: op,
}
}
}
impl From<Cursor> for Value {
fn from(c: Cursor) -> Self {
Value::Primitive(Primitive::Cursor(c))
}
}
impl From<&Primitive> for amp::ScalarValue {
fn from(p: &Primitive) -> Self {
match p {
Primitive::Str(s) => amp::ScalarValue::Str(s.clone()),
Primitive::Int(i) => amp::ScalarValue::Int(*i),
Primitive::Uint(u) => amp::ScalarValue::Uint(*u),
Primitive::F64(f) => amp::ScalarValue::F64(*f),
Primitive::F32(f) => amp::ScalarValue::F32(*f),
Primitive::Counter(i) => amp::ScalarValue::Counter(*i),
Primitive::Timestamp(i) => amp::ScalarValue::Timestamp(*i),
Primitive::Boolean(b) => amp::ScalarValue::Boolean(*b),
Primitive::Null => amp::ScalarValue::Null,
Primitive::Cursor(c) => amp::ScalarValue::Cursor(c.elem_opid.clone()),
}
}
}
impl From<Primitive> for Value {
fn from(p: Primitive) -> Self {
Value::Primitive(p)
}
}
impl From<&str> for Value {
fn from(s: &str) -> Self {
Value::Primitive(Primitive::Str(s.to_string()))
}
}
impl From<&amp::CursorDiff> for Primitive {
fn from(diff: &amp::CursorDiff) -> Self {
Primitive::Cursor(Cursor {
index: diff.index,
object: diff.object_id.clone(),
elem_opid: diff.elem_id.clone(),
})
}
}
impl From<char> for Value {
fn from(c: char) -> Value {
Value::Primitive(Primitive::Str(c.to_string()))
}
}
impl<T> From<Vec<T>> for Value
where
T: Into<Value>,
{
fn from(v: Vec<T>) -> Self {
Value::Sequence(v.into_iter().map(|t| t.into()).collect())
}
}
impl From<i64> for Value {
fn from(v: i64) -> Self {
Value::Primitive(Primitive::Int(v))
}
}
impl<T, K> From<HashMap<K, T>> for Value
where
T: Into<Value>,
K: Borrow<str>,
{
fn from(h: HashMap<K, T>) -> Self {
Value::Map(
h.into_iter()
.map(|(k, v)| (k.borrow().to_string(), v.into()))
.collect(),
amp::MapType::Map,
)
}
}
impl Value {
pub fn from_json(json: &serde_json::Value) -> Value {
match json {
serde_json::Value::Object(kvs) => {
let result: HashMap<String, Value> = kvs
.iter()
.map(|(k, v)| (k.clone(), Value::from_json(v)))
.collect();
Value::Map(result, amp::MapType::Map)
}
serde_json::Value::Array(vs) => {
Value::Sequence(vs.iter().map(Value::from_json).collect())
}
serde_json::Value::String(s) => Value::Primitive(Primitive::Str(s.clone())),
serde_json::Value::Number(n) => {
Value::Primitive(Primitive::F64(n.as_f64().unwrap_or(0.0)))
}
serde_json::Value::Bool(b) => Value::Primitive(Primitive::Boolean(*b)),
serde_json::Value::Null => Value::Primitive(Primitive::Null),
}
}
pub fn to_json(&self) -> serde_json::Value {
match self {
Value::Map(map, _) => {
let result: serde_json::map::Map<String, serde_json::Value> =
map.iter().map(|(k, v)| (k.clone(), v.to_json())).collect();
serde_json::Value::Object(result)
}
Value::Sequence(elements) => {
serde_json::Value::Array(elements.iter().map(|v| v.to_json()).collect())
}
Value::Text(graphemes) => serde_json::Value::String(graphemes.join("")),
Value::Primitive(v) => match v {
Primitive::F64(n) => serde_json::Value::Number(
serde_json::Number::from_f64(*n).unwrap_or_else(|| serde_json::Number::from(0)),
),
Primitive::F32(n) => serde_json::Value::Number(
serde_json::Number::from_f64(f64::from(*n))
.unwrap_or_else(|| serde_json::Number::from(0)),
),
Primitive::Uint(n) => serde_json::Value::Number(serde_json::Number::from(*n)),
Primitive::Int(n) => serde_json::Value::Number(serde_json::Number::from(*n)),
Primitive::Str(s) => serde_json::Value::String(s.to_string()),
Primitive::Boolean(b) => serde_json::Value::Bool(*b),
Primitive::Counter(c) => serde_json::Value::Number(serde_json::Number::from(*c)),
Primitive::Timestamp(t) => serde_json::Value::Number(serde_json::Number::from(*t)),
Primitive::Null => serde_json::Value::Null,
Primitive::Cursor(c) => {
serde_json::Value::Number(serde_json::Number::from(c.index))
}
},
}
}
}
/// Convert a value to a vector of op requests that will create said value.
///
/// #Arguments
///
/// * actor - The actor who is creating this value
/// * start_op - The start op which will be used to generate element IDs
/// * parent_object - The ID of the "parent" object, i.e the object that will
/// contain the newly created object
/// * key - The property that the newly created object will populate
/// within the parent object.
/// * insert - Whether the op that creates this value should be insert
///
///
/// Returns a vector of the op requests which will create this value
pub(crate) fn value_to_op_requests(
actor: &amp::ActorId,
start_op: u64,
parent_object: amp::ObjectId,
key: &amp::Key,
v: &Value,
insert: bool,
) -> (Vec<amp::Op>, u64) {
match v {
Value::Sequence(vs) => {
let list_op = amp::OpId(start_op, actor.clone());
let make_op = amp::Op {
action: amp::OpType::Make(amp::ObjType::list()),
obj: parent_object,
key: key.clone(),
insert,
pred: Vec::new(),
};
let mut op_num = start_op + 1;
let mut result = vec![make_op];
let mut last_elemid = amp::ElementId::Head;
for v in vs.iter() {
let (child_requests, new_op_num) = value_to_op_requests(
actor,
op_num,
amp::ObjectId::from(list_op.clone()),
&last_elemid.clone().into(),
v,
true,
);
last_elemid = amp::OpId::new(op_num, actor).into();
op_num = new_op_num;
result.extend(child_requests);
}
(result, op_num)
}
Value::Text(chars) => {
let make_text_op = amp::OpId(start_op, actor.clone());
let make_op = amp::Op {
action: amp::OpType::Make(amp::ObjType::text()),
obj: parent_object,
key: key.clone(),
insert,
pred: Vec::new(),
};
let mut insert_ops: Vec<amp::Op> = Vec::new();
let mut last_elemid = amp::ElementId::Head;
let mut op_num = start_op + 1;
for c in chars.iter() {
insert_ops.push(amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Str(c.to_string())),
obj: amp::ObjectId::from(make_text_op.clone()),
key: last_elemid.clone().into(),
insert: true,
pred: Vec::new(),
});
last_elemid = amp::OpId::new(op_num, actor).into();
op_num += 1;
}
let mut ops = vec![make_op];
ops.extend(insert_ops.into_iter());
(ops, op_num)
}
Value::Map(kvs, map_type) => {
let make_action = match map_type {
amp::MapType::Map => amp::OpType::Make(amp::ObjType::map()),
amp::MapType::Table => amp::OpType::Make(amp::ObjType::table()),
};
let make_op_id = amp::OpId::new(start_op, actor);
let make_op = amp::Op {
action: make_action,
obj: parent_object,
key: key.clone(),
insert,
pred: Vec::new(),
};
let mut op_num = start_op + 1;
let mut result = vec![make_op];
for (key, v) in kvs.iter() {
let (child_requests, new_op_num) = value_to_op_requests(
actor,
op_num,
amp::ObjectId::from(make_op_id.clone()),
&amp::Key::from(key.as_str()),
v,
false,
);
op_num = new_op_num;
result.extend(child_requests);
}
(result, op_num)
}
Value::Primitive(prim_value) => {
let ops = vec![amp::Op {
action: amp::OpType::Set(prim_value.into()),
obj: parent_object,
key: key.clone(),
insert,
pred: Vec::new(),
}];
(ops, start_op + 1)
}
}
}

View file

@ -0,0 +1,979 @@
use std::convert::TryInto;
use automerge_frontend::{Frontend, Path, Primitive, Value};
use automerge_protocol as amp;
use maplit::hashmap;
use unicode_segmentation::UnicodeSegmentation;
#[test]
fn set_object_root_properties() {
let actor = amp::ActorId::random();
let patch = amp::Patch {
actor: None,
seq: None,
max_op: 1,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 1,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"bird".into() => hashmap!{
actor.op_id_at(1) => "magpie".into()
}
},
})),
};
let mut frontend = Frontend::new();
frontend.apply_patch(patch).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {"bird" => "magpie"})
);
}
#[test]
fn reveal_conflicts_on_root_properties() {
// We don't just use random actor IDs because we need to have a specific
// ordering (actor1 > actor2)
let actor1 = amp::ActorId::from_bytes(
uuid::Uuid::parse_str("02ef21f3-c9eb-4087-880e-bedd7c4bbe43")
.unwrap()
.as_bytes(),
);
let actor2 = amp::ActorId::from_bytes(
uuid::Uuid::parse_str("2a1d376b-24f7-4400-8d4a-f58252d644dd")
.unwrap()
.as_bytes(),
);
let patch = amp::Patch {
actor: None,
seq: None,
max_op: 2,
pending_changes: 0,
clock: hashmap! {
actor1.clone() => 1,
actor2.clone() => 2,
},
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"favouriteBird".into() => hashmap!{
actor1.op_id_at(1) => amp::Diff::Value("robin".into()),
actor2.op_id_at(1) => amp::Diff::Value("wagtail".into()),
}
},
})),
};
let mut doc = Frontend::new();
doc.apply_patch(patch).unwrap();
assert_eq!(
doc.state(),
&Into::<Value>::into(hashmap! {"favouriteBird" => "wagtail"})
);
let conflicts = doc.get_conflicts(&Path::root().key("favouriteBird"));
assert_eq!(
conflicts,
Some(hashmap! {
actor1.op_id_at(1) => "robin".into(),
actor2.op_id_at(1) => "wagtail".into(),
})
)
}
#[test]
fn create_nested_maps() {
let actor = amp::ActorId::random();
let patch = amp::Patch {
actor: None,
seq: None,
max_op: 3,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 1,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Map(amp::MapDiff{
object_id: actor.op_id_at(2).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"wrens".into() => hashmap!{
actor.op_id_at(2) => amp::Diff::Value(amp::ScalarValue::Int(3))
}
}
})
}
},
})),
};
let mut frontend = Frontend::new();
frontend.apply_patch(patch).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {"birds" => hashmap!{"wrens" => Primitive::Int(3)}})
);
}
#[test]
fn apply_updates_inside_nested_maps() {
let actor = amp::ActorId::random();
let patch1 = amp::Patch {
actor: None,
seq: None,
max_op: 2,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 1,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Map(amp::MapDiff{
object_id: actor.op_id_at(2).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"wrens".into() => hashmap!{
actor.op_id_at(2) => amp::Diff::Value(amp::ScalarValue::Int(3))
}
}
})
}
},
})),
};
let mut frontend = Frontend::new();
frontend.apply_patch(patch1).unwrap();
let birds_id = frontend.get_object_id(&Path::root().key("birds")).unwrap();
let patch2 = amp::Patch {
actor: None,
seq: None,
max_op: 3,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 2,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Map(amp::MapDiff{
object_id: birds_id,
obj_type: amp::MapType::Map,
props: hashmap!{
"sparrows".into() => hashmap!{
actor.op_id_at(3) => amp::Diff::Value(amp::ScalarValue::Int(15))
}
}
})
}
},
})),
};
frontend.apply_patch(patch2).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(
hashmap! {"birds" => hashmap!{"wrens" => Primitive::Int(3), "sparrows" => Primitive::Int(15)}}
)
);
}
#[test]
fn apply_updates_inside_map_conflicts() {
// We don't just use random actor IDs because we need to have a specific
// ordering (actor1 < actor2)
let actor1 = amp::ActorId::from_bytes(
uuid::Uuid::parse_str("02ef21f3-c9eb-4087-880e-bedd7c4bbe43")
.unwrap()
.as_bytes(),
);
let actor2 = amp::ActorId::from_bytes(
uuid::Uuid::parse_str("2a1d376b-24f7-4400-8d4a-f58252d644dd")
.unwrap()
.as_bytes(),
);
let patch1 = amp::Patch {
actor: None,
seq: None,
max_op: 2,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor1.clone() => 1,
actor2.clone() => 1,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"favouriteBirds".into() => hashmap!{
actor1.op_id_at(1) => amp::Diff::Map(amp::MapDiff{
object_id: actor1.op_id_at(1).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"blackbirds".into() => hashmap!{
actor1.op_id_at(2) => amp::Diff::Value(amp::ScalarValue::Int(1)),
}
},
}),
actor2.op_id_at(1) => amp::Diff::Map(amp::MapDiff{
object_id: actor2.op_id_at(1).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"wrens".into() => hashmap!{
actor2.op_id_at(2) => amp::Diff::Value(amp::ScalarValue::Int(3)),
}
},
})
}
},
})),
};
let mut frontend = Frontend::new();
frontend.apply_patch(patch1).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {"favouriteBirds" => hashmap!{"wrens" => Primitive::Int(3)}})
);
assert_eq!(
frontend
.get_conflicts(&Path::root().key("favouriteBirds"))
.unwrap(),
hashmap! {
actor1.op_id_at(1) => hashmap!{"blackbirds" => Primitive::Int(1)}.into(),
actor2.op_id_at(1) => hashmap!{"wrens" => Primitive::Int(3)}.into(),
}
);
let patch2 = amp::Patch {
actor: None,
seq: None,
max_op: 1,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor1.clone() => 2,
actor2.clone() => 1,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"favouriteBirds".into() => hashmap!{
actor1.op_id_at(1) => amp::Diff::Map(amp::MapDiff{
object_id: actor1.op_id_at(1).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"blackbirds".into() => hashmap!{
actor1.op_id_at(3) => amp::Diff::Value(amp::ScalarValue::Int(2)),
}
},
}),
actor2.op_id_at(1) => amp::Diff::Unchanged(amp::ObjDiff{
object_id: actor2.op_id_at(1).into(),
obj_type: amp::ObjType::Map(amp::MapType::Map),
})
}
},
})),
};
frontend.apply_patch(patch2).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {"favouriteBirds" => hashmap!{"wrens" => Primitive::Int(3)}})
);
assert_eq!(
frontend
.get_conflicts(&Path::root().key("favouriteBirds"))
.unwrap(),
hashmap! {
actor1.op_id_at(1) => hashmap!{"blackbirds" => Primitive::Int(2)}.into(),
actor2.op_id_at(1) => hashmap!{"wrens" => Primitive::Int(3)}.into(),
}
);
}
#[test]
fn delete_keys_in_maps() {
let actor = amp::ActorId::random();
let mut frontend = Frontend::new();
let patch1 = amp::Patch {
actor: None,
max_op: 2,
pending_changes: 0,
seq: None,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 1,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"magpies".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Value(amp::ScalarValue::Int(2))
},
"sparrows".into() => hashmap!{
actor.op_id_at(2) => amp::Diff::Value(amp::ScalarValue::Int(15))
}
},
})),
};
frontend.apply_patch(patch1).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(
hashmap! {"magpies" => Primitive::Int(2), "sparrows" => Primitive::Int(15)}
)
);
let patch2 = amp::Patch {
actor: None,
seq: None,
max_op: 3,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor => 2,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"magpies".into() => hashmap!{}
},
})),
};
frontend.apply_patch(patch2).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {"sparrows" => Primitive::Int(15)})
);
}
#[test]
fn create_lists() {
let actor = amp::ActorId::random();
let mut frontend = Frontend::new();
let patch = amp::Patch {
actor: None,
seq: None,
max_op: 2,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 2,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: actor.op_id_at(1).into(),
obj_type: amp::SequenceType::List,
edits: vec![amp::DiffEdit::Insert { index: 0, elem_id: actor.op_id_at(2).into() }],
props: hashmap!{
0 => hashmap!{
actor.op_id_at(2) => amp::Diff::Value("chaffinch".into())
}
}
})
}
},
})),
};
frontend.apply_patch(patch).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {"birds" => vec!["chaffinch"]})
)
}
#[test]
fn apply_updates_inside_lists() {
let actor = amp::ActorId::random();
let mut frontend = Frontend::new();
let patch = amp::Patch {
actor: None,
seq: None,
max_op: 1,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 1,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: actor.op_id_at(1).into(),
obj_type: amp::SequenceType::List,
edits: vec![amp::DiffEdit::Insert { index: 0, elem_id: actor.op_id_at(2).into() }],
props: hashmap!{
0 => hashmap!{
actor.op_id_at(2) => amp::Diff::Value("chaffinch".into())
}
}
})
}
},
})),
};
frontend.apply_patch(patch).unwrap();
let patch2 = amp::Patch {
actor: None,
seq: None,
max_op: 3,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 2,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: actor.op_id_at(1).into(),
obj_type: amp::SequenceType::List,
edits: vec![],
props: hashmap!{
0 => hashmap!{
actor.op_id_at(3) => amp::Diff::Value("greenfinch".into())
}
}
})
}
},
})),
};
frontend.apply_patch(patch2).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {"birds" => vec!["greenfinch"]})
)
}
#[test]
fn apply_updates_inside_list_conflicts() {
// We don't just use random actor IDs because we need to have a specific
// ordering (actor1 < actor2)
let actor1 = amp::ActorId::from_bytes(
uuid::Uuid::parse_str("02ef21f3-c9eb-4087-880e-bedd7c4bbe43")
.unwrap()
.as_bytes(),
);
let actor2 = amp::ActorId::from_bytes(
uuid::Uuid::parse_str("2a1d376b-24f7-4400-8d4a-f58252d644dd")
.unwrap()
.as_bytes(),
);
let other_actor = amp::ActorId::random();
let patch1 = amp::Patch {
actor: None,
seq: None,
max_op: 2,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
other_actor.clone() => 1,
actor1.clone() => 1,
actor2.clone() => 1,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
other_actor.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: other_actor.op_id_at(1).into(),
obj_type: amp::SequenceType::List,
edits: vec![amp::DiffEdit::Insert{ index: 0, elem_id: actor1.op_id_at(2).into()}],
props: hashmap!{
0 => hashmap!{
actor1.op_id_at(2) => amp::Diff::Map(amp::MapDiff{
object_id: actor1.op_id_at(2).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"species".into() => hashmap!{
actor1.op_id_at(3) => amp::Diff::Value("woodpecker".into()),
},
"numSeen".into() => hashmap!{
actor1.op_id_at(4) => amp::Diff::Value(amp::ScalarValue::Int(1)),
},
}
}),
actor2.op_id_at(2) => amp::Diff::Map(amp::MapDiff{
object_id: actor2.op_id_at(2).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"species".into() => hashmap!{
actor2.op_id_at(3) => amp::Diff::Value("lapwing".into()),
},
"numSeen".into() => hashmap!{
actor2.op_id_at(4) => amp::Diff::Value(amp::ScalarValue::Int(2)),
},
}
}),
},
}
})
}
},
})),
};
let mut frontend = Frontend::new();
frontend.apply_patch(patch1).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(
hashmap! {"birds" => vec![hashmap!{"species" => Primitive::Str("lapwing".to_string()), "numSeen" => Primitive::Int(2)}]}
)
);
assert_eq!(
frontend
.get_conflicts(&Path::root().key("birds").index(0))
.unwrap(),
hashmap! {
actor1.op_id_at(2) => hashmap!{
"species" => Primitive::Str("woodpecker".into()),
"numSeen" => Primitive::Int(1),
}.into(),
actor2.op_id_at(2) => hashmap!{
"species" => Primitive::Str("lapwing".into()),
"numSeen" => Primitive::Int(2),
}.into(),
}
);
let patch2 = amp::Patch {
actor: None,
seq: None,
max_op: 5,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor1.clone() => 2,
actor2.clone() => 1,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
other_actor.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: other_actor.op_id_at(1).into(),
obj_type: amp::SequenceType::List,
edits: Vec::new(),
props: hashmap!{
0 => hashmap!{
actor1.op_id_at(2) => amp::Diff::Map(amp::MapDiff{
object_id: actor1.op_id_at(2).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"numSeen".into() => hashmap!{
actor1.op_id_at(5) => amp::Diff::Value(amp::ScalarValue::Int(2)),
},
}
}),
actor2.op_id_at(2) => amp::Diff::Unchanged(amp::ObjDiff{
object_id: actor2.op_id_at(2).into(),
obj_type: amp::ObjType::Map(amp::MapType::Map),
}),
},
}
})
}
},
})),
};
frontend.apply_patch(patch2).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(
hashmap! {"birds" => vec![hashmap!{"species" => Primitive::Str("lapwing".to_string()), "numSeen" => Primitive::Int(2)}]}
)
);
assert_eq!(
frontend
.get_conflicts(&Path::root().key("birds").index(0))
.unwrap(),
hashmap! {
actor1.op_id_at(2) => hashmap!{
"species" => Primitive::Str("woodpecker".into()),
"numSeen" => Primitive::Int(2),
}.into(),
actor2.op_id_at(2) => hashmap!{
"species" => Primitive::Str("lapwing".into()),
"numSeen" => Primitive::Int(2),
}.into(),
}
);
}
#[test]
fn delete_list_elements() {
let actor = amp::ActorId::random();
let mut frontend = Frontend::new();
let patch = amp::Patch {
actor: None,
seq: None,
max_op: 3,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 1,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: actor.op_id_at(1).into(),
obj_type: amp::SequenceType::List,
edits: vec![
amp::DiffEdit::Insert { index: 0, elem_id: actor.op_id_at(2).into() },
amp::DiffEdit::Insert { index: 1, elem_id: actor.op_id_at(3).into() },
],
props: hashmap!{
0 => hashmap!{
actor.op_id_at(2) => amp::Diff::Value("chaffinch".into())
},
1 => hashmap!{
actor.op_id_at(3) => amp::Diff::Value("goldfinch".into())
}
}
})
}
},
})),
};
frontend.apply_patch(patch).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {"birds" => vec!["chaffinch", "goldfinch"]})
);
let patch2 = amp::Patch {
actor: None,
seq: None,
max_op: 4,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 2,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: actor.op_id_at(1).into(),
obj_type: amp::SequenceType::List,
edits: vec![amp::DiffEdit::Remove{ index: 0 }],
props: hashmap!{}
})
}
},
})),
};
frontend.apply_patch(patch2).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {"birds" => vec!["goldfinch"]})
);
}
#[test]
fn apply_updates_at_different_levels_of_object_tree() {
let actor = amp::ActorId::random();
let patch1 = amp::Patch {
clock: hashmap! {actor.clone() => 1},
seq: None,
max_op: 6,
pending_changes: 0,
actor: None,
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"counts".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Map(amp::MapDiff{
object_id: actor.op_id_at(1).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"magpie".into() => hashmap!{
actor.op_id_at(2) => amp::Diff::Value(amp::ScalarValue::Int(2))
}
}
})
},
"details".into() => hashmap!{
actor.op_id_at(3) => amp::Diff::Seq(amp::SeqDiff{
object_id: actor.op_id_at(3).into(),
obj_type: amp::SequenceType::List,
edits: vec![amp::DiffEdit::Insert{ index: 0, elem_id: actor.op_id_at(4).into() }],
props: hashmap!{
0 => hashmap!{
actor.op_id_at(4) => amp::Diff::Map(amp::MapDiff{
object_id: actor.op_id_at(4).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"species".into() => hashmap!{
actor.op_id_at(5) => amp::Diff::Value("magpie".into())
},
"family".into() => hashmap!{
actor.op_id_at(6) => amp::Diff::Value("Corvidae".into())
}
}
})
}
}
})
},
},
})),
};
let mut frontend = Frontend::new();
frontend.apply_patch(patch1).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {
"counts" => Into::<Value>::into(hashmap!{"magpie".to_string() => Primitive::Int(2)}),
"details" => vec![Into::<Value>::into(hashmap!{
"species" => "magpie",
"family" => "Corvidae",
})].into()
})
);
let patch2 = amp::Patch {
clock: hashmap! {actor.clone() => 2},
seq: None,
max_op: 7,
pending_changes: 0,
actor: None,
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"counts".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Map(amp::MapDiff{
object_id: actor.op_id_at(1).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"magpie".into() => hashmap!{
actor.op_id_at(7) => amp::Diff::Value(amp::ScalarValue::Int(3))
}
}
})
},
"details".into() => hashmap!{
actor.op_id_at(3) => amp::Diff::Seq(amp::SeqDiff{
object_id: actor.op_id_at(3).into(),
obj_type: amp::SequenceType::List,
edits: Vec::new(),
props: hashmap!{
0 => hashmap!{
actor.op_id_at(4) => amp::Diff::Map(amp::MapDiff{
object_id: actor.op_id_at(4).into(),
obj_type: amp::MapType::Map,
props: hashmap!{
"species".into() => hashmap!{
actor.op_id_at(8) => amp::Diff::Value("Eurasian magpie".into())
},
}
})
}
}
})
},
},
})),
};
frontend.apply_patch(patch2).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(hashmap! {
"counts" => Into::<Value>::into(hashmap!{"magpie".to_string() => Primitive::Int(3)}),
"details" => vec![Into::<Value>::into(hashmap!{
"species" => "Eurasian magpie",
"family" => "Corvidae",
})].into()
})
);
}
#[test]
fn test_text_objects() {
let actor = amp::ActorId::random();
let mut frontend = Frontend::new();
let patch = amp::Patch {
actor: None,
seq: None,
max_op: 4,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 2,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"name".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: actor.op_id_at(1).into(),
obj_type: amp::SequenceType::Text,
edits: vec![
amp::DiffEdit::Insert { index: 0, elem_id: actor.op_id_at(2).into() },
amp::DiffEdit::Insert { index: 1, elem_id: actor.op_id_at(3).into() },
amp::DiffEdit::Insert { index: 2, elem_id: actor.op_id_at(4).into() },
],
props: hashmap!{
0 => hashmap!{
actor.op_id_at(2) => amp::Diff::Value("b".into())
},
1 => hashmap!{
actor.op_id_at(3) => amp::Diff::Value("e".into())
},
2 => hashmap!{
actor.op_id_at(4) => amp::Diff::Value("n".into())
}
}
})
}
},
})),
};
frontend.apply_patch(patch).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(
hashmap! {"name" => Value::Text("ben".graphemes(true).map(|s|s.to_owned()).collect())}
)
);
let patch2 = amp::Patch {
actor: None,
seq: None,
max_op: 5,
pending_changes: 0,
deps: Vec::new(),
clock: hashmap! {
actor.clone() => 3,
},
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"name".into() => hashmap!{
actor.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: actor.op_id_at(1).into(),
obj_type: amp::SequenceType::Text,
edits: vec![
amp::DiffEdit::Remove { index: 1 },
],
props: hashmap!{
1 => hashmap! {
actor.op_id_at(5) => amp::Diff::Value(amp::ScalarValue::Str("i".to_string()))
}
}
})
}
},
})),
};
frontend.apply_patch(patch2).unwrap();
assert_eq!(
frontend.state(),
&Into::<Value>::into(
hashmap! {"name" => Value::Text("bi".graphemes(true).map(|s|s.to_owned()).collect())}
)
);
}
#[test]
fn test_unchanged_diff_creates_empty_objects() {
let mut doc = Frontend::new();
let patch = amp::Patch {
actor: Some(doc.actor_id.clone()),
seq: Some(1),
clock: hashmap! {doc.actor_id.clone() => 1},
deps: Vec::new(),
max_op: 1,
pending_changes: 0,
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"text".to_string() => hashmap!{
"1@cfe5fefb771f4c15a716d488012cbf40".try_into().unwrap() => amp::Diff::Unchanged(amp::ObjDiff{
object_id: "1@cfe5fefb771f4c15a716d488012cbf40".try_into().unwrap(),
obj_type: amp::ObjType::Sequence(amp::SequenceType::Text)
})
}
},
})),
};
doc.apply_patch(patch).unwrap();
assert_eq!(
doc.state(),
&Value::Map(
hashmap! {"text".to_string() => Value::Text(Vec::new())},
amp::MapType::Map
),
);
}

View file

@ -0,0 +1,748 @@
use automerge_backend::Backend;
use automerge_frontend::{
Frontend, InvalidChangeRequest, InvalidPatch, LocalChange, Path, Primitive, Value,
};
use automerge_protocol as amp;
use maplit::hashmap;
fn random_op_id() -> amp::OpId {
amp::OpId::new(1, &amp::ActorId::random())
}
#[test]
fn use_version_and_sequence_number_from_backend() {
let mut doc = Frontend::new();
let remote_actor1 = amp::ActorId::random();
let remote_actor2 = amp::ActorId::random();
// This is a remote patch
let patch = amp::Patch {
actor: None,
seq: None,
clock: hashmap! {
doc.actor_id.clone() => 4,
remote_actor1 => 11,
remote_actor2 => 41,
},
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"blackbirds".into() => hashmap!{
random_op_id() => amp::Diff::Value(amp::ScalarValue::F64(24.0))
}
},
})),
max_op: 4,
pending_changes: 0,
};
// There were no in flight requests so the doc state should be reconciled
// and should reflect the above patch
doc.apply_patch(patch).unwrap();
// Now apply a local patch, this will move the doc into the "waiting for
// in flight requests" state, which should reflect the change just made.
let req = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("partridges"),
Value::Primitive(Primitive::Int(1)),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let expected_change_request = amp::UncompressedChange {
actor_id: doc.actor_id,
seq: 5,
start_op: 5,
time: req.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Int(1)),
obj: amp::ObjectId::Root,
key: "partridges".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
assert_eq!(req, expected_change_request);
}
#[test]
fn remove_pending_requests_once_handled() {
let mut doc = Frontend::new();
// First we add two local changes
let _req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("blackbirds"),
Primitive::Int(24),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let _req2 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("partridges"),
Primitive::Int(1),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
// The doc is waiting for those changes to be applied
assert_eq!(doc.in_flight_requests(), vec![1, 2]);
// Apply a patch corresponding (via actor ID and seq) to the first change
doc.apply_patch(amp::Patch {
actor: Some(doc.actor_id.clone()),
seq: Some(1),
clock: hashmap! {
doc.actor_id.clone() => 1,
},
max_op: 4,
pending_changes: 0,
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"blackbirds".into() => hashmap!{
random_op_id() => amp::Diff::Value(amp::ScalarValue::Int(24))
}
},
})),
})
.unwrap();
// The doc state should still reflect both local changes as we're still
// waiting for the last in flight request to be fulfilled
assert_eq!(
doc.state(),
&Into::<Value>::into(hashmap! {
"blackbirds".to_string() => Primitive::Int(24),
"partridges".to_string() => Primitive::Int(1),
})
);
assert_eq!(doc.in_flight_requests(), vec![2]);
// Apply a patch corresponding (via actor ID and seq) to the second change
doc.apply_patch(amp::Patch {
actor: Some(doc.actor_id.clone()),
seq: Some(2),
clock: hashmap! {
doc.actor_id.clone() => 2,
},
max_op: 5,
pending_changes: 0,
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"partridges".into() => hashmap!{
random_op_id() => amp::Diff::Value(amp::ScalarValue::Int(1))
}
},
})),
})
.unwrap();
// The doc state should have switched to reconciled
assert!(doc.in_flight_requests().is_empty());
// The doc state should still reflect the local changes as they have now
// been reconciled
assert_eq!(
doc.state(),
&Into::<Value>::into(hashmap! {
"blackbirds".to_string() => Primitive::Int(24),
"partridges".to_string() => Primitive::Int(1),
})
);
assert_eq!(doc.seq, 2);
}
#[test]
fn leave_request_queue_unchanged_on_remote_changes() {
let remote = amp::ActorId::random();
let mut doc = Frontend::new();
// Enqueue a local change, moving the document into the "waiting for in
// flight requests" state
let _req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("blackbirds"),
Primitive::Int(24),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
// The document is now waiting for the above request
assert_eq!(doc.in_flight_requests(), vec![1]);
// Apply a remote patch (due to actor ID and seq missing)
doc.apply_patch(amp::Patch {
actor: None,
seq: None,
max_op: 10,
pending_changes: 0,
clock: hashmap! {
remote.clone() => 1,
},
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"pheasants".into() => hashmap!{
random_op_id() => amp::Diff::Value(amp::ScalarValue::Int(2))
}
},
})),
})
.unwrap();
// The doc state should reflect outstanding in flight request and not the
// remote patch (because we're still waiting for in flight requests)
assert_eq!(
doc.state(),
&Into::<Value>::into(hashmap! {
"blackbirds".to_string() => Primitive::Int(24),
})
);
assert_eq!(doc.in_flight_requests(), vec![1]);
// Now apply a patch corresponding to the outstanding in flight request
doc.apply_patch(amp::Patch {
actor: Some(doc.actor_id.clone()),
seq: Some(1),
clock: hashmap! {
doc.actor_id.clone() => 2,
remote => 1,
},
max_op: 11,
pending_changes: 0,
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"blackbirds".into() => hashmap!{
random_op_id() => amp::Diff::Value(amp::ScalarValue::Int(24))
}
},
})),
})
.unwrap();
// The doc state should now reflect both the local and remote changes
// as the doc is now reconciled (all in flight requests have received a
// patch)
assert_eq!(
doc.state(),
&Into::<Value>::into(hashmap! {
"blackbirds".to_string() => Primitive::Int(24),
"pheasants".to_string() => Primitive::Int(2),
})
);
assert!(doc.in_flight_requests().is_empty());
assert_eq!(doc.seq, 2);
}
#[test]
fn dont_allow_out_of_order_request_patches() {
let mut doc = Frontend::new();
let _req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("blackbirds"),
Primitive::Int(24),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let result = doc.apply_patch(amp::Patch {
actor: Some(doc.actor_id.clone()),
seq: Some(2),
max_op: 8,
pending_changes: 0,
clock: hashmap! {
doc.actor_id.clone() => 2,
},
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"partridges".to_string() => hashmap!{
random_op_id() => amp::Diff::Value(amp::ScalarValue::Int(1))
}
},
})),
});
assert_eq!(
result,
Err(InvalidPatch::MismatchedSequenceNumber {
expected: 1,
actual: 2
})
);
}
#[test]
fn handle_concurrent_insertions_into_lists() {
let mut doc = Frontend::new();
let _req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("birds"),
vec!["goldfinch"],
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let birds_id = doc.get_object_id(&Path::root().key("birds")).unwrap();
// Apply the corresponding backend patch for the above state, document
// shoudl be reconciled after this
doc.apply_patch(amp::Patch {
actor: Some(doc.actor_id.clone()),
seq: Some(1),
max_op: 1,
pending_changes: 0,
clock: hashmap! {
doc.actor_id.clone() => 1,
},
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".to_string() => hashmap!{
doc.actor_id.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: birds_id.clone(),
obj_type: amp::SequenceType::List,
edits: vec![amp::DiffEdit::Insert{ index: 0, elem_id: doc.actor_id.op_id_at(1).into() }],
props: hashmap!{
0 => hashmap!{
random_op_id() => amp::Diff::Value("goldfinch".into())
}
}
})
}
},
})),
})
.unwrap();
assert_eq!(
doc.state(),
&Into::<Value>::into(hashmap! {"birds".to_string() => vec!["goldfinch"]})
);
assert!(doc.in_flight_requests().is_empty());
// Now add another change which updates the same list, this results in an
// in flight reuest
let _req2 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::insert(
Path::root().key("birds").index(0),
"chaffinch".into(),
))?;
doc.add_change(LocalChange::insert(
Path::root().key("birds").index(2),
"greenfinch".into(),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
assert_eq!(
doc.state(),
&Into::<Value>::into(
hashmap! {"birds".to_string() => vec!["chaffinch", "goldfinch", "greenfinch"]}
)
);
let remote = amp::ActorId::random();
// Apply a patch which does not take effect because we're still waiting
// for the in flight requests to be responded to
doc.apply_patch(amp::Patch {
clock: hashmap! {
doc.actor_id.clone() => 1,
remote.clone() => 1,
},
max_op: 3,
pending_changes: 0,
actor: None,
seq: None,
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"birds".into() => hashmap!{
doc.actor_id.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: birds_id.clone(),
obj_type: amp::SequenceType::List,
edits: vec![amp::DiffEdit::Insert{ index: 1, elem_id: remote.op_id_at(1).into()}],
props: hashmap!{
1 => hashmap!{
remote.op_id_at(1) => amp::Diff::Value("bullfinch".into())
}
}
})
}
},
})),
})
.unwrap();
// Check that the doc state hasn't been updated yet
assert_eq!(
doc.state(),
&Into::<Value>::into(
hashmap! {"birds".to_string() => vec!["chaffinch", "goldfinch", "greenfinch"]}
)
);
// Now apply a patch acknowledging the in flight request
doc.apply_patch(amp::Patch {
actor: Some(doc.actor_id.clone()),
seq: Some(2),
max_op: 3,
pending_changes: 0,
clock: hashmap!{
doc.actor_id.clone() => 2,
remote => 1,
},
deps: Vec::new(),
diffs: Some(amp::Diff::Map(amp::MapDiff{
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap!{
"birds".to_string() => hashmap!{
doc.actor_id.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: birds_id,
obj_type: amp::SequenceType::List,
edits: vec![
amp::DiffEdit::Insert { index: 0, elem_id: doc.actor_id.op_id_at(2).into() },
amp::DiffEdit::Insert{ index: 2, elem_id: doc.actor_id.op_id_at(3).into() },
],
props: hashmap!{
0 => hashmap!{
doc.actor_id.op_id_at(2) => amp::Diff::Value("chaffinch".into()),
},
2 => hashmap!{
doc.actor_id.op_id_at(3) => amp::Diff::Value("greenfinch".into()),
}
}
})
}
}
}))
}).unwrap();
assert!(doc.in_flight_requests().is_empty());
assert_eq!(
doc.state(),
&Into::<Value>::into(
hashmap! {"birds".to_string() => vec!["chaffinch", "goldfinch", "greenfinch", "bullfinch"]}
)
)
}
#[test]
fn allow_interleaving_of_patches_and_changes() {
let mut doc = Frontend::new();
let req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("number"),
Primitive::Int(1),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let req2 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("number"),
Primitive::Int(2),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
assert_eq!(
req1,
amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 1,
start_op: 1,
message: None,
hash: None,
time: req1.time,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Int(1)),
obj: amp::ObjectId::Root,
key: "number".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
}
);
assert_eq!(
req2,
amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 2,
start_op: 2,
message: None,
hash: None,
time: req2.time,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Int(2)),
obj: amp::ObjectId::Root,
key: "number".into(),
insert: false,
pred: vec![doc.actor_id.op_id_at(1)],
}],
extra_bytes: Vec::new(),
}
);
let mut backend = Backend::new();
let (patch1, _) = backend.apply_local_change(req1).unwrap();
doc.apply_patch(patch1).unwrap();
let req3 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("number"),
Primitive::Int(3),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
assert_eq!(
req3,
amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 3,
start_op: 3,
message: None,
hash: None,
time: req3.time,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Int(3)),
obj: amp::ObjectId::Root,
key: "number".into(),
insert: false,
pred: vec![doc.actor_id.op_id_at(2)],
}],
extra_bytes: Vec::new(),
}
);
}
//it('deps are filled in if the frontend does not have the latest patch', () => {
//const actor1 = uuid(), actor2 = uuid()
//const [doc1, change1] = Frontend.change(Frontend.init(actor1), doc => doc.number = 1)
//const [state1, patch1, binChange1] = Backend.applyLocalChange(Backend.init(), change1)
//const [state1a, patch1a] = Backend.applyChanges(Backend.init(), [binChange1])
//const doc1a = Frontend.applyPatch(Frontend.init(actor2), patch1a)
//const [doc2, change2] = Frontend.change(doc1a, doc => doc.number = 2)
//const [doc3, change3] = Frontend.change(doc2, doc => doc.number = 3)
//assert.deepStrictEqual(change2, {
//actor: actor2, seq: 1, startOp: 2, deps: [decodeChange(binChange1).hash], time: change2.time, message: '', ops: [
//{obj: '_root', action: 'set', key: 'number', insert: false, value: 2, pred: [`1@${actor1}`]}
//]
//})
//assert.deepStrictEqual(change3, {
//actor: actor2, seq: 2, startOp: 3, deps: [], time: change3.time, message: '', ops: [
//{obj: '_root', action: 'set', key: 'number', insert: false, value: 3, pred: [`2@${actor2}`]}
//]
//})
//const [state2, patch2, binChange2] = Backend.applyLocalChange(state1a, change2)
//const [state3, patch3, binChange3] = Backend.applyLocalChange(state2, change3)
//assert.deepStrictEqual(decodeChange(binChange2).deps, [decodeChange(binChange1).hash])
//assert.deepStrictEqual(decodeChange(binChange3).deps, [decodeChange(binChange2).hash])
//assert.deepStrictEqual(patch1a.deps, [decodeChange(binChange1).hash])
//assert.deepStrictEqual(patch2.deps, [])
//const doc2a = Frontend.applyPatch(doc3, patch2)
//const doc3a = Frontend.applyPatch(doc2a, patch3)
//const [doc4, change4] = Frontend.change(doc3a, doc => doc.number = 4)
//assert.deepStrictEqual(change4, {
//actor: actor2, seq: 3, startOp: 4, time: change4.time, message: '', deps: [], ops: [
//{obj: '_root', action: 'set', key: 'number', insert: false, value: 4, pred: [`3@${actor2}`]}
//]
//})
//const [state4, patch4, binChange4] = Backend.applyLocalChange(state3, change4)
//assert.deepStrictEqual(decodeChange(binChange4).deps, [decodeChange(binChange3).hash])
//})
#[test]
fn test_deps_are_filled_in_if_frontend_does_not_have_latest_patch() {
let (doc, change1) =
Frontend::new_with_initial_state(hashmap! {"number" => Primitive::Int(1)}.into()).unwrap();
let mut backend1 = Backend::new();
let (_, binchange1) = backend1.apply_local_change(change1).unwrap();
let mut doc2 = Frontend::new();
let mut backend2 = Backend::new();
let patch1 = backend2.apply_changes(vec![binchange1.clone()]).unwrap();
doc2.apply_patch(patch1.clone()).unwrap();
let change2 = doc2
.change::<_, _, InvalidChangeRequest>(None, |d| {
d.add_change(LocalChange::set(
Path::root().key("number"),
Primitive::Int(2),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let change3 = doc2
.change::<_, _, InvalidChangeRequest>(None, |d| {
d.add_change(LocalChange::set(
Path::root().key("number"),
Primitive::Int(3),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let expected_change2 = amp::UncompressedChange {
actor_id: doc2.actor_id.clone(),
start_op: 2,
seq: 1,
time: change2.time,
message: None,
hash: None,
deps: vec![binchange1.hash],
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::from(2)),
obj: amp::ObjectId::Root,
key: "number".into(),
insert: false,
pred: vec![doc.actor_id.op_id_at(1)],
}],
extra_bytes: Vec::new(),
};
assert_eq!(change2, expected_change2);
let expected_change3 = amp::UncompressedChange {
actor_id: doc2.actor_id.clone(),
start_op: 3,
seq: 2,
time: change3.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::from(3)),
obj: amp::ObjectId::Root,
key: "number".into(),
insert: false,
pred: vec![doc2.actor_id.op_id_at(2)],
}],
extra_bytes: Vec::new(),
};
assert_eq!(change3, expected_change3);
let (patch2, binchange2) = backend2.apply_local_change(change2).unwrap();
let (patch3, binchange3) = backend2.apply_local_change(change3).unwrap();
assert_eq!(binchange2.deps, vec![binchange1.hash]);
assert_eq!(binchange3.deps, vec![binchange2.hash]);
assert_eq!(patch1.deps, vec![binchange1.hash]);
assert_eq!(patch2.deps, Vec::new());
doc2.apply_patch(patch2).unwrap();
doc2.apply_patch(patch3).unwrap();
let change4 = doc2
.change::<_, _, InvalidChangeRequest>(None, |d| {
d.add_change(LocalChange::set(
Path::root().key("number"),
Primitive::Int(4),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let expected_change4 = amp::UncompressedChange {
actor_id: doc2.actor_id.clone(),
start_op: 4,
seq: 3,
time: change4.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::from(4)),
obj: amp::ObjectId::Root,
key: "number".into(),
insert: false,
pred: vec![doc2.actor_id.op_id_at(3)],
}],
extra_bytes: Vec::new(),
};
assert_eq!(change4, expected_change4);
}

View file

@ -0,0 +1,299 @@
use std::convert::TryInto;
use automerge_backend::Backend;
use automerge_frontend::{Frontend, InvalidChangeRequest, LocalChange, Path, Primitive, Value};
use automerge_protocol as amp;
use maplit::hashmap;
use unicode_segmentation::UnicodeSegmentation;
#[test]
fn test_allow_cursor_on_list_element() {
let _ = env_logger::builder().is_test(true).try_init().unwrap();
let mut frontend = Frontend::new();
let change = frontend
.change::<_, _, InvalidChangeRequest>(None, |d| {
d.add_change(LocalChange::set(Path::root().key("list"), vec![1, 2, 3]))?;
let cursor = d
.cursor_to_path(&Path::root().key("list").index(1))
.unwrap();
d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let mut backend = Backend::new();
backend
.apply_changes(vec![change.try_into().unwrap()])
.unwrap();
let mut backend2 = Backend::new();
backend2
.apply_changes(backend.get_changes(&[]).into_iter().cloned().collect())
.unwrap();
let mut frontend2 = Frontend::new();
frontend2
.apply_patch(backend2.get_patch().unwrap())
.unwrap();
let index_value = frontend2
.value_at_path(&Path::root().key("cursor"))
.unwrap();
if let Value::Primitive(Primitive::Cursor(c)) = index_value {
assert_eq!(c.index, 1)
} else {
panic!("value was not a cursor");
}
}
#[test]
fn test_allow_cursor_on_text_element() {
let mut frontend = Frontend::new();
let change = frontend
.change::<_, _, InvalidChangeRequest>(None, |d| {
d.add_change(LocalChange::set(
Path::root().key("list"),
Value::Text("123".graphemes(true).map(|s| s.to_owned()).collect()),
))?;
let cursor = d
.cursor_to_path(&Path::root().key("list").index(1))
.unwrap();
d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let mut backend = Backend::new();
backend
.apply_changes(vec![change.try_into().unwrap()])
.unwrap();
let mut backend2 = Backend::new();
backend2
.apply_changes(backend.get_changes(&[]).into_iter().cloned().collect())
.unwrap();
let mut frontend2 = Frontend::new();
frontend2
.apply_patch(backend2.get_patch().unwrap())
.unwrap();
let index_value = frontend2
.value_at_path(&Path::root().key("cursor"))
.unwrap();
if let Value::Primitive(Primitive::Cursor(c)) = index_value {
assert_eq!(c.index, 1)
} else {
panic!("value was not a cursor");
}
}
#[test]
fn test_do_not_allow_index_past_end_of_list() {
let mut frontend = Frontend::new();
frontend
.change::<_, _, InvalidChangeRequest>(None, |d| {
d.add_change(LocalChange::set(
Path::root().key("list"),
Value::Text("123".graphemes(true).map(|s| s.to_owned()).collect()),
))?;
let cursor = d.cursor_to_path(&Path::root().key("list").index(10));
assert_eq!(cursor, None);
Ok(())
})
.unwrap();
}
#[test]
fn test_updates_cursor_during_change_function() {
let mut frontend = Frontend::new();
frontend
.change::<_, _, InvalidChangeRequest>(None, |d| {
d.add_change(LocalChange::set(
Path::root().key("list"),
Value::Text("123".graphemes(true).map(|s| s.to_owned()).collect()),
))?;
let cursor = d
.cursor_to_path(&Path::root().key("list").index(1))
.unwrap();
d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
let cursor_the_second = d.value_at_path(&Path::root().key("cursor"));
if let Some(Value::Primitive(Primitive::Cursor(c))) = cursor_the_second {
assert_eq!(c.index, 1);
} else {
panic!("Cursor the second not found");
}
d.add_change(LocalChange::insert(
Path::root().key("list").index(0),
Value::Primitive(Primitive::Str("0".to_string())),
))?;
let cursor_the_third = d.value_at_path(&Path::root().key("cursor"));
if let Some(Value::Primitive(Primitive::Cursor(c))) = cursor_the_third {
assert_eq!(c.index, 2);
} else {
panic!("Cursor the third not found");
}
Ok(())
})
.unwrap();
}
#[test]
fn test_set_cursor_to_new_element_in_diff() {
let mut frontend = Frontend::new();
let actor = frontend.actor_id.clone();
let patch1 = amp::Patch {
actor: Some(actor.clone()),
deps: Vec::new(),
seq: Some(1),
clock: hashmap! {actor.clone() => 1},
max_op: 3,
pending_changes: 0,
diffs: Some(amp::Diff::Map(amp::MapDiff {
obj_type: amp::MapType::Map,
object_id: amp::ObjectId::Root,
props: hashmap! {
"list".to_string() => hashmap!{
actor.op_id_at(1) => amp::Diff::Seq(amp::SeqDiff{
object_id: actor.op_id_at(1).into(),
obj_type: amp::SequenceType::List,
edits: vec![
amp::DiffEdit::Insert{index: 0, elem_id: actor.op_id_at(2).into()},
amp::DiffEdit::Insert{index: 1, elem_id: actor.op_id_at(3).into()},
],
props: hashmap!{
0 => hashmap!{
actor.op_id_at(2) => amp::Diff::Value("one".into())
},
1 => hashmap!{
actor.op_id_at(3) => amp::Diff::Value("two".into())
}
}
}),
},
"cursor".to_string() => hashmap!{
actor.op_id_at(4) => amp::Diff::Cursor(amp::CursorDiff{
elem_id: actor.op_id_at(3),
index: 1,
object_id: actor.op_id_at(1).into(),
})
},
},
})),
};
frontend.apply_patch(patch1).unwrap();
let patch2 = amp::Patch {
actor: Some(actor.clone()),
deps: Vec::new(),
seq: Some(2),
clock: hashmap! {actor.clone() => 2},
max_op: 5,
pending_changes: 0,
diffs: Some(amp::Diff::Map(amp::MapDiff {
object_id: amp::ObjectId::Root,
obj_type: amp::MapType::Map,
props: hashmap! {
"cursor".to_string() => hashmap!{
actor.op_id_at(4) => amp::Diff::Cursor(amp::CursorDiff{
elem_id: actor.op_id_at(2),
index: 0,
object_id: actor.op_id_at(1).into(),
})
}
},
})),
};
frontend.apply_patch(patch2).unwrap();
frontend
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::insert(
Path::root().key("list").index(1),
"three".into(),
))?;
let cursor = doc.value_at_path(&Path::root().key("cursor")).unwrap();
match cursor {
Value::Primitive(Primitive::Cursor(c)) => assert_eq!(c.index, 0),
_ => panic!("Cursor value was not a cursor"),
}
Ok(())
})
.unwrap();
}
#[test]
fn test_set_cursor_to_new_element_in_local_change() {
let mut frontend = Frontend::new();
frontend
.change::<_, _, InvalidChangeRequest>(None, |d| {
d.add_change(LocalChange::set(
Path::root().key("list"),
Value::Text("123".graphemes(true).map(|s| s.to_owned()).collect()),
))?;
let cursor = d
.cursor_to_path(&Path::root().key("list").index(1))
.unwrap();
d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
let cursor_the_second = d.value_at_path(&Path::root().key("cursor"));
if let Some(Value::Primitive(Primitive::Cursor(c))) = cursor_the_second {
assert_eq!(c.index, 1);
} else {
panic!("Cursor the second not found");
}
d.add_change(LocalChange::insert(
Path::root().key("list").index(0),
Value::Primitive(Primitive::Str("0".to_string())),
))?;
d.add_change(LocalChange::insert(
Path::root().key("list").index(0),
Value::Primitive(Primitive::Str("1".to_string())),
))?;
d.add_change(LocalChange::set(
Path::root().key("cursor"),
d.cursor_to_path(&Path::root().key("list").index(2))
.unwrap(),
))?;
d.add_change(LocalChange::insert(
Path::root().key("list").index(4),
"2".into(),
))?;
let cursor_the_third = d.value_at_path(&Path::root().key("cursor"));
if let Some(Value::Primitive(Primitive::Cursor(c))) = cursor_the_third {
assert_eq!(c.index, 3);
} else {
panic!("Cursor the third not found");
}
Ok(())
})
.unwrap();
}
#[test]
fn test_delete_cursor_and_adding_again() {
let mut frontend = Frontend::new();
frontend
.change::<_, _, InvalidChangeRequest>(None, |d| {
d.add_change(LocalChange::set(
Path::root().key("list"),
Value::Text("123".graphemes(true).map(|s| s.to_owned()).collect()),
))?;
let cursor = d
.cursor_to_path(&Path::root().key("list").index(1))
.unwrap();
d.add_change(LocalChange::set(Path::root().key("cursor"), cursor.clone()))?;
d.add_change(LocalChange::delete(Path::root().key("cursor")))?;
d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
let cursor_value = d.value_at_path(&Path::root().key("cursor"));
if let Some(Value::Primitive(Primitive::Cursor(c))) = cursor_value {
assert_eq!(c.index, 1);
} else {
panic!("Cursor the third not found");
}
Ok(())
})
.unwrap();
}
//TODO test removing a cursors

View file

@ -0,0 +1,893 @@
use std::convert::TryInto;
use automerge_frontend::{Frontend, InvalidChangeRequest, LocalChange, Path, Primitive, Value};
use automerge_protocol as amp;
use maplit::hashmap;
use unicode_segmentation::UnicodeSegmentation;
#[test]
fn test_should_be_empty_after_init() {
let mut frontend = Frontend::new();
let result_state = frontend.state().to_json();
let expected_state: serde_json::Value = serde_json::from_str("{}").unwrap();
assert_eq!(result_state, expected_state);
}
#[test]
fn test_init_with_state() {
let initial_state_json: serde_json::Value = serde_json::from_str(
r#"
{
"birds": {
"wrens": 3.0,
"magpies": 4.0
},
"alist": ["one", 2.0]
}
"#,
)
.unwrap();
let value = Value::from_json(&initial_state_json);
let (mut frontend, _) = Frontend::new_with_initial_state(value).unwrap();
let result_state = frontend.state().to_json();
assert_eq!(initial_state_json, result_state);
}
#[test]
fn test_init_with_empty_state() {
let initial_state_json: serde_json::Value = serde_json::from_str("{}").unwrap();
let value = Value::from_json(&initial_state_json);
let (mut frontend, _) = Frontend::new_with_initial_state(value).unwrap();
let result_state = frontend.state().to_json();
assert_eq!(initial_state_json, result_state);
}
#[test]
fn test_set_root_object_properties() {
let mut doc = Frontend::new();
let change_request = doc
.change::<_, _, InvalidChangeRequest>(Some("set root object".into()), |doc| {
doc.add_change(LocalChange::set(
Path::root().key("bird"),
Value::Primitive(Primitive::Str("magpie".to_string())),
))?;
Ok(())
})
.unwrap()
.1
// Remove timestamp which is irrelevant to test
.map(|mut cr| {
cr.time = 0;
cr
});
let expected_change = amp::UncompressedChange {
actor_id: doc.actor_id,
start_op: 1,
seq: 1,
time: 0,
message: Some("set root object".into()),
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Str("magpie".to_string())),
obj: "_root".try_into().unwrap(),
key: "bird".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
assert_eq!(change_request, Some(expected_change));
}
#[test]
fn it_should_return_no_changes_if_nothing_was_changed() {
let mut doc = Frontend::new();
let change_request = doc
.change::<_, _, InvalidChangeRequest>(Some("do nothing".into()), |_| Ok(()))
.unwrap()
.1;
assert!(change_request.is_none())
}
#[test]
fn it_should_create_nested_maps() {
let mut doc = Frontend::new();
let change_request = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("birds"),
Value::from_json(&serde_json::json!({
"wrens": 3
})),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let birds_id = doc.get_object_id(&Path::root().key("birds")).unwrap();
let expected_change = amp::UncompressedChange {
actor_id: doc.actor_id,
start_op: 1,
seq: 1,
time: change_request.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![
amp::Op {
action: amp::OpType::Make(amp::ObjType::map()),
obj: amp::ObjectId::Root,
key: "birds".into(),
insert: false,
pred: Vec::new(),
},
amp::Op {
action: amp::OpType::Set(amp::ScalarValue::F64(3.0)),
obj: birds_id,
key: "wrens".into(),
insert: false,
pred: Vec::new(),
},
],
extra_bytes: Vec::new(),
};
assert_eq!(change_request, expected_change);
}
#[test]
fn apply_updates_inside_nested_maps() {
let mut doc = Frontend::new();
let _req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("birds"),
Value::from_json(&serde_json::json!({
"wrens": 3,
})),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let state_after_first_change = doc.state().clone();
let req2 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("birds").key("sparrows"),
Value::Primitive(Primitive::F64(15.0)),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let state_after_second_change = doc.state().clone();
assert_eq!(
state_after_first_change,
Value::from_json(&serde_json::json!({
"birds": { "wrens": 3.0}
}))
);
assert_eq!(
state_after_second_change,
Value::from_json(&serde_json::json!({
"birds": {
"wrens": 3.0,
"sparrows": 15.0
}
}))
);
let birds_id = doc.get_object_id(&Path::root().key("birds")).unwrap();
let expected_change_request = amp::UncompressedChange {
actor_id: doc.actor_id,
seq: 2,
start_op: 3,
time: req2.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::F64(15.0)),
obj: birds_id,
key: "sparrows".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
assert_eq!(req2, expected_change_request);
}
#[test]
fn delete_keys_in_a_map() {
let mut doc = Frontend::new();
let _req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root(),
Value::from_json(&serde_json::json!({
"magpies": 2,
})),
))?;
doc.add_change(LocalChange::set(
Path::root(),
Value::from_json(&serde_json::json!({
"sparrows": 15,
})),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let req2 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::delete(Path::root().key("magpies")))?;
Ok(())
})
.unwrap()
.1
.unwrap();
assert_eq!(
doc.state(),
&Value::from_json(&serde_json::json!({
"sparrows": 15.0
}))
);
let expected_change_request = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 2,
start_op: 3,
time: req2.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Del,
obj: amp::ObjectId::Root,
key: "magpies".into(),
insert: false,
pred: vec![doc.actor_id.op_id_at(1)],
}],
extra_bytes: Vec::new(),
};
assert_eq!(req2, expected_change_request);
}
#[test]
fn create_lists() {
let mut doc = Frontend::new();
let req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("birds"),
Value::Sequence(vec!["chaffinch".into()]),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let _req2 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("birds").index(0),
"chaffinch",
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
assert_eq!(
doc.state(),
&Value::from_json(&serde_json::json!({
"birds": ["chaffinch"],
}))
);
let birds_id = doc.get_object_id(&Path::root().key("birds")).unwrap();
let expected_change_request = amp::UncompressedChange {
actor_id: doc.actor_id,
seq: 1,
start_op: 1,
time: req1.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![
amp::Op {
action: amp::OpType::Make(amp::ObjType::list()),
key: "birds".into(),
obj: amp::ObjectId::Root,
insert: false,
pred: Vec::new(),
},
amp::Op {
action: amp::OpType::Set("chaffinch".into()),
obj: birds_id,
key: amp::ElementId::Head.into(),
insert: true,
pred: Vec::new(),
},
],
extra_bytes: Vec::new(),
};
assert_eq!(req1, expected_change_request);
}
#[test]
fn apply_updates_inside_lists() {
let mut doc = Frontend::new();
let _req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("birds"),
Value::Sequence(vec!["chaffinch".into()]),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let req2 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("birds").index(0),
"greenfinch",
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
assert_eq!(
doc.state(),
&Value::from_json(&serde_json::json!({
"birds": ["greenfinch"],
}))
);
let birds_id = doc.get_object_id(&Path::root().key("birds")).unwrap();
let expected_change_request = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 2,
start_op: 3,
time: req2.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set("greenfinch".into()),
obj: birds_id,
key: doc.actor_id.op_id_at(2).into(),
insert: false,
pred: vec![doc.actor_id.op_id_at(2)],
}],
extra_bytes: Vec::new(),
};
assert_eq!(req2, expected_change_request);
}
#[test]
fn delete_list_elements() {
let mut doc = Frontend::new();
let _req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("birds"),
vec!["chaffinch", "goldfinch"],
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let req2 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::delete(Path::root().key("birds").index(0)))?;
Ok(())
})
.unwrap()
.1
.unwrap();
assert_eq!(
doc.state(),
&Value::from_json(&serde_json::json!({
"birds": ["goldfinch"],
}))
);
let birds_id = doc.get_object_id(&Path::root().key("birds")).unwrap();
let expected_change_request = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 2,
start_op: 4,
time: req2.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Del,
obj: birds_id,
key: doc.actor_id.op_id_at(2).into(),
insert: false,
pred: vec![doc.actor_id.op_id_at(2)],
}],
extra_bytes: Vec::new(),
};
assert_eq!(req2, expected_change_request);
}
#[test]
fn handle_counters_inside_maps() {
let mut doc = Frontend::new();
let req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("wrens"),
Value::Primitive(Primitive::Counter(0)),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let state_after_first_change = doc.state().clone();
let req2 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::increment(Path::root().key("wrens")))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let state_after_second_change = doc.state().clone();
assert_eq!(
state_after_first_change,
Value::Map(
hashmap! {
"wrens".into() => Value::Primitive(Primitive::Counter(0))
},
amp::MapType::Map
)
);
assert_eq!(
state_after_second_change,
Value::Map(
hashmap! {
"wrens".into() => Value::Primitive(Primitive::Counter(1))
},
amp::MapType::Map
)
);
let expected_change_request_1 = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 1,
start_op: 1,
time: req1.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Counter(0)),
obj: amp::ObjectId::Root,
key: "wrens".into(),
insert: false,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
assert_eq!(req1, expected_change_request_1);
let expected_change_request_2 = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 2,
start_op: 2,
time: req2.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Inc(1),
obj: amp::ObjectId::Root,
key: "wrens".into(),
insert: false,
pred: vec![doc.actor_id.op_id_at(1)],
}],
extra_bytes: Vec::new(),
};
assert_eq!(req2, expected_change_request_2);
}
#[test]
fn handle_counters_inside_lists() {
let mut doc = Frontend::new();
let req1 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("counts"),
vec![Value::Primitive(Primitive::Counter(1))],
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let state_after_first_change = doc.state().clone();
let req2 = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::increment_by(
Path::root().key("counts").index(0),
2,
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let state_after_second_change = doc.state().clone();
assert_eq!(
state_after_first_change,
Value::Map(
hashmap! {
"counts".into() => vec![Value::Primitive(Primitive::Counter(1))].into()
},
amp::MapType::Map
)
);
assert_eq!(
state_after_second_change,
Value::Map(
hashmap! {
"counts".into() => vec![Value::Primitive(Primitive::Counter(3))].into()
},
amp::MapType::Map
)
);
let counts_id = doc.get_object_id(&Path::root().key("counts")).unwrap();
let expected_change_request_1 = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 1,
time: req1.time,
message: None,
hash: None,
deps: Vec::new(),
start_op: 1,
operations: vec![
amp::Op {
action: amp::OpType::Make(amp::ObjType::list()),
obj: amp::ObjectId::Root,
key: "counts".into(),
insert: false,
pred: Vec::new(),
},
amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Counter(1)),
obj: counts_id.clone(),
key: amp::ElementId::Head.into(),
insert: true,
pred: Vec::new(),
},
],
extra_bytes: Vec::new(),
};
assert_eq!(req1, expected_change_request_1);
let expected_change_request_2 = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 2,
start_op: 3,
time: req2.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Inc(2),
obj: counts_id,
key: doc.actor_id.op_id_at(2).into(),
insert: false,
pred: vec![doc.actor_id.op_id_at(2)],
}],
extra_bytes: Vec::new(),
};
assert_eq!(req2, expected_change_request_2);
}
#[test]
fn refuse_to_overwrite_counter_value() {
let mut doc = Frontend::new();
doc.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("counts"),
Value::Primitive(Primitive::Counter(1)),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let result = doc.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("counts"),
"somethingelse",
))?;
Ok(())
});
assert_eq!(
result,
Err(InvalidChangeRequest::CannotOverwriteCounter {
path: Path::root().key("counts")
})
);
}
#[test]
fn test_sets_characters_in_text() {
let mut doc = Frontend::new();
doc.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("text"),
Value::Text("some".graphemes(true).map(|s| s.to_owned()).collect()),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let request = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(Path::root().key("text").index(1), "a"))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let text_id = doc.get_object_id(&Path::root().key("text")).unwrap();
let expected_change_request = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 2,
start_op: 6,
time: request.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Str("a".into())),
obj: text_id,
key: doc.actor_id.op_id_at(3).into(),
insert: false,
pred: vec![doc.actor_id.op_id_at(3)],
}],
extra_bytes: Vec::new(),
};
assert_eq!(request, expected_change_request);
let value = doc.get_value(&Path::root()).unwrap();
let expected_value: Value = Value::Map(
hashmap! {
"text".into() => Value::Text(vec!["s".to_owned(), "a".to_owned(), "m".to_owned(), "e".to_owned()]),
},
amp::MapType::Map,
);
assert_eq!(value, expected_value);
}
#[test]
fn test_inserts_characters_in_text() {
let mut doc = Frontend::new();
doc.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("text"),
Value::Text("same".graphemes(true).map(|s| s.to_owned()).collect()),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let request = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::insert(
Path::root().key("text").index(1),
"h".into(),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let text_id = doc.get_object_id(&Path::root().key("text")).unwrap();
let expected_change_request = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 2,
start_op: 6,
time: request.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Str("h".into())),
obj: text_id,
key: doc.actor_id.op_id_at(2).into(),
insert: true,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
assert_eq!(request, expected_change_request);
let value = doc.get_value(&Path::root()).unwrap();
let expected_value: Value = Value::Map(
hashmap! {
"text".into() => Value::Text(vec!["s".to_owned(), "h".to_owned(), "a".to_owned(), "m".to_owned(), "e".to_owned()]),
},
amp::MapType::Map,
);
assert_eq!(value, expected_value);
}
#[test]
fn test_inserts_characters_at_start_of_text() {
let mut doc = Frontend::new();
doc.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("text"),
Value::Text(Vec::new()),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let request = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::insert(
Path::root().key("text").index(0),
"i".into(),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let text_id = doc.get_object_id(&Path::root().key("text")).unwrap();
let expected_change_request = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 2,
start_op: 2,
time: request.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Str("i".into())),
obj: text_id,
key: amp::ElementId::Head.into(),
insert: true,
pred: Vec::new(),
}],
extra_bytes: Vec::new(),
};
assert_eq!(request, expected_change_request);
let value = doc.get_value(&Path::root()).unwrap();
let expected_value: Value = Value::Map(
hashmap! {
"text".into() => Value::Text(vec!["i".to_owned()]),
},
amp::MapType::Map,
);
assert_eq!(value, expected_value);
}
#[test]
fn test_inserts_at_end_of_lists() {
let mut doc = Frontend::new();
doc.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("birds"),
Value::Sequence(Vec::new()),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let request = doc
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::insert(
Path::root().key("birds").index(0),
"greenfinch".into(),
))?;
doc.add_change(LocalChange::insert(
Path::root().key("birds").index(1),
"bullfinch".into(),
))?;
Ok(())
})
.unwrap()
.1
.unwrap();
let list_id = doc.get_object_id(&Path::root().key("birds")).unwrap();
let expected_change_request = amp::UncompressedChange {
actor_id: doc.actor_id.clone(),
seq: 2,
start_op: 2,
time: request.time,
message: None,
hash: None,
deps: Vec::new(),
operations: vec![
amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Str("greenfinch".into())),
obj: list_id.clone(),
key: amp::ElementId::Head.into(),
insert: true,
pred: Vec::new(),
},
amp::Op {
action: amp::OpType::Set(amp::ScalarValue::Str("bullfinch".into())),
obj: list_id,
key: doc.actor_id.op_id_at(2).into(),
insert: true,
pred: Vec::new(),
},
],
extra_bytes: Vec::new(),
};
assert_eq!(request, expected_change_request);
let value = doc.get_value(&Path::root()).unwrap();
let expected_value: Value = Value::Map(
hashmap! {
"birds".into() => Value::Sequence(vec!["greenfinch".into(), "bullfinch".into()]),
},
amp::MapType::Map,
);
assert_eq!(value, expected_value);
}

View file

@ -0,0 +1,42 @@
use automerge_frontend::{Frontend, InvalidChangeRequest, LocalChange, Path, Value};
#[test]
fn test_delete_index_in_mutation() {
let mut frontend = Frontend::new();
let _cr = frontend
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::set(
Path::root().key("vals"),
Value::Sequence(Vec::new()),
))?;
Ok(())
})
.unwrap();
frontend
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::insert(
Path::root().key("vals").index(0),
"0".into(),
))?;
Ok(())
})
.unwrap();
frontend
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::insert(
Path::root().key("vals").index(1),
"1".into(),
))?;
Ok(())
})
.unwrap();
frontend
.change::<_, _, InvalidChangeRequest>(None, |doc| {
doc.add_change(LocalChange::delete(Path::root().key("vals").index(1)))?;
Ok(())
})
.unwrap();
}

View file

@ -0,0 +1,15 @@
use wasm_bindgen_test::*;
#[wasm_bindgen_test]
fn test_simple_frontend_change_with_set_sequence() {
let mut f = automerge_frontend::Frontend::new_with_timestamper(Box::new(|| None));
f.change::<_, _, automerge_frontend::InvalidChangeRequest>(None, |doc| {
doc.add_change(automerge_frontend::LocalChange::set(
automerge_frontend::Path::root().key(""),
automerge_frontend::Value::Sequence(vec![]),
))
.unwrap();
Ok(())
})
.unwrap();
}

View file

@ -0,0 +1,24 @@
[package]
name = "automerge-protocol"
version = "0.1.0"
authors = ["Alex Good <alex@memoryandthought.me>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
bench = false
[dependencies]
hex = "^0.4.2"
uuid = { version = "^0.8.2", features=["v4"] }
thiserror = "1.0.16"
serde = { version = "^1.0", features=["derive"] }
arbitrary = { version = "1", features = ["derive"], optional = true }
[dev-dependencies]
maplit = "^1.0.2"
serde_json = { version = "^1.0.61", features=["float_roundtrip"], default-features=true }
proptest = "0.10.1"
[features]
derive-arbitrary = ["arbitrary"]

View file

@ -0,0 +1,32 @@
use thiserror::Error;
use crate::{DataType, ScalarValue};
#[derive(Error, Debug)]
#[error("Invalid OpID: {0}")]
pub struct InvalidOpId(pub String);
#[derive(Error, Debug, PartialEq)]
#[error("Invalid object ID: {0}")]
pub struct InvalidObjectId(pub String);
#[derive(Error, Debug)]
#[error("Invalid element ID: {0}")]
pub struct InvalidElementId(pub String);
#[derive(Error, Debug)]
#[error("Invalid actor ID: {0}")]
pub struct InvalidActorId(pub String);
#[derive(Error, Debug, PartialEq)]
#[error("Invalid change hash slice: {0:?}")]
pub struct InvalidChangeHashSlice(pub Vec<u8>);
#[derive(Error, Debug, PartialEq)]
#[error("Invalid scalar value, expected {expected} but received {unexpected}")]
pub struct InvalidScalarValue {
pub raw_value: ScalarValue,
pub datatype: DataType,
pub unexpected: String,
pub expected: String,
}

View file

@ -0,0 +1,491 @@
pub mod error;
mod serde_impls;
mod utility_impls;
use std::{collections::HashMap, convert::TryFrom, fmt};
use serde::{ser::SerializeMap, Deserialize, Serialize, Serializer};
#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
pub struct ActorId(Vec<u8>);
impl fmt::Debug for ActorId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("ActorID")
.field(&hex::encode(&self.0))
.finish()
}
}
impl ActorId {
pub fn random() -> ActorId {
ActorId(uuid::Uuid::new_v4().as_bytes().to_vec())
}
pub fn to_bytes(&self) -> Vec<u8> {
self.0.clone()
}
pub fn into_bytes(self) -> Vec<u8> {
self.0
}
pub fn from_bytes(bytes: &[u8]) -> ActorId {
ActorId(bytes.to_vec())
}
pub fn to_hex_string(&self) -> String {
hex::encode(&self.0)
}
pub fn op_id_at(&self, seq: u64) -> OpId {
OpId(seq, self.clone())
}
}
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Copy, Hash)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
#[serde(rename_all = "camelCase", untagged)]
pub enum ObjType {
Map(MapType),
Sequence(SequenceType),
}
impl ObjType {
pub fn map() -> ObjType {
ObjType::Map(MapType::Map)
}
pub fn table() -> ObjType {
ObjType::Map(MapType::Table)
}
pub fn text() -> ObjType {
ObjType::Sequence(SequenceType::Text)
}
pub fn list() -> ObjType {
ObjType::Sequence(SequenceType::List)
}
}
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Copy, Hash)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
#[serde(rename_all = "camelCase")]
pub enum MapType {
Map,
Table,
}
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Copy, Hash)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
#[serde(rename_all = "camelCase")]
pub enum SequenceType {
List,
Text,
}
#[derive(Eq, PartialEq, Hash, Clone)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
pub struct OpId(pub u64, pub ActorId);
impl OpId {
pub fn new(seq: u64, actor: &ActorId) -> OpId {
OpId(seq, actor.clone())
}
pub fn counter(&self) -> u64 {
self.0
}
}
#[derive(Eq, PartialEq, Debug, Hash, Clone)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
pub enum ObjectId {
Id(OpId),
Root,
}
#[derive(PartialEq, Eq, Debug, Hash, Clone)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
pub enum ElementId {
Head,
Id(OpId),
}
impl ElementId {
pub fn as_opid(&self) -> Option<&OpId> {
match self {
ElementId::Head => None,
ElementId::Id(opid) => Some(opid),
}
}
pub fn into_key(self) -> Key {
Key::Seq(self)
}
pub fn not_head(&self) -> bool {
match self {
ElementId::Head => false,
ElementId::Id(_) => true,
}
}
}
#[derive(Serialize, PartialEq, Eq, Debug, Hash, Clone)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
#[serde(untagged)]
pub enum Key {
Map(String),
Seq(ElementId),
}
impl Key {
pub fn head() -> Key {
Key::Seq(ElementId::Head)
}
pub fn is_map_key(&self) -> bool {
match self {
Key::Map(_) => true,
Key::Seq(_) => false,
}
}
pub fn as_element_id(&self) -> Option<ElementId> {
match self {
Key::Map(_) => None,
Key::Seq(eid) => Some(eid.clone()),
}
}
pub fn to_opid(&self) -> Option<OpId> {
match self.as_element_id()? {
ElementId::Id(id) => Some(id),
ElementId::Head => None,
}
}
}
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone, Copy)]
pub enum DataType {
#[serde(rename = "counter")]
Counter,
#[serde(rename = "timestamp")]
Timestamp,
#[serde(rename = "cursor")]
Cursor,
#[serde(rename = "undefined")]
Undefined,
}
impl DataType {
#[allow(clippy::trivially_copy_pass_by_ref)]
pub fn is_undefined(d: &DataType) -> bool {
matches!(d, DataType::Undefined)
}
}
#[derive(Serialize, PartialEq, Debug, Clone)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
#[serde(untagged)]
pub enum ScalarValue {
Str(String),
Int(i64),
Uint(u64),
F64(f64),
F32(f32),
Counter(i64),
Timestamp(i64),
Cursor(OpId),
Boolean(bool),
Null,
}
impl ScalarValue {
pub fn as_datatype(
&self,
datatype: DataType,
) -> Result<ScalarValue, error::InvalidScalarValue> {
match (datatype, self) {
(DataType::Counter, ScalarValue::Int(i)) => Ok(ScalarValue::Counter(*i)),
(DataType::Counter, ScalarValue::Uint(u)) => match i64::try_from(*u) {
Ok(i) => Ok(ScalarValue::Counter(i)),
Err(_) => Err(error::InvalidScalarValue {
raw_value: self.clone(),
expected: "an integer".to_string(),
unexpected: "an integer larger than i64::max_value".to_string(),
datatype,
}),
},
(DataType::Counter, v) => Err(error::InvalidScalarValue {
raw_value: self.clone(),
expected: "an integer".to_string(),
unexpected: v.to_string(),
datatype,
}),
(DataType::Timestamp, ScalarValue::Int(i)) => Ok(ScalarValue::Timestamp(*i)),
(DataType::Timestamp, ScalarValue::Uint(u)) => match i64::try_from(*u) {
Ok(i) => Ok(ScalarValue::Timestamp(i)),
Err(_) => Err(error::InvalidScalarValue {
raw_value: self.clone(),
expected: "an integer".to_string(),
unexpected: "an integer larger than i64::max_value".to_string(),
datatype,
}),
},
(DataType::Timestamp, v) => Err(error::InvalidScalarValue {
raw_value: self.clone(),
expected: "an integer".to_string(),
unexpected: v.to_string(),
datatype,
}),
(DataType::Cursor, v) => Err(error::InvalidScalarValue {
raw_value: self.clone(),
expected: "a cursor".to_string(),
unexpected: v.to_string(),
datatype,
}),
(DataType::Undefined, _) => Ok(self.clone()),
}
}
/// If this value can be coerced to an i64, return the i64 value
pub fn to_i64(&self) -> Option<i64> {
match self {
ScalarValue::Int(n) => Some(*n),
ScalarValue::Uint(n) => Some(*n as i64),
ScalarValue::F32(n) => Some(*n as i64),
ScalarValue::F64(n) => Some(*n as i64),
ScalarValue::Counter(n) => Some(*n),
ScalarValue::Timestamp(n) => Some(*n),
_ => None,
}
}
pub fn datatype(&self) -> Option<DataType> {
match self {
ScalarValue::Counter(..) => Some(DataType::Counter),
ScalarValue::Timestamp(..) => Some(DataType::Timestamp),
_ => None,
}
}
}
#[derive(PartialEq, Debug, Clone)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
pub enum OpType {
Make(ObjType),
Del,
Inc(i64),
Set(ScalarValue),
}
#[derive(PartialEq, Debug, Clone)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
pub struct Op {
pub action: OpType,
pub obj: ObjectId,
pub key: Key,
pub pred: Vec<OpId>,
pub insert: bool,
}
impl Op {
pub fn primitive_value(&self) -> Option<ScalarValue> {
match &self.action {
OpType::Set(v) => Some(v.clone()),
OpType::Inc(i) => Some(ScalarValue::Int(*i)),
_ => None,
}
}
pub fn obj_type(&self) -> Option<ObjType> {
match self.action {
OpType::Make(o) => Some(o),
_ => None,
}
}
pub fn to_i64(&self) -> Option<i64> {
self.primitive_value().as_ref().and_then(|v| v.to_i64())
}
}
#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Copy)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
pub struct ChangeHash(pub [u8; 32]);
impl fmt::Debug for ChangeHash {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("ChangeHash")
.field(&hex::encode(&self.0))
.finish()
}
}
// The Diff Structure Maps on to the Patch Diffs the Frontend is expecting
// Diff {
// object_id: 123,
// obj_type: map,
// edits: None,
// props: {
// "key1": {
// "10@abc123":
// DiffLink::Diff(Diff {
// object_id: 444,
// obj_type: list,
// edits: [ DiffEdit { ... } ],
// props: { ... },
// })
// }
// "key2": {
// "11@abc123":
// DiffLink::Value(DiffValue {
// value: 10,
// datatype: "counter"
// }
// }
// }
// }
#[derive(Debug, PartialEq, Clone)]
pub enum Diff {
Map(MapDiff),
Seq(SeqDiff),
Unchanged(ObjDiff),
Value(ScalarValue),
Cursor(CursorDiff),
}
#[derive(Deserialize, Serialize, Debug, PartialEq, Clone)]
#[serde(rename_all = "camelCase")]
pub struct MapDiff {
pub object_id: ObjectId,
#[serde(rename = "type")]
pub obj_type: MapType,
#[serde(skip_serializing_if = "HashMap::is_empty", default)]
pub props: HashMap<String, HashMap<OpId, Diff>>,
}
#[derive(Deserialize, Serialize, Debug, PartialEq, Clone)]
#[serde(rename_all = "camelCase")]
pub struct SeqDiff {
pub object_id: ObjectId,
#[serde(rename = "type")]
pub obj_type: SequenceType,
pub edits: Vec<DiffEdit>,
pub props: HashMap<usize, HashMap<OpId, Diff>>,
}
#[derive(Deserialize, Serialize, Debug, PartialEq, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ObjDiff {
pub object_id: ObjectId,
#[serde(rename = "type")]
pub obj_type: ObjType,
}
#[derive(Debug, PartialEq, Clone)]
pub struct CursorDiff {
pub object_id: ObjectId,
pub elem_id: OpId,
pub index: u32,
}
#[derive(Deserialize, Serialize, Debug, PartialEq, Clone)]
#[serde(rename_all = "camelCase", tag = "action")]
pub enum DiffEdit {
Insert {
index: usize,
#[serde(rename = "elemId")]
elem_id: ElementId,
},
Remove {
index: usize,
},
}
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Patch {
#[serde(skip_serializing_if = "Option::is_none", default)]
pub actor: Option<ActorId>,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub seq: Option<u64>,
pub clock: HashMap<ActorId, u64>,
pub deps: Vec<ChangeHash>,
pub max_op: u64,
pub pending_changes: usize,
// pub can_undo: bool,
// pub can_redo: bool,
// pub version: u64,
#[serde(serialize_with = "Patch::top_level_serialize")]
pub diffs: Option<Diff>,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
pub struct UncompressedChange {
#[serde(rename = "ops")]
pub operations: Vec<Op>,
#[serde(rename = "actor")]
pub actor_id: ActorId,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub hash: Option<ChangeHash>,
pub seq: u64,
#[serde(rename = "startOp")]
pub start_op: u64,
pub time: i64,
pub message: Option<String>,
pub deps: Vec<ChangeHash>,
#[serde(skip_serializing_if = "Vec::is_empty", default = "Default::default")]
pub extra_bytes: Vec<u8>,
}
impl PartialEq for UncompressedChange {
// everything but hash (its computed and not always present)
fn eq(&self, other: &Self) -> bool {
self.operations == other.operations
&& self.actor_id == other.actor_id
&& self.seq == other.seq
&& self.start_op == other.start_op
&& self.time == other.time
&& self.message == other.message
&& self.deps == other.deps
&& self.extra_bytes == other.extra_bytes
}
}
impl UncompressedChange {
pub fn op_id_of(&self, index: u64) -> Option<OpId> {
if let Ok(index_usize) = usize::try_from(index) {
if index_usize < self.operations.len() {
return Some(self.actor_id.op_id_at(self.start_op + index));
}
}
None
}
}
impl Patch {
// the default behavior is to return {} for an empty patch
// this patch implementation comes with ObjectID::Root baked in so this covered
// the top level scope where not even Root is referenced
pub(crate) fn top_level_serialize<S>(
maybe_diff: &Option<Diff>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if let Some(diff) = maybe_diff {
diff.serialize(serializer)
} else {
let map = serializer.serialize_map(Some(0))?;
map.end()
}
}
}

View file

@ -9,7 +9,7 @@ impl Serialize for ChangeHash {
where
S: Serializer,
{
hex::encode(self.0).serialize(serializer)
hex::encode(&self.0).serialize(serializer)
}
}

View file

@ -0,0 +1,17 @@
use serde::{ser::SerializeStruct, Serialize, Serializer};
use crate::CursorDiff;
impl Serialize for CursorDiff {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_struct("CursorDiff", 4)?;
map.serialize_field("refObjectId", &self.object_id)?;
map.serialize_field("elemId", &self.elem_id)?;
map.serialize_field("index", &self.index)?;
map.serialize_field("datatype", "cursor")?;
map.end()
}
}

View file

@ -0,0 +1,262 @@
use std::{collections::HashMap, fmt};
use serde::{
de,
de::{Error, MapAccess, Unexpected},
ser::SerializeStruct,
Deserialize, Deserializer, Serialize, Serializer,
};
use super::read_field;
use crate::{
CursorDiff, DataType, Diff, DiffEdit, MapDiff, ObjDiff, ObjType, ObjectId, OpId, ScalarValue,
SeqDiff,
};
impl Serialize for Diff {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Diff::Map(diff) => diff.serialize(serializer),
Diff::Seq(diff) => diff.serialize(serializer),
Diff::Unchanged(diff) => diff.serialize(serializer),
Diff::Value(val) => match val {
ScalarValue::Counter(_) => {
let mut op = serializer.serialize_struct("Value", 2)?;
op.serialize_field("value", &val)?;
op.serialize_field("datatype", "counter")?;
op.end()
}
ScalarValue::Timestamp(_) => {
let mut op = serializer.serialize_struct("Value", 2)?;
op.serialize_field("value", &val)?;
op.serialize_field("datatype", "timestamp")?;
op.end()
}
_ => {
let mut op = serializer.serialize_struct("Value", 1)?;
op.serialize_field("value", &val)?;
op.end()
}
},
Diff::Cursor(diff) => diff.serialize(serializer),
}
}
}
impl<'de> Deserialize<'de> for Diff {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct DiffVisitor;
const FIELDS: &[&str] = &[
"edits",
"objType",
"objectId",
"props",
"value",
"datatype",
"refObjectId",
"elemId",
"index",
];
impl<'de> de::Visitor<'de> for DiffVisitor {
type Value = Diff;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("A diff")
}
fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>
where
V: MapAccess<'de>,
{
let mut edits: Option<Vec<DiffEdit>> = None;
let mut object_id: Option<ObjectId> = None;
let mut obj_type: Option<ObjType> = None;
let mut props: Option<HashMap<String, HashMap<OpId, Diff>>> = None;
let mut value: Option<ScalarValue> = None;
let mut datatype: Option<DataType> = None;
let mut elem_id: Option<OpId> = None;
let mut index: Option<u32> = None;
let mut ref_object_id: Option<ObjectId> = None;
while let Some(field) = map.next_key::<String>()? {
match field.as_ref() {
"edits" => read_field("edits", &mut edits, &mut map)?,
"objectId" => read_field("objectId", &mut object_id, &mut map)?,
"type" => read_field("type", &mut obj_type, &mut map)?,
"props" => read_field("props", &mut props, &mut map)?,
"value" => read_field("value", &mut value, &mut map)?,
"datatype" => read_field("datatype", &mut datatype, &mut map)?,
"refObjectId" => read_field("refObjectId", &mut ref_object_id, &mut map)?,
"elemId" => read_field("elemId", &mut elem_id, &mut map)?,
"index" => read_field("index", &mut index, &mut map)?,
_ => return Err(Error::unknown_field(&field, FIELDS)),
}
}
if value.is_some() || datatype.is_some() {
let datatype = datatype.unwrap_or(DataType::Undefined);
match datatype {
DataType::Cursor => {
let ref_object_id =
ref_object_id.ok_or_else(|| Error::missing_field("refObjectId"))?;
let elem_id = elem_id.ok_or_else(|| Error::missing_field("elemId"))?;
let index = index.ok_or_else(|| Error::missing_field("index"))?;
Ok(Diff::Cursor(CursorDiff {
object_id: ref_object_id,
elem_id,
index,
}))
}
_ => {
let value = value.ok_or_else(|| Error::missing_field("value"))?;
let value_with_datatype = maybe_add_datatype_to_value(value, datatype);
Ok(Diff::Value(value_with_datatype))
}
}
} else {
let object_id = object_id.ok_or_else(|| Error::missing_field("objectId"))?;
let obj_type = obj_type.ok_or_else(|| Error::missing_field("type"))?;
if let Some(mut props) = props {
match obj_type {
ObjType::Sequence(seq_type) => {
let edits = edits.ok_or_else(|| Error::missing_field("edits"))?;
let mut new_props = HashMap::new();
for (k, v) in props.drain() {
let index = k.parse().map_err(|_| {
Error::invalid_type(Unexpected::Str(&k), &"an integer")
})?;
new_props.insert(index, v);
}
Ok(Diff::Seq(SeqDiff {
object_id,
obj_type: seq_type,
edits,
props: new_props,
}))
}
ObjType::Map(map_type) => Ok(Diff::Map(MapDiff {
object_id,
obj_type: map_type,
props,
})),
}
} else {
Ok(Diff::Unchanged(ObjDiff {
object_id,
obj_type,
}))
}
}
}
}
deserializer.deserialize_struct("Diff", &FIELDS, DiffVisitor)
}
}
fn maybe_add_datatype_to_value(value: ScalarValue, datatype: DataType) -> ScalarValue {
match datatype {
DataType::Counter => {
if let Some(n) = value.to_i64() {
ScalarValue::Counter(n)
} else {
value
}
}
DataType::Timestamp => {
if let Some(n) = value.to_i64() {
ScalarValue::Timestamp(n)
} else {
value
}
}
_ => value,
}
}
#[cfg(test)]
mod tests {
use std::{convert::TryInto, str::FromStr};
use maplit::hashmap;
use crate::{CursorDiff, Diff, MapDiff, MapType, ObjectId, OpId, SeqDiff, SequenceType};
#[test]
fn map_diff_serialization_round_trip() {
let json = serde_json::json!({
"objectId": "1@6121f8757d5d46609b665218b2b3a141",
"type": "map",
"props": {
"key": {
"1@4a093244de2b4fd0a4203724e15dfc16": {
"value": "value"
}
}
}
});
let diff = Diff::Map(MapDiff {
object_id: ObjectId::from_str("1@6121f8757d5d46609b665218b2b3a141").unwrap(),
obj_type: MapType::Map,
props: hashmap! {
"key".to_string() => hashmap!{
OpId::from_str("1@4a093244de2b4fd0a4203724e15dfc16").unwrap() => "value".into()
}
},
});
assert_eq!(json, serde_json::to_value(diff.clone()).unwrap());
assert_eq!(serde_json::from_value::<Diff>(json).unwrap(), diff);
}
#[test]
fn seq_diff_serialization_round_trip() {
let json = serde_json::json!({
"objectId": "1@6121f8757d5d46609b665218b2b3a141",
"type": "list",
"edits": [],
"props": {
"0": {
"1@4a093244de2b4fd0a4203724e15dfc16": {
"value": "value"
}
}
}
});
let diff = Diff::Seq(SeqDiff {
object_id: ObjectId::from_str("1@6121f8757d5d46609b665218b2b3a141").unwrap(),
obj_type: SequenceType::List,
edits: Vec::new(),
props: hashmap! {
0 => hashmap!{
OpId::from_str("1@4a093244de2b4fd0a4203724e15dfc16").unwrap() => "value".into()
}
},
});
assert_eq!(json, serde_json::to_value(diff.clone()).unwrap());
assert_eq!(serde_json::from_value::<Diff>(json).unwrap(), diff);
}
#[test]
fn cursor_diff_serialization_round_trip() {
let json = serde_json::json!({
"datatype": "cursor",
"refObjectId": "1@4a093244de2b4fd0a4203724e15dfc16",
"elemId": "2@4a093244de2b4fd0a4203724e15dfc16",
"index": 0,
});
let diff = Diff::Cursor(CursorDiff {
object_id: "1@4a093244de2b4fd0a4203724e15dfc16".try_into().unwrap(),
elem_id: "2@4a093244de2b4fd0a4203724e15dfc16".try_into().unwrap(),
index: 0,
});
assert_eq!(json, serde_json::to_value(diff.clone()).unwrap());
assert_eq!(serde_json::from_value::<Diff>(json).unwrap(), diff);
}
}

View file

@ -2,7 +2,7 @@ use std::str::FromStr;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use crate::legacy::ElementId;
use crate::ElementId;
impl Serialize for ElementId {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>

View file

@ -0,0 +1,19 @@
use std::str::FromStr;
use serde::{Deserialize, Deserializer};
use crate::{ElementId, Key};
impl<'de> Deserialize<'de> for Key {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if let Ok(eid) = ElementId::from_str(&s) {
Ok(Key::Seq(eid))
} else {
Ok(Key::Map(s))
}
}
}

Some files were not shown because too many files have changed in this diff Show more