Compare commits
No commits in common. "main" and "load-avoid-queue" have entirely different histories.
main
...
load-avoid
548 changed files with 34301 additions and 341251 deletions
17
.github/workflows/advisory-cron.yaml
vendored
17
.github/workflows/advisory-cron.yaml
vendored
|
@ -1,17 +0,0 @@
|
|||
name: Advisories
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 18 * * *'
|
||||
jobs:
|
||||
cargo-deny:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
checks:
|
||||
- advisories
|
||||
- bans licenses sources
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: EmbarkStudios/cargo-deny-action@v1
|
||||
with:
|
||||
command: check ${{ matrix.checks }}
|
177
.github/workflows/ci.yaml
vendored
177
.github/workflows/ci.yaml
vendored
|
@ -1,177 +0,0 @@
|
|||
name: CI
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: 1.67.0
|
||||
default: true
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- run: ./scripts/ci/fmt
|
||||
shell: bash
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: 1.67.0
|
||||
default: true
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- run: ./scripts/ci/lint
|
||||
shell: bash
|
||||
|
||||
docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: 1.67.0
|
||||
default: true
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- name: Build rust docs
|
||||
run: ./scripts/ci/rust-docs
|
||||
shell: bash
|
||||
- name: Install doxygen
|
||||
run: sudo apt-get install -y doxygen
|
||||
shell: bash
|
||||
|
||||
cargo-deny:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
checks:
|
||||
- advisories
|
||||
- bans licenses sources
|
||||
continue-on-error: ${{ matrix.checks == 'advisories' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: EmbarkStudios/cargo-deny-action@v1
|
||||
with:
|
||||
arguments: '--manifest-path ./rust/Cargo.toml'
|
||||
command: check ${{ matrix.checks }}
|
||||
|
||||
wasm_tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install wasm-bindgen-cli
|
||||
run: cargo install wasm-bindgen-cli wasm-opt
|
||||
- name: Install wasm32 target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- name: run tests
|
||||
run: ./scripts/ci/wasm_tests
|
||||
deno_tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: denoland/setup-deno@v1
|
||||
with:
|
||||
deno-version: v1.x
|
||||
- name: Install wasm-bindgen-cli
|
||||
run: cargo install wasm-bindgen-cli wasm-opt
|
||||
- name: Install wasm32 target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- name: run tests
|
||||
run: ./scripts/ci/deno_tests
|
||||
|
||||
js_fmt:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: install
|
||||
run: yarn global add prettier
|
||||
- name: format
|
||||
run: prettier -c javascript/.prettierrc javascript
|
||||
|
||||
js_tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install wasm-bindgen-cli
|
||||
run: cargo install wasm-bindgen-cli wasm-opt
|
||||
- name: Install wasm32 target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- name: run tests
|
||||
run: ./scripts/ci/js_tests
|
||||
|
||||
cmake_build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly-2023-01-26
|
||||
default: true
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- name: Install CMocka
|
||||
run: sudo apt-get install -y libcmocka-dev
|
||||
- name: Install/update CMake
|
||||
uses: jwlawson/actions-setup-cmake@v1.12
|
||||
with:
|
||||
cmake-version: latest
|
||||
- name: Install rust-src
|
||||
run: rustup component add rust-src
|
||||
- name: Build and test C bindings
|
||||
run: ./scripts/ci/cmake-build Release Static
|
||||
shell: bash
|
||||
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toolchain:
|
||||
- 1.67.0
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: ${{ matrix.toolchain }}
|
||||
default: true
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- run: ./scripts/ci/build-test
|
||||
shell: bash
|
||||
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: 1.67.0
|
||||
default: true
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- run: ./scripts/ci/build-test
|
||||
shell: bash
|
||||
|
||||
windows:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: 1.67.0
|
||||
default: true
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- run: ./scripts/ci/build-test
|
||||
shell: bash
|
52
.github/workflows/docs.yaml
vendored
52
.github/workflows/docs.yaml
vendored
|
@ -1,52 +0,0 @@
|
|||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
name: Documentation
|
||||
|
||||
jobs:
|
||||
deploy-docs:
|
||||
concurrency: deploy-docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Cache
|
||||
uses: Swatinem/rust-cache@v1
|
||||
|
||||
- name: Clean docs dir
|
||||
run: rm -rf docs
|
||||
shell: bash
|
||||
|
||||
- name: Clean Rust docs dir
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clean
|
||||
args: --manifest-path ./rust/Cargo.toml --doc
|
||||
|
||||
- name: Build Rust docs
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: doc
|
||||
args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps
|
||||
|
||||
- name: Move Rust docs
|
||||
run: mkdir -p docs && mv rust/target/doc/* docs/.
|
||||
shell: bash
|
||||
|
||||
- name: Configure root page
|
||||
run: echo '<meta http-equiv="refresh" content="0; url=automerge">' > docs/index.html
|
||||
|
||||
- name: Deploy docs
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./docs
|
214
.github/workflows/release.yaml
vendored
214
.github/workflows/release.yaml
vendored
|
@ -1,214 +0,0 @@
|
|||
name: Release
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check_if_wasm_version_upgraded:
|
||||
name: Check if WASM version has been upgraded
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
wasm_version: ${{ steps.version-updated.outputs.current-package-version }}
|
||||
wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }}
|
||||
steps:
|
||||
- uses: JiPaix/package-json-updated-action@v1.0.5
|
||||
id: version-updated
|
||||
with:
|
||||
path: rust/automerge-wasm/package.json
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish-wasm:
|
||||
name: Publish WASM package
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check_if_wasm_version_upgraded
|
||||
# We create release only if the version in the package.json has been upgraded
|
||||
if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true'
|
||||
steps:
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16.x'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- uses: denoland/setup-deno@v1
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.ref }}
|
||||
- name: Get rid of local github workflows
|
||||
run: rm -r .github/workflows
|
||||
- name: Remove tmp_branch if it exists
|
||||
run: git push origin :tmp_branch || true
|
||||
- run: git checkout -b tmp_branch
|
||||
- name: Install wasm-bindgen-cli
|
||||
run: cargo install wasm-bindgen-cli wasm-opt
|
||||
- name: Install wasm32 target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- name: run wasm js tests
|
||||
id: wasm_js_tests
|
||||
run: ./scripts/ci/wasm_tests
|
||||
- name: run wasm deno tests
|
||||
id: wasm_deno_tests
|
||||
run: ./scripts/ci/deno_tests
|
||||
- name: build release
|
||||
id: build_release
|
||||
run: |
|
||||
npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release
|
||||
- name: Collate deno release files
|
||||
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
|
||||
run: |
|
||||
mkdir $GITHUB_WORKSPACE/deno_wasm_dist
|
||||
cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist
|
||||
cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist
|
||||
cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist
|
||||
cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist
|
||||
sed -i '1i /// <reference types="./index.d.ts" />' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js
|
||||
- name: Create npm release
|
||||
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
|
||||
run: |
|
||||
if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then
|
||||
echo "This version is already published"
|
||||
exit 0
|
||||
fi
|
||||
EXTRA_ARGS="--access public"
|
||||
if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
|
||||
echo "Is pre-release version"
|
||||
EXTRA_ARGS="$EXTRA_ARGS --tag next"
|
||||
fi
|
||||
if [ "$NODE_AUTH_TOKEN" = "" ]; then
|
||||
echo "Can't publish on NPM, You need a NPM_TOKEN secret."
|
||||
false
|
||||
fi
|
||||
npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
|
||||
VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
|
||||
- name: Commit wasm deno release files
|
||||
run: |
|
||||
git config --global user.name "actions"
|
||||
git config --global user.email actions@github.com
|
||||
git add $GITHUB_WORKSPACE/deno_wasm_dist
|
||||
git commit -am "Add deno release files"
|
||||
git push origin tmp_branch
|
||||
- name: Tag wasm release
|
||||
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
|
||||
tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
|
||||
target_commitish: tmp_branch
|
||||
generate_release_notes: false
|
||||
draft: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Remove tmp_branch
|
||||
run: git push origin :tmp_branch
|
||||
check_if_js_version_upgraded:
|
||||
name: Check if JS version has been upgraded
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
js_version: ${{ steps.version-updated.outputs.current-package-version }}
|
||||
js_has_updated: ${{ steps.version-updated.outputs.has-updated }}
|
||||
steps:
|
||||
- uses: JiPaix/package-json-updated-action@v1.0.5
|
||||
id: version-updated
|
||||
with:
|
||||
path: javascript/package.json
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish-js:
|
||||
name: Publish JS package
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check_if_js_version_upgraded
|
||||
- check_if_wasm_version_upgraded
|
||||
- publish-wasm
|
||||
# We create release only if the version in the package.json has been upgraded and after the WASM release
|
||||
if: |
|
||||
(always() && ! cancelled()) &&
|
||||
(needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') &&
|
||||
needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true'
|
||||
steps:
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '16.x'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- uses: denoland/setup-deno@v1
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.ref }}
|
||||
- name: Get rid of local github workflows
|
||||
run: rm -r .github/workflows
|
||||
- name: Remove js_tmp_branch if it exists
|
||||
run: git push origin :js_tmp_branch || true
|
||||
- run: git checkout -b js_tmp_branch
|
||||
- name: check js formatting
|
||||
run: |
|
||||
yarn global add prettier
|
||||
prettier -c javascript/.prettierrc javascript
|
||||
- name: run js tests
|
||||
id: js_tests
|
||||
run: |
|
||||
cargo install wasm-bindgen-cli wasm-opt
|
||||
rustup target add wasm32-unknown-unknown
|
||||
./scripts/ci/js_tests
|
||||
- name: build js release
|
||||
id: build_release
|
||||
run: |
|
||||
npm --prefix $GITHUB_WORKSPACE/javascript run build
|
||||
- name: build js deno release
|
||||
id: build_deno_release
|
||||
run: |
|
||||
VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build
|
||||
env:
|
||||
WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
|
||||
- name: run deno tests
|
||||
id: deno_tests
|
||||
run: |
|
||||
npm --prefix $GITHUB_WORKSPACE/javascript run deno:test
|
||||
- name: Collate deno release files
|
||||
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
|
||||
run: |
|
||||
mkdir $GITHUB_WORKSPACE/deno_js_dist
|
||||
cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist
|
||||
- name: Create npm release
|
||||
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
|
||||
run: |
|
||||
if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then
|
||||
echo "This version is already published"
|
||||
exit 0
|
||||
fi
|
||||
EXTRA_ARGS="--access public"
|
||||
if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
|
||||
echo "Is pre-release version"
|
||||
EXTRA_ARGS="$EXTRA_ARGS --tag next"
|
||||
fi
|
||||
if [ "$NODE_AUTH_TOKEN" = "" ]; then
|
||||
echo "Can't publish on NPM, You need a NPM_TOKEN secret."
|
||||
false
|
||||
fi
|
||||
npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
|
||||
VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }}
|
||||
- name: Commit js deno release files
|
||||
run: |
|
||||
git config --global user.name "actions"
|
||||
git config --global user.email actions@github.com
|
||||
git add $GITHUB_WORKSPACE/deno_js_dist
|
||||
git commit -am "Add deno js release files"
|
||||
git push origin js_tmp_branch
|
||||
- name: Tag JS release
|
||||
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }}
|
||||
tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }}
|
||||
target_commitish: js_tmp_branch
|
||||
generate_release_notes: false
|
||||
draft: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Remove js_tmp_branch
|
||||
run: git push origin :js_tmp_branch
|
12
.gitignore
vendored
12
.gitignore
vendored
|
@ -1,6 +1,8 @@
|
|||
/.direnv
|
||||
perf.*
|
||||
/Cargo.lock
|
||||
build/
|
||||
.vim/*
|
||||
/target
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
libtest.rmeta
|
||||
|
||||
.direnv/
|
||||
result
|
||||
result-lib
|
||||
|
|
2
.rustfmt.toml
Normal file
2
.rustfmt.toml
Normal file
|
@ -0,0 +1,2 @@
|
|||
group_imports = "StdExternalCrate"
|
||||
imports_granularity = "Crate"
|
74
.travis.yml
Normal file
74
.travis.yml
Normal file
|
@ -0,0 +1,74 @@
|
|||
os: linux
|
||||
dist: xenial
|
||||
language: rust
|
||||
|
||||
if: branch = main
|
||||
|
||||
install:
|
||||
- rustup self update
|
||||
- rustup component add clippy
|
||||
- rustup component add rustfmt
|
||||
- curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
|
||||
- nvm install 12
|
||||
|
||||
jobs:
|
||||
allow_failures:
|
||||
- rust: nightly
|
||||
fast_finish: true
|
||||
include:
|
||||
- name: Stable - Format, Clippy and Docs
|
||||
rust: stable
|
||||
script:
|
||||
- cargo fmt --all -- --check
|
||||
- cargo clippy --all-targets --all-features -- -D warnings
|
||||
- cargo doc --workspace --all-features
|
||||
- name: Stable - Build and Test
|
||||
rust: stable
|
||||
script:
|
||||
- cargo build --all-targets --workspace
|
||||
- cargo test --workspace
|
||||
- name: Stable - Wasm and Interop
|
||||
rust: stable
|
||||
script:
|
||||
- wasm-pack test automerge-frontend --node
|
||||
- cd automerge-backend-wasm
|
||||
- yarn dev
|
||||
- yarn test:js
|
||||
|
||||
- name: Beta - Format, Clippy and Docs
|
||||
rust: beta
|
||||
script:
|
||||
- cargo fmt --all -- --check
|
||||
- cargo clippy --all-targets --all-features -- -D warnings
|
||||
- cargo doc --workspace --all-features
|
||||
- name: Beta - Build and Test
|
||||
rust: beta
|
||||
script:
|
||||
- cargo build --all-targets --workspace
|
||||
- cargo test --workspace
|
||||
- name: Beta - Wasm and Interop
|
||||
rust: beta
|
||||
script:
|
||||
- wasm-pack test automerge-frontend --node
|
||||
- cd automerge-backend-wasm
|
||||
- yarn dev
|
||||
- yarn test:js
|
||||
|
||||
- name: Nightly - Format, Clippy and Docs
|
||||
rust: nightly
|
||||
script:
|
||||
- cargo fmt --all -- --check
|
||||
- cargo clippy --all-targets --all-features -- -D warnings
|
||||
- cargo doc --workspace --all-features
|
||||
- name: Nightly - Build and Test
|
||||
rust: nightly
|
||||
script:
|
||||
- cargo build --all-targets --workspace
|
||||
- cargo test --workspace
|
||||
- name: Nightly - Wasm and Interop
|
||||
rust: nightly
|
||||
script:
|
||||
- wasm-pack test automerge-frontend --node
|
||||
- cd automerge-backend-wasm
|
||||
- yarn dev
|
||||
- yarn test:js
|
17
Cargo.toml
Normal file
17
Cargo.toml
Normal file
|
@ -0,0 +1,17 @@
|
|||
[workspace]
|
||||
|
||||
members = [
|
||||
"automerge",
|
||||
"automerge-c",
|
||||
"automerge-c-v2",
|
||||
"automerge-backend",
|
||||
"automerge-backend-wasm",
|
||||
"automerge-frontend",
|
||||
"automerge-cli",
|
||||
"automerge-protocol",
|
||||
"fuzz",
|
||||
"perf",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
20
LICENSE
20
LICENSE
|
@ -1,19 +1,7 @@
|
|||
Copyright (c) 2019-2021 the Automerge contributors
|
||||
Copyright 2019 Alex Good
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
|
41
Makefile
Normal file
41
Makefile
Normal file
|
@ -0,0 +1,41 @@
|
|||
.PHONY: all
|
||||
all: ci
|
||||
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
cargo fmt --all -- --check
|
||||
|
||||
.PHONY: clippy
|
||||
clippy:
|
||||
cargo clippy --all-targets --all-features -- -D warnings
|
||||
|
||||
.PHONY: doc
|
||||
doc:
|
||||
cargo doc --workspace --all-features
|
||||
|
||||
.PHONY: build
|
||||
build:
|
||||
cargo build --all-targets --workspace
|
||||
|
||||
.PHONY: build-wasm
|
||||
build-wasm:
|
||||
cd automerge-backend-wasm && yarn dev
|
||||
|
||||
.PHONY: test
|
||||
test: test-rust test-wasm test-js
|
||||
cargo test --workspace
|
||||
|
||||
.PHONY: test-rust
|
||||
test-rust:
|
||||
cargo test --workspace
|
||||
|
||||
.PHONY: test-wasm
|
||||
test-wasm:
|
||||
wasm-pack test automerge-frontend --node
|
||||
|
||||
.PHONY: test-js
|
||||
test-js: build-wasm
|
||||
cd automerge-backend-wasm && yarn test:js
|
||||
|
||||
.PHONY: ci
|
||||
ci: fmt clippy doc build test
|
176
README.md
176
README.md
|
@ -1,147 +1,55 @@
|
|||
# Automerge
|
||||
|
||||
<img src='./img/sign.svg' width='500' alt='Automerge logo' />
|
||||
[](https://docs.rs/automerge)
|
||||
[](https://crates.io/crates/automerge)
|
||||
[](https://travis-ci.org/automerge/automerge-rs)
|
||||
|
||||
[](https://automerge.org/)
|
||||
[](https://automerge.org/automerge-rs/automerge/)
|
||||
[](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml)
|
||||
[](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml)
|
||||
This is a rust implementation of
|
||||
[automerge](https://github.com/automerge/automerge). Currently this repo
|
||||
contains an implementation of the "backend" of the Automerge library, designed
|
||||
to be used via FFI from many different platforms. Very soon there will also be
|
||||
a frontend which will be designed for Rust application developers to use.
|
||||
|
||||
Automerge is a library which provides fast implementations of several different
|
||||
CRDTs, a compact compression format for these CRDTs, and a sync protocol for
|
||||
efficiently transmitting those changes over the network. The objective of the
|
||||
project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational
|
||||
databases support server applications - by providing mechanisms for persistence
|
||||
which allow application developers to avoid thinking about hard distributed
|
||||
computing problems. Automerge aims to be PostgreSQL for your local-first app.
|
||||
This project is tracking the `performance` branch of the JavaScript reference implementation of Automerge. The `performance` branch contains a lot of backwards incompatible changes and is intended to become a 1.0 release of the library, you can find more information about that [here](https://github.com/automerge/automerge/pull/253). Our goal is to release a pre 1.0 version of the rust library once the JavaScript library hits 1.0. As such we are keeping this project up to date with the frequent and often quite large changes in the `performance` branch of the JavaScript repo - that is to say, don't depend on anything in this repo to stay constant right now.
|
||||
|
||||
If you're looking for documentation on the JavaScript implementation take a look
|
||||
at https://automerge.org/docs/hello/. There are other implementations in both
|
||||
Rust and C, but they are earlier and don't have documentation yet. You can find
|
||||
them in `rust/automerge` and `rust/automerge-c` if you are comfortable
|
||||
reading the code and tests to figure out how to use them.
|
||||
|
||||
If you're familiar with CRDTs and interested in the design of Automerge in
|
||||
particular take a look at https://automerge.org/docs/how-it-works/backend/
|
||||
## Using automerge-backend-wasm with automerge
|
||||
|
||||
Finally, if you want to talk to us about this project please [join the
|
||||
Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw)
|
||||
This backend is tracking the [performance branch of automerge](https://github.com/automerge/automerge/tree/performance)
|
||||
|
||||
## Status
|
||||
|
||||
This project is formed of a core Rust implementation which is exposed via FFI in
|
||||
javascript+WASM, C, and soon other languages. Alex
|
||||
([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining
|
||||
automerge, other members of Ink and Switch are also contributing time and there
|
||||
are several other maintainers. The focus is currently on shipping the new JS
|
||||
package. We expect to be iterating the API and adding new features over the next
|
||||
six months so there will likely be several major version bumps in all packages
|
||||
in that time.
|
||||
|
||||
In general we try and respect semver.
|
||||
|
||||
### JavaScript
|
||||
|
||||
A stable release of the javascript package is currently available as
|
||||
`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are
|
||||
available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at
|
||||
https://deno.land/x/automerge
|
||||
|
||||
### Rust
|
||||
|
||||
The rust codebase is currently oriented around producing a performant backend
|
||||
for the Javascript wrapper and as such the API for Rust code is low level and
|
||||
not well documented. We will be returning to this over the next few months but
|
||||
for now you will need to be comfortable reading the tests and asking questions
|
||||
to figure out how to use it. If you are looking to build rust applications which
|
||||
use automerge you may want to look into
|
||||
[autosurgeon](https://github.com/alexjg/autosurgeon)
|
||||
|
||||
## Repository Organisation
|
||||
|
||||
- `./rust` - the rust rust implementation and also the Rust components of
|
||||
platform specific wrappers (e.g. `automerge-wasm` for the WASM API or
|
||||
`automerge-c` for the C FFI bindings)
|
||||
- `./javascript` - The javascript library which uses `automerge-wasm`
|
||||
internally but presents a more idiomatic javascript interface
|
||||
- `./scripts` - scripts which are useful to maintenance of the repository.
|
||||
This includes the scripts which are run in CI.
|
||||
- `./img` - static assets for use in `.md` files
|
||||
|
||||
## Building
|
||||
|
||||
To build this codebase you will need:
|
||||
|
||||
- `rust`
|
||||
- `node`
|
||||
- `yarn`
|
||||
- `cmake`
|
||||
- `cmocka`
|
||||
|
||||
You will also need to install the following with `cargo install`
|
||||
|
||||
- `wasm-bindgen-cli`
|
||||
- `wasm-opt`
|
||||
- `cargo-deny`
|
||||
|
||||
And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation.
|
||||
|
||||
The various subprojects (the rust code, the wrapper projects) have their own
|
||||
build instructions, but to run the tests that will be run in CI you can run
|
||||
`./scripts/ci/run`.
|
||||
|
||||
### For macOS
|
||||
|
||||
These instructions worked to build locally on macOS 13.1 (arm64) as of
|
||||
Nov 29th 2022.
|
||||
|
||||
```bash
|
||||
# clone the repo
|
||||
git clone https://github.com/automerge/automerge-rs
|
||||
cd automerge-rs
|
||||
|
||||
# install rustup
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
|
||||
|
||||
# install homebrew
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
||||
|
||||
# install cmake, node, cmocka
|
||||
brew install cmake node cmocka
|
||||
|
||||
# install yarn
|
||||
npm install --global yarn
|
||||
|
||||
# install javascript dependencies
|
||||
yarn --cwd ./javascript
|
||||
|
||||
# install rust dependencies
|
||||
cargo install wasm-bindgen-cli wasm-opt cargo-deny
|
||||
|
||||
# get nightly rust to produce optimized automerge-c builds
|
||||
rustup toolchain install nightly
|
||||
rustup component add rust-src --toolchain nightly
|
||||
|
||||
# add wasm target in addition to current architecture
|
||||
rustup target add wasm32-unknown-unknown
|
||||
|
||||
# Run ci script
|
||||
./scripts/ci/run
|
||||
```
|
||||
|
||||
If your build fails to find `cmocka.h` you may need to teach it about homebrew's
|
||||
installation location:
|
||||
To build the wasm backend you'll need to install [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/). Then:
|
||||
|
||||
```
|
||||
export CPATH=/opt/homebrew/include
|
||||
export LIBRARY_PATH=/opt/homebrew/lib
|
||||
./scripts/ci/run
|
||||
$ cd automerge-backend-wasm
|
||||
$ yarn release
|
||||
```
|
||||
|
||||
## Contributing
|
||||
Once it is built set the new default backend in your js application like this.
|
||||
|
||||
```js
|
||||
const wasmBackend = require(path.resolve(WASM_BACKEND_PATH))
|
||||
Automerge.setDefaultBackend(wasmBackend)
|
||||
```
|
||||
|
||||
## Backend? Frontend?
|
||||
|
||||
Automerge is a JSON CRDT, in this sense it is just a data structure with a set
|
||||
of rules about how to merge two different versions of that data structure.
|
||||
However, in practice one often needs two separate roles when writing
|
||||
applications which use the CRDT:
|
||||
|
||||
- A very low latency process, usually running on some kind of UI thread, which
|
||||
records changes made by the user and reflects them in the UI
|
||||
- A less latency sensitive process which executes the complex logic of merging changes
|
||||
received from the UI and over the network and send diffs to the frontend to apply
|
||||
|
||||
More details can be found [here](https://github.com/automerge/automerge/blob/performance/BINARY_FORMAT.md).
|
||||
|
||||
Note that the performance branch of automerge is under active development and is changing quickly.
|
||||
|
||||
## Community
|
||||
|
||||
Development of automerge rust is currently being coordinated at our [slack channel](https://automerge.slack.com/archives/CTQARU3NZ). Come say hi. =)
|
||||
|
||||
|
||||
|
||||
Please try and split your changes up into relatively independent commits which
|
||||
change one subsystem at a time and add good commit messages which describe what
|
||||
the change is and why you're making it (err on the side of longer commit
|
||||
messages). `git blame` should give future maintainers a good idea of why
|
||||
something is the way it is.
|
||||
|
|
7
automerge-backend-wasm/.gitignore
vendored
Normal file
7
automerge-backend-wasm/.gitignore
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
node_modules
|
||||
/wasm-pack.log
|
||||
/build
|
||||
/dev
|
||||
|
||||
# Used for js-interop tests
|
||||
/automerge-js-temp
|
|
@ -1,15 +1,14 @@
|
|||
# You must change these to your own details.
|
||||
[package]
|
||||
name = "automerge-wasm"
|
||||
name = "automerge-backend-wasm"
|
||||
description = "An js/wasm wrapper for the rust implementation of automerge-backend"
|
||||
repository = "https://github.com/automerge/automerge-rs"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Good <alex@memoryandthought.me>","Orion Henry <orion@inkandswitch.com>", "Martin Kleppmann"]
|
||||
categories = ["wasm"]
|
||||
readme = "README.md"
|
||||
edition = "2021"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
rust-version = "1.57.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib","rlib"]
|
||||
|
@ -22,30 +21,19 @@ default = ["console_error_panic_hook"]
|
|||
[dependencies]
|
||||
console_error_panic_hook = { version = "^0.1", optional = true }
|
||||
# wee_alloc = { version = "^0.4", optional = true }
|
||||
automerge = { path = "../automerge", features=["wasm"] }
|
||||
automerge-backend = { path = "../automerge-backend" }
|
||||
automerge-protocol = { path = "../automerge-protocol" }
|
||||
js-sys = "^0.3"
|
||||
serde = "^1.0"
|
||||
serde_json = "^1.0"
|
||||
rand = { version = "^0.8.4" }
|
||||
getrandom = { version = "^0.2.2", features=["js"] }
|
||||
uuid = { version = "^1.2.1", features=["v4", "js", "serde"] }
|
||||
serde-wasm-bindgen = "0.4.3"
|
||||
getrandom = { version = "0.2.2", features=["js"] }
|
||||
uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] }
|
||||
serde-wasm-bindgen = "0.1.3"
|
||||
serde_bytes = "0.11.5"
|
||||
hex = "^0.4.3"
|
||||
regex = "^1.5"
|
||||
itertools = "^0.10.3"
|
||||
thiserror = "^1.0.16"
|
||||
|
||||
[dependencies.wasm-bindgen]
|
||||
version = "^0.2.83"
|
||||
#features = ["std"]
|
||||
features = ["serde-serialize", "std"]
|
||||
|
||||
[package.metadata.wasm-pack.profile.release]
|
||||
# wasm-opt = false
|
||||
|
||||
[package.metadata.wasm-pack.profile.profiling]
|
||||
wasm-opt = false
|
||||
version = "^0.2"
|
||||
features = ["serde-serialize"]
|
||||
|
||||
# The `web-sys` crate allows you to interact with the various browser APIs,
|
||||
# like the DOM.
|
||||
|
@ -54,9 +42,7 @@ version = "0.3.22"
|
|||
features = ["console"]
|
||||
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
futures = "^0.1"
|
||||
proptest = { version = "^1.0.0", default-features = false, features = ["std"] }
|
||||
wasm-bindgen-futures = "^0.4"
|
||||
wasm-bindgen-futures = "^0.3"
|
||||
wasm-bindgen-test = "^0.3"
|
78
automerge-backend-wasm/GOALS_AND_ISSUES.md
Normal file
78
automerge-backend-wasm/GOALS_AND_ISSUES.md
Normal file
|
@ -0,0 +1,78 @@
|
|||
|
||||
# WASM Goals and Issues
|
||||
|
||||
We set out with this project to see if we could create a backend implementation
|
||||
for Automerge that could serve as a basis for native ports to many different
|
||||
languages but also replace the javascript backend of the current implementation
|
||||
without any compromises.
|
||||
|
||||
We chose Rust as the basis of this project. It has the same performance
|
||||
characteristics as C and C++ making it ideal for implementing a database-like
|
||||
tool. It also has safety guarantees C and C++ which will protect us from
|
||||
synchronization issues and data races that plague projects like this. Rust
|
||||
also has a very mature WASM integration suite of tools.
|
||||
|
||||
Our goal was to create a zero compromise implementation of the backend. We
|
||||
almost achieved this goal. Here are the details of the compromises we found.
|
||||
|
||||
## Problem: WASM memory and garbage collection
|
||||
|
||||
Memory allocated in WASM needs to be explicitly freed. And there is no feature
|
||||
(yet) in javascript to alert you when an object has been collected by the
|
||||
GC. This makes immutable API's undoable since you need the GC to collect old
|
||||
versions of objects.
|
||||
|
||||
Also this means that an Automerge backend would need to be explicitly freed at the
|
||||
end of its life. Under normal circumstances a backend will live indefinitely so this
|
||||
would not require a change but in situations where many small databases are being
|
||||
created and thrown away this requires an API change.
|
||||
|
||||
## Solution
|
||||
|
||||
The performance branch of Automerge has made some small but important adjustments to
|
||||
the Frontend/Backend API. These now assume the backends to be long lived and possibly
|
||||
mutable and disallows creating divergent histories with old handles to the backend.
|
||||
A `clone` function was added to allow this behavior if it was intentional and a `free`
|
||||
that can do cleanup.
|
||||
|
||||
```js
|
||||
let doc1 = Automerge.init();
|
||||
let doc2 = Automerge.clone(doc1);
|
||||
Automerge.free(doc1);
|
||||
```
|
||||
|
||||
## Problem: WASM in fundamentally async - Automerge is sync
|
||||
|
||||
WASM's love of all things async was surely the largest thorn in our side was dealing with this. It basically boils down to this...
|
||||
|
||||
1. ### Loading WASM requires IO - IO is async
|
||||
|
||||
WASM binaries are not js - loading them from JS is async (with the notable exception of node's `readFileSync()`)
|
||||
|
||||
2. ### WebAssembly.Module(buffer) has a 4k limit on the render thread in browsers
|
||||
|
||||
Even if you can synchronously load and compile the wasm, most browsers impose a 4k limit on synchronous (but not asynchronous) WASM compilation in the render thread. This is not an issue in node applications or in web workers.
|
||||
|
||||
## Solutions
|
||||
|
||||
1. ### Compile Rust to ASM.js - (no problems except it's big and slow)
|
||||
|
||||
Now it's javascript. All the strangeness of WASM goes away. Webpack will happily inline the code into a bundle. The only downside, 400k of WASM becomes 5M of js and it runs 3 times slower.
|
||||
|
||||
2. ### Inline the WASM as a base64 encoded string - (no problems except the render thread)
|
||||
|
||||
This is actually surprisingly effective. The sized added to the js bundle is reasonable and the decode time is trivial. The only issue is, it still wont work in the render thread
|
||||
|
||||
3. ### Wait for top level await (no problems - someday)
|
||||
|
||||
There is a proposal for top level await support in js modules. This would allow us to insert an internal await into the backend module and hide the async load from users. Unfortunately its not in JS yet...
|
||||
|
||||
4. ### Change Automerge.init to be async (no problems except a breaking api change)
|
||||
|
||||
All of the async strangeness can be boiled down to the Automerge.init() call. This would require introducing an api change that has no purpose in the JS only implementation and represents a non-trivial compromise in adopting WASM
|
||||
```js
|
||||
const doc = Automerge.init();
|
||||
// becomes
|
||||
const doc = await Automerge.init();
|
||||
```
|
||||
|
20
automerge-backend-wasm/LICENSE
Normal file
20
automerge-backend-wasm/LICENSE
Normal file
|
@ -0,0 +1,20 @@
|
|||
Copyright (c) 2020 Ink & Switch LLC
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
22
automerge-backend-wasm/README.md
Normal file
22
automerge-backend-wasm/README.md
Normal file
|
@ -0,0 +1,22 @@
|
|||
## automerge-backend-wasm
|
||||
|
||||
This is a wrapper for the rust implementation of [automerge-backend](https://github.com/automerge/automerge-rs/tree/master/automerge-backend) to be used with [Automerge](https://github.com/automerge/automerge).
|
||||
|
||||
### Using
|
||||
|
||||
You can require this synchronously as a CommonJS module or import it as a ES6 module
|
||||
|
||||
```js
|
||||
let Automerge = require("automerge")
|
||||
let Backend = require("automerge-backend-wasm")
|
||||
Automerge.setDefaultBackend(Backend)
|
||||
```
|
||||
|
||||
```js
|
||||
import * as Automerge from "automerge"
|
||||
import * as Backend from "automerge-backend-wasm"
|
||||
Automerge.setDefaultBackend(Backend)
|
||||
```
|
||||
|
||||
Note that the first uses a synchronous filesystem load of the wasm and will not be transferable to a browser bundle. The second uses ES6 wasm import statements which should work in all modern browsers but require a '--experimental-wasm-modules' flag on nodejs (v13 on) unless you pack/bundle the code into compatible format.
|
||||
|
38
automerge-backend-wasm/package.json
Normal file
38
automerge-backend-wasm/package.json
Normal file
|
@ -0,0 +1,38 @@
|
|||
{
|
||||
"collaborators": [
|
||||
"Orion Henry <orion@inkandswitch.com>",
|
||||
"Alex Good <alex@memoryandthought.me>"
|
||||
],
|
||||
"name": "automerge-backend-wasm",
|
||||
"description": "wasm-bindgen bindings to the automerge-backend rust implementation",
|
||||
"version": "0.1.0",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"README.md",
|
||||
"pkg.js",
|
||||
"LICENSE",
|
||||
"package.json",
|
||||
"automerge_backend_wasm_bg.wasm",
|
||||
"automerge_backend_wasm.js"
|
||||
],
|
||||
"main": "./dev/index.js",
|
||||
"scripts": {
|
||||
"cjs-release": "wasm-pack build --target nodejs --release --out-name index -d build/cjs && rm build/cjs/package.json",
|
||||
"mjs-release": "wasm-pack build --target bundler --release --out-name index -d build/mjs && cp package.mjs.json build/mjs/package.json",
|
||||
"prep-release": "rm build/*/.gitignore build/*/README.md build/*/LICENSE build/*/*.ts",
|
||||
"release": "yarn cjs-release && yarn mjs-release && yarn prep-release && cp package.pkg.json build/package.json && cp README.md LICENSE build",
|
||||
"cjs": "wasm-pack build --target nodejs --dev --out-name index -d build/cjs && rm build/cjs/package.json",
|
||||
"mjs": "wasm-pack build --target bundler --dev --out-name index -d build/mjs && cp package.mjs.json build/mjs/package.json",
|
||||
"dev": "yarn cjs && yarn mjs && yarn prep-release && cp package.pkg.json build/package.json && cp README.md LICENSE build",
|
||||
"build": "rm -rf dev && wasm-pack build --target nodejs --dev --out-name index -d dev",
|
||||
"profiling": "wasm-pack build --target nodejs --profiling --out-name index -d dev",
|
||||
"mocha": "yarn build && mocha --bail --full-trace",
|
||||
"webpack": "webpack",
|
||||
"test": "cargo test && wasm-pack test --node",
|
||||
"test:js": "./scripts/js_tests.sh"
|
||||
},
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"mocha": "^7.1.1"
|
||||
}
|
||||
}
|
3
automerge-backend-wasm/package.mjs.json
Normal file
3
automerge-backend-wasm/package.mjs.json
Normal file
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"type": "module"
|
||||
}
|
35
automerge-backend-wasm/package.pkg.json
Normal file
35
automerge-backend-wasm/package.pkg.json
Normal file
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"name": "automerge-backend-wasm",
|
||||
"collaborators": [
|
||||
"Alex Good <alex@memoryandthought.me>",
|
||||
"Orion Henry <orion@inkandswitch.com>",
|
||||
"Martin Kleppmann"
|
||||
],
|
||||
"description": "A js/wasm wrapper for the rust implementation of automerge-backend",
|
||||
"version": "0.1.4",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/automerge/automerge-rs"
|
||||
},
|
||||
"files": [
|
||||
"README.md",
|
||||
"LICENSE",
|
||||
"cjs/index_bg.wasm",
|
||||
"cjs/index.js",
|
||||
"mjs/package.json",
|
||||
"mjs/index_bg.wasm",
|
||||
"mjs/index_bg.js",
|
||||
"mjs/index.js"
|
||||
],
|
||||
"type": "commonjs",
|
||||
"exports": {
|
||||
".": {
|
||||
"require": "./cjs/index.js",
|
||||
"default": "./mjs/index.js"
|
||||
}
|
||||
},
|
||||
"main" : "./cjs/index.js",
|
||||
"module" : "./mjs/index.js",
|
||||
"sideEffects": false
|
||||
}
|
41
automerge-backend-wasm/scripts/js_tests.sh
Executable file
41
automerge-backend-wasm/scripts/js_tests.sh
Executable file
|
@ -0,0 +1,41 @@
|
|||
#! /usr/bin/env bash
|
||||
set -e
|
||||
|
||||
ORIGINAL_PWD=$PWD
|
||||
if [[ -z $AUTOMERGE_JS_DIR ]]; then
|
||||
COMMIT_HASH=ee09eead68d572725b344bd7ab94bfa3d6d29889
|
||||
AUTOMERGE_JS_DIR="./automerge-js-temp"
|
||||
echo "'AUTOMERGE_JS_DIR' var not set. Using temporary dir: $AUTOMERGE_JS_DIR & commit hash: $COMMIT_HASH"
|
||||
if [[ -d $AUTOMERGE_JS_DIR ]]; then
|
||||
echo "Dir found, skipping clone"
|
||||
cd $AUTOMERGE_JS_DIR
|
||||
git fetch --all
|
||||
if ! git cat-file -e $COMMIT_HASH; then
|
||||
echo "Commit hash: $COMMIT_HASH not found in $AUTOMERGE_JS_DIR"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
git clone https://github.com/automerge/automerge.git $AUTOMERGE_JS_DIR
|
||||
fi
|
||||
cd $ORIGINAL_PWD
|
||||
cd $AUTOMERGE_JS_DIR
|
||||
git checkout $COMMIT_HASH
|
||||
else
|
||||
# if the env var is set, assume the user is using an existing checkout of automerge
|
||||
echo "Using $AUTOMERGE_JS_DIR"
|
||||
if [[ ! -d $AUTOMERGE_JS_DIR ]]; then
|
||||
echo "$AUTOMERGE_JS_DIR dir not found."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
cd $ORIGINAL_PWD
|
||||
cd $AUTOMERGE_JS_DIR
|
||||
|
||||
WASM_BACKEND_PATH="$ORIGINAL_PWD/build"
|
||||
if [[ ! -d $WASM_BACKEND_PATH ]]; then
|
||||
echo "$WASM_BACKEND_PATH does not exist. Run 'yarn dev' or 'yarn release' to build WASM backend"
|
||||
exit 1
|
||||
fi
|
||||
yarn install
|
||||
WASM_BACKEND_PATH=$WASM_BACKEND_PATH yarn testwasm
|
404
automerge-backend-wasm/src/lib.rs
Normal file
404
automerge-backend-wasm/src/lib.rs
Normal file
|
@ -0,0 +1,404 @@
|
|||
//#![feature(set_stdio)]
|
||||
|
||||
mod types;
|
||||
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
convert::TryFrom,
|
||||
fmt::Display,
|
||||
};
|
||||
|
||||
use automerge_backend::{AutomergeError, Backend, Change, SyncMessage, SyncState};
|
||||
use automerge_protocol as amp;
|
||||
use automerge_protocol::ChangeHash;
|
||||
use js_sys::Array;
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
use types::{BinaryChange, BinaryDocument, BinarySyncMessage, BinarySyncState, RawSyncMessage};
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
extern crate web_sys;
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! log {
|
||||
( $( $t:tt )* ) => {
|
||||
web_sys::console::log_1(&format!( $( $t )* ).into());
|
||||
};
|
||||
}
|
||||
|
||||
fn array<T: Serialize>(data: &[T]) -> Result<Array, JsValue> {
|
||||
let result = Array::new();
|
||||
for d in data {
|
||||
result.push(&rust_to_js(d)?);
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[cfg(feature = "wee_alloc")]
|
||||
#[global_allocator]
|
||||
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
|
||||
|
||||
fn js_to_rust<T: DeserializeOwned>(value: &JsValue) -> Result<T, JsValue> {
|
||||
value.into_serde().map_err(json_error_to_js)
|
||||
}
|
||||
|
||||
fn rust_to_js<T: Serialize>(value: T) -> Result<JsValue, JsValue> {
|
||||
JsValue::from_serde(&value).map_err(json_error_to_js)
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
#[derive(Debug)]
|
||||
struct State(Backend);
|
||||
|
||||
#[wasm_bindgen]
|
||||
extern "C" {
|
||||
pub type Object;
|
||||
|
||||
#[wasm_bindgen(constructor)]
|
||||
fn new() -> Object;
|
||||
|
||||
#[wasm_bindgen(method, getter)]
|
||||
fn state(this: &Object) -> State;
|
||||
|
||||
#[wasm_bindgen(method, setter)]
|
||||
fn set_state(this: &Object, state: State);
|
||||
|
||||
#[wasm_bindgen(method, getter)]
|
||||
fn frozen(this: &Object) -> bool;
|
||||
|
||||
#[wasm_bindgen(method, setter)]
|
||||
fn set_frozen(this: &Object, frozen: bool);
|
||||
|
||||
#[wasm_bindgen(method, getter)]
|
||||
fn heads(this: &Object) -> Array;
|
||||
|
||||
#[wasm_bindgen(method, setter)]
|
||||
fn set_heads(this: &Object, heads: Array);
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
#[derive(Clone)]
|
||||
pub struct JsSyncState(SyncState);
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl JsSyncState {
|
||||
#[wasm_bindgen(getter, js_name = sharedHeads)]
|
||||
pub fn shared_heads(&self) -> JsValue {
|
||||
rust_to_js(&self.0.shared_heads).unwrap()
|
||||
}
|
||||
|
||||
#[wasm_bindgen(getter, js_name = lastSentHeads)]
|
||||
pub fn last_sent_heads(&self) -> JsValue {
|
||||
rust_to_js(self.0.last_sent_heads.as_ref()).unwrap()
|
||||
}
|
||||
|
||||
#[wasm_bindgen(setter, js_name = lastSentHeads)]
|
||||
pub fn set_last_sent_heads(&mut self, heads: JsValue) {
|
||||
let heads: Option<Vec<ChangeHash>> = js_to_rust(&heads).unwrap();
|
||||
self.0.last_sent_heads = heads
|
||||
}
|
||||
|
||||
#[wasm_bindgen(setter, js_name = sentHashes)]
|
||||
pub fn set_sent_hashes(&mut self, hashes: JsValue) {
|
||||
let hashes_map: HashMap<ChangeHash, bool> = js_to_rust(&hashes).unwrap();
|
||||
let hashes_set: HashSet<ChangeHash> = hashes_map.keys().cloned().collect();
|
||||
self.0.sent_hashes = hashes_set
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn init() -> Result<Object, JsValue> {
|
||||
console_error_panic_hook::set_once();
|
||||
Ok(wrapper(State(Backend::new()), false, Vec::new()))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getHeads)]
|
||||
pub fn get_heads(input: Object) -> Result<Array, JsValue> {
|
||||
Ok(input.heads())
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = free)]
|
||||
pub fn free(input: Object) -> Result<(), JsValue> {
|
||||
let state: State = get_state(&input)?;
|
||||
std::mem::drop(state);
|
||||
input.set_frozen(true);
|
||||
input.set_heads(Array::new());
|
||||
Ok(())
|
||||
}
|
||||
#[wasm_bindgen(js_name = applyLocalChange)]
|
||||
pub fn apply_local_change(input: Object, change: JsValue) -> Result<JsValue, JsValue> {
|
||||
get_mut_input(input, |state| {
|
||||
let change: amp::Change = change
|
||||
.into_serde()
|
||||
.map_err(|_| AutomergeError::DecodeFailed)?;
|
||||
let (patch, change) = state.0.apply_local_change(change)?;
|
||||
let result = Array::new();
|
||||
let change_bytes = types::BinaryChange(change.raw_bytes().to_vec());
|
||||
// FIXME unwrap
|
||||
let p = rust_to_js(&patch).unwrap();
|
||||
result.push(&p);
|
||||
result.push(&serde_wasm_bindgen::to_value(&change_bytes).unwrap());
|
||||
Ok(result)
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = applyChanges)]
|
||||
pub fn apply_changes(input: Object, changes: Array) -> Result<JsValue, JsValue> {
|
||||
get_mut_input(input, |state| {
|
||||
let ch = import_changes(&changes)?;
|
||||
let patch = state.0.apply_changes(ch)?;
|
||||
Ok(array(&[patch]).unwrap())
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = loadChanges)]
|
||||
pub fn load_changes(input: Object, changes: Array) -> Result<JsValue, JsValue> {
|
||||
get_mut_input(input, |state| {
|
||||
let ch = import_changes(&changes)?;
|
||||
state.0.load_changes(ch)?;
|
||||
Ok(Array::new())
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = load)]
|
||||
pub fn load(data: JsValue) -> Result<JsValue, JsValue> {
|
||||
let binary_document: BinaryDocument = serde_wasm_bindgen::from_value(data)?;
|
||||
let backend = Backend::load(binary_document.0).map_err(to_js_err)?;
|
||||
let heads = backend.get_heads();
|
||||
Ok(wrapper(State(backend), false, heads).into())
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getPatch)]
|
||||
pub fn get_patch(input: Object) -> Result<JsValue, JsValue> {
|
||||
get_input(input, |state| {
|
||||
state.0.get_patch().map_err(to_js_err).and_then(rust_to_js)
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = clone)]
|
||||
pub fn clone(input: Object) -> Result<Object, JsValue> {
|
||||
let old_state = get_state(&input)?;
|
||||
let state = State(old_state.0.clone());
|
||||
let heads = state.0.get_heads();
|
||||
input.set_state(old_state);
|
||||
Ok(wrapper(state, false, heads))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = save)]
|
||||
pub fn save(input: Object) -> Result<JsValue, JsValue> {
|
||||
get_input(input, |state| {
|
||||
state
|
||||
.0
|
||||
.save()
|
||||
.map(BinaryDocument)
|
||||
.as_ref()
|
||||
.map_err(to_js_err)
|
||||
.and_then(|binary_document| Ok(serde_wasm_bindgen::to_value(binary_document)?))
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getChanges)]
|
||||
pub fn get_changes(input: Object, have_deps: JsValue) -> Result<JsValue, JsValue> {
|
||||
let deps: Vec<ChangeHash> = js_to_rust(&have_deps)?;
|
||||
get_input(input, |state| {
|
||||
Ok(export_changes(state.0.get_changes(&deps)).into())
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getAllChanges)]
|
||||
pub fn get_all_changes(input: Object) -> Result<JsValue, JsValue> {
|
||||
let deps: Vec<ChangeHash> = vec![];
|
||||
get_input(input, |state| {
|
||||
Ok(export_changes(state.0.get_changes(&deps)).into())
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getChangesAdded)]
|
||||
pub fn get_changes_added(input: Object, input2: Object) -> Result<JsValue, JsValue> {
|
||||
get_input(input, |state| {
|
||||
get_input(input2, |state2| {
|
||||
let changes = state.0.get_changes_added(&state2.0);
|
||||
Ok(export_changes(changes).into())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getMissingDeps)]
|
||||
pub fn get_missing_deps(input: Object) -> Result<JsValue, JsValue> {
|
||||
get_input(input, |state| rust_to_js(state.0.get_missing_deps(&[])))
|
||||
}
|
||||
|
||||
fn import_changes(changes: &Array) -> Result<Vec<Change>, AutomergeError> {
|
||||
let mut ch = Vec::with_capacity(changes.length() as usize);
|
||||
for c in changes.iter() {
|
||||
let change_bytes: types::BinaryChange = serde_wasm_bindgen::from_value(c).unwrap();
|
||||
ch.push(Change::from_bytes(change_bytes.0)?);
|
||||
}
|
||||
Ok(ch)
|
||||
}
|
||||
|
||||
fn export_changes(changes: Vec<&Change>) -> Array {
|
||||
let result = Array::new();
|
||||
for c in changes {
|
||||
let change_bytes = BinaryChange(c.raw_bytes().to_vec());
|
||||
result.push(&serde_wasm_bindgen::to_value(&change_bytes).unwrap());
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = generateSyncMessage)]
|
||||
pub fn generate_sync_message(input: Object, sync_state: &JsSyncState) -> Result<JsValue, JsValue> {
|
||||
get_input(input, |state| {
|
||||
let mut sync_state = sync_state.clone();
|
||||
let message = state.0.generate_sync_message(&mut sync_state.0);
|
||||
let result = Array::new();
|
||||
result.push(&JsValue::from(sync_state));
|
||||
let message = if let Some(message) = message {
|
||||
serde_wasm_bindgen::to_value(&BinarySyncMessage(message.encode().map_err(to_js_err)?))?
|
||||
} else {
|
||||
JsValue::NULL
|
||||
};
|
||||
result.push(&message);
|
||||
Ok(result.into())
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = receiveSyncMessage)]
|
||||
pub fn receive_sync_message(
|
||||
input: Object,
|
||||
sync_state: &JsSyncState,
|
||||
message: JsValue,
|
||||
) -> Result<JsValue, JsValue> {
|
||||
let mut state: State = get_state(&input)?;
|
||||
|
||||
let binary_message: BinarySyncMessage = serde_wasm_bindgen::from_value(message)?;
|
||||
let message = SyncMessage::decode(&binary_message.0).map_err(to_js_err)?;
|
||||
|
||||
let mut sync_state = sync_state.clone();
|
||||
let patch = match state.0.receive_sync_message(&mut sync_state.0, message) {
|
||||
Ok(r) => r,
|
||||
Err(err) => {
|
||||
input.set_state(state);
|
||||
return Err(to_js_err(err));
|
||||
}
|
||||
};
|
||||
|
||||
let result = Array::new();
|
||||
|
||||
if patch.is_some() {
|
||||
let heads = state.0.get_heads();
|
||||
let new_state = wrapper(state, false, heads);
|
||||
// the receiveSyncMessage in automerge.js returns the original doc when there is no patch so we should only freeze it when there is a patch
|
||||
input.set_frozen(true);
|
||||
result.push(&new_state.into());
|
||||
} else {
|
||||
input.set_state(state);
|
||||
result.push(&input);
|
||||
}
|
||||
|
||||
result.push(&JsValue::from(sync_state));
|
||||
|
||||
let p = rust_to_js(&patch)?;
|
||||
result.push(&p);
|
||||
|
||||
Ok(result.into())
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = initSyncState)]
|
||||
pub fn init_sync_state() -> Result<JsSyncState, JsValue> {
|
||||
Ok(JsSyncState(SyncState::default()))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = encodeSyncState)]
|
||||
pub fn encode_sync_state(sync_state: &JsSyncState) -> Result<JsValue, JsValue> {
|
||||
let binary_sync_state = BinarySyncState(sync_state.0.clone().encode().map_err(to_js_err)?);
|
||||
Ok(serde_wasm_bindgen::to_value(&binary_sync_state)?)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = decodeSyncState)]
|
||||
pub fn decode_sync_state(sync_state_bytes: JsValue) -> Result<JsSyncState, JsValue> {
|
||||
let bytes: BinarySyncState = serde_wasm_bindgen::from_value(sync_state_bytes)?;
|
||||
let sync_state = SyncState::decode(&bytes.0).map_err(to_js_err)?;
|
||||
Ok(JsSyncState(sync_state))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = encodeSyncMessage)]
|
||||
pub fn encode_sync_message(sync_message: JsValue) -> Result<JsValue, JsValue> {
|
||||
let sync_message = SyncMessage::try_from(serde_wasm_bindgen::from_value::<RawSyncMessage>(
|
||||
sync_message,
|
||||
)?)
|
||||
.map_err(to_js_err)?;
|
||||
|
||||
let binary_sync_message = BinarySyncMessage(sync_message.encode().map_err(to_js_err)?);
|
||||
Ok(serde_wasm_bindgen::to_value(&binary_sync_message)?)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = decodeSyncMessage)]
|
||||
pub fn decode_sync_message(sync_message_bytes: JsValue) -> Result<JsValue, JsValue> {
|
||||
let bytes: BinarySyncMessage = serde_wasm_bindgen::from_value(sync_message_bytes)?;
|
||||
let sync_message = SyncMessage::decode(&bytes.0).map_err(to_js_err)?;
|
||||
serde_wasm_bindgen::to_value(&RawSyncMessage::try_from(sync_message).map_err(to_js_err)?)
|
||||
.map_err(to_js_err)
|
||||
}
|
||||
|
||||
fn get_state(input: &Object) -> Result<State, JsValue> {
|
||||
if input.frozen() {
|
||||
Err(js_sys::Error::new("Attempting to use an outdated Automerge document that has already been updated. Please use the latest document state, or call Automerge.clone() if you really need to use this old document state.").into())
|
||||
} else {
|
||||
Ok(input.state())
|
||||
}
|
||||
}
|
||||
|
||||
fn wrapper(state: State, frozen: bool, heads: Vec<ChangeHash>) -> Object {
|
||||
let heads_array = Array::new();
|
||||
for h in heads {
|
||||
heads_array.push(&rust_to_js(h).unwrap());
|
||||
}
|
||||
|
||||
let wrapper = Object::new();
|
||||
wrapper.set_heads(heads_array);
|
||||
wrapper.set_frozen(frozen);
|
||||
wrapper.set_state(state);
|
||||
wrapper
|
||||
}
|
||||
|
||||
fn get_input<F>(input: Object, action: F) -> Result<JsValue, JsValue>
|
||||
where
|
||||
F: FnOnce(&State) -> Result<JsValue, JsValue>,
|
||||
{
|
||||
let state: State = get_state(&input)?;
|
||||
let result = action(&state);
|
||||
input.set_state(state);
|
||||
result
|
||||
}
|
||||
|
||||
fn get_mut_input<F>(input: Object, action: F) -> Result<JsValue, JsValue>
|
||||
where
|
||||
F: Fn(&mut State) -> Result<Array, AutomergeError>,
|
||||
{
|
||||
let mut state: State = get_state(&input)?;
|
||||
|
||||
match action(&mut state) {
|
||||
Ok(result) => {
|
||||
let heads = state.0.get_heads();
|
||||
let new_state = wrapper(state, false, heads);
|
||||
input.set_frozen(true);
|
||||
if result.length() == 0 {
|
||||
Ok(new_state.into())
|
||||
} else {
|
||||
result.unshift(&new_state.into());
|
||||
Ok(result.into())
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
input.set_state(state);
|
||||
Err(to_js_err(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn to_js_err<T: Display>(err: T) -> JsValue {
|
||||
js_sys::Error::new(&std::format!("Automerge error: {}", err)).into()
|
||||
}
|
||||
|
||||
fn json_error_to_js(err: serde_json::Error) -> JsValue {
|
||||
js_sys::Error::new(&std::format!("serde_json error: {}", err)).into()
|
||||
}
|
102
automerge-backend-wasm/src/types.rs
Normal file
102
automerge-backend-wasm/src/types.rs
Normal file
|
@ -0,0 +1,102 @@
|
|||
use std::convert::TryFrom;
|
||||
|
||||
use automerge_backend::{AutomergeError, BloomFilter, Change, SyncHave, SyncMessage};
|
||||
use automerge_protocol::ChangeHash;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct BinaryChange(#[serde(with = "serde_bytes")] pub Vec<u8>);
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct BinaryDocument(#[serde(with = "serde_bytes")] pub Vec<u8>);
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct BinarySyncState(#[serde(with = "serde_bytes")] pub Vec<u8>);
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct BinarySyncMessage(#[serde(with = "serde_bytes")] pub Vec<u8>);
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RawSyncMessage {
|
||||
pub heads: Vec<ChangeHash>,
|
||||
pub need: Vec<ChangeHash>,
|
||||
pub have: Vec<RawSyncHave>,
|
||||
pub changes: Vec<BinaryChange>,
|
||||
}
|
||||
|
||||
impl TryFrom<SyncMessage> for RawSyncMessage {
|
||||
type Error = AutomergeError;
|
||||
|
||||
fn try_from(value: SyncMessage) -> Result<Self, Self::Error> {
|
||||
let have = value
|
||||
.have
|
||||
.into_iter()
|
||||
.map(RawSyncHave::try_from)
|
||||
.collect::<Result<_, _>>()?;
|
||||
let changes = value
|
||||
.changes
|
||||
.into_iter()
|
||||
.map(|c| BinaryChange(c.raw_bytes().to_vec()))
|
||||
.collect();
|
||||
Ok(Self {
|
||||
heads: value.heads,
|
||||
need: value.need,
|
||||
have,
|
||||
changes,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<RawSyncMessage> for SyncMessage {
|
||||
type Error = AutomergeError;
|
||||
|
||||
fn try_from(value: RawSyncMessage) -> Result<Self, Self::Error> {
|
||||
let have = value
|
||||
.have
|
||||
.into_iter()
|
||||
.map(SyncHave::try_from)
|
||||
.collect::<Result<_, _>>()?;
|
||||
let changes = value
|
||||
.changes
|
||||
.into_iter()
|
||||
.map(|b| Change::from_bytes(b.0))
|
||||
.collect::<Result<_, _>>()?;
|
||||
Ok(Self {
|
||||
heads: value.heads,
|
||||
need: value.need,
|
||||
have,
|
||||
changes,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RawSyncHave {
|
||||
pub last_sync: Vec<ChangeHash>,
|
||||
#[serde(with = "serde_bytes")]
|
||||
pub bloom: Vec<u8>,
|
||||
}
|
||||
|
||||
impl TryFrom<SyncHave> for RawSyncHave {
|
||||
type Error = AutomergeError;
|
||||
|
||||
fn try_from(value: SyncHave) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
last_sync: value.last_sync,
|
||||
bloom: value.bloom.into_bytes()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<RawSyncHave> for SyncHave {
|
||||
type Error = AutomergeError;
|
||||
|
||||
fn try_from(raw: RawSyncHave) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
last_sync: raw.last_sync,
|
||||
bloom: BloomFilter::try_from(raw.bloom.as_slice())?,
|
||||
})
|
||||
}
|
||||
}
|
52
automerge-backend-wasm/test/backend_test.js
Normal file
52
automerge-backend-wasm/test/backend_test.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
const assert = require('assert')
|
||||
const Backend = require('..')
|
||||
|
||||
describe('Automerge.Backend', () => {
|
||||
describe('incremental diffs', () => {
|
||||
it('should assign to a key in a map', () => {
|
||||
const doc1 = Backend.init()
|
||||
const change = {
|
||||
actor: '55f250d0f76b4e15923600f98ebed8d7',
|
||||
seq: 1,
|
||||
startOp: 1,
|
||||
deps: [],
|
||||
time: 1609190674,
|
||||
message: '',
|
||||
ops: [
|
||||
{
|
||||
action: 'makeText',
|
||||
obj: '_root',
|
||||
key: 'text',
|
||||
insert: false,
|
||||
pred: []
|
||||
},
|
||||
{
|
||||
action: 'set',
|
||||
obj: '1@55f250d0f76b4e15923600f98ebed8d7',
|
||||
key: '_head',
|
||||
insert: true,
|
||||
pred: [],
|
||||
value: 'a'
|
||||
},
|
||||
{
|
||||
action: 'makeMap',
|
||||
obj: '1@55f250d0f76b4e15923600f98ebed8d7',
|
||||
key: '2@55f250d0f76b4e15923600f98ebed8d7',
|
||||
insert: true,
|
||||
pred: []
|
||||
},
|
||||
{
|
||||
action: 'set',
|
||||
obj: '3@55f250d0f76b4e15923600f98ebed8d7',
|
||||
key: 'attribute',
|
||||
insert: false,
|
||||
pred: [],
|
||||
value: 'bold'
|
||||
},
|
||||
],
|
||||
extra_bytes: []
|
||||
}
|
||||
const doc2 = Backend.applyLocalChange(doc1, change)
|
||||
})
|
||||
})
|
||||
})
|
3
automerge-backend-wasm/test/mocha.opts
Normal file
3
automerge-backend-wasm/test/mocha.opts
Normal file
|
@ -0,0 +1,3 @@
|
|||
--use_strict
|
||||
--watch-extensions js
|
||||
test/*test*.js
|
735
automerge-backend-wasm/yarn.lock
Normal file
735
automerge-backend-wasm/yarn.lock
Normal file
|
@ -0,0 +1,735 @@
|
|||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
ansi-colors@3.2.3:
|
||||
version "3.2.3"
|
||||
resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.3.tgz#57d35b8686e851e2cc04c403f1c00203976a1813"
|
||||
integrity sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==
|
||||
|
||||
ansi-regex@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998"
|
||||
integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=
|
||||
|
||||
ansi-regex@^4.1.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997"
|
||||
integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==
|
||||
|
||||
ansi-styles@^3.2.0, ansi-styles@^3.2.1:
|
||||
version "3.2.1"
|
||||
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
|
||||
integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
|
||||
dependencies:
|
||||
color-convert "^1.9.0"
|
||||
|
||||
anymatch@~3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142"
|
||||
integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==
|
||||
dependencies:
|
||||
normalize-path "^3.0.0"
|
||||
picomatch "^2.0.4"
|
||||
|
||||
argparse@^1.0.7:
|
||||
version "1.0.10"
|
||||
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
|
||||
integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
|
||||
dependencies:
|
||||
sprintf-js "~1.0.2"
|
||||
|
||||
balanced-match@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
|
||||
integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
|
||||
|
||||
binary-extensions@^2.0.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
|
||||
integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==
|
||||
|
||||
brace-expansion@^1.1.7:
|
||||
version "1.1.11"
|
||||
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
|
||||
integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
|
||||
dependencies:
|
||||
balanced-match "^1.0.0"
|
||||
concat-map "0.0.1"
|
||||
|
||||
braces@~3.0.2:
|
||||
version "3.0.2"
|
||||
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
|
||||
integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
|
||||
dependencies:
|
||||
fill-range "^7.0.1"
|
||||
|
||||
browser-stdout@1.3.1:
|
||||
version "1.3.1"
|
||||
resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60"
|
||||
integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==
|
||||
|
||||
call-bind@^1.0.0, call-bind@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c"
|
||||
integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==
|
||||
dependencies:
|
||||
function-bind "^1.1.1"
|
||||
get-intrinsic "^1.0.2"
|
||||
|
||||
camelcase@^5.0.0:
|
||||
version "5.3.1"
|
||||
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
|
||||
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
|
||||
|
||||
chalk@^2.4.2:
|
||||
version "2.4.2"
|
||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
|
||||
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
|
||||
dependencies:
|
||||
ansi-styles "^3.2.1"
|
||||
escape-string-regexp "^1.0.5"
|
||||
supports-color "^5.3.0"
|
||||
|
||||
chokidar@3.3.0:
|
||||
version "3.3.0"
|
||||
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.3.0.tgz#12c0714668c55800f659e262d4962a97faf554a6"
|
||||
integrity sha512-dGmKLDdT3Gdl7fBUe8XK+gAtGmzy5Fn0XkkWQuYxGIgWVPPse2CxFA5mtrlD0TOHaHjEUqkWNyP1XdHoJES/4A==
|
||||
dependencies:
|
||||
anymatch "~3.1.1"
|
||||
braces "~3.0.2"
|
||||
glob-parent "~5.1.0"
|
||||
is-binary-path "~2.1.0"
|
||||
is-glob "~4.0.1"
|
||||
normalize-path "~3.0.0"
|
||||
readdirp "~3.2.0"
|
||||
optionalDependencies:
|
||||
fsevents "~2.1.1"
|
||||
|
||||
cliui@^5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5"
|
||||
integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==
|
||||
dependencies:
|
||||
string-width "^3.1.0"
|
||||
strip-ansi "^5.2.0"
|
||||
wrap-ansi "^5.1.0"
|
||||
|
||||
color-convert@^1.9.0:
|
||||
version "1.9.3"
|
||||
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
|
||||
integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
|
||||
dependencies:
|
||||
color-name "1.1.3"
|
||||
|
||||
color-name@1.1.3:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
|
||||
integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=
|
||||
|
||||
concat-map@0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
|
||||
integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
|
||||
|
||||
debug@3.2.6:
|
||||
version "3.2.6"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
|
||||
integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
|
||||
dependencies:
|
||||
ms "^2.1.1"
|
||||
|
||||
decamelize@^1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
|
||||
integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
|
||||
|
||||
define-properties@^1.1.2, define-properties@^1.1.3:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1"
|
||||
integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==
|
||||
dependencies:
|
||||
object-keys "^1.0.12"
|
||||
|
||||
diff@3.5.0:
|
||||
version "3.5.0"
|
||||
resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12"
|
||||
integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==
|
||||
|
||||
emoji-regex@^7.0.1:
|
||||
version "7.0.3"
|
||||
resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156"
|
||||
integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==
|
||||
|
||||
es-abstract@^1.18.0-next.1:
|
||||
version "1.18.0-next.2"
|
||||
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.0-next.2.tgz#088101a55f0541f595e7e057199e27ddc8f3a5c2"
|
||||
integrity sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw==
|
||||
dependencies:
|
||||
call-bind "^1.0.2"
|
||||
es-to-primitive "^1.2.1"
|
||||
function-bind "^1.1.1"
|
||||
get-intrinsic "^1.0.2"
|
||||
has "^1.0.3"
|
||||
has-symbols "^1.0.1"
|
||||
is-callable "^1.2.2"
|
||||
is-negative-zero "^2.0.1"
|
||||
is-regex "^1.1.1"
|
||||
object-inspect "^1.9.0"
|
||||
object-keys "^1.1.1"
|
||||
object.assign "^4.1.2"
|
||||
string.prototype.trimend "^1.0.3"
|
||||
string.prototype.trimstart "^1.0.3"
|
||||
|
||||
es-to-primitive@^1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a"
|
||||
integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==
|
||||
dependencies:
|
||||
is-callable "^1.1.4"
|
||||
is-date-object "^1.0.1"
|
||||
is-symbol "^1.0.2"
|
||||
|
||||
escape-string-regexp@1.0.5, escape-string-regexp@^1.0.5:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
|
||||
integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
|
||||
|
||||
esprima@^4.0.0:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
|
||||
integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
|
||||
|
||||
fill-range@^7.0.1:
|
||||
version "7.0.1"
|
||||
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
|
||||
integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
|
||||
dependencies:
|
||||
to-regex-range "^5.0.1"
|
||||
|
||||
find-up@3.0.0, find-up@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
|
||||
integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==
|
||||
dependencies:
|
||||
locate-path "^3.0.0"
|
||||
|
||||
flat@^4.1.0:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/flat/-/flat-4.1.1.tgz#a392059cc382881ff98642f5da4dde0a959f309b"
|
||||
integrity sha512-FmTtBsHskrU6FJ2VxCnsDb84wu9zhmO3cUX2kGFb5tuwhfXxGciiT0oRY+cck35QmG+NmGh5eLz6lLCpWTqwpA==
|
||||
dependencies:
|
||||
is-buffer "~2.0.3"
|
||||
|
||||
fs.realpath@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
|
||||
integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
|
||||
|
||||
fsevents@~2.1.1:
|
||||
version "2.1.3"
|
||||
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.3.tgz#fb738703ae8d2f9fe900c33836ddebee8b97f23e"
|
||||
integrity sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==
|
||||
|
||||
function-bind@^1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
|
||||
integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
|
||||
|
||||
get-caller-file@^2.0.1:
|
||||
version "2.0.5"
|
||||
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
|
||||
integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
|
||||
|
||||
get-intrinsic@^1.0.2:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6"
|
||||
integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==
|
||||
dependencies:
|
||||
function-bind "^1.1.1"
|
||||
has "^1.0.3"
|
||||
has-symbols "^1.0.1"
|
||||
|
||||
glob-parent@~5.1.0:
|
||||
version "5.1.2"
|
||||
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
|
||||
integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
|
||||
dependencies:
|
||||
is-glob "^4.0.1"
|
||||
|
||||
glob@7.1.3:
|
||||
version "7.1.3"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.3.tgz#3960832d3f1574108342dafd3a67b332c0969df1"
|
||||
integrity sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==
|
||||
dependencies:
|
||||
fs.realpath "^1.0.0"
|
||||
inflight "^1.0.4"
|
||||
inherits "2"
|
||||
minimatch "^3.0.4"
|
||||
once "^1.3.0"
|
||||
path-is-absolute "^1.0.0"
|
||||
|
||||
growl@1.10.5:
|
||||
version "1.10.5"
|
||||
resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e"
|
||||
integrity sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==
|
||||
|
||||
has-flag@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
|
||||
integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
|
||||
|
||||
has-symbols@^1.0.0, has-symbols@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8"
|
||||
integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==
|
||||
|
||||
has@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
|
||||
integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
|
||||
dependencies:
|
||||
function-bind "^1.1.1"
|
||||
|
||||
he@1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
|
||||
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
|
||||
|
||||
inflight@^1.0.4:
|
||||
version "1.0.6"
|
||||
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
|
||||
integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
|
||||
dependencies:
|
||||
once "^1.3.0"
|
||||
wrappy "1"
|
||||
|
||||
inherits@2:
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
|
||||
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
|
||||
|
||||
is-binary-path@~2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
|
||||
integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
|
||||
dependencies:
|
||||
binary-extensions "^2.0.0"
|
||||
|
||||
is-buffer@~2.0.3:
|
||||
version "2.0.5"
|
||||
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191"
|
||||
integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==
|
||||
|
||||
is-callable@^1.1.4, is-callable@^1.2.2:
|
||||
version "1.2.3"
|
||||
resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.3.tgz#8b1e0500b73a1d76c70487636f368e519de8db8e"
|
||||
integrity sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==
|
||||
|
||||
is-date-object@^1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e"
|
||||
integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==
|
||||
|
||||
is-extglob@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
|
||||
integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=
|
||||
|
||||
is-fullwidth-code-point@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
|
||||
integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=
|
||||
|
||||
is-glob@^4.0.1, is-glob@~4.0.1:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
|
||||
integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==
|
||||
dependencies:
|
||||
is-extglob "^2.1.1"
|
||||
|
||||
is-negative-zero@^2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24"
|
||||
integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==
|
||||
|
||||
is-number@^7.0.0:
|
||||
version "7.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
|
||||
integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
|
||||
|
||||
is-regex@^1.1.1:
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.2.tgz#81c8ebde4db142f2cf1c53fc86d6a45788266251"
|
||||
integrity sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg==
|
||||
dependencies:
|
||||
call-bind "^1.0.2"
|
||||
has-symbols "^1.0.1"
|
||||
|
||||
is-symbol@^1.0.2:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937"
|
||||
integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==
|
||||
dependencies:
|
||||
has-symbols "^1.0.1"
|
||||
|
||||
isexe@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
|
||||
integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
|
||||
|
||||
js-yaml@3.13.1:
|
||||
version "3.13.1"
|
||||
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847"
|
||||
integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==
|
||||
dependencies:
|
||||
argparse "^1.0.7"
|
||||
esprima "^4.0.0"
|
||||
|
||||
locate-path@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
|
||||
integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==
|
||||
dependencies:
|
||||
p-locate "^3.0.0"
|
||||
path-exists "^3.0.0"
|
||||
|
||||
lodash@^4.17.15:
|
||||
version "4.17.21"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
log-symbols@3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-3.0.0.tgz#f3a08516a5dea893336a7dee14d18a1cfdab77c4"
|
||||
integrity sha512-dSkNGuI7iG3mfvDzUuYZyvk5dD9ocYCYzNU6CYDE6+Xqd+gwme6Z00NS3dUh8mq/73HaEtT7m6W+yUPtU6BZnQ==
|
||||
dependencies:
|
||||
chalk "^2.4.2"
|
||||
|
||||
minimatch@3.0.4, minimatch@^3.0.4:
|
||||
version "3.0.4"
|
||||
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
|
||||
integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
|
||||
dependencies:
|
||||
brace-expansion "^1.1.7"
|
||||
|
||||
minimist@^1.2.5:
|
||||
version "1.2.5"
|
||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
|
||||
integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==
|
||||
|
||||
mkdirp@0.5.5:
|
||||
version "0.5.5"
|
||||
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
|
||||
integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==
|
||||
dependencies:
|
||||
minimist "^1.2.5"
|
||||
|
||||
mocha@^7.1.1:
|
||||
version "7.2.0"
|
||||
resolved "https://registry.yarnpkg.com/mocha/-/mocha-7.2.0.tgz#01cc227b00d875ab1eed03a75106689cfed5a604"
|
||||
integrity sha512-O9CIypScywTVpNaRrCAgoUnJgozpIofjKUYmJhiCIJMiuYnLI6otcb1/kpW9/n/tJODHGZ7i8aLQoDVsMtOKQQ==
|
||||
dependencies:
|
||||
ansi-colors "3.2.3"
|
||||
browser-stdout "1.3.1"
|
||||
chokidar "3.3.0"
|
||||
debug "3.2.6"
|
||||
diff "3.5.0"
|
||||
escape-string-regexp "1.0.5"
|
||||
find-up "3.0.0"
|
||||
glob "7.1.3"
|
||||
growl "1.10.5"
|
||||
he "1.2.0"
|
||||
js-yaml "3.13.1"
|
||||
log-symbols "3.0.0"
|
||||
minimatch "3.0.4"
|
||||
mkdirp "0.5.5"
|
||||
ms "2.1.1"
|
||||
node-environment-flags "1.0.6"
|
||||
object.assign "4.1.0"
|
||||
strip-json-comments "2.0.1"
|
||||
supports-color "6.0.0"
|
||||
which "1.3.1"
|
||||
wide-align "1.1.3"
|
||||
yargs "13.3.2"
|
||||
yargs-parser "13.1.2"
|
||||
yargs-unparser "1.6.0"
|
||||
|
||||
ms@2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
|
||||
integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==
|
||||
|
||||
ms@^2.1.1:
|
||||
version "2.1.3"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
|
||||
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
|
||||
|
||||
node-environment-flags@1.0.6:
|
||||
version "1.0.6"
|
||||
resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.6.tgz#a30ac13621f6f7d674260a54dede048c3982c088"
|
||||
integrity sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw==
|
||||
dependencies:
|
||||
object.getownpropertydescriptors "^2.0.3"
|
||||
semver "^5.7.0"
|
||||
|
||||
normalize-path@^3.0.0, normalize-path@~3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
|
||||
integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
|
||||
|
||||
object-inspect@^1.9.0:
|
||||
version "1.9.0"
|
||||
resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.9.0.tgz#c90521d74e1127b67266ded3394ad6116986533a"
|
||||
integrity sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==
|
||||
|
||||
object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e"
|
||||
integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
|
||||
|
||||
object.assign@4.1.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da"
|
||||
integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==
|
||||
dependencies:
|
||||
define-properties "^1.1.2"
|
||||
function-bind "^1.1.1"
|
||||
has-symbols "^1.0.0"
|
||||
object-keys "^1.0.11"
|
||||
|
||||
object.assign@^4.1.2:
|
||||
version "4.1.2"
|
||||
resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940"
|
||||
integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==
|
||||
dependencies:
|
||||
call-bind "^1.0.0"
|
||||
define-properties "^1.1.3"
|
||||
has-symbols "^1.0.1"
|
||||
object-keys "^1.1.1"
|
||||
|
||||
object.getownpropertydescriptors@^2.0.3:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.1.tgz#0dfda8d108074d9c563e80490c883b6661091544"
|
||||
integrity sha512-6DtXgZ/lIZ9hqx4GtZETobXLR/ZLaa0aqV0kzbn80Rf8Z2e/XFnhA0I7p07N2wH8bBBltr2xQPi6sbKWAY2Eng==
|
||||
dependencies:
|
||||
call-bind "^1.0.0"
|
||||
define-properties "^1.1.3"
|
||||
es-abstract "^1.18.0-next.1"
|
||||
|
||||
once@^1.3.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
|
||||
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
|
||||
dependencies:
|
||||
wrappy "1"
|
||||
|
||||
p-limit@^2.0.0:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
|
||||
integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
|
||||
dependencies:
|
||||
p-try "^2.0.0"
|
||||
|
||||
p-locate@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4"
|
||||
integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==
|
||||
dependencies:
|
||||
p-limit "^2.0.0"
|
||||
|
||||
p-try@^2.0.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
|
||||
integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
|
||||
|
||||
path-exists@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
|
||||
integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
|
||||
|
||||
path-is-absolute@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
|
||||
integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
|
||||
|
||||
picomatch@^2.0.4:
|
||||
version "2.2.2"
|
||||
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
|
||||
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
|
||||
|
||||
readdirp@~3.2.0:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.2.0.tgz#c30c33352b12c96dfb4b895421a49fd5a9593839"
|
||||
integrity sha512-crk4Qu3pmXwgxdSgGhgA/eXiJAPQiX4GMOZZMXnqKxHX7TaoL+3gQVo/WeuAiogr07DpnfjIMpXXa+PAIvwPGQ==
|
||||
dependencies:
|
||||
picomatch "^2.0.4"
|
||||
|
||||
require-directory@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
|
||||
integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
|
||||
|
||||
require-main-filename@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
|
||||
integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
|
||||
|
||||
semver@^5.7.0:
|
||||
version "5.7.1"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
|
||||
integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
|
||||
|
||||
set-blocking@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
|
||||
integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
|
||||
|
||||
sprintf-js@~1.0.2:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
|
||||
integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
|
||||
|
||||
"string-width@^1.0.2 || 2":
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
|
||||
integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==
|
||||
dependencies:
|
||||
is-fullwidth-code-point "^2.0.0"
|
||||
strip-ansi "^4.0.0"
|
||||
|
||||
string-width@^3.0.0, string-width@^3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961"
|
||||
integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==
|
||||
dependencies:
|
||||
emoji-regex "^7.0.1"
|
||||
is-fullwidth-code-point "^2.0.0"
|
||||
strip-ansi "^5.1.0"
|
||||
|
||||
string.prototype.trimend@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz#a22bd53cca5c7cf44d7c9d5c732118873d6cd18b"
|
||||
integrity sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==
|
||||
dependencies:
|
||||
call-bind "^1.0.0"
|
||||
define-properties "^1.1.3"
|
||||
|
||||
string.prototype.trimstart@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz#9b4cb590e123bb36564401d59824298de50fd5aa"
|
||||
integrity sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==
|
||||
dependencies:
|
||||
call-bind "^1.0.0"
|
||||
define-properties "^1.1.3"
|
||||
|
||||
strip-ansi@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f"
|
||||
integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8=
|
||||
dependencies:
|
||||
ansi-regex "^3.0.0"
|
||||
|
||||
strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0:
|
||||
version "5.2.0"
|
||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae"
|
||||
integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==
|
||||
dependencies:
|
||||
ansi-regex "^4.1.0"
|
||||
|
||||
strip-json-comments@2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
|
||||
integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
|
||||
|
||||
supports-color@6.0.0:
|
||||
version "6.0.0"
|
||||
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.0.0.tgz#76cfe742cf1f41bb9b1c29ad03068c05b4c0e40a"
|
||||
integrity sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==
|
||||
dependencies:
|
||||
has-flag "^3.0.0"
|
||||
|
||||
supports-color@^5.3.0:
|
||||
version "5.5.0"
|
||||
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
|
||||
integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
|
||||
dependencies:
|
||||
has-flag "^3.0.0"
|
||||
|
||||
to-regex-range@^5.0.1:
|
||||
version "5.0.1"
|
||||
resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
|
||||
integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
|
||||
dependencies:
|
||||
is-number "^7.0.0"
|
||||
|
||||
which-module@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
|
||||
integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
|
||||
|
||||
which@1.3.1:
|
||||
version "1.3.1"
|
||||
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
|
||||
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
|
||||
dependencies:
|
||||
isexe "^2.0.0"
|
||||
|
||||
wide-align@1.1.3:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457"
|
||||
integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==
|
||||
dependencies:
|
||||
string-width "^1.0.2 || 2"
|
||||
|
||||
wrap-ansi@^5.1.0:
|
||||
version "5.1.0"
|
||||
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09"
|
||||
integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==
|
||||
dependencies:
|
||||
ansi-styles "^3.2.0"
|
||||
string-width "^3.0.0"
|
||||
strip-ansi "^5.0.0"
|
||||
|
||||
wrappy@1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
||||
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
|
||||
|
||||
y18n@^4.0.0:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4"
|
||||
integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==
|
||||
|
||||
yargs-parser@13.1.2, yargs-parser@^13.1.2:
|
||||
version "13.1.2"
|
||||
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38"
|
||||
integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==
|
||||
dependencies:
|
||||
camelcase "^5.0.0"
|
||||
decamelize "^1.2.0"
|
||||
|
||||
yargs-unparser@1.6.0:
|
||||
version "1.6.0"
|
||||
resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-1.6.0.tgz#ef25c2c769ff6bd09e4b0f9d7c605fb27846ea9f"
|
||||
integrity sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==
|
||||
dependencies:
|
||||
flat "^4.1.0"
|
||||
lodash "^4.17.15"
|
||||
yargs "^13.3.0"
|
||||
|
||||
yargs@13.3.2, yargs@^13.3.0:
|
||||
version "13.3.2"
|
||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
|
||||
integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==
|
||||
dependencies:
|
||||
cliui "^5.0.0"
|
||||
find-up "^3.0.0"
|
||||
get-caller-file "^2.0.1"
|
||||
require-directory "^2.1.1"
|
||||
require-main-filename "^2.0.0"
|
||||
set-blocking "^2.0.0"
|
||||
string-width "^3.0.0"
|
||||
which-module "^2.0.0"
|
||||
y18n "^4.0.0"
|
||||
yargs-parser "^13.1.2"
|
40
automerge-backend/Cargo.toml
Normal file
40
automerge-backend/Cargo.toml
Normal file
|
@ -0,0 +1,40 @@
|
|||
[package]
|
||||
name = "automerge-backend"
|
||||
version = "0.0.1"
|
||||
authors = ["Alex Good <alex@memoryandthought.me>"]
|
||||
edition = "2018"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[lib]
|
||||
bench = false
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "^1.0", features=["derive"] }
|
||||
serde_json = "^1.0"
|
||||
wasm-bindgen = "^0.2"
|
||||
js-sys = "^0.3"
|
||||
hex = "^0.4.2"
|
||||
rand = { version = "^0.7.3", features=["small_rng"] }
|
||||
maplit = "^1.0.2"
|
||||
sha2 = "^0.8.1"
|
||||
leb128 = "^0.2.4"
|
||||
automerge-protocol = { path = "../automerge-protocol" }
|
||||
fxhash = "^0.2.1"
|
||||
thiserror = "1.0.16"
|
||||
itertools = "0.9.0"
|
||||
tracing = { version = "0.1.25", features = ["log"] }
|
||||
flate2 = "1.0.20"
|
||||
nonzero_ext = "^0.2.0"
|
||||
smol_str = "0.1.17"
|
||||
|
||||
[dependencies.web-sys]
|
||||
version = "0.3"
|
||||
features = [
|
||||
"console",
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
test-env-log = "0.2.6"
|
||||
env_logger = "*"
|
||||
tracing-subscriber = {version = "0.2", features = ["chrono", "env-filter", "fmt"]}
|
||||
pretty_assertions = "0.7.1"
|
9
automerge-backend/TODO.md
Normal file
9
automerge-backend/TODO.md
Normal file
|
@ -0,0 +1,9 @@
|
|||
|
||||
|
||||
### TODO
|
||||
|
||||
1. Performance work
|
||||
2. Multi-Change compression `save() / load()`
|
||||
3. `Automerge.ack()`
|
||||
4. `Automerge.getLastLocalChange()`
|
||||
|
130
automerge-backend/src/actor_map.rs
Normal file
130
automerge-backend/src/actor_map.rs
Normal file
|
@ -0,0 +1,130 @@
|
|||
use std::cmp::Ordering;
|
||||
|
||||
use automerge_protocol as amp;
|
||||
use smol_str::SmolStr;
|
||||
|
||||
use crate::{
|
||||
expanded_op::ExpandedOp,
|
||||
internal::{ActorId, ElementId, InternalOp, Key, ObjectId, OpId},
|
||||
};
|
||||
|
||||
#[derive(PartialEq, Debug, Clone, Default)]
|
||||
pub(crate) struct ActorMap(Vec<amp::ActorId>);
|
||||
|
||||
impl ActorMap {
|
||||
pub fn import_key(&mut self, key: &::Key) -> Key {
|
||||
match key {
|
||||
amp::Key::Map(string) => Key::Map(string.clone()),
|
||||
amp::Key::Seq(eid) => Key::Seq(self.import_element_id(eid)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn import_actor(&mut self, actor: &::ActorId) -> ActorId {
|
||||
if let Some(idx) = self.0.iter().position(|a| a == actor) {
|
||||
ActorId(idx)
|
||||
} else {
|
||||
self.0.push(actor.clone());
|
||||
ActorId(self.0.len() - 1)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn import_opid(&mut self, opid: &::OpId) -> OpId {
|
||||
OpId(opid.0, self.import_actor(&opid.1))
|
||||
}
|
||||
|
||||
pub fn import_obj(&mut self, obj: &::ObjectId) -> ObjectId {
|
||||
match obj {
|
||||
amp::ObjectId::Root => ObjectId::Root,
|
||||
amp::ObjectId::Id(ref opid) => ObjectId::Id(self.import_opid(opid)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn import_element_id(&mut self, eid: &::ElementId) -> ElementId {
|
||||
match eid {
|
||||
amp::ElementId::Head => ElementId::Head,
|
||||
amp::ElementId::Id(ref opid) => ElementId::Id(self.import_opid(opid)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn import_op(&mut self, op: ExpandedOp) -> InternalOp {
|
||||
let pred: Vec<OpId> = op.pred.iter().map(|id| self.import_opid(id)).collect();
|
||||
InternalOp {
|
||||
action: op.action,
|
||||
obj: self.import_obj(&op.obj),
|
||||
key: self.import_key(&op.key),
|
||||
pred,
|
||||
insert: op.insert,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export_actor(&self, actor: ActorId) -> amp::ActorId {
|
||||
self.0[actor.0].clone()
|
||||
}
|
||||
|
||||
pub fn export_opid(&self, opid: &OpId) -> amp::OpId {
|
||||
amp::OpId(opid.0, self.export_actor(opid.1))
|
||||
}
|
||||
|
||||
pub fn export_obj(&self, obj: &ObjectId) -> amp::ObjectId {
|
||||
match obj {
|
||||
ObjectId::Root => amp::ObjectId::Root,
|
||||
ObjectId::Id(opid) => amp::ObjectId::Id(self.export_opid(opid)),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn index_of(&mut self, actor: &::ActorId) -> usize {
|
||||
if let Some(index) = self.0.iter().position(|a| a == actor) {
|
||||
return index;
|
||||
}
|
||||
self.0.push(actor.clone());
|
||||
self.0.len() - 1
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn actor_for(&self, index: usize) -> Option<&::ActorId> {
|
||||
self.0.get(index)
|
||||
}
|
||||
|
||||
pub fn cmp(&self, eid1: &ElementId, eid2: &ElementId) -> Ordering {
|
||||
match (eid1, eid2) {
|
||||
(ElementId::Head, ElementId::Head) => Ordering::Equal,
|
||||
(ElementId::Head, _) => Ordering::Less,
|
||||
(_, ElementId::Head) => Ordering::Greater,
|
||||
(ElementId::Id(opid1), ElementId::Id(opid2)) => self.cmp_opid(opid1, opid2),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn opid_to_string(&self, id: &OpId) -> SmolStr {
|
||||
SmolStr::new(format!(
|
||||
"{}@{}",
|
||||
id.0,
|
||||
self.export_actor(id.1).to_hex_string()
|
||||
))
|
||||
}
|
||||
|
||||
pub fn elementid_to_string(&self, eid: &ElementId) -> SmolStr {
|
||||
match eid {
|
||||
ElementId::Head => "_head".into(),
|
||||
ElementId::Id(id) => self.opid_to_string(id),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn key_to_string(&self, key: &Key) -> SmolStr {
|
||||
match &key {
|
||||
Key::Map(s) => s.clone(),
|
||||
Key::Seq(eid) => self.elementid_to_string(eid),
|
||||
}
|
||||
}
|
||||
|
||||
fn cmp_opid(&self, op1: &OpId, op2: &OpId) -> Ordering {
|
||||
if op1.0 == op2.0 {
|
||||
let actor1 = &self.0[(op1.1).0];
|
||||
let actor2 = &self.0[(op2.1).0];
|
||||
actor1.cmp(actor2)
|
||||
//op1.1.cmp(&op2.1)
|
||||
} else {
|
||||
op1.0.cmp(&op2.0)
|
||||
}
|
||||
}
|
||||
}
|
640
automerge-backend/src/backend.rs
Normal file
640
automerge-backend/src/backend.rs
Normal file
|
@ -0,0 +1,640 @@
|
|||
use core::cmp::max;
|
||||
use std::{
|
||||
collections::{HashMap, HashSet, VecDeque},
|
||||
fmt::Debug,
|
||||
};
|
||||
|
||||
use amp::ChangeHash;
|
||||
use automerge_protocol as amp;
|
||||
|
||||
use crate::{
|
||||
actor_map::ActorMap,
|
||||
change::encode_document,
|
||||
error::AutomergeError,
|
||||
event_handlers::{EventHandlerId, EventHandlers},
|
||||
op_handle::OpHandle,
|
||||
op_set::OpSet,
|
||||
patches::{generate_from_scratch_diff, IncrementalPatch},
|
||||
Change, EventHandler,
|
||||
};
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct Backend {
|
||||
queue: Vec<Change>,
|
||||
op_set: OpSet,
|
||||
states: HashMap<amp::ActorId, Vec<usize>>,
|
||||
actors: ActorMap,
|
||||
history: Vec<Change>,
|
||||
history_index: HashMap<amp::ChangeHash, usize>,
|
||||
event_handlers: EventHandlers,
|
||||
}
|
||||
|
||||
impl Backend {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
fn make_patch(
|
||||
&self,
|
||||
diffs: amp::RootDiff,
|
||||
actor_seq: Option<(amp::ActorId, u64)>,
|
||||
) -> Result<amp::Patch, AutomergeError> {
|
||||
let mut deps: Vec<_> = if let Some((ref actor, ref seq)) = actor_seq {
|
||||
let last_hash = self.get_hash(actor, *seq)?;
|
||||
self.op_set
|
||||
.deps
|
||||
.iter()
|
||||
.filter(|&dep| dep != &last_hash)
|
||||
.copied()
|
||||
.collect()
|
||||
} else {
|
||||
self.op_set.deps.iter().copied().collect()
|
||||
};
|
||||
deps.sort_unstable();
|
||||
let pending_changes = self.get_missing_deps(&[]).len();
|
||||
Ok(amp::Patch {
|
||||
diffs,
|
||||
deps,
|
||||
max_op: self.op_set.max_op,
|
||||
clock: self
|
||||
.states
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.len() as u64))
|
||||
.collect(),
|
||||
actor: actor_seq.clone().map(|(actor, _)| actor),
|
||||
seq: actor_seq.map(|(_, seq)| seq),
|
||||
pending_changes,
|
||||
})
|
||||
}
|
||||
|
||||
/// This loads changes into the backend but does not produce a patch and does not go through
|
||||
/// the queue.
|
||||
///
|
||||
/// Generating the patch can itself be expensive and not always required, for instance when
|
||||
/// loading a new backend from bytes.
|
||||
///
|
||||
/// The changes should be in causal order as per the document's binary format.
|
||||
pub fn load_changes(&mut self, changes: Vec<Change>) -> Result<(), AutomergeError> {
|
||||
let mut patch = IncrementalPatch::new();
|
||||
|
||||
for change in changes {
|
||||
self.apply_change(change, &mut patch)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn apply_changes(&mut self, changes: Vec<Change>) -> Result<amp::Patch, AutomergeError> {
|
||||
self.apply(changes, None)
|
||||
}
|
||||
|
||||
pub fn get_heads(&self) -> Vec<amp::ChangeHash> {
|
||||
self.op_set.heads()
|
||||
}
|
||||
|
||||
fn apply(
|
||||
&mut self,
|
||||
changes: Vec<Change>,
|
||||
actor: Option<(amp::ActorId, u64)>,
|
||||
) -> Result<amp::Patch, AutomergeError> {
|
||||
let mut patch = IncrementalPatch::new();
|
||||
|
||||
for change in changes {
|
||||
self.add_change(change, actor.is_some(), &mut patch)?;
|
||||
}
|
||||
|
||||
let workshop = self.op_set.patch_workshop(&self.actors);
|
||||
let diffs = patch.finalize(&workshop);
|
||||
self.make_patch(diffs, actor)
|
||||
}
|
||||
|
||||
fn get_hash(&self, actor: &::ActorId, seq: u64) -> Result<amp::ChangeHash, AutomergeError> {
|
||||
self.states
|
||||
.get(actor)
|
||||
.and_then(|v| v.get(seq as usize - 1))
|
||||
.and_then(|&i| self.history.get(i))
|
||||
.map(|c| c.hash)
|
||||
.ok_or(AutomergeError::InvalidSeq(seq))
|
||||
}
|
||||
|
||||
pub fn apply_local_change(
|
||||
&mut self,
|
||||
mut change: amp::Change,
|
||||
) -> Result<(amp::Patch, Change), AutomergeError> {
|
||||
self.check_for_duplicate(&change)?; // Change has already been applied
|
||||
|
||||
let actor_seq = (change.actor_id.clone(), change.seq);
|
||||
|
||||
if change.seq > 1 {
|
||||
let last_hash = self.get_hash(&change.actor_id, change.seq - 1)?;
|
||||
if !change.deps.contains(&last_hash) {
|
||||
change.deps.push(last_hash);
|
||||
}
|
||||
}
|
||||
|
||||
let bin_change: Change = change.into();
|
||||
let patch: amp::Patch = self.apply(vec![bin_change.clone()], Some(actor_seq))?;
|
||||
|
||||
Ok((patch, bin_change))
|
||||
}
|
||||
|
||||
fn check_for_duplicate(&self, change: &::Change) -> Result<(), AutomergeError> {
|
||||
if self
|
||||
.states
|
||||
.get(&change.actor_id)
|
||||
.map_or(0, |v| v.len() as u64)
|
||||
>= change.seq
|
||||
{
|
||||
return Err(AutomergeError::DuplicateChange(format!(
|
||||
"Change request has already been applied {}:{}",
|
||||
change.actor_id.to_hex_string(),
|
||||
change.seq
|
||||
)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_change(
|
||||
&mut self,
|
||||
change: Change,
|
||||
local: bool,
|
||||
diffs: &mut IncrementalPatch,
|
||||
) -> Result<(), AutomergeError> {
|
||||
if local {
|
||||
self.apply_change(change, diffs)
|
||||
} else {
|
||||
self.queue.push(change);
|
||||
self.apply_queued_ops(diffs)
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_queued_ops(&mut self, diffs: &mut IncrementalPatch) -> Result<(), AutomergeError> {
|
||||
while let Some(next_change) = self.pop_next_causally_ready_change() {
|
||||
self.apply_change(next_change, diffs)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply_change(
|
||||
&mut self,
|
||||
change: Change,
|
||||
diffs: &mut IncrementalPatch,
|
||||
) -> Result<(), AutomergeError> {
|
||||
if self.history_index.contains_key(&change.hash) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.event_handlers.before_apply_change(&change);
|
||||
|
||||
let change_index = self.update_history(change);
|
||||
|
||||
// SAFETY: change_index is the index for the change we've just added so this can't (and
|
||||
// shouldn't) panic. This is to get around the borrow checker.
|
||||
let change = &self.history[change_index];
|
||||
|
||||
let op_set = &mut self.op_set;
|
||||
|
||||
let start_op = change.start_op;
|
||||
|
||||
op_set.update_deps(change);
|
||||
|
||||
let ops = OpHandle::extract(change, &mut self.actors);
|
||||
|
||||
op_set.max_op = max(
|
||||
op_set.max_op,
|
||||
(start_op + (ops.len() as u64)).saturating_sub(1),
|
||||
);
|
||||
|
||||
op_set.apply_ops(ops, diffs, &mut self.actors)?;
|
||||
|
||||
self.event_handlers.after_apply_change(change);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_history(&mut self, change: Change) -> usize {
|
||||
let history_index = self.history.len();
|
||||
|
||||
self.states
|
||||
.entry(change.actor_id().clone())
|
||||
.or_default()
|
||||
.push(history_index);
|
||||
|
||||
self.history_index.insert(change.hash, history_index);
|
||||
self.history.push(change);
|
||||
|
||||
history_index
|
||||
}
|
||||
|
||||
fn pop_next_causally_ready_change(&mut self) -> Option<Change> {
|
||||
let mut index = 0;
|
||||
while index < self.queue.len() {
|
||||
let change = self.queue.get(index).unwrap();
|
||||
if change
|
||||
.deps
|
||||
.iter()
|
||||
.all(|d| self.history_index.contains_key(d))
|
||||
{
|
||||
return Some(self.queue.swap_remove(index));
|
||||
}
|
||||
index += 1;
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn get_patch(&self) -> Result<amp::Patch, AutomergeError> {
|
||||
let workshop = self.op_set.patch_workshop(&self.actors);
|
||||
let diffs = generate_from_scratch_diff(&workshop);
|
||||
self.make_patch(diffs, None)
|
||||
}
|
||||
|
||||
pub fn get_changes_for_actor_id(
|
||||
&self,
|
||||
actor_id: &::ActorId,
|
||||
) -> Result<Vec<&Change>, AutomergeError> {
|
||||
Ok(self
|
||||
.states
|
||||
.get(actor_id)
|
||||
.map(|vec| vec.iter().filter_map(|&i| self.history.get(i)).collect())
|
||||
.unwrap_or_default())
|
||||
}
|
||||
|
||||
fn get_changes_fast(&self, have_deps: &[amp::ChangeHash]) -> Option<Vec<&Change>> {
|
||||
if have_deps.is_empty() {
|
||||
return Some(self.history.iter().collect());
|
||||
}
|
||||
|
||||
let lowest_idx = have_deps
|
||||
.iter()
|
||||
.filter_map(|h| self.history_index.get(h))
|
||||
.min()?
|
||||
+ 1;
|
||||
|
||||
let mut missing_changes = vec![];
|
||||
let mut has_seen: HashSet<_> = have_deps.iter().collect();
|
||||
for change in &self.history[lowest_idx..] {
|
||||
let deps_seen = change.deps.iter().filter(|h| has_seen.contains(h)).count();
|
||||
if deps_seen > 0 {
|
||||
if deps_seen != change.deps.len() {
|
||||
// future change depends on something we haven't seen - fast path cant work
|
||||
return None;
|
||||
}
|
||||
missing_changes.push(change);
|
||||
has_seen.insert(&change.hash);
|
||||
}
|
||||
}
|
||||
|
||||
// if we get to the end and there is a head we haven't seen then fast path cant work
|
||||
if self.get_heads().iter().all(|h| has_seen.contains(h)) {
|
||||
Some(missing_changes)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_changes_slow(&self, have_deps: &[amp::ChangeHash]) -> Vec<&Change> {
|
||||
let mut stack: Vec<_> = have_deps.iter().collect();
|
||||
let mut has_seen = HashSet::new();
|
||||
while let Some(hash) = stack.pop() {
|
||||
if has_seen.contains(&hash) {
|
||||
continue;
|
||||
}
|
||||
if let Some(change) = self
|
||||
.history_index
|
||||
.get(hash)
|
||||
.and_then(|i| self.history.get(*i))
|
||||
{
|
||||
stack.extend(change.deps.iter());
|
||||
}
|
||||
has_seen.insert(hash);
|
||||
}
|
||||
self.history
|
||||
.iter()
|
||||
.filter(|change| !has_seen.contains(&change.hash))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_changes(&self, have_deps: &[amp::ChangeHash]) -> Vec<&Change> {
|
||||
if let Some(changes) = self.get_changes_fast(have_deps) {
|
||||
changes
|
||||
} else {
|
||||
self.get_changes_slow(have_deps)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save(&self) -> Result<Vec<u8>, AutomergeError> {
|
||||
let changes: Vec<amp::Change> = self.history.iter().map(Change::decode).collect();
|
||||
//self.history.iter().map(|change| change.decode()).collect();
|
||||
Ok(encode_document(&changes)?)
|
||||
}
|
||||
|
||||
// allow this for API reasons
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub fn load(data: Vec<u8>) -> Result<Self, AutomergeError> {
|
||||
let changes = Change::load_document(&data)?;
|
||||
let mut backend = Self::new();
|
||||
backend.load_changes(changes)?;
|
||||
Ok(backend)
|
||||
}
|
||||
|
||||
pub fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec<amp::ChangeHash> {
|
||||
let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect();
|
||||
let mut missing = HashSet::new();
|
||||
|
||||
for head in self.queue.iter().flat_map(|change| &change.deps) {
|
||||
if !self.history_index.contains_key(head) {
|
||||
missing.insert(head);
|
||||
}
|
||||
}
|
||||
|
||||
for head in heads {
|
||||
if !self.history_index.contains_key(head) {
|
||||
missing.insert(head);
|
||||
}
|
||||
}
|
||||
|
||||
let mut missing = missing
|
||||
.into_iter()
|
||||
.filter(|hash| !in_queue.contains(hash))
|
||||
.copied()
|
||||
.collect::<Vec<_>>();
|
||||
missing.sort();
|
||||
missing
|
||||
}
|
||||
|
||||
pub fn get_change_by_hash(&self, hash: &::ChangeHash) -> Option<&Change> {
|
||||
self.history_index
|
||||
.get(hash)
|
||||
.and_then(|index| self.history.get(*index))
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all changes that are present in `self` but not present in `other`.
|
||||
*/
|
||||
pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> {
|
||||
// Depth-first traversal from the heads through the dependency graph,
|
||||
// until we reach a change that is already present in other
|
||||
let mut stack: Vec<_> = other.op_set.deps.iter().collect();
|
||||
let mut seen_hashes = HashSet::new();
|
||||
let mut added_change_hashes = Vec::new();
|
||||
while let Some(hash) = stack.pop() {
|
||||
if !seen_hashes.contains(&hash) && self.get_change_by_hash(hash).is_none() {
|
||||
seen_hashes.insert(hash);
|
||||
added_change_hashes.push(hash);
|
||||
if let Some(change) = other.get_change_by_hash(hash) {
|
||||
stack.extend(&change.deps);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Return those changes in the reverse of the order in which the depth-first search
|
||||
// found them. This is not necessarily a topological sort, but should usually be close.
|
||||
added_change_hashes
|
||||
.into_iter()
|
||||
.filter_map(|h| other.get_change_by_hash(h))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Filter the changes down to those that are not transitive dependencies of the heads.
|
||||
///
|
||||
/// Thus a graph with these heads has not seen the remaining changes.
|
||||
pub(crate) fn filter_changes(
|
||||
&self,
|
||||
heads: &[amp::ChangeHash],
|
||||
changes: &mut HashSet<amp::ChangeHash>,
|
||||
) {
|
||||
// Reduce the working set to find to those which we may be able to find.
|
||||
// This filters out those hashes that are successors of or concurrent with all of the
|
||||
// heads.
|
||||
// This can help in avoiding traversing the entire graph back to the roots when we try to
|
||||
// search for a hash we can know won't be found there.
|
||||
let max_head_index = heads
|
||||
.iter()
|
||||
.map(|h| self.history_index.get(h).unwrap_or(&0))
|
||||
.max()
|
||||
.unwrap_or(&0);
|
||||
let mut may_find: HashSet<ChangeHash> = changes
|
||||
.iter()
|
||||
.filter(|hash| {
|
||||
let change_index = self.history_index.get(hash).unwrap_or(&0);
|
||||
change_index <= max_head_index
|
||||
})
|
||||
.copied()
|
||||
.collect();
|
||||
|
||||
if may_find.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut queue: VecDeque<_> = heads.iter().collect();
|
||||
let mut seen = HashSet::new();
|
||||
while let Some(hash) = queue.pop_front() {
|
||||
if seen.contains(hash) {
|
||||
continue;
|
||||
}
|
||||
seen.insert(hash);
|
||||
|
||||
let removed = may_find.remove(hash);
|
||||
changes.remove(hash);
|
||||
if may_find.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
for dep in self
|
||||
.history_index
|
||||
.get(hash)
|
||||
.and_then(|i| self.history.get(*i))
|
||||
.map(|c| c.deps.as_slice())
|
||||
.unwrap_or_default()
|
||||
{
|
||||
// if we just removed something from our hashes then it is likely there is more
|
||||
// down here so do a quick inspection on the children.
|
||||
// When we don't remove anything it is less likely that there is something down
|
||||
// that chain so delay it.
|
||||
if removed {
|
||||
queue.push_front(dep);
|
||||
} else {
|
||||
queue.push_back(dep);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds the event handler and returns the id of the handler.
|
||||
pub fn add_event_handler(&mut self, handler: EventHandler) -> EventHandlerId {
|
||||
self.event_handlers.add_handler(handler)
|
||||
}
|
||||
|
||||
/// Remove the handler with the given id, returning whether it removed a handler or not.
|
||||
pub fn remove_event_handler(&mut self, id: EventHandlerId) -> bool {
|
||||
self.event_handlers.remove_handler(id)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::convert::TryInto;
|
||||
|
||||
use amp::SortedVec;
|
||||
use automerge_protocol::{ActorId, ObjectId, Op, OpType};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_get_changes_fast_behavior() {
|
||||
let actor_a: ActorId = "7b7723afd9e6480397a4d467b7693156".try_into().unwrap();
|
||||
let actor_b: ActorId = "37704788917a499cb0206fa8519ac4d9".try_into().unwrap();
|
||||
let change_a1: Change = amp::Change {
|
||||
actor_id: actor_a.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::Root,
|
||||
action: OpType::Set("magpie".into()),
|
||||
key: "bird".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let change_a2: Change = amp::Change {
|
||||
actor_id: actor_a,
|
||||
seq: 2,
|
||||
start_op: 2,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![change_a1.hash],
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::Root,
|
||||
action: OpType::Set("ant".into()),
|
||||
key: "bug".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let change_b1: Change = amp::Change {
|
||||
actor_id: actor_b.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![],
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::Root,
|
||||
action: OpType::Set("dove".into()),
|
||||
key: "bird".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let change_b2: Change = amp::Change {
|
||||
actor_id: actor_b.clone(),
|
||||
seq: 2,
|
||||
start_op: 2,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![change_b1.hash],
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::Root,
|
||||
action: OpType::Set("stag beetle".into()),
|
||||
key: "bug".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let change_b3: Change = amp::Change {
|
||||
actor_id: actor_b,
|
||||
seq: 3,
|
||||
start_op: 3,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![change_a2.hash, change_b2.hash],
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::Root,
|
||||
action: OpType::Set("bugs and birds".into()),
|
||||
key: "title".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let mut backend = Backend::new();
|
||||
|
||||
backend
|
||||
.apply_changes(vec![change_a1.clone(), change_a2.clone()])
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
backend.get_changes_fast(&[]),
|
||||
Some(vec![&change_a1, &change_a2])
|
||||
);
|
||||
assert_eq!(
|
||||
backend.get_changes_fast(&[change_a1.hash]),
|
||||
Some(vec![&change_a2])
|
||||
);
|
||||
assert_eq!(backend.get_heads(), vec![change_a2.hash]);
|
||||
|
||||
backend
|
||||
.apply_changes(vec![change_b1.clone(), change_b2.clone()])
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
backend.get_changes_fast(&[]),
|
||||
Some(vec![&change_a1, &change_a2, &change_b1, &change_b2])
|
||||
);
|
||||
assert_eq!(backend.get_changes_fast(&[change_a1.hash]), None);
|
||||
assert_eq!(backend.get_changes_fast(&[change_a2.hash]), None);
|
||||
assert_eq!(
|
||||
backend.get_changes_fast(&[change_a1.hash, change_b1.hash]),
|
||||
Some(vec![&change_a2, &change_b2])
|
||||
);
|
||||
assert_eq!(
|
||||
backend.get_changes_fast(&[change_a2.hash, change_b1.hash]),
|
||||
Some(vec![&change_b2])
|
||||
);
|
||||
assert_eq!(backend.get_heads(), vec![change_b2.hash, change_a2.hash]);
|
||||
|
||||
backend.apply_changes(vec![change_b3.clone()]).unwrap();
|
||||
|
||||
assert_eq!(backend.get_heads(), vec![change_b3.hash]);
|
||||
assert_eq!(
|
||||
backend.get_changes_fast(&[]),
|
||||
Some(vec![
|
||||
&change_a1, &change_a2, &change_b1, &change_b2, &change_b3
|
||||
])
|
||||
);
|
||||
assert_eq!(backend.get_changes_fast(&[change_a1.hash]), None);
|
||||
assert_eq!(backend.get_changes_fast(&[change_a2.hash]), None);
|
||||
assert_eq!(backend.get_changes_fast(&[change_b1.hash]), None);
|
||||
assert_eq!(backend.get_changes_fast(&[change_b2.hash]), None);
|
||||
assert_eq!(
|
||||
backend.get_changes_fast(&[change_a1.hash, change_b1.hash]),
|
||||
Some(vec![&change_a2, &change_b2, &change_b3])
|
||||
);
|
||||
assert_eq!(
|
||||
backend.get_changes_fast(&[change_a2.hash, change_b1.hash]),
|
||||
Some(vec![&change_b2, &change_b3])
|
||||
);
|
||||
assert_eq!(backend.get_changes_fast(&[change_b3.hash]), Some(vec![]));
|
||||
}
|
||||
}
|
1354
automerge-backend/src/change.rs
Normal file
1354
automerge-backend/src/change.rs
Normal file
File diff suppressed because it is too large
Load diff
1360
automerge-backend/src/columnar.rs
Normal file
1360
automerge-backend/src/columnar.rs
Normal file
File diff suppressed because it is too large
Load diff
70
automerge-backend/src/concurrent_operations.rs
Normal file
70
automerge-backend/src/concurrent_operations.rs
Normal file
|
@ -0,0 +1,70 @@
|
|||
use std::ops::Deref;
|
||||
|
||||
use crate::{internal::InternalOpType, op_handle::OpHandle};
|
||||
|
||||
/// Represents a set of operations which are relevant to either an element ID
|
||||
/// or object ID and which occurred without knowledge of each other
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct ConcurrentOperations {
|
||||
pub ops: Vec<OpHandle>,
|
||||
}
|
||||
|
||||
impl Deref for ConcurrentOperations {
|
||||
type Target = Vec<OpHandle>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.ops
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ConcurrentOperations {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ConcurrentOperations {
|
||||
pub fn new() -> ConcurrentOperations {
|
||||
ConcurrentOperations { ops: Vec::new() }
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.ops.is_empty()
|
||||
}
|
||||
|
||||
/// Updates this set of operations based on a new operation.
|
||||
///
|
||||
/// Returns the operation representing the update and the previous operations that this op
|
||||
/// replaces.
|
||||
/// This is to cover the case of increment operations actually being reflected as Sets on
|
||||
/// counters.
|
||||
pub fn incorporate_new_op(&mut self, new_op: OpHandle) -> (OpHandle, Vec<OpHandle>) {
|
||||
if new_op.is_inc() {
|
||||
for op in &mut self.ops {
|
||||
if op.maybe_increment(&new_op) {
|
||||
return (op.clone(), Vec::new());
|
||||
}
|
||||
}
|
||||
(new_op, Vec::new())
|
||||
} else {
|
||||
let mut overwritten_ops = Vec::new();
|
||||
let mut i = 0;
|
||||
while i != self.ops.len() {
|
||||
if new_op.pred.contains(&self.ops[i].id) {
|
||||
overwritten_ops.push(self.ops.swap_remove(i));
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
match new_op.action {
|
||||
InternalOpType::Set(_) | InternalOpType::Make(_) => {
|
||||
self.ops.push(new_op.clone());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
(new_op, overwritten_ops)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,10 +1,7 @@
|
|||
use core::fmt::Debug;
|
||||
use std::num::NonZeroU64;
|
||||
use std::{borrow::Cow, io, io::Read, str};
|
||||
use std::{borrow::Cow, convert::TryFrom, io, io::Read, str};
|
||||
|
||||
use crate::error;
|
||||
use crate::legacy as amp;
|
||||
use crate::ActorId;
|
||||
use automerge_protocol as amp;
|
||||
use smol_str::SmolStr;
|
||||
|
||||
/// The error type for decoding operations.
|
||||
|
@ -23,7 +20,7 @@ pub enum Error {
|
|||
)]
|
||||
WrongType { expected_one_of: Vec<u8>, found: u8 },
|
||||
#[error("Bad change format: {0}")]
|
||||
BadChangeFormat(#[source] error::InvalidChangeHashSlice),
|
||||
BadChangeFormat(#[source] amp::error::InvalidChangeHashSlice),
|
||||
#[error("Not enough bytes")]
|
||||
NotEnoughBytes,
|
||||
#[error("Found the wrong magic bytes in the document")]
|
||||
|
@ -52,60 +49,7 @@ pub enum Error {
|
|||
Io(#[from] io::Error),
|
||||
}
|
||||
|
||||
impl PartialEq<Error> for Error {
|
||||
fn eq(&self, other: &Error) -> bool {
|
||||
match (self, other) {
|
||||
(
|
||||
Self::WrongType {
|
||||
expected_one_of: l_expected_one_of,
|
||||
found: l_found,
|
||||
},
|
||||
Self::WrongType {
|
||||
expected_one_of: r_expected_one_of,
|
||||
found: r_found,
|
||||
},
|
||||
) => l_expected_one_of == r_expected_one_of && l_found == r_found,
|
||||
(Self::BadChangeFormat(l0), Self::BadChangeFormat(r0)) => l0 == r0,
|
||||
(
|
||||
Self::WrongByteLength {
|
||||
expected: l_expected,
|
||||
found: l_found,
|
||||
},
|
||||
Self::WrongByteLength {
|
||||
expected: r_expected,
|
||||
found: r_found,
|
||||
},
|
||||
) => l_expected == r_expected && l_found == r_found,
|
||||
(
|
||||
Self::ColumnsNotInAscendingOrder {
|
||||
last: l_last,
|
||||
found: l_found,
|
||||
},
|
||||
Self::ColumnsNotInAscendingOrder {
|
||||
last: r_last,
|
||||
found: r_found,
|
||||
},
|
||||
) => l_last == r_last && l_found == r_found,
|
||||
(
|
||||
Self::InvalidChecksum {
|
||||
found: l_found,
|
||||
calculated: l_calculated,
|
||||
},
|
||||
Self::InvalidChecksum {
|
||||
found: r_found,
|
||||
calculated: r_calculated,
|
||||
},
|
||||
) => l_found == r_found && l_calculated == r_calculated,
|
||||
(Self::InvalidChange(l0), Self::InvalidChange(r0)) => l0 == r0,
|
||||
(Self::ChangeDecompressFailed(l0), Self::ChangeDecompressFailed(r0)) => l0 == r0,
|
||||
(Self::Leb128(_l0), Self::Leb128(_r0)) => true,
|
||||
(Self::Io(l0), Self::Io(r0)) => l0.kind() == r0.kind(),
|
||||
_ => core::mem::discriminant(self) == core::mem::discriminant(other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, PartialEq, Debug)]
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum InvalidChangeError {
|
||||
#[error("Change contained an operation with action 'set' which did not have a 'value'")]
|
||||
SetOpWithoutValue,
|
||||
|
@ -114,24 +58,24 @@ pub enum InvalidChangeError {
|
|||
#[error("Change contained an invalid object id: {}", source.0)]
|
||||
InvalidObjectId {
|
||||
#[from]
|
||||
source: error::InvalidObjectId,
|
||||
source: amp::error::InvalidObjectId,
|
||||
},
|
||||
#[error("Change contained an invalid hash: {:?}", source.0)]
|
||||
InvalidChangeHash {
|
||||
#[from]
|
||||
source: error::InvalidChangeHashSlice,
|
||||
source: amp::error::InvalidChangeHashSlice,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Decoder<'a> {
|
||||
pub(crate) offset: usize,
|
||||
pub(crate) last_read: usize,
|
||||
pub offset: usize,
|
||||
pub last_read: usize,
|
||||
data: Cow<'a, [u8]>,
|
||||
}
|
||||
|
||||
impl<'a> Decoder<'a> {
|
||||
pub(crate) fn new(data: Cow<'a, [u8]>) -> Self {
|
||||
pub fn new(data: Cow<'a, [u8]>) -> Self {
|
||||
Decoder {
|
||||
offset: 0,
|
||||
last_read: 0,
|
||||
|
@ -139,7 +83,7 @@ impl<'a> Decoder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn read<T: Decodable + Debug>(&mut self) -> Result<T, Error> {
|
||||
pub fn read<T: Decodable + Debug>(&mut self) -> Result<T, Error> {
|
||||
let mut buf = &self.data[self.offset..];
|
||||
let init_len = buf.len();
|
||||
let val = T::decode::<&[u8]>(&mut buf).ok_or(Error::NoDecodedValue)?;
|
||||
|
@ -153,7 +97,7 @@ impl<'a> Decoder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> {
|
||||
pub fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> {
|
||||
if self.offset + index > self.data.len() {
|
||||
Err(Error::TryingToReadPastEnd)
|
||||
} else {
|
||||
|
@ -164,12 +108,12 @@ impl<'a> Decoder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn done(&self) -> bool {
|
||||
pub fn done(&self) -> bool {
|
||||
self.offset >= self.data.len()
|
||||
}
|
||||
}
|
||||
|
||||
/// See discussion on [`crate::encoding::BooleanEncoder`] for the format data is stored in.
|
||||
/// See discussion on [`BooleanEncoder`] for the format data is stored in.
|
||||
pub(crate) struct BooleanDecoder<'a> {
|
||||
decoder: Decoder<'a>,
|
||||
last_value: bool,
|
||||
|
@ -209,10 +153,10 @@ impl<'a> Iterator for BooleanDecoder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// See discussion on [`crate::encoding::RleEncoder`] for the format data is stored in.
|
||||
/// See discussion on [`RleEncoder`] for the format data is stored in.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct RleDecoder<'a, T> {
|
||||
pub(crate) decoder: Decoder<'a>,
|
||||
pub decoder: Decoder<'a>,
|
||||
last_value: Option<T>,
|
||||
count: isize,
|
||||
literal: bool,
|
||||
|
@ -277,7 +221,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// See discussion on [`crate::encoding::DeltaEncoder`] for the format data is stored in.
|
||||
/// See discussion on [`DeltaEncoder`] for the format data is stored in.
|
||||
pub(crate) struct DeltaDecoder<'a> {
|
||||
rle: RleDecoder<'a, i64>,
|
||||
absolute_val: u64,
|
||||
|
@ -407,15 +351,6 @@ impl Decodable for u64 {
|
|||
}
|
||||
}
|
||||
|
||||
impl Decodable for NonZeroU64 {
|
||||
fn decode<R>(bytes: &mut R) -> Option<Self>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
NonZeroU64::new(leb128::read::unsigned(bytes).ok()?)
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Vec<u8> {
|
||||
fn decode<R>(bytes: &mut R) -> Option<Self>
|
||||
where
|
||||
|
@ -464,7 +399,7 @@ impl Decodable for Option<String> {
|
|||
}
|
||||
}
|
||||
|
||||
impl Decodable for ActorId {
|
||||
impl Decodable for amp::ActorId {
|
||||
fn decode<R>(bytes: &mut R) -> Option<Self>
|
||||
where
|
||||
R: Read,
|
376
automerge-backend/src/encoding.rs
Normal file
376
automerge-backend/src/encoding.rs
Normal file
|
@ -0,0 +1,376 @@
|
|||
use core::fmt::Debug;
|
||||
use std::{
|
||||
io,
|
||||
io::{Read, Write},
|
||||
mem,
|
||||
};
|
||||
|
||||
use automerge_protocol as amp;
|
||||
use flate2::{bufread::DeflateEncoder, Compression};
|
||||
use smol_str::SmolStr;
|
||||
|
||||
use crate::columnar::COLUMN_TYPE_DEFLATE;
|
||||
|
||||
pub(crate) const DEFLATE_MIN_SIZE: usize = 256;
|
||||
|
||||
/// The error type for encoding operations.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
}
|
||||
|
||||
/// Encodes booleans by storing the count of the same value.
|
||||
///
|
||||
/// The sequence of numbers describes the count of false values on even indices (0-indexed) and the
|
||||
/// count of true values on odd indices (0-indexed).
|
||||
///
|
||||
/// Counts are encoded as usize.
|
||||
pub(crate) struct BooleanEncoder {
|
||||
buf: Vec<u8>,
|
||||
last: bool,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl BooleanEncoder {
|
||||
pub fn new() -> BooleanEncoder {
|
||||
BooleanEncoder {
|
||||
buf: Vec::new(),
|
||||
last: false,
|
||||
count: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append(&mut self, value: bool) {
|
||||
if value == self.last {
|
||||
self.count += 1;
|
||||
} else {
|
||||
self.count.encode(&mut self.buf).ok();
|
||||
self.last = value;
|
||||
self.count = 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finish(mut self, col: u32) -> ColData {
|
||||
if self.count > 0 {
|
||||
self.count.encode(&mut self.buf).ok();
|
||||
}
|
||||
ColData::new(col, self.buf)
|
||||
}
|
||||
}
|
||||
|
||||
/// Encodes integers as the change since the previous value.
|
||||
///
|
||||
/// The initial value is 0 encoded as u64. Deltas are encoded as i64.
|
||||
///
|
||||
/// Run length encoding is then applied to the resulting sequence.
|
||||
pub(crate) struct DeltaEncoder {
|
||||
rle: RleEncoder<i64>,
|
||||
absolute_value: u64,
|
||||
}
|
||||
|
||||
impl DeltaEncoder {
|
||||
pub fn new() -> DeltaEncoder {
|
||||
DeltaEncoder {
|
||||
rle: RleEncoder::new(),
|
||||
absolute_value: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append_value(&mut self, value: u64) {
|
||||
self.rle
|
||||
.append_value(value as i64 - self.absolute_value as i64);
|
||||
self.absolute_value = value;
|
||||
}
|
||||
|
||||
pub fn append_null(&mut self) {
|
||||
self.rle.append_null();
|
||||
}
|
||||
|
||||
pub fn finish(self, col: u32) -> ColData {
|
||||
self.rle.finish(col)
|
||||
}
|
||||
}
|
||||
|
||||
enum RleState<T> {
|
||||
Empty,
|
||||
NullRun(usize),
|
||||
LiteralRun(T, Vec<T>),
|
||||
LoneVal(T),
|
||||
Run(T, usize),
|
||||
}
|
||||
|
||||
/// Encodes data in run lengh encoding format. This is very efficient for long repeats of data
|
||||
///
|
||||
/// There are 3 types of 'run' in this encoder:
|
||||
/// - a normal run (compresses repeated values)
|
||||
/// - a null run (compresses repeated nulls)
|
||||
/// - a literal run (no compression)
|
||||
///
|
||||
/// A normal run consists of the length of the run (encoded as an i64) followed by the encoded value that this run contains.
|
||||
///
|
||||
/// A null run consists of a zero value (encoded as an i64) followed by the length of the null run (encoded as a usize).
|
||||
///
|
||||
/// A literal run consists of the **negative** length of the run (encoded as an i64) followed by the values in the run.
|
||||
///
|
||||
/// Therefore all the types start with an encoded i64, the value of which determines the type of the following data.
|
||||
pub(crate) struct RleEncoder<T>
|
||||
where
|
||||
T: Encodable + PartialEq + Clone,
|
||||
{
|
||||
buf: Vec<u8>,
|
||||
state: RleState<T>,
|
||||
}
|
||||
|
||||
impl<T> RleEncoder<T>
|
||||
where
|
||||
T: Encodable + PartialEq + Clone,
|
||||
{
|
||||
pub fn new() -> RleEncoder<T> {
|
||||
RleEncoder {
|
||||
buf: Vec::new(),
|
||||
state: RleState::Empty,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finish(mut self, col: u32) -> ColData {
|
||||
match self.take_state() {
|
||||
// this covers `only_nulls`
|
||||
RleState::NullRun(size) => {
|
||||
if !self.buf.is_empty() {
|
||||
self.flush_null_run(size);
|
||||
}
|
||||
}
|
||||
RleState::LoneVal(value) => self.flush_lit_run(vec![value]),
|
||||
RleState::Run(value, len) => self.flush_run(&value, len),
|
||||
RleState::LiteralRun(last, mut run) => {
|
||||
run.push(last);
|
||||
self.flush_lit_run(run);
|
||||
}
|
||||
RleState::Empty => {}
|
||||
}
|
||||
ColData::new(col, self.buf)
|
||||
}
|
||||
|
||||
fn flush_run(&mut self, val: &T, len: usize) {
|
||||
self.encode(&(len as i64));
|
||||
self.encode(val);
|
||||
}
|
||||
|
||||
fn flush_null_run(&mut self, len: usize) {
|
||||
self.encode::<i64>(&0);
|
||||
self.encode(&len);
|
||||
}
|
||||
|
||||
fn flush_lit_run(&mut self, run: Vec<T>) {
|
||||
self.encode(&-(run.len() as i64));
|
||||
for val in run {
|
||||
self.encode(&val);
|
||||
}
|
||||
}
|
||||
|
||||
fn take_state(&mut self) -> RleState<T> {
|
||||
let mut state = RleState::Empty;
|
||||
mem::swap(&mut self.state, &mut state);
|
||||
state
|
||||
}
|
||||
|
||||
pub fn append_null(&mut self) {
|
||||
self.state = match self.take_state() {
|
||||
RleState::Empty => RleState::NullRun(1),
|
||||
RleState::NullRun(size) => RleState::NullRun(size + 1),
|
||||
RleState::LoneVal(other) => {
|
||||
self.flush_lit_run(vec![other]);
|
||||
RleState::NullRun(1)
|
||||
}
|
||||
RleState::Run(other, len) => {
|
||||
self.flush_run(&other, len);
|
||||
RleState::NullRun(1)
|
||||
}
|
||||
RleState::LiteralRun(last, mut run) => {
|
||||
run.push(last);
|
||||
self.flush_lit_run(run);
|
||||
RleState::NullRun(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append_value(&mut self, value: T) {
|
||||
self.state = match self.take_state() {
|
||||
RleState::Empty => RleState::LoneVal(value),
|
||||
RleState::LoneVal(other) => {
|
||||
if other == value {
|
||||
RleState::Run(value, 2)
|
||||
} else {
|
||||
let mut v = Vec::with_capacity(2);
|
||||
v.push(other);
|
||||
RleState::LiteralRun(value, v)
|
||||
}
|
||||
}
|
||||
RleState::Run(other, len) => {
|
||||
if other == value {
|
||||
RleState::Run(other, len + 1)
|
||||
} else {
|
||||
self.flush_run(&other, len);
|
||||
RleState::LoneVal(value)
|
||||
}
|
||||
}
|
||||
RleState::LiteralRun(last, mut run) => {
|
||||
if last == value {
|
||||
self.flush_lit_run(run);
|
||||
RleState::Run(value, 2)
|
||||
} else {
|
||||
run.push(last);
|
||||
RleState::LiteralRun(value, run)
|
||||
}
|
||||
}
|
||||
RleState::NullRun(size) => {
|
||||
self.flush_null_run(size);
|
||||
RleState::LoneVal(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn encode<V>(&mut self, val: &V)
|
||||
where
|
||||
V: Encodable,
|
||||
{
|
||||
val.encode(&mut self.buf).ok();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait Encodable {
|
||||
fn encode_with_actors_to_vec(&self, actors: &mut Vec<amp::ActorId>) -> io::Result<Vec<u8>> {
|
||||
let mut buf = Vec::new();
|
||||
self.encode_with_actors(&mut buf, actors)?;
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
fn encode_with_actors<R: Write>(
|
||||
&self,
|
||||
buf: &mut R,
|
||||
_actors: &mut Vec<amp::ActorId>,
|
||||
) -> io::Result<usize> {
|
||||
self.encode(buf)
|
||||
}
|
||||
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize>;
|
||||
}
|
||||
|
||||
impl Encodable for SmolStr {
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
let bytes = self.as_bytes();
|
||||
let head = bytes.len().encode(buf)?;
|
||||
buf.write_all(bytes)?;
|
||||
Ok(head + bytes.len())
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for String {
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
let bytes = self.as_bytes();
|
||||
let head = bytes.len().encode(buf)?;
|
||||
buf.write_all(bytes)?;
|
||||
Ok(head + bytes.len())
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Option<String> {
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
if let Some(s) = self {
|
||||
s.encode(buf)
|
||||
} else {
|
||||
0.encode(buf)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for u64 {
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
leb128::write::unsigned(buf, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for f64 {
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
let bytes = self.to_le_bytes();
|
||||
buf.write_all(&bytes)?;
|
||||
Ok(bytes.len())
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for f32 {
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
let bytes = self.to_le_bytes();
|
||||
buf.write_all(&bytes)?;
|
||||
Ok(bytes.len())
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for i64 {
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
leb128::write::signed(buf, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for usize {
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
(*self as u64).encode(buf)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for u32 {
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
u64::from(*self).encode(buf)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for i32 {
|
||||
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
i64::from(*self).encode(buf)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ColData {
|
||||
pub col: u32,
|
||||
pub data: Vec<u8>,
|
||||
#[cfg(debug_assertions)]
|
||||
has_been_deflated: bool,
|
||||
}
|
||||
|
||||
impl ColData {
|
||||
pub fn new(col_id: u32, data: Vec<u8>) -> ColData {
|
||||
ColData {
|
||||
col: col_id,
|
||||
data,
|
||||
#[cfg(debug_assertions)]
|
||||
has_been_deflated: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encode_col_len<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||
let mut len = 0;
|
||||
if !self.data.is_empty() {
|
||||
len += self.col.encode(buf)?;
|
||||
len += self.data.len().encode(buf)?;
|
||||
}
|
||||
Ok(len)
|
||||
}
|
||||
|
||||
pub fn deflate(&mut self) {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
debug_assert!(!self.has_been_deflated);
|
||||
self.has_been_deflated = true;
|
||||
}
|
||||
if self.data.len() > DEFLATE_MIN_SIZE {
|
||||
let mut deflated = Vec::new();
|
||||
let mut deflater = DeflateEncoder::new(&self.data[..], Compression::default());
|
||||
//This unwrap should be okay as we're reading and writing to in memory buffers
|
||||
deflater.read_to_end(&mut deflated).unwrap();
|
||||
self.col |= COLUMN_TYPE_DEFLATE;
|
||||
self.data = deflated;
|
||||
}
|
||||
}
|
||||
}
|
67
automerge-backend/src/error.rs
Normal file
67
automerge-backend/src/error.rs
Normal file
|
@ -0,0 +1,67 @@
|
|||
//use std::error::Error;
|
||||
use std::fmt::Debug;
|
||||
|
||||
use automerge_protocol as amp;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::{decoding, encoding};
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AutomergeError {
|
||||
#[error("Missing object ID")]
|
||||
MissingObjectError,
|
||||
#[error("Missing index in op {0}")]
|
||||
MissingIndex(amp::OpId),
|
||||
#[error("Missing element ID: {0}")]
|
||||
MissingElement(amp::ObjectId, amp::ElementId),
|
||||
#[error("No path to object: {0}")]
|
||||
NoPathToObject(amp::ObjectId),
|
||||
#[error("Cant extract object: {0}")]
|
||||
CantExtractObject(amp::ObjectId),
|
||||
#[error("Skiplist error: {0}")]
|
||||
SkipListError(String),
|
||||
#[error("Index out of bounds: {0}")]
|
||||
IndexOutOfBounds(usize),
|
||||
#[error("Invalid op id: {0}")]
|
||||
InvalidOpId(String),
|
||||
#[error("Invalid object ID: {0}")]
|
||||
InvalidObjectId(String),
|
||||
#[error("Missing value")]
|
||||
MissingValue,
|
||||
#[error("Unknown error: {0}")]
|
||||
GeneralError(String),
|
||||
#[error("Missing number value")]
|
||||
MissingNumberValue,
|
||||
#[error("Unknown version: {0}")]
|
||||
UnknownVersion(u64),
|
||||
#[error("Duplicate change {0}")]
|
||||
DuplicateChange(String),
|
||||
#[error("Diverged state {0}")]
|
||||
DivergedState(String),
|
||||
#[error("Invalid seq {0}")]
|
||||
InvalidSeq(u64),
|
||||
#[error("Map key in seq")]
|
||||
MapKeyInSeq,
|
||||
#[error("Head to opid")]
|
||||
HeadToOpId,
|
||||
#[error("Doc format not implemented yet")]
|
||||
DocFormatUnimplemented,
|
||||
#[error("Divergent change {0}")]
|
||||
DivergentChange(String),
|
||||
#[error("Encode failed")]
|
||||
EncodeFailed,
|
||||
#[error("Decode failed")]
|
||||
DecodeFailed,
|
||||
#[error("Encoding error {0}")]
|
||||
EncodingError(#[from] encoding::Error),
|
||||
#[error("Decoding error {0}")]
|
||||
DecodingError(#[from] decoding::Error),
|
||||
#[error("Attempted to create a cursor for opid {opid} which was not an element in a sequence")]
|
||||
InvalidCursor { opid: amp::OpId },
|
||||
#[error("A compressed chunk could not be decompressed")]
|
||||
BadCompressedChunk,
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("Invalid element ID: {0}")]
|
||||
pub struct InvalidElementId(pub String);
|
72
automerge-backend/src/event_handlers.rs
Normal file
72
automerge-backend/src/event_handlers.rs
Normal file
|
@ -0,0 +1,72 @@
|
|||
use std::fmt::Debug;
|
||||
|
||||
use crate::Change;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct EventHandlerId(usize);
|
||||
|
||||
/// A sequence of event handlers.
|
||||
///
|
||||
/// This maintains the order of insertion so handlers will be called in a consistent order.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct EventHandlers(Vec<EventHandler>);
|
||||
|
||||
impl Clone for EventHandlers {
|
||||
fn clone(&self) -> Self {
|
||||
EventHandlers(Vec::new())
|
||||
}
|
||||
}
|
||||
|
||||
impl EventHandlers {
|
||||
pub(crate) fn before_apply_change(&mut self, change: &Change) {
|
||||
for handler in &mut self.0 {
|
||||
if let EventHandler::BeforeApplyChange(f) = handler {
|
||||
f.0(change);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn after_apply_change(&mut self, change: &Change) {
|
||||
for handler in &mut self.0 {
|
||||
if let EventHandler::AfterApplyChange(f) = handler {
|
||||
f.0(change);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds the event handler and returns the id of the handler.
|
||||
pub fn add_handler(&mut self, handler: EventHandler) -> EventHandlerId {
|
||||
self.0.push(handler);
|
||||
EventHandlerId(self.0.len() - 1)
|
||||
}
|
||||
|
||||
/// Remove the handler with the given id, returning whether it removed a handler or not.
|
||||
pub fn remove_handler(&mut self, id: EventHandlerId) -> bool {
|
||||
if id.0 < self.0.len() {
|
||||
self.0.remove(id.0);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A handler for changes.
|
||||
pub struct ChangeEventHandler(pub Box<dyn FnMut(&Change) + Send>);
|
||||
|
||||
/// An general event handler.
|
||||
pub enum EventHandler {
|
||||
/// An event handler that gets called before a change is applied to the history.
|
||||
BeforeApplyChange(ChangeEventHandler),
|
||||
/// An event handler that gets called after a change has been applied to the history.
|
||||
AfterApplyChange(ChangeEventHandler),
|
||||
}
|
||||
|
||||
impl Debug for EventHandler {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
|
||||
match self {
|
||||
Self::BeforeApplyChange(_) => write!(f, "BeforeApplyChange"),
|
||||
Self::AfterApplyChange(_) => write!(f, "AfterApplyChange"),
|
||||
}
|
||||
}
|
||||
}
|
310
automerge-backend/src/expanded_op.rs
Normal file
310
automerge-backend/src/expanded_op.rs
Normal file
|
@ -0,0 +1,310 @@
|
|||
use std::borrow::Cow;
|
||||
|
||||
use amp::{ActorId, ElementId, Key, OpId, SortedVec};
|
||||
use automerge_protocol as amp;
|
||||
|
||||
use crate::internal::InternalOpType;
|
||||
|
||||
/// The same as amp::Op except the `action` is an `InternalOpType`. This allows us to expand
|
||||
/// collections of `amp::Op` into `ExpandedOp`s and remove optypes which perform multiple
|
||||
/// operations (`amp::OpType::MultiSet` and `amp::OpType::Del`)
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct ExpandedOp<'a> {
|
||||
pub(crate) action: InternalOpType,
|
||||
pub obj: Cow<'a, amp::ObjectId>,
|
||||
pub key: Cow<'a, amp::Key>,
|
||||
pub pred: Cow<'a, SortedVec<amp::OpId>>,
|
||||
pub insert: bool,
|
||||
}
|
||||
|
||||
/// An iterator which expands `amp::OpType::MultiSet` and `amp::OpType::Del` operations into
|
||||
/// multiple `amp::InternalOpType`s
|
||||
pub(super) struct ExpandedOpIterator<'a> {
|
||||
actor: ActorId,
|
||||
offset: usize,
|
||||
ops: &'a [amp::Op],
|
||||
expand_count: Option<usize>,
|
||||
op_num: u64,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for ExpandedOpIterator<'a> {
|
||||
type Item = ExpandedOp<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.offset >= self.ops.len() {
|
||||
None
|
||||
} else {
|
||||
self.op_num += 1;
|
||||
let op = &self.ops[self.offset];
|
||||
let action = match &op.action {
|
||||
amp::OpType::Set(v) => InternalOpType::Set(v.clone()),
|
||||
amp::OpType::Make(ot) => InternalOpType::Make(*ot),
|
||||
amp::OpType::Inc(i) => InternalOpType::Inc(*i),
|
||||
amp::OpType::Del(count) => {
|
||||
if count.get() == 1 {
|
||||
InternalOpType::Del
|
||||
} else {
|
||||
assert_eq!(
|
||||
op.pred.len(),
|
||||
1,
|
||||
"multiOp deletion must have exactly one pred"
|
||||
);
|
||||
let index = if let Some(c) = self.expand_count {
|
||||
if c == count.get() as usize - 1 {
|
||||
// the last
|
||||
self.offset += 1;
|
||||
self.expand_count = None;
|
||||
} else {
|
||||
// somewhere in the middle
|
||||
self.expand_count = Some(c + 1);
|
||||
}
|
||||
c
|
||||
} else {
|
||||
// first one of the series
|
||||
self.expand_count = Some(1);
|
||||
0
|
||||
};
|
||||
let pred = op.pred.get(0).unwrap().increment_by(index as u64);
|
||||
let key = op.key.increment_by(index as u64).unwrap();
|
||||
return Some(ExpandedOp {
|
||||
action: InternalOpType::Del,
|
||||
insert: op.insert,
|
||||
pred: Cow::Owned(vec![pred].into()),
|
||||
key: Cow::Owned(key),
|
||||
obj: Cow::Borrowed(&op.obj),
|
||||
});
|
||||
}
|
||||
}
|
||||
amp::OpType::MultiSet(values) => {
|
||||
assert!(op.pred.is_empty(), "multi-insert pred must be empty");
|
||||
let expanded_offset = match self.expand_count {
|
||||
None => {
|
||||
self.expand_count = Some(0);
|
||||
0
|
||||
}
|
||||
Some(o) => o,
|
||||
};
|
||||
|
||||
let key = if expanded_offset == 0 {
|
||||
Cow::Borrowed(&op.key)
|
||||
} else {
|
||||
Cow::Owned(Key::Seq(ElementId::Id(OpId(
|
||||
self.op_num - 1,
|
||||
self.actor.clone(),
|
||||
))))
|
||||
};
|
||||
|
||||
if expanded_offset == values.len() - 1 {
|
||||
self.offset += 1;
|
||||
self.expand_count = None;
|
||||
} else {
|
||||
self.expand_count = Some(expanded_offset + 1);
|
||||
}
|
||||
|
||||
let v = values.get(expanded_offset).unwrap();
|
||||
return Some(ExpandedOp {
|
||||
action: InternalOpType::Set(v.clone()),
|
||||
insert: op.insert,
|
||||
pred: Cow::Borrowed(&op.pred),
|
||||
key,
|
||||
obj: Cow::Borrowed(&op.obj),
|
||||
});
|
||||
}
|
||||
};
|
||||
self.offset += 1;
|
||||
Some(ExpandedOp {
|
||||
action,
|
||||
insert: op.insert,
|
||||
pred: Cow::Borrowed(&op.pred),
|
||||
key: Cow::Borrowed(&op.key),
|
||||
obj: Cow::Borrowed(&op.obj),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ExpandedOpIterator<'a> {
|
||||
pub(super) fn new(ops: &'a [amp::Op], start_op: u64, actor: ActorId) -> ExpandedOpIterator<'a> {
|
||||
ExpandedOpIterator {
|
||||
ops,
|
||||
offset: 0,
|
||||
expand_count: None,
|
||||
op_num: start_op - 1,
|
||||
actor,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{convert::TryInto, num::NonZeroU32};
|
||||
|
||||
use amp::{ObjectId, Op, OpType, ScalarValue, SortedVec};
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn expand_multi_set() {
|
||||
let actor = ActorId::from_bytes(b"7f12a4d3567c4257af34f216aa16fe48");
|
||||
let ops = [Op {
|
||||
action: OpType::MultiSet(
|
||||
vec![
|
||||
ScalarValue::Uint(1),
|
||||
ScalarValue::Uint(2),
|
||||
ScalarValue::Uint(3),
|
||||
]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
),
|
||||
obj: ObjectId::Id(OpId(1, actor.clone())),
|
||||
key: Key::Seq(ElementId::Head),
|
||||
pred: SortedVec::new(),
|
||||
insert: true,
|
||||
}];
|
||||
let expanded_ops = ExpandedOpIterator::new(&ops, 2, actor.clone()).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
expanded_ops,
|
||||
vec![
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Set(ScalarValue::Uint(1)),
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Head)),
|
||||
pred: Cow::Owned(SortedVec::new()),
|
||||
insert: true
|
||||
},
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Set(ScalarValue::Uint(2)),
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Id(OpId(2, actor.clone())))),
|
||||
pred: Cow::Owned(SortedVec::new()),
|
||||
insert: true
|
||||
},
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Set(ScalarValue::Uint(3)),
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Id(OpId(3, actor)))),
|
||||
pred: Cow::Owned(SortedVec::new()),
|
||||
insert: true
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expand_multi_set_double() {
|
||||
let actor = ActorId::from_bytes(b"7f12a4d3567c4257af34f216aa16fe48");
|
||||
let ops = [
|
||||
Op {
|
||||
action: OpType::MultiSet(
|
||||
vec![
|
||||
ScalarValue::Uint(1),
|
||||
ScalarValue::Uint(2),
|
||||
ScalarValue::Uint(3),
|
||||
]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
),
|
||||
obj: ObjectId::Id(OpId(1, actor.clone())),
|
||||
key: Key::Seq(ElementId::Head),
|
||||
pred: SortedVec::new(),
|
||||
insert: true,
|
||||
},
|
||||
Op {
|
||||
action: OpType::MultiSet(
|
||||
vec![
|
||||
ScalarValue::Str("hi".into()),
|
||||
ScalarValue::Str("world".into()),
|
||||
]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
),
|
||||
obj: ObjectId::Id(OpId(1, actor.clone())),
|
||||
key: Key::Seq(ElementId::Id(OpId(4, actor.clone()))),
|
||||
pred: SortedVec::new(),
|
||||
insert: true,
|
||||
},
|
||||
];
|
||||
let expanded_ops = ExpandedOpIterator::new(&ops, 2, actor.clone()).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
expanded_ops,
|
||||
vec![
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Set(ScalarValue::Uint(1)),
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Head)),
|
||||
pred: Cow::Owned(SortedVec::new()),
|
||||
insert: true
|
||||
},
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Set(ScalarValue::Uint(2)),
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Id(OpId(2, actor.clone())))),
|
||||
pred: Cow::Owned(SortedVec::new()),
|
||||
insert: true
|
||||
},
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Set(ScalarValue::Uint(3)),
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Id(OpId(3, actor.clone())))),
|
||||
pred: Cow::Owned(SortedVec::new()),
|
||||
insert: true
|
||||
},
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Set(ScalarValue::Str("hi".into())),
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Id(OpId(4, actor.clone())))),
|
||||
pred: Cow::Owned(SortedVec::new()),
|
||||
insert: true
|
||||
},
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Set(ScalarValue::Str("world".into())),
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Id(OpId(5, actor)))),
|
||||
pred: Cow::Owned(SortedVec::new()),
|
||||
insert: true
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expand_multi_del() {
|
||||
let actor = ActorId::from_bytes(b"7f12a4d3567c4257af34f216aa16fe48");
|
||||
let pred = OpId(1, actor.clone());
|
||||
let ops = [Op {
|
||||
action: OpType::Del(NonZeroU32::new(3).unwrap()),
|
||||
obj: ObjectId::Id(OpId(1, actor.clone())),
|
||||
key: Key::Seq(ElementId::Id(OpId(1, actor.clone()))),
|
||||
pred: vec![pred].into(),
|
||||
insert: true,
|
||||
}];
|
||||
let expanded_ops = ExpandedOpIterator::new(&ops, 2, actor.clone()).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
expanded_ops,
|
||||
vec![
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Del,
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Id(OpId(1, actor.clone())))),
|
||||
pred: Cow::Owned(vec![OpId(1, actor.clone())].into()),
|
||||
insert: true
|
||||
},
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Del,
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Id(OpId(2, actor.clone())))),
|
||||
pred: Cow::Owned(vec![OpId(2, actor.clone())].into()),
|
||||
insert: true
|
||||
},
|
||||
ExpandedOp {
|
||||
action: InternalOpType::Del,
|
||||
obj: Cow::Owned(ObjectId::Id(OpId(1, actor.clone()))),
|
||||
key: Cow::Owned(Key::Seq(ElementId::Id(OpId(3, actor.clone())))),
|
||||
pred: Cow::Owned(vec![OpId(3, actor)].into()),
|
||||
insert: true
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
102
automerge-backend/src/internal.rs
Normal file
102
automerge-backend/src/internal.rs
Normal file
|
@ -0,0 +1,102 @@
|
|||
use automerge_protocol as amp;
|
||||
use nonzero_ext::nonzero;
|
||||
use smol_str::SmolStr;
|
||||
|
||||
#[derive(Eq, PartialEq, Hash, Debug, Clone, Copy)]
|
||||
pub(crate) struct ActorId(pub usize);
|
||||
|
||||
#[derive(Eq, PartialEq, Debug, Hash, Clone, Copy)]
|
||||
pub(crate) struct OpId(pub u64, pub ActorId);
|
||||
|
||||
#[derive(Eq, PartialEq, Debug, Hash, Clone, Copy)]
|
||||
pub(crate) enum ObjectId {
|
||||
Id(OpId),
|
||||
Root,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Hash, Clone, Copy)]
|
||||
pub(crate) enum ElementId {
|
||||
Head,
|
||||
Id(OpId),
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Hash, Clone)]
|
||||
pub(crate) enum Key {
|
||||
Map(SmolStr),
|
||||
Seq(ElementId),
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Clone)]
|
||||
pub(crate) struct InternalOp {
|
||||
pub action: InternalOpType,
|
||||
pub obj: ObjectId,
|
||||
pub key: Key,
|
||||
pub pred: Vec<OpId>,
|
||||
pub insert: bool,
|
||||
}
|
||||
|
||||
impl InternalOp {
|
||||
pub fn obj_type(&self) -> Option<amp::ObjType> {
|
||||
match self.action {
|
||||
InternalOpType::Make(objtype) => Some(objtype),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_inc(&self) -> bool {
|
||||
matches!(self.action, InternalOpType::Inc(_))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Clone)]
|
||||
pub(crate) enum InternalOpType {
|
||||
Make(amp::ObjType),
|
||||
Del,
|
||||
Inc(i64),
|
||||
Set(amp::ScalarValue),
|
||||
}
|
||||
|
||||
impl Key {
|
||||
pub fn as_element_id(&self) -> Option<ElementId> {
|
||||
match self {
|
||||
Key::Map(_) => None,
|
||||
Key::Seq(eid) => Some(*eid),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_opid(&self) -> Option<OpId> {
|
||||
match self.as_element_id()? {
|
||||
ElementId::Id(id) => Some(id),
|
||||
ElementId::Head => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<OpId> for ObjectId {
|
||||
fn from(id: OpId) -> ObjectId {
|
||||
ObjectId::Id(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<OpId> for ElementId {
|
||||
fn from(id: OpId) -> ElementId {
|
||||
ElementId::Id(id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<OpId> for Key {
|
||||
fn from(id: OpId) -> Key {
|
||||
Key::Seq(ElementId::Id(id))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&InternalOpType> for amp::OpType {
|
||||
fn from(i: &InternalOpType) -> amp::OpType {
|
||||
match i {
|
||||
InternalOpType::Del => amp::OpType::Del(nonzero!(1_u32)),
|
||||
InternalOpType::Make(ot) => amp::OpType::Make(*ot),
|
||||
InternalOpType::Set(v) => amp::OpType::Set(v.clone()),
|
||||
InternalOpType::Inc(i) => amp::OpType::Inc(*i),
|
||||
}
|
||||
}
|
||||
}
|
75
automerge-backend/src/lib.rs
Normal file
75
automerge-backend/src/lib.rs
Normal file
|
@ -0,0 +1,75 @@
|
|||
#![warn(clippy::pedantic)]
|
||||
#![warn(clippy::nursery)]
|
||||
#![allow(clippy::missing_errors_doc)]
|
||||
#![allow(clippy::must_use_candidate)]
|
||||
#![allow(clippy::option_if_let_else)]
|
||||
#![allow(clippy::cast_sign_loss)]
|
||||
#![allow(clippy::cast_possible_truncation)]
|
||||
#![allow(clippy::cast_possible_wrap)]
|
||||
#![allow(clippy::doc_markdown)]
|
||||
#![allow(clippy::similar_names)]
|
||||
#![allow(clippy::shadow_unrelated)]
|
||||
#![allow(clippy::module_name_repetitions)]
|
||||
#![allow(clippy::redundant_pub_crate)]
|
||||
#![allow(clippy::missing_const_for_fn)]
|
||||
#![allow(clippy::use_self)]
|
||||
#![allow(clippy::too_many_lines)]
|
||||
|
||||
extern crate fxhash;
|
||||
extern crate hex;
|
||||
extern crate itertools;
|
||||
extern crate maplit;
|
||||
extern crate rand;
|
||||
extern crate web_sys;
|
||||
|
||||
// this is needed for print debugging via WASM
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! log {
|
||||
( $( $t:tt )* ) => {
|
||||
web_sys::console::log_1(&format!( $( $t )* ).into());
|
||||
}
|
||||
}
|
||||
|
||||
mod actor_map;
|
||||
mod backend;
|
||||
mod change;
|
||||
mod columnar;
|
||||
mod concurrent_operations;
|
||||
mod decoding;
|
||||
mod encoding;
|
||||
mod error;
|
||||
mod event_handlers;
|
||||
mod expanded_op;
|
||||
mod internal;
|
||||
mod object_store;
|
||||
mod op_handle;
|
||||
mod op_set;
|
||||
mod ordered_set;
|
||||
mod patches;
|
||||
mod sync;
|
||||
|
||||
pub use backend::Backend;
|
||||
pub use change::Change;
|
||||
pub use decoding::Error as DecodingError;
|
||||
pub use encoding::Error as EncodingError;
|
||||
pub use error::AutomergeError;
|
||||
pub use event_handlers::{ChangeEventHandler, EventHandler, EventHandlerId};
|
||||
pub use sync::{BloomFilter, SyncHave, SyncMessage, SyncState};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{
|
||||
sync::{Arc, Mutex},
|
||||
thread,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn sync_and_send_backend() {
|
||||
let b = crate::Backend::new();
|
||||
let mb = Arc::new(Mutex::new(b));
|
||||
thread::spawn(move || {
|
||||
let b = mb.lock().unwrap();
|
||||
b.get_changes(&[]);
|
||||
});
|
||||
}
|
||||
}
|
111
automerge-backend/src/object_store.rs
Normal file
111
automerge-backend/src/object_store.rs
Normal file
|
@ -0,0 +1,111 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use automerge_protocol as amp;
|
||||
use fxhash::FxBuildHasher;
|
||||
|
||||
use crate::{
|
||||
actor_map::ActorMap,
|
||||
concurrent_operations::ConcurrentOperations,
|
||||
internal::{ElementId, Key, OpId},
|
||||
op_handle::OpHandle,
|
||||
ordered_set::{OrderedSet, SkipList},
|
||||
};
|
||||
|
||||
/// ObjectHistory is what the OpSet uses to store operations for a particular
|
||||
/// key, they represent the two possible container types in automerge, a map or
|
||||
/// a sequence (tables and text are effectively the maps and sequences
|
||||
/// respectively).
|
||||
|
||||
/// Stores operations on map objects
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct ObjState {
|
||||
pub props: HashMap<Key, ConcurrentOperations>,
|
||||
pub obj_type: amp::ObjType,
|
||||
pub inbound: Option<OpHandle>,
|
||||
pub following: HashMap<ElementId, Vec<ElementId>, FxBuildHasher>,
|
||||
pub insertions: HashMap<ElementId, OpHandle, FxBuildHasher>,
|
||||
pub seq: SkipList<OpId>,
|
||||
}
|
||||
|
||||
impl ObjState {
|
||||
pub fn new(obj_type: amp::ObjType) -> ObjState {
|
||||
let mut following = HashMap::default();
|
||||
following.insert(ElementId::Head, Vec::new());
|
||||
ObjState {
|
||||
props: HashMap::default(),
|
||||
following,
|
||||
insertions: HashMap::default(),
|
||||
obj_type,
|
||||
inbound: None,
|
||||
seq: SkipList::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_seq(&self) -> bool {
|
||||
self.obj_type.is_sequence()
|
||||
}
|
||||
|
||||
fn get_parent(&self, id: &ElementId) -> Option<ElementId> {
|
||||
self.insertions.get(id).and_then(|i| i.key.as_element_id())
|
||||
}
|
||||
|
||||
fn insertions_after(&self, parent: &ElementId) -> Vec<ElementId> {
|
||||
self.following.get(parent).cloned().unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn conflicts(&self, key: &Key) -> impl Iterator<Item = &OpHandle> {
|
||||
self.props.get(key).into_iter().flat_map(|i| i.iter())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn index_of(&self, id: OpId) -> Option<usize> {
|
||||
let mut prev_id = id.into();
|
||||
let mut index = None;
|
||||
// reverse walk through the following/insertions and looking for something that not deleted
|
||||
while index.is_none() {
|
||||
prev_id = match self.get_previous(&prev_id) {
|
||||
Some(p) => p,
|
||||
None => return None,
|
||||
};
|
||||
match prev_id {
|
||||
ElementId::Id(id) => {
|
||||
// FIXME maybe I can speed this up with self.props.get before looking for
|
||||
index = self.seq.index_of(&id);
|
||||
}
|
||||
ElementId::Head => return None,
|
||||
}
|
||||
}
|
||||
index.map(|i| i + 1)
|
||||
}
|
||||
|
||||
fn get_previous(&self, element: &ElementId) -> Option<ElementId> {
|
||||
let parent_id = match self.get_parent(element) {
|
||||
Some(p) => p,
|
||||
None => return None,
|
||||
};
|
||||
let children = self.insertions_after(&parent_id);
|
||||
let pos = match children.iter().position(|k| k == element) {
|
||||
Some(p) => p,
|
||||
None => return None,
|
||||
};
|
||||
if pos == 0 {
|
||||
Some(parent_id)
|
||||
} else {
|
||||
let mut prev_id = children[pos - 1]; // FIXME - use refs here
|
||||
loop {
|
||||
match self.insertions_after(&prev_id).last() {
|
||||
Some(id) => prev_id = *id,
|
||||
None => return Some(prev_id),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_after(&mut self, elem: ElementId, op: OpHandle, actors: &ActorMap) {
|
||||
let eid = op.id.into();
|
||||
self.insertions.insert(eid, op);
|
||||
let following = self.following.entry(elem).or_default();
|
||||
following.push(eid);
|
||||
following.sort_unstable_by(|a, b| actors.cmp(b, a));
|
||||
}
|
||||
}
|
111
automerge-backend/src/op_handle.rs
Normal file
111
automerge-backend/src/op_handle.rs
Normal file
|
@ -0,0 +1,111 @@
|
|||
use std::{
|
||||
borrow::Cow,
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
ops::Deref,
|
||||
};
|
||||
|
||||
use automerge_protocol as amp;
|
||||
|
||||
use crate::{
|
||||
actor_map::ActorMap,
|
||||
internal::{InternalOp, InternalOpType, Key, ObjectId, OpId},
|
||||
Change,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct OpHandle {
|
||||
pub id: OpId,
|
||||
pub op: InternalOp,
|
||||
pub delta: i64,
|
||||
}
|
||||
|
||||
impl OpHandle {
|
||||
pub fn extract(change: &Change, actors: &mut ActorMap) -> Vec<OpHandle> {
|
||||
let mut opnum = change.start_op;
|
||||
let actor = actors.import_actor(change.actor_id());
|
||||
change
|
||||
.iter_ops()
|
||||
.map(|op| {
|
||||
let internal_op = actors.import_op(op);
|
||||
let id = OpId(opnum, actor);
|
||||
opnum += 1;
|
||||
OpHandle {
|
||||
id,
|
||||
op: internal_op,
|
||||
delta: 0,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn adjusted_value(&self) -> amp::ScalarValue {
|
||||
match &self.action {
|
||||
InternalOpType::Set(amp::ScalarValue::Counter(a)) => {
|
||||
amp::ScalarValue::Counter(a + self.delta)
|
||||
}
|
||||
InternalOpType::Set(val) => val.clone(),
|
||||
_ => amp::ScalarValue::Null,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn child(&self) -> Option<ObjectId> {
|
||||
match &self.action {
|
||||
InternalOpType::Make(_) => Some(self.id.into()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn operation_key(&self) -> Cow<Key> {
|
||||
if self.insert {
|
||||
Cow::Owned(self.id.into())
|
||||
} else {
|
||||
Cow::Borrowed(&self.key)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn maybe_increment(&mut self, inc: &OpHandle) -> bool {
|
||||
if let InternalOpType::Inc(amount) = inc.action {
|
||||
if inc.pred.contains(&self.id) {
|
||||
if let InternalOpType::Set(amp::ScalarValue::Counter(_)) = self.action {
|
||||
self.delta += amount;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for OpHandle {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("OpHandle")
|
||||
.field("id", &self.id)
|
||||
.field("action", &self.action)
|
||||
.field("obj", &self.obj)
|
||||
.field("key", &self.key)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for OpHandle {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.id.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for OpHandle {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.id.eq(&other.id)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for OpHandle {}
|
||||
|
||||
impl Deref for OpHandle {
|
||||
type Target = InternalOp;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.op
|
||||
}
|
||||
}
|
353
automerge-backend/src/op_set.rs
Normal file
353
automerge-backend/src/op_set.rs
Normal file
|
@ -0,0 +1,353 @@
|
|||
//! The OpSet is where most of the interesting work is done in this library.
|
||||
//! It maintains a mapping from each object ID to a set of concurrent
|
||||
//! operations which have been seen for that object ID.
|
||||
//!
|
||||
//! When the client requests the value of the CRDT (via
|
||||
//! document::state) the implementation fetches the root object ID's history
|
||||
//! and then recursively walks through the tree of histories constructing the
|
||||
//! state. Obviously this is not very efficient.
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use automerge_protocol as amp;
|
||||
use fxhash::FxBuildHasher;
|
||||
use smol_str::SmolStr;
|
||||
use tracing::instrument;
|
||||
|
||||
use crate::{
|
||||
actor_map::ActorMap,
|
||||
error::AutomergeError,
|
||||
internal::{InternalOpType, Key, ObjectId},
|
||||
object_store::ObjState,
|
||||
op_handle::OpHandle,
|
||||
ordered_set::OrderedSet,
|
||||
patches::{IncrementalPatch, PatchWorkshop},
|
||||
Change,
|
||||
};
|
||||
|
||||
/// The OpSet manages an ObjectStore, and a queue of incoming changes in order
|
||||
/// to ensure that operations are delivered to the object store in causal order
|
||||
///
|
||||
/// Whenever a new change is received we iterate through any causally ready
|
||||
/// changes in the queue and apply them to the object store, then repeat until
|
||||
/// there are no causally ready changes left. The end result of this is that
|
||||
/// the object store will contain sets of concurrent operations for each object
|
||||
/// ID or element ID.
|
||||
///
|
||||
/// When we want to get the state of the CRDT we walk through the
|
||||
/// object store, starting with the root object ID and constructing the value
|
||||
/// at each node by examining the concurrent operations which are active for
|
||||
/// that node.
|
||||
///
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub(crate) struct OpSet {
|
||||
pub objs: HashMap<ObjectId, ObjState, FxBuildHasher>,
|
||||
pub deps: HashSet<amp::ChangeHash>,
|
||||
pub max_op: u64,
|
||||
cursors: HashMap<ObjectId, Vec<CursorState>>,
|
||||
}
|
||||
|
||||
impl Default for OpSet {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl OpSet {
|
||||
pub fn new() -> OpSet {
|
||||
let mut objs = HashMap::default();
|
||||
objs.insert(ObjectId::Root, ObjState::new(amp::ObjType::Map));
|
||||
|
||||
OpSet {
|
||||
objs,
|
||||
max_op: 0,
|
||||
deps: HashSet::default(),
|
||||
cursors: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn apply_ops(
|
||||
&mut self,
|
||||
ops: Vec<OpHandle>,
|
||||
patch: &mut IncrementalPatch,
|
||||
actors: &mut ActorMap,
|
||||
) -> Result<(), AutomergeError> {
|
||||
for op in ops {
|
||||
self.apply_op(op, actors, patch)?;
|
||||
}
|
||||
self.update_cursors(patch);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn heads(&self) -> Vec<amp::ChangeHash> {
|
||||
let mut deps: Vec<_> = self.deps.iter().copied().collect();
|
||||
deps.sort_unstable();
|
||||
deps
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
fn apply_op(
|
||||
&mut self,
|
||||
op: OpHandle,
|
||||
actors: &mut ActorMap,
|
||||
patch: &mut IncrementalPatch,
|
||||
) -> Result<(), AutomergeError> {
|
||||
if let (Some(child), Some(obj_type)) = (op.child(), op.obj_type()) {
|
||||
//let child = actors.import_obj(child);
|
||||
self.objs.insert(child, ObjState::new(obj_type));
|
||||
}
|
||||
|
||||
if let InternalOpType::Set(amp::ScalarValue::Cursor(ref oid)) = op.op.action {
|
||||
tracing::debug!(referred_opid=?oid, "Adding cursor");
|
||||
let internal_opid = actors.import_opid(oid);
|
||||
let mut target_found = false;
|
||||
for (obj_id, obj) in &self.objs {
|
||||
if obj.insertions.contains_key(&internal_opid.into()) {
|
||||
target_found = true;
|
||||
self.cursors.entry(*obj_id).or_default().push(CursorState {
|
||||
referring_object_id: actors.export_obj(&op.obj),
|
||||
internal_referring_object_id: op.obj,
|
||||
key: op.key.clone(),
|
||||
element_opid: oid.clone(),
|
||||
internal_element_opid: internal_opid,
|
||||
index: obj.index_of(internal_opid).unwrap_or(0),
|
||||
referred_object_id: actors.export_obj(obj_id),
|
||||
internal_referred_object_id: *obj_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
if !target_found {
|
||||
return Err(AutomergeError::InvalidCursor { opid: oid.clone() });
|
||||
}
|
||||
}
|
||||
|
||||
let object_id = op.obj;
|
||||
let object = self.get_obj_mut(&object_id)?;
|
||||
|
||||
let overwritten = if object.is_seq() {
|
||||
if op.insert {
|
||||
object.insert_after(
|
||||
op.key.as_element_id().ok_or(AutomergeError::MapKeyInSeq)?,
|
||||
op.clone(),
|
||||
actors,
|
||||
);
|
||||
}
|
||||
|
||||
let ops = object
|
||||
.props
|
||||
.entry(op.operation_key().into_owned())
|
||||
.or_default();
|
||||
let before = !ops.is_empty();
|
||||
let (op, overwritten_ops) = ops.incorporate_new_op(op);
|
||||
let after = !ops.is_empty();
|
||||
|
||||
match (before, after) {
|
||||
(true, true) => {
|
||||
tracing::debug!("updating existing element");
|
||||
let opid = op
|
||||
.operation_key()
|
||||
.to_opid()
|
||||
.ok_or(AutomergeError::HeadToOpId)?;
|
||||
let ops = ops.clone();
|
||||
let index = object.index_of(opid).unwrap_or(0);
|
||||
|
||||
patch.record_seq_updates(&object_id, object, index, ops.iter(), actors);
|
||||
}
|
||||
(true, false) => {
|
||||
let opid = op
|
||||
.operation_key()
|
||||
.to_opid()
|
||||
.ok_or(AutomergeError::HeadToOpId)?;
|
||||
let index = object.seq.remove_key(&opid).unwrap();
|
||||
tracing::debug!(opid=?opid, index=%index, "deleting element");
|
||||
patch.record_seq_remove(&object_id, op.clone(), index);
|
||||
}
|
||||
(false, true) => {
|
||||
let id = op
|
||||
.operation_key()
|
||||
.to_opid()
|
||||
.ok_or(AutomergeError::HeadToOpId)?;
|
||||
let index = object.index_of(id).unwrap_or(0);
|
||||
tracing::debug!(new_id=?id, index=%index, after=?op.operation_key(), "inserting new element");
|
||||
object.seq.insert_index(index, id);
|
||||
patch.record_seq_insert(&object_id, op.clone(), index, op.id);
|
||||
}
|
||||
(false, false) => {}
|
||||
};
|
||||
|
||||
self.unlink(&op, &overwritten_ops)?;
|
||||
|
||||
overwritten_ops
|
||||
} else {
|
||||
let ops = object.props.entry(op.key.clone()).or_default();
|
||||
let before = !ops.is_empty();
|
||||
let (op, overwritten_ops) = ops.incorporate_new_op(op);
|
||||
let after = !ops.is_empty();
|
||||
self.unlink(&op, &overwritten_ops)?;
|
||||
|
||||
if before || after {
|
||||
patch.record_set(&object_id, op);
|
||||
}
|
||||
overwritten_ops
|
||||
};
|
||||
|
||||
for op in overwritten {
|
||||
if let InternalOpType::Set(amp::ScalarValue::Cursor(ref oid)) = op.op.action {
|
||||
if let Some(opids) = self.cursors.get_mut(&op.op.obj) {
|
||||
opids.retain(|o| o.element_opid != *oid);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn unlink(&mut self, op: &OpHandle, overwritten: &[OpHandle]) -> Result<(), AutomergeError> {
|
||||
if let Some(child) = op.child() {
|
||||
self.get_obj_mut(&child)?.inbound = Some(op.clone());
|
||||
}
|
||||
|
||||
for old in overwritten.iter() {
|
||||
if let Some(child) = old.child() {
|
||||
self.get_obj_mut(&child)?.inbound = None;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_obj(&self, object_id: &ObjectId) -> Result<&ObjState, AutomergeError> {
|
||||
self.objs
|
||||
.get(object_id)
|
||||
.ok_or(AutomergeError::MissingObjectError)
|
||||
}
|
||||
|
||||
fn get_obj_mut(&mut self, object_id: &ObjectId) -> Result<&mut ObjState, AutomergeError> {
|
||||
self.objs
|
||||
.get_mut(object_id)
|
||||
.ok_or(AutomergeError::MissingObjectError)
|
||||
}
|
||||
|
||||
/// Update any cursors which will be affected by the changes in `pending`
|
||||
/// and add the changed cursors to `pending`
|
||||
fn update_cursors(&mut self, patch: &mut IncrementalPatch) {
|
||||
// For each cursor, if the cursor references an object which has been changed we generate a
|
||||
// diff for the cursor
|
||||
if self.cursors.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut cursor_changes: HashMap<ObjectId, Vec<Key>> = HashMap::new();
|
||||
for obj_id in patch.changed_object_ids() {
|
||||
if let Some(cursors) = self.cursors.get_mut(obj_id) {
|
||||
for cursor in cursors.iter_mut() {
|
||||
if let Some(obj) = self.objs.get(&cursor.internal_referred_object_id) {
|
||||
cursor.index = obj.index_of(cursor.internal_element_opid).unwrap_or(0);
|
||||
cursor_changes
|
||||
.entry(cursor.internal_referring_object_id)
|
||||
.or_default()
|
||||
.push(cursor.key.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (obj_id, keys) in cursor_changes {
|
||||
for key in keys {
|
||||
patch.record_cursor_change(&obj_id, key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_deps(&mut self, change: &Change) {
|
||||
//self.max_op = max(self.max_op, change.max_op());
|
||||
|
||||
for d in &change.deps {
|
||||
self.deps.remove(d);
|
||||
}
|
||||
self.deps.insert(change.hash);
|
||||
}
|
||||
|
||||
pub(crate) fn patch_workshop<'a>(&'a self, actors: &'a ActorMap) -> impl PatchWorkshop + 'a {
|
||||
PatchWorkshopImpl {
|
||||
opset: self,
|
||||
actors,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `CursorState` is the information we need to track in order to update cursors as changes come
|
||||
/// in. Cursors are created by `Set` operations and therefore live in a particular object (the
|
||||
/// "referring object") and point at an element in a sequence (the "referred" object). For example
|
||||
/// this operation:
|
||||
///
|
||||
/// ```json
|
||||
/// {
|
||||
/// "action": "set",
|
||||
/// "obj": "_root",
|
||||
/// "key": "a_cursor",
|
||||
/// "refObjectId": "1@222"
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Creates a cursor in the root object under the "a_cursor" key which points at element "1@222".
|
||||
/// When we process a set operation which is a cursor we find the object which contains "1@222" and
|
||||
/// populate this `CursorState`.
|
||||
///
|
||||
/// Note that several fields are duplicated for internal and `automerge_protocol` types. This is
|
||||
/// because we need to compare those fields against internal types when processing cursors, but we
|
||||
/// need to create patches which use the `automerge_protocol` types.
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
struct CursorState {
|
||||
/// The id of the object this cursor lives in
|
||||
referring_object_id: amp::ObjectId,
|
||||
/// The same as `referring_object_id` but as an internal::ObjectID
|
||||
internal_referring_object_id: ObjectId,
|
||||
/// The key withing the referring object this cursor lives at
|
||||
key: crate::internal::Key,
|
||||
/// The id of the sequence this cursor refers
|
||||
referred_object_id: amp::ObjectId,
|
||||
/// The same as the `referred_object_id` but as an internal::ObjectID
|
||||
internal_referred_object_id: ObjectId,
|
||||
/// The OpID of the element within the sequence this cursor refers to
|
||||
element_opid: amp::OpId,
|
||||
/// The same as the `element_opid` but as an internal::OpID,
|
||||
internal_element_opid: crate::internal::OpId,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
/// Implementation of `patches::PatchWorkshop` to pass to the various patch
|
||||
/// generation mechanisms, defined here to avoid having to make members of the
|
||||
/// OpSet public.
|
||||
struct PatchWorkshopImpl<'a> {
|
||||
opset: &'a OpSet,
|
||||
actors: &'a ActorMap,
|
||||
}
|
||||
|
||||
impl<'a> PatchWorkshop for PatchWorkshopImpl<'a> {
|
||||
fn get_obj(&self, object_id: &ObjectId) -> Option<&ObjState> {
|
||||
self.opset.get_obj(object_id).ok()
|
||||
}
|
||||
|
||||
fn find_cursor(&self, opid: &::OpId) -> Option<amp::CursorDiff> {
|
||||
self.opset
|
||||
.cursors
|
||||
.values()
|
||||
.flatten()
|
||||
.find(|c| c.element_opid == *opid)
|
||||
.map(|c| amp::CursorDiff {
|
||||
object_id: c.referred_object_id.clone(),
|
||||
index: c.index as u32,
|
||||
elem_id: opid.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn key_to_string(&self, key: &crate::internal::Key) -> SmolStr {
|
||||
self.actors.key_to_string(key)
|
||||
}
|
||||
|
||||
fn make_external_opid(&self, opid: &crate::internal::OpId) -> amp::OpId {
|
||||
self.actors.export_opid(opid)
|
||||
}
|
||||
|
||||
fn make_external_objid(&self, object_id: &ObjectId) -> amp::ObjectId {
|
||||
self.actors.export_obj(object_id)
|
||||
}
|
||||
}
|
897
automerge-backend/src/ordered_set.rs
Normal file
897
automerge-backend/src/ordered_set.rs
Normal file
|
@ -0,0 +1,897 @@
|
|||
#![allow(dead_code)]
|
||||
|
||||
use std::{
|
||||
cmp::{max, min},
|
||||
collections::HashMap,
|
||||
fmt::Debug,
|
||||
hash::Hash,
|
||||
iter::Iterator,
|
||||
mem,
|
||||
ops::AddAssign,
|
||||
};
|
||||
|
||||
use fxhash::FxBuildHasher;
|
||||
use rand::{rngs::SmallRng, Rng, SeedableRng};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq)]
|
||||
struct Link<K>
|
||||
where
|
||||
K: Clone + Copy + Debug + PartialEq,
|
||||
{
|
||||
key: Option<K>,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
struct LinkLevel<K>
|
||||
where
|
||||
K: Copy + Clone + Debug + PartialEq,
|
||||
{
|
||||
next: Link<K>,
|
||||
prev: Link<K>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
struct Node<K>
|
||||
where
|
||||
K: Copy + Clone + Debug + PartialEq,
|
||||
{
|
||||
level: usize,
|
||||
links: Vec<LinkLevel<K>>,
|
||||
// IDEA: can I make this an unsized array??
|
||||
// IDEA - Node could be Node(Vec<K>)
|
||||
}
|
||||
|
||||
impl<K> AddAssign for Link<K>
|
||||
where
|
||||
K: Copy + Clone + Debug + PartialEq,
|
||||
{
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
self.key = other.key;
|
||||
self.count += other.count;
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Node<K>
|
||||
where
|
||||
K: Debug + Copy + Clone + PartialEq,
|
||||
{
|
||||
fn successor(&self) -> Option<&K> {
|
||||
if self.links.is_empty() {
|
||||
None
|
||||
} else {
|
||||
self.links[0].next.key.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_node_after(&mut self, from_level: usize, removed_level: usize, links: &[Link<K>]) {
|
||||
for (level, link) in links.iter().enumerate().take(self.level).skip(from_level) {
|
||||
if level < removed_level {
|
||||
self.links[level].next = *link;
|
||||
} else {
|
||||
self.links[level].next.count -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_node_before(&mut self, from_level: usize, removed_level: usize, links: &[Link<K>]) {
|
||||
for (level, link) in links.iter().enumerate().take(self.level).skip(from_level) {
|
||||
if level < removed_level {
|
||||
self.links[level].prev = *link;
|
||||
} else {
|
||||
self.links[level].prev.count -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_node_after(
|
||||
&mut self,
|
||||
new_key: &K,
|
||||
new_level: usize,
|
||||
from_level: usize,
|
||||
distance: usize,
|
||||
is_head: bool,
|
||||
) {
|
||||
if new_level > self.level && !is_head {
|
||||
panic!("Cannot increase the level of a non-head node");
|
||||
}
|
||||
self.level = max(self.level, new_level);
|
||||
|
||||
for level in from_level..self.level {
|
||||
if level < new_level {
|
||||
let next = Link {
|
||||
key: Some(*new_key),
|
||||
count: distance,
|
||||
};
|
||||
let prev = Link {
|
||||
key: None,
|
||||
count: 0,
|
||||
};
|
||||
if self.links.len() == level {
|
||||
self.links.push(LinkLevel { next, prev });
|
||||
} else {
|
||||
self.links[level].next = next;
|
||||
}
|
||||
} else {
|
||||
self.links[level].next.count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_node_before(
|
||||
&mut self,
|
||||
new_key: &K,
|
||||
new_level: usize,
|
||||
from_level: usize,
|
||||
distance: usize,
|
||||
) {
|
||||
if new_level > self.level {
|
||||
panic!("Cannot increase the level on insert_node_before");
|
||||
}
|
||||
for level in from_level..self.level {
|
||||
if level < new_level {
|
||||
self.links[level].prev = Link {
|
||||
key: Some(*new_key),
|
||||
count: distance,
|
||||
};
|
||||
} else {
|
||||
self.links[level].prev.count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct VecOrderedSet<K>
|
||||
where
|
||||
K: Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
keys: Vec<K>,
|
||||
}
|
||||
|
||||
impl<K> VecOrderedSet<K>
|
||||
where
|
||||
K: Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
pub fn new() -> VecOrderedSet<K> {
|
||||
VecOrderedSet { keys: Vec::new() }
|
||||
}
|
||||
}
|
||||
|
||||
pub trait OrderedSet<K>
|
||||
where
|
||||
K: Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
fn index_of(&self, key: &K) -> Option<usize>;
|
||||
fn remove_key(&mut self, key: &K) -> Option<usize>;
|
||||
fn insert_index(&mut self, index: usize, key: K) -> bool;
|
||||
fn remove_index(&mut self, index: usize) -> Option<K>;
|
||||
fn key_of(&self, index: usize) -> Option<&K>;
|
||||
}
|
||||
|
||||
impl<K> OrderedSet<K> for SkipList<K>
|
||||
where
|
||||
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
fn remove_index(&mut self, index: usize) -> Option<K> {
|
||||
let key = self.key_of(index).copied();
|
||||
if let Some(ref k) = &key {
|
||||
self.remove(k);
|
||||
}
|
||||
key
|
||||
}
|
||||
|
||||
fn remove_key(&mut self, key: &K) -> Option<usize> {
|
||||
let index = self.index_of(key);
|
||||
if index.is_some() {
|
||||
self.remove(key);
|
||||
}
|
||||
index
|
||||
}
|
||||
|
||||
fn key_of(&self, index: usize) -> Option<&K> {
|
||||
if index >= self.len {
|
||||
return None;
|
||||
}
|
||||
let target = index + 1;
|
||||
let mut node = &self.head;
|
||||
let mut level = node.level - 1;
|
||||
let mut count = 0;
|
||||
loop {
|
||||
while count + node.links[level].next.count > target {
|
||||
level -= 1;
|
||||
}
|
||||
count += node.links[level].next.count;
|
||||
let k = node.links[level].next.key.as_ref();
|
||||
if count == target {
|
||||
return k;
|
||||
}
|
||||
node = self.get_node(k);
|
||||
}
|
||||
}
|
||||
|
||||
fn index_of(&self, key: &K) -> Option<usize> {
|
||||
let mut count = 0;
|
||||
let mut key = key;
|
||||
loop {
|
||||
if let Some(node) = self.nodes.get(key) {
|
||||
let link = &node.links[node.level - 1].prev;
|
||||
count += link.count;
|
||||
if let Some(ref k) = &link.key {
|
||||
key = k;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Some(count - 1)
|
||||
}
|
||||
|
||||
fn insert_index(&mut self, index: usize, key: K) -> bool {
|
||||
if index == 0 {
|
||||
self.insert_head(key)
|
||||
} else {
|
||||
self.key_of(index - 1)
|
||||
.copied()
|
||||
.map_or(false, |suc| self.insert_after(&suc, key))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> OrderedSet<K> for VecOrderedSet<K>
|
||||
where
|
||||
K: Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
fn remove_index(&mut self, index: usize) -> Option<K> {
|
||||
if self.keys.len() > index {
|
||||
let k = self.keys.remove(index);
|
||||
Some(k)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn key_of(&self, index: usize) -> Option<&K> {
|
||||
self.keys.get(index)
|
||||
}
|
||||
|
||||
fn index_of(&self, key: &K) -> Option<usize> {
|
||||
self.keys.iter().position(|o| o == key)
|
||||
}
|
||||
|
||||
fn insert_index(&mut self, index: usize, key: K) -> bool {
|
||||
self.keys.insert(index, key);
|
||||
true
|
||||
}
|
||||
|
||||
fn remove_key(&mut self, key: &K) -> Option<usize> {
|
||||
if let Some(index) = self.keys.iter().position(|o| o == key) {
|
||||
self.keys.remove(index);
|
||||
Some(index)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Default for SkipList<K>
|
||||
where
|
||||
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Default for VecOrderedSet<K>
|
||||
where
|
||||
K: Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, K> IntoIterator for &'a VecOrderedSet<K>
|
||||
where
|
||||
K: Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
type Item = &'a K;
|
||||
type IntoIter = std::slice::Iter<'a, K>;
|
||||
|
||||
fn into_iter(self) -> std::slice::Iter<'a, K> {
|
||||
self.keys.as_slice().iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, K> IntoIterator for &'a SkipList<K>
|
||||
where
|
||||
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
type Item = &'a K;
|
||||
type IntoIter = SkipIterator<'a, K>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
SkipIterator {
|
||||
id: self.head.successor(),
|
||||
nodes: &self.nodes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct SkipList<K>
|
||||
where
|
||||
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
nodes: HashMap<K, Node<K>, FxBuildHasher>,
|
||||
head: Node<K>,
|
||||
rng: SmallRng,
|
||||
pub len: usize,
|
||||
}
|
||||
|
||||
impl<K> PartialEq for SkipList<K>
|
||||
where
|
||||
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.nodes.eq(&other.nodes)
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> SkipList<K>
|
||||
where
|
||||
K: Copy + Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
pub fn new() -> SkipList<K> {
|
||||
let nodes = HashMap::default();
|
||||
let head = Node {
|
||||
links: Vec::new(),
|
||||
level: 1,
|
||||
//is_head: true,
|
||||
};
|
||||
let len = 0;
|
||||
let rng = SmallRng::seed_from_u64(0);
|
||||
SkipList {
|
||||
nodes,
|
||||
head,
|
||||
rng,
|
||||
len,
|
||||
}
|
||||
}
|
||||
|
||||
fn remove(&mut self, key: &K) {
|
||||
let removed = self
|
||||
.nodes
|
||||
.remove(key)
|
||||
.unwrap_or_else(|| panic!("The given key cannot be removed because it does not exist"));
|
||||
|
||||
let max_level = self.head.level;
|
||||
let mut pre = self.predecessors(removed.links[0].prev.key.as_ref(), max_level);
|
||||
let mut suc = self.successors(removed.links[0].next.key.as_ref(), max_level);
|
||||
|
||||
for i in 0..max_level {
|
||||
let distance = pre[i].count + suc[i].count - 1;
|
||||
pre[i].count = distance;
|
||||
suc[i].count = distance;
|
||||
}
|
||||
|
||||
self.len -= 1;
|
||||
let mut pre_level = 0;
|
||||
let mut suc_level = 0;
|
||||
|
||||
for level in 1..=max_level {
|
||||
let update_level = min(level, removed.level);
|
||||
if level == max_level
|
||||
|| pre.get(level).map(|l| &l.key) != pre.get(pre_level).map(|l| &l.key)
|
||||
{
|
||||
self.get_node_mut(pre[pre_level].key.as_ref())
|
||||
.remove_node_after(pre_level, update_level, &suc);
|
||||
pre_level = level;
|
||||
}
|
||||
if suc[suc_level].key.is_some()
|
||||
&& (level == max_level
|
||||
|| suc.get(level).map(|l| &l.key) != suc.get(suc_level).map(|l| &l.key))
|
||||
{
|
||||
self.get_node_mut(suc[suc_level].key.as_ref())
|
||||
.remove_node_before(suc_level, update_level, &pre);
|
||||
suc_level = level;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_node(&self, key: Option<&K>) -> &Node<K> {
|
||||
if let Some(k) = key {
|
||||
self.nodes
|
||||
.get(k)
|
||||
.unwrap_or_else(|| panic!("get_node - missing key {:?}", key))
|
||||
} else {
|
||||
&self.head
|
||||
}
|
||||
}
|
||||
|
||||
fn get_node_mut(&mut self, key: Option<&K>) -> &mut Node<K> {
|
||||
if let Some(k) = key {
|
||||
self.nodes
|
||||
.get_mut(k)
|
||||
.unwrap_or_else(|| panic!("get_node - missing key {:?}", key))
|
||||
} else {
|
||||
&mut self.head
|
||||
}
|
||||
}
|
||||
|
||||
// IDEA: Can i merge the successors and predecessors into a singlue unified function
|
||||
// so we dont need to zip the results?
|
||||
fn predecessors(&self, predecessor: Option<&K>, max_level: usize) -> Vec<Link<K>> {
|
||||
let mut pre = Vec::with_capacity(max_level);
|
||||
pre.push(Link {
|
||||
key: predecessor.copied(),
|
||||
count: 1,
|
||||
});
|
||||
|
||||
for level in 1..max_level {
|
||||
let mut link = pre[level - 1];
|
||||
while link.key.is_some() {
|
||||
let node = self.get_node(link.key.as_ref());
|
||||
if node.level > level {
|
||||
break;
|
||||
}
|
||||
if node.level < level {
|
||||
panic!("Level lower than expected");
|
||||
}
|
||||
link += node.links[level - 1].prev;
|
||||
}
|
||||
pre.push(link);
|
||||
}
|
||||
pre
|
||||
}
|
||||
|
||||
fn successors(&self, successor: Option<&K>, max_level: usize) -> Vec<Link<K>> {
|
||||
let mut suc = Vec::with_capacity(max_level);
|
||||
suc.push(Link {
|
||||
key: successor.copied(),
|
||||
count: 1,
|
||||
});
|
||||
|
||||
for level in 1..max_level {
|
||||
let mut link = suc[level - 1];
|
||||
while link.key.is_some() {
|
||||
let node = self.get_node(link.key.as_ref());
|
||||
if node.level > level {
|
||||
break;
|
||||
}
|
||||
if node.level < level {
|
||||
panic!("Level lower than expected");
|
||||
}
|
||||
link += node.links[level - 1].next;
|
||||
}
|
||||
suc.push(link);
|
||||
}
|
||||
suc
|
||||
}
|
||||
|
||||
pub fn insert_head(&mut self, key: K) -> bool {
|
||||
self.insert(None, key)
|
||||
}
|
||||
|
||||
pub fn insert_after(&mut self, predecessor: &K, key: K) -> bool {
|
||||
// TODO add check that `predecessor` is actually in the list and is not identical to `K`.
|
||||
// The latter point is especially important as allowing cycles leads to an infinite loop in
|
||||
// `ObjState.index_of`
|
||||
self.insert(Some(predecessor), key)
|
||||
}
|
||||
|
||||
fn insert(&mut self, predecessor: Option<&K>, key: K) -> bool {
|
||||
if self.nodes.contains_key(&key) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let new_level = self.random_level();
|
||||
let max_level = max(new_level, self.head.level);
|
||||
let successor = self.get_node(predecessor).successor();
|
||||
let mut pre = self.predecessors(predecessor, max_level);
|
||||
let mut suc = self.successors(successor, max_level);
|
||||
|
||||
self.len += 1;
|
||||
|
||||
let mut pre_level = 0;
|
||||
let mut suc_level = 0;
|
||||
for level in 1..=max_level {
|
||||
let update_level = min(level, new_level);
|
||||
if level == max_level
|
||||
|| pre.get(level).map(|l| &l.key) != pre.get(pre_level).map(|l| &l.key)
|
||||
{
|
||||
self.get_node_mut(pre[pre_level].key.as_ref())
|
||||
.insert_node_after(
|
||||
&key,
|
||||
update_level,
|
||||
pre_level,
|
||||
pre[pre_level].count,
|
||||
pre[pre_level].key.is_none(),
|
||||
);
|
||||
pre_level = level;
|
||||
}
|
||||
if suc[suc_level].key.is_some()
|
||||
&& (level == max_level
|
||||
|| suc.get(level).map(|l| &l.key) != suc.get(suc_level).map(|l| &l.key))
|
||||
{
|
||||
self.get_node_mut(suc[suc_level].key.as_ref())
|
||||
.insert_node_before(&key, update_level, suc_level, suc[suc_level].count);
|
||||
suc_level = level;
|
||||
}
|
||||
}
|
||||
|
||||
pre.truncate(new_level);
|
||||
suc.truncate(new_level);
|
||||
let links = pre
|
||||
.into_iter()
|
||||
.zip(suc.into_iter())
|
||||
.map(|(prev, next)| LinkLevel { next, prev })
|
||||
.collect();
|
||||
self.nodes.insert(
|
||||
key,
|
||||
Node {
|
||||
level: new_level,
|
||||
links,
|
||||
},
|
||||
);
|
||||
true
|
||||
}
|
||||
|
||||
// Returns a random number from the geometric distribution with p = 0.75.
|
||||
// That is, returns k with probability p * (1 - p)^(k - 1).
|
||||
// For example, returns 1 with probability 3/4, returns 2 with probability 3/16,
|
||||
// returns 3 with probability 3/64, and so on.
|
||||
|
||||
fn random_level(&mut self) -> usize {
|
||||
// Create random number between 0 and 2^32 - 1
|
||||
// Count leading zeros in that 32-bit number
|
||||
let rand: u32 = self.rng.gen();
|
||||
let mut level = 1;
|
||||
while rand < 1 << (32 - 2 * level) && level < 16 {
|
||||
level += 1;
|
||||
}
|
||||
level
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct SkipIterator<'a, K>
|
||||
where
|
||||
K: Debug + Copy + Clone + PartialEq,
|
||||
{
|
||||
id: Option<&'a K>,
|
||||
nodes: &'a HashMap<K, Node<K>, FxBuildHasher>,
|
||||
}
|
||||
|
||||
impl<'a, K> Iterator for SkipIterator<'a, K>
|
||||
where
|
||||
K: Debug + Copy + Clone + Hash + PartialEq + Eq,
|
||||
{
|
||||
type Item = &'a K;
|
||||
|
||||
fn next(&mut self) -> Option<&'a K> {
|
||||
let mut successor = match self.id {
|
||||
None => None,
|
||||
Some(key) => self.nodes.get(key).and_then(Node::successor),
|
||||
};
|
||||
mem::swap(&mut successor, &mut self.id);
|
||||
successor
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
struct Delta<K>
|
||||
where
|
||||
K: Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
index: isize,
|
||||
key: Option<K>,
|
||||
}
|
||||
|
||||
// this is an experiment to if I can change request processing
|
||||
// index lookups by not mutating the skip list
|
||||
// throughput was quite significant actually - about 1.5x over in the
|
||||
// mass edit perf test
|
||||
// ideally we can speed up the skip list enough to not need this
|
||||
// also this could perform worse if the ops per change were huge
|
||||
// eg.. 10,000 changes with 10 ops each vs 10 changes with 10,000 ops each
|
||||
|
||||
/*
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct OrdDelta<'a, K>
|
||||
where
|
||||
K: Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
list: Option<&'a SkipList<K>>,
|
||||
delta: Vec<Delta<K>>,
|
||||
}
|
||||
|
||||
impl<'a, K> OrdDelta<'a, K>
|
||||
where
|
||||
K: Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
pub fn new(list: Option<&'a SkipList<K>>) -> OrdDelta<'a, K> {
|
||||
OrdDelta {
|
||||
list,
|
||||
delta: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, K> OrderedSet<K> for OrdDelta<'a, K>
|
||||
where
|
||||
K: Clone + Debug + Hash + PartialEq + Eq,
|
||||
{
|
||||
fn insert_index(&mut self, index: usize, key: K) -> bool {
|
||||
let index = index as isize;
|
||||
let delta = Delta {
|
||||
index,
|
||||
key: Some(key),
|
||||
};
|
||||
for i in 0..self.delta.len() {
|
||||
if self.delta[i].index >= index {
|
||||
self.delta.iter_mut().skip(i).for_each(|d| d.index += 1);
|
||||
self.delta.insert(i, delta);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
self.delta.push(delta);
|
||||
true
|
||||
}
|
||||
|
||||
fn key_of(&self, index: usize) -> Option<&K> {
|
||||
let index = index as isize;
|
||||
let mut acc: isize = 0;
|
||||
for i in 0..self.delta.len() {
|
||||
match &self.delta[i] {
|
||||
Delta {
|
||||
index: j,
|
||||
key: Some(key),
|
||||
} => {
|
||||
if j == &index {
|
||||
return Some(&key);
|
||||
}
|
||||
if j > &index {
|
||||
break;
|
||||
}
|
||||
acc += 1;
|
||||
}
|
||||
Delta {
|
||||
index: j,
|
||||
key: None,
|
||||
} => {
|
||||
if j > &index {
|
||||
break;
|
||||
}
|
||||
acc -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
self.list
|
||||
.and_then(|l| l.key_of((index as isize - acc) as usize))
|
||||
}
|
||||
|
||||
fn remove_index(&mut self, index: usize) -> Option<K> {
|
||||
let index = index as isize;
|
||||
let delta = Delta { index, key: None };
|
||||
for i in 0..self.delta.len() {
|
||||
if self.delta[i].index == index && self.delta[i].key.is_some() {
|
||||
let old_insert = self.delta.remove(i);
|
||||
self.delta.iter_mut().skip(i).for_each(|d| d.index -= 1);
|
||||
return old_insert.key;
|
||||
}
|
||||
if self.delta[i].index > index {
|
||||
let key = self.key_of(index as usize).cloned();
|
||||
self.delta.iter_mut().skip(i).for_each(|d| d.index -= 1);
|
||||
self.delta.insert(i, delta);
|
||||
return key;
|
||||
}
|
||||
}
|
||||
let key = self.key_of(index as usize).cloned();
|
||||
self.delta.push(delta);
|
||||
key
|
||||
}
|
||||
|
||||
fn index_of(&self, _key: &K) -> Option<usize> {
|
||||
panic!("not implemented");
|
||||
}
|
||||
|
||||
fn remove_key(&mut self, _key: &K) -> Option<usize> {
|
||||
panic!("not implemented");
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
// get(n)
|
||||
// insert(n)
|
||||
// len()
|
||||
// remove(n)
|
||||
// get_index_for(T)
|
||||
// insert_after_(i,K,V)
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
//use std::str::FromStr;
|
||||
|
||||
#[test]
|
||||
fn test_index_of() {
|
||||
let mut s = SkipList::<&str>::new();
|
||||
|
||||
// should return None on an empty list
|
||||
assert_eq!(s.index_of(&"foo"), None);
|
||||
|
||||
// should return None for a nonexistent key
|
||||
s.insert_head("foo");
|
||||
assert_eq!(s.index_of(&"baz"), None);
|
||||
|
||||
// should return 0 for the first list element
|
||||
assert_eq!(s.index_of(&"foo"), Some(0));
|
||||
|
||||
// should return length-1 for the last list element
|
||||
s.insert_after(&"foo", "bar");
|
||||
s.insert_after(&"bar", "baz");
|
||||
assert_eq!(s.index_of(&"baz"), Some(s.len - 1));
|
||||
|
||||
// should adjust based on removed elements
|
||||
s.remove_key(&"foo");
|
||||
assert_eq!(s.index_of(&"bar"), Some(0));
|
||||
assert_eq!(s.index_of(&"baz"), Some(1));
|
||||
s.remove_key(&"bar");
|
||||
assert_eq!(s.index_of(&"baz"), Some(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_len() {
|
||||
let mut s = SkipList::<&str>::new();
|
||||
|
||||
//should be 0 for an empty list
|
||||
assert_eq!(s.len, 0);
|
||||
|
||||
// should increase by 1 for every insertion
|
||||
s.insert_head("a3");
|
||||
s.insert_head("a2");
|
||||
s.insert_head("a1");
|
||||
assert_eq!(s.len, 3);
|
||||
|
||||
//should decrease by 1 for every removal
|
||||
s.remove_key(&"a2");
|
||||
assert_eq!(s.len, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_key_of() {
|
||||
let mut s = SkipList::<&str>::new();
|
||||
|
||||
// should return None on an empty list
|
||||
assert_eq!(s.key_of(0), None);
|
||||
|
||||
// should return None for an index past the end of the list
|
||||
s.insert_head("a3");
|
||||
s.insert_head("a2");
|
||||
s.insert_head("a1");
|
||||
assert_eq!(s.key_of(10), None);
|
||||
|
||||
// should return the first key for index 0
|
||||
assert_eq!(s.key_of(0), Some(&"a1"));
|
||||
|
||||
// should return the last key for index -1
|
||||
// assert_eq!(s.key_of(-1), Some("a3"));
|
||||
|
||||
// should return the last key for index length-1
|
||||
assert_eq!(s.key_of(s.len - 1), Some(&"a3"));
|
||||
|
||||
// should not count removed elements
|
||||
s.remove_key(&"a1");
|
||||
s.remove_key(&"a3");
|
||||
assert_eq!(s.key_of(0), Some(&"a2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_index() {
|
||||
let mut s = SkipList::<&str>::new();
|
||||
|
||||
// should insert the new key-value pair at the given index
|
||||
s.insert_head("aaa");
|
||||
s.insert_after(&"aaa", "ccc");
|
||||
s.insert_index(1, "bbb");
|
||||
assert_eq!(s.index_of(&"aaa"), Some(0));
|
||||
assert_eq!(s.index_of(&"bbb"), Some(1));
|
||||
assert_eq!(s.index_of(&"ccc"), Some(2));
|
||||
|
||||
// should insert at the head if the index is zero
|
||||
s.insert_index(0, "a");
|
||||
assert_eq!(s.key_of(0), Some(&"a"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_index() {
|
||||
let mut s = SkipList::<&str>::new();
|
||||
|
||||
// should remove the value at the given index
|
||||
s.insert_head("ccc");
|
||||
s.insert_head("bbb");
|
||||
s.insert_head("aaa");
|
||||
s.remove_index(1);
|
||||
assert_eq!(s.index_of(&"aaa"), Some(0));
|
||||
assert_eq!(s.index_of(&"bbb"), None);
|
||||
assert_eq!(s.index_of(&"ccc"), Some(1));
|
||||
|
||||
// should raise an error if the given index is out of bounds
|
||||
assert_eq!(s.remove_index(100), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_key_big() {
|
||||
//String is not Copy so we have to create our elements first and then insert them
|
||||
let elems: Vec<String> = (0..10000)
|
||||
.map(|i| {
|
||||
let j = 9999 - i;
|
||||
format!("a{}", j)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut s = SkipList::<&str>::new();
|
||||
for elem in &elems {
|
||||
s.insert_head(elem);
|
||||
}
|
||||
|
||||
assert_eq!(s.index_of(&"a20"), Some(20));
|
||||
assert_eq!(s.index_of(&"a500"), Some(500));
|
||||
assert_eq!(s.index_of(&"a1000"), Some(1000));
|
||||
|
||||
for i in 0..5000 {
|
||||
let j = (4999 - i) * 2 + 1;
|
||||
s.remove_index(j);
|
||||
}
|
||||
|
||||
assert_eq!(s.index_of(&"a4000"), Some(2000));
|
||||
assert_eq!(s.index_of(&"a1000"), Some(500));
|
||||
assert_eq!(s.index_of(&"a500"), Some(250));
|
||||
assert_eq!(s.index_of(&"a20"), Some(10));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_key() {
|
||||
let mut s = SkipList::<&str>::new();
|
||||
s.insert_head("a20");
|
||||
s.insert_head("a19");
|
||||
s.insert_head("a18");
|
||||
s.insert_head("a17");
|
||||
s.insert_head("a16");
|
||||
s.insert_head("a15");
|
||||
s.insert_head("a14");
|
||||
s.insert_head("a13");
|
||||
s.insert_head("a12");
|
||||
s.insert_head("a11");
|
||||
s.insert_head("a10");
|
||||
s.insert_head("a9");
|
||||
s.insert_head("a8");
|
||||
s.insert_head("a7");
|
||||
s.insert_head("a6");
|
||||
s.insert_head("a5");
|
||||
s.insert_head("a4");
|
||||
s.insert_head("a3");
|
||||
s.insert_head("a2");
|
||||
s.insert_head("a1");
|
||||
s.insert_head("a0");
|
||||
|
||||
assert_eq!(s.index_of(&"a20"), Some(20));
|
||||
|
||||
s.remove_key(&"a1");
|
||||
s.remove_key(&"a3");
|
||||
s.remove_key(&"a5");
|
||||
s.remove_key(&"a7");
|
||||
s.remove_key(&"a9");
|
||||
s.remove_key(&"a11");
|
||||
s.remove_key(&"a13");
|
||||
s.remove_key(&"a15");
|
||||
s.remove_key(&"a17");
|
||||
s.remove_key(&"a19");
|
||||
|
||||
assert_eq!(s.index_of(&"a20"), Some(10));
|
||||
assert_eq!(s.index_of(&"a10"), Some(5));
|
||||
}
|
||||
}
|
10
automerge-backend/src/patches.rs
Normal file
10
automerge-backend/src/patches.rs
Normal file
|
@ -0,0 +1,10 @@
|
|||
mod edits;
|
||||
mod from_scratch_diff;
|
||||
mod gen_value_diff;
|
||||
mod incremental_diff;
|
||||
mod patch_workshop;
|
||||
|
||||
pub(crate) use edits::Edits;
|
||||
pub(crate) use from_scratch_diff::generate_from_scratch_diff;
|
||||
pub(crate) use incremental_diff::IncrementalPatch;
|
||||
pub(crate) use patch_workshop::PatchWorkshop;
|
98
automerge-backend/src/patches/edits.rs
Normal file
98
automerge-backend/src/patches/edits.rs
Normal file
|
@ -0,0 +1,98 @@
|
|||
use std::{convert::TryInto, mem};
|
||||
|
||||
use automerge_protocol as amp;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Edits(Vec<amp::DiffEdit>);
|
||||
|
||||
impl Edits {
|
||||
pub(crate) fn new() -> Edits {
|
||||
Edits(Vec::new())
|
||||
}
|
||||
|
||||
/// Append an edit to this sequence, collapsing it into the last edit if possible.
|
||||
///
|
||||
/// The collapsing handles conversion of a sequence of inserts to a multi-insert.
|
||||
pub(crate) fn append_edit(&mut self, edit: amp::DiffEdit) {
|
||||
if let Some(mut last) = self.0.last_mut() {
|
||||
match (&mut last, edit) {
|
||||
(
|
||||
amp::DiffEdit::SingleElementInsert {
|
||||
index,
|
||||
elem_id,
|
||||
op_id,
|
||||
value: amp::Diff::Value(value),
|
||||
},
|
||||
amp::DiffEdit::SingleElementInsert {
|
||||
index: next_index,
|
||||
elem_id: next_elem_id,
|
||||
op_id: next_op_id,
|
||||
value: amp::Diff::Value(next_value),
|
||||
},
|
||||
) if *index + 1 == next_index
|
||||
&& elem_id.as_opid() == Some(op_id)
|
||||
&& next_elem_id.as_opid() == Some(&next_op_id)
|
||||
// Ensure the values have a common type
|
||||
&& std::mem::discriminant(value) == std::mem::discriminant(&next_value)
|
||||
&& op_id.delta(&next_op_id, 1) =>
|
||||
{
|
||||
let values: amp::ScalarValues = vec![
|
||||
// We need ownership of `value`. We can either `clone` it
|
||||
// or swap it with a junk value using `mem::replace`
|
||||
mem::replace(value, amp::ScalarValue::Null),
|
||||
next_value,
|
||||
]
|
||||
.try_into()
|
||||
// `unwrap` is safe: we check for same types above
|
||||
// in the if stmt
|
||||
.unwrap();
|
||||
*last = amp::DiffEdit::MultiElementInsert(amp::MultiElementInsert {
|
||||
index: *index,
|
||||
elem_id: elem_id.clone(),
|
||||
values,
|
||||
});
|
||||
}
|
||||
(
|
||||
amp::DiffEdit::MultiElementInsert(amp::MultiElementInsert {
|
||||
index,
|
||||
elem_id,
|
||||
values,
|
||||
}),
|
||||
amp::DiffEdit::SingleElementInsert {
|
||||
index: next_index,
|
||||
elem_id: next_elem_id,
|
||||
op_id,
|
||||
value: amp::Diff::Value(value),
|
||||
},
|
||||
) if *index + (values.len() as u64) == next_index
|
||||
&& next_elem_id.as_opid() == Some(&op_id)
|
||||
// Ensure the values have a common type
|
||||
// `unwrap` is safe: `values` always has a length of at this point
|
||||
&& std::mem::discriminant(values.get(0).unwrap()) == std::mem::discriminant(&value)
|
||||
&& elem_id
|
||||
.as_opid()
|
||||
.unwrap()
|
||||
.delta(&op_id, values.len() as u64) =>
|
||||
{
|
||||
// `unwrap_none` is safe: we check if they are the same type above
|
||||
//values.append(value).unwrap_none();
|
||||
values.append(value);
|
||||
}
|
||||
(
|
||||
amp::DiffEdit::Remove { index, count },
|
||||
amp::DiffEdit::Remove {
|
||||
index: new_index,
|
||||
count: new_count,
|
||||
},
|
||||
) if *index == new_index => *count += new_count,
|
||||
(_, edit) => self.0.push(edit),
|
||||
}
|
||||
} else {
|
||||
self.0.push(edit);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn into_vec(self) -> Vec<amp::DiffEdit> {
|
||||
self.0
|
||||
}
|
||||
}
|
201
automerge-backend/src/patches/from_scratch_diff.rs
Normal file
201
automerge-backend/src/patches/from_scratch_diff.rs
Normal file
|
@ -0,0 +1,201 @@
|
|||
use core::cmp::max;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use automerge_protocol as amp;
|
||||
|
||||
use super::{gen_value_diff::gen_value_diff, Edits, PatchWorkshop};
|
||||
use crate::{internal::ObjectId, object_store::ObjState};
|
||||
|
||||
/// Used to generate a diff when there is no previous state to diff against.
|
||||
/// This works by starting at the root object and then recursively constructing
|
||||
/// all the objects contained in it.
|
||||
pub(crate) fn generate_from_scratch_diff(workshop: &dyn PatchWorkshop) -> amp::RootDiff {
|
||||
let mut props = HashMap::new();
|
||||
|
||||
for (key, ops) in &workshop.get_obj(&ObjectId::Root).unwrap().props {
|
||||
if !ops.is_empty() {
|
||||
let mut opid_to_value = HashMap::new();
|
||||
for op in ops.iter() {
|
||||
let amp_opid = workshop.make_external_opid(&op.id);
|
||||
if let Some(child_id) = op.child() {
|
||||
opid_to_value.insert(amp_opid, construct_object(&child_id, workshop));
|
||||
} else {
|
||||
opid_to_value
|
||||
.insert(amp_opid, gen_value_diff(op, &op.adjusted_value(), workshop));
|
||||
}
|
||||
}
|
||||
props.insert(workshop.key_to_string(key), opid_to_value);
|
||||
}
|
||||
}
|
||||
amp::RootDiff { props }
|
||||
}
|
||||
|
||||
fn construct_map(
|
||||
object_id: &ObjectId,
|
||||
object: &ObjState,
|
||||
workshop: &dyn PatchWorkshop,
|
||||
) -> amp::MapDiff {
|
||||
let mut props = HashMap::new();
|
||||
|
||||
for (key, ops) in &object.props {
|
||||
if !ops.is_empty() {
|
||||
let mut opid_to_value = HashMap::new();
|
||||
for op in ops.iter() {
|
||||
let amp_opid = workshop.make_external_opid(&op.id);
|
||||
if let Some(child_id) = op.child() {
|
||||
opid_to_value.insert(amp_opid, construct_object(&child_id, workshop));
|
||||
} else {
|
||||
opid_to_value
|
||||
.insert(amp_opid, gen_value_diff(op, &op.adjusted_value(), workshop));
|
||||
}
|
||||
}
|
||||
props.insert(workshop.key_to_string(key), opid_to_value);
|
||||
}
|
||||
}
|
||||
amp::MapDiff {
|
||||
object_id: workshop.make_external_objid(object_id),
|
||||
props,
|
||||
}
|
||||
}
|
||||
|
||||
fn construct_table(
|
||||
object_id: &ObjectId,
|
||||
object: &ObjState,
|
||||
workshop: &dyn PatchWorkshop,
|
||||
) -> amp::TableDiff {
|
||||
let mut props = HashMap::new();
|
||||
|
||||
for (key, ops) in &object.props {
|
||||
if !ops.is_empty() {
|
||||
let mut opid_to_value = HashMap::new();
|
||||
for op in ops.iter() {
|
||||
let amp_opid = workshop.make_external_opid(&op.id);
|
||||
if let Some(child_id) = op.child() {
|
||||
opid_to_value.insert(amp_opid, construct_object(&child_id, workshop));
|
||||
} else {
|
||||
opid_to_value
|
||||
.insert(amp_opid, gen_value_diff(op, &op.adjusted_value(), workshop));
|
||||
}
|
||||
}
|
||||
props.insert(workshop.key_to_string(key), opid_to_value);
|
||||
}
|
||||
}
|
||||
amp::TableDiff {
|
||||
object_id: workshop.make_external_objid(object_id),
|
||||
props,
|
||||
}
|
||||
}
|
||||
|
||||
fn construct_list(
|
||||
object_id: &ObjectId,
|
||||
object: &ObjState,
|
||||
workshop: &dyn PatchWorkshop,
|
||||
) -> amp::ListDiff {
|
||||
let mut edits = Edits::new();
|
||||
let mut index = 0;
|
||||
let mut max_counter = 0;
|
||||
let mut seen_indices: std::collections::HashSet<u64> = std::collections::HashSet::new();
|
||||
|
||||
for opid in &object.seq {
|
||||
max_counter = max(max_counter, opid.0);
|
||||
let key = (*opid).into(); // FIXME - something is wrong here
|
||||
if let Some(ops) = object.props.get(&key) {
|
||||
if !ops.is_empty() {
|
||||
for op in ops.iter() {
|
||||
let value = if let Some(child_id) = op.child() {
|
||||
construct_object(&child_id, workshop)
|
||||
} else {
|
||||
gen_value_diff(op, &op.adjusted_value(), workshop)
|
||||
};
|
||||
let amp_opid = workshop.make_external_opid(&op.id);
|
||||
if seen_indices.contains(&index) {
|
||||
edits.append_edit(amp::DiffEdit::Update {
|
||||
index,
|
||||
op_id: amp_opid,
|
||||
value,
|
||||
});
|
||||
} else {
|
||||
let key = workshop
|
||||
.make_external_opid(&key.to_opid().unwrap_or(op.id))
|
||||
.into();
|
||||
edits.append_edit(amp::DiffEdit::SingleElementInsert {
|
||||
index,
|
||||
elem_id: key,
|
||||
op_id: amp_opid,
|
||||
value,
|
||||
});
|
||||
}
|
||||
seen_indices.insert(index);
|
||||
}
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
amp::ListDiff {
|
||||
object_id: workshop.make_external_objid(object_id),
|
||||
edits: edits.into_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
fn construct_text(
|
||||
object_id: &ObjectId,
|
||||
object: &ObjState,
|
||||
workshop: &dyn PatchWorkshop,
|
||||
) -> amp::TextDiff {
|
||||
let mut edits = Edits::new();
|
||||
let mut index = 0;
|
||||
let mut max_counter = 0;
|
||||
let mut seen_indices: std::collections::HashSet<u64> = std::collections::HashSet::new();
|
||||
|
||||
for opid in &object.seq {
|
||||
max_counter = max(max_counter, opid.0);
|
||||
let key = (*opid).into(); // FIXME - something is wrong here
|
||||
if let Some(ops) = object.props.get(&key) {
|
||||
if !ops.is_empty() {
|
||||
for op in ops.iter() {
|
||||
let value = if let Some(child_id) = op.child() {
|
||||
construct_object(&child_id, workshop)
|
||||
} else {
|
||||
gen_value_diff(op, &op.adjusted_value(), workshop)
|
||||
};
|
||||
let amp_opid = workshop.make_external_opid(&op.id);
|
||||
if seen_indices.contains(&index) {
|
||||
edits.append_edit(amp::DiffEdit::Update {
|
||||
index,
|
||||
op_id: amp_opid,
|
||||
value,
|
||||
});
|
||||
} else {
|
||||
let key = workshop
|
||||
.make_external_opid(&key.to_opid().unwrap_or(op.id))
|
||||
.into();
|
||||
edits.append_edit(amp::DiffEdit::SingleElementInsert {
|
||||
index,
|
||||
elem_id: key,
|
||||
op_id: amp_opid,
|
||||
value,
|
||||
});
|
||||
}
|
||||
seen_indices.insert(index);
|
||||
}
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
amp::TextDiff {
|
||||
object_id: workshop.make_external_objid(object_id),
|
||||
edits: edits.into_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
fn construct_object(object_id: &ObjectId, workshop: &dyn PatchWorkshop) -> amp::Diff {
|
||||
// Safety: if the object is missing when we're generating a diff from
|
||||
// scratch then the document is corrupt
|
||||
let object = workshop.get_obj(object_id).expect("missing object");
|
||||
match object.obj_type {
|
||||
amp::ObjType::Map => amp::Diff::Map(construct_map(object_id, object, workshop)),
|
||||
amp::ObjType::Table => amp::Diff::Table(construct_table(object_id, object, workshop)),
|
||||
amp::ObjType::List => amp::Diff::List(construct_list(object_id, object, workshop)),
|
||||
amp::ObjType::Text => amp::Diff::Text(construct_text(object_id, object, workshop)),
|
||||
}
|
||||
}
|
19
automerge-backend/src/patches/gen_value_diff.rs
Normal file
19
automerge-backend/src/patches/gen_value_diff.rs
Normal file
|
@ -0,0 +1,19 @@
|
|||
use automerge_protocol as amp;
|
||||
|
||||
use super::PatchWorkshop;
|
||||
use crate::op_handle::OpHandle;
|
||||
|
||||
pub(super) fn gen_value_diff(
|
||||
op: &OpHandle,
|
||||
value: &::ScalarValue,
|
||||
workshop: &dyn PatchWorkshop,
|
||||
) -> amp::Diff {
|
||||
match value {
|
||||
amp::ScalarValue::Cursor(oid) => {
|
||||
// .expect() is okay here because we check that the cursr exists at the start of
|
||||
// `OpSet::apply_op()`
|
||||
amp::Diff::Cursor(workshop.find_cursor(oid).expect("missing cursor"))
|
||||
}
|
||||
_ => op.adjusted_value().into(),
|
||||
}
|
||||
}
|
457
automerge-backend/src/patches/incremental_diff.rs
Normal file
457
automerge-backend/src/patches/incremental_diff.rs
Normal file
|
@ -0,0 +1,457 @@
|
|||
use std::{
|
||||
borrow::Cow,
|
||||
collections::{HashMap, HashSet},
|
||||
};
|
||||
|
||||
use automerge_protocol as amp;
|
||||
|
||||
use super::{gen_value_diff::gen_value_diff, Edits, PatchWorkshop};
|
||||
use crate::{
|
||||
actor_map::ActorMap,
|
||||
internal::{InternalOpType, Key, ObjectId, OpId},
|
||||
object_store::ObjState,
|
||||
op_handle::OpHandle,
|
||||
};
|
||||
|
||||
/// Records a change that has happened as a result of an operation
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum PendingDiff {
|
||||
// contains the op handle, the index to insert after and the new element's id
|
||||
SeqInsert(OpHandle, usize, OpId),
|
||||
// contains the op handle, the index to insert after and the new element's id
|
||||
SeqUpdate(OpHandle, usize, OpId),
|
||||
SeqRemove(OpHandle, usize),
|
||||
Set(OpHandle),
|
||||
CursorChange(Key),
|
||||
}
|
||||
|
||||
impl PendingDiff {
|
||||
pub fn operation_key(&self) -> Cow<Key> {
|
||||
match self {
|
||||
Self::SeqInsert(op, ..)
|
||||
| Self::SeqUpdate(op, ..)
|
||||
| Self::SeqRemove(op, ..)
|
||||
| Self::Set(op) => op.operation_key(),
|
||||
Self::CursorChange(k) => Cow::Borrowed(k),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `IncrementalPatch` is used to build patches which are a result of applying a `Change`. As the
|
||||
/// `OpSet` applies each op in the change it records the difference that will make in the
|
||||
/// `IncrementalPatch` using the various `record_*` methods. At the end of the change process the
|
||||
/// `IncrementalPatch::finalize` method is used to generate a `automerge_protocol::Diff` to send to
|
||||
/// the frontend.
|
||||
///
|
||||
/// The reason this is called an "incremental" patch is because it impliciatly generates a diff
|
||||
/// between the "current" state - represented by whatever was in the OpSet before the change was
|
||||
/// received - and the new state after the change is applied. This is in contrast to when we are
|
||||
/// generating a diff without any existing state, as in the case when we first load a saved
|
||||
/// document.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct IncrementalPatch(HashMap<ObjectId, Vec<PendingDiff>>);
|
||||
|
||||
impl IncrementalPatch {
|
||||
pub(crate) fn new() -> IncrementalPatch {
|
||||
IncrementalPatch(HashMap::new())
|
||||
}
|
||||
|
||||
pub(crate) fn record_set(&mut self, oid: &ObjectId, op: OpHandle) {
|
||||
self.append_diff(oid, PendingDiff::Set(op));
|
||||
}
|
||||
|
||||
pub(crate) fn record_cursor_change(&mut self, oid: &ObjectId, key: Key) {
|
||||
self.append_diff(oid, PendingDiff::CursorChange(key));
|
||||
}
|
||||
|
||||
pub(crate) fn record_seq_insert(
|
||||
&mut self,
|
||||
oid: &ObjectId,
|
||||
op: OpHandle,
|
||||
index: usize,
|
||||
opid: OpId,
|
||||
) {
|
||||
self.append_diff(oid, PendingDiff::SeqInsert(op, index, opid));
|
||||
}
|
||||
|
||||
pub(crate) fn record_seq_updates<'a, 'b, I: Iterator<Item = &'b OpHandle>>(
|
||||
&'a mut self,
|
||||
oid: &ObjectId,
|
||||
object: &ObjState,
|
||||
index: usize,
|
||||
ops: I,
|
||||
actors: &ActorMap,
|
||||
) {
|
||||
// TODO: Remove the actors argument and instead add a new case to the `PendingDiff`
|
||||
// enum to represent multiple seq updates, then sort by actor ID at the point at which we
|
||||
// finalize the diffs, when we have access to a `PatchWorkshop` to perform the sorting
|
||||
let diffs = self.0.entry(*oid).or_default();
|
||||
let mut new_diffs = Vec::new();
|
||||
'outer: for op in ops {
|
||||
let i = op
|
||||
.key
|
||||
.to_opid()
|
||||
.and_then(|opid| object.index_of(opid))
|
||||
.unwrap_or(0);
|
||||
if i == index {
|
||||
// go through existing diffs and find an insert
|
||||
for diff in diffs.iter_mut() {
|
||||
match diff {
|
||||
// if this insert was for the index we are now updating, and it is from the
|
||||
// same actor,
|
||||
// then change the insert to just insert our data instead
|
||||
PendingDiff::SeqInsert(original_op, index, original_opid)
|
||||
if i == *index && original_op.id.1 == op.id.1 =>
|
||||
{
|
||||
*diff = PendingDiff::SeqInsert(op.clone(), *index, *original_opid);
|
||||
continue 'outer;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
new_diffs.push(PendingDiff::SeqUpdate(op.clone(), index, op.id));
|
||||
}
|
||||
}
|
||||
new_diffs.sort_by_key(|d| {
|
||||
if let PendingDiff::SeqUpdate(op, _, _) = d {
|
||||
actors.export_actor(op.id.1)
|
||||
} else {
|
||||
// SAFETY: we only add SeqUpdates to this vec above.
|
||||
unreachable!()
|
||||
}
|
||||
});
|
||||
self.append_diffs(oid, new_diffs);
|
||||
}
|
||||
|
||||
pub(crate) fn record_seq_remove(&mut self, oid: &ObjectId, op: OpHandle, index: usize) {
|
||||
self.append_diff(oid, PendingDiff::SeqRemove(op, index));
|
||||
}
|
||||
|
||||
fn append_diff(&mut self, oid: &ObjectId, diff: PendingDiff) {
|
||||
self.0.entry(*oid).or_default().push(diff);
|
||||
}
|
||||
|
||||
fn append_diffs(&mut self, oid: &ObjectId, mut diffs: Vec<PendingDiff>) {
|
||||
self.0.entry(*oid).or_default().append(&mut diffs);
|
||||
}
|
||||
|
||||
pub(crate) fn changed_object_ids(&self) -> impl Iterator<Item = &ObjectId> {
|
||||
self.0.keys()
|
||||
}
|
||||
|
||||
pub(crate) fn finalize(mut self, workshop: &dyn PatchWorkshop) -> amp::RootDiff {
|
||||
if self.0.is_empty() {
|
||||
return amp::RootDiff::default();
|
||||
}
|
||||
|
||||
let mut objs: Vec<_> = self.changed_object_ids().copied().collect();
|
||||
while let Some(obj_id) = objs.pop() {
|
||||
if let Some(inbound) = workshop
|
||||
.get_obj(&obj_id)
|
||||
.and_then(|obj| obj.inbound.as_ref())
|
||||
{
|
||||
if !self.0.contains_key(&inbound.obj) {
|
||||
// our parent was not changed - walk up the tree and try them too
|
||||
objs.push(inbound.obj);
|
||||
}
|
||||
self.append_diff(&inbound.obj, PendingDiff::Set(inbound.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(root) = self.0.remove(&ObjectId::Root) {
|
||||
// I may have duplicate keys - I do this to make sure I visit each one only once
|
||||
let keys: HashSet<_> = root.iter().map(PendingDiff::operation_key).collect();
|
||||
let mut props = HashMap::with_capacity(keys.len());
|
||||
let obj = workshop.get_obj(&ObjectId::Root).expect("no root found");
|
||||
for key in &keys {
|
||||
let key_string = workshop.key_to_string(key);
|
||||
let mut opid_to_value = HashMap::new();
|
||||
for op in obj.conflicts(key) {
|
||||
let link = match op.action {
|
||||
InternalOpType::Set(ref value) => gen_value_diff(op, value, workshop),
|
||||
InternalOpType::Make(_) => self.gen_obj_diff(&op.id.into(), workshop),
|
||||
_ => panic!("del or inc found in field_operations"),
|
||||
};
|
||||
opid_to_value.insert(workshop.make_external_opid(&op.id), link);
|
||||
}
|
||||
props.insert(key_string, opid_to_value);
|
||||
}
|
||||
amp::RootDiff { props }
|
||||
} else {
|
||||
amp::RootDiff {
|
||||
props: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_obj_diff(&self, obj_id: &ObjectId, workshop: &dyn PatchWorkshop) -> amp::Diff {
|
||||
// Safety: the pending diffs we are working with are all generated by
|
||||
// the OpSet, we should never have a missing object and if we do
|
||||
// there's nothing the user can do about that
|
||||
let obj = workshop
|
||||
.get_obj(obj_id)
|
||||
.expect("Missing object in internal diff");
|
||||
if let Some(pending) = self.0.get(obj_id) {
|
||||
match obj.obj_type {
|
||||
amp::ObjType::List => {
|
||||
amp::Diff::List(self.gen_list_diff(obj_id, obj, pending, workshop))
|
||||
}
|
||||
amp::ObjType::Text => {
|
||||
amp::Diff::Text(self.gen_text_diff(obj_id, obj, pending, workshop))
|
||||
}
|
||||
amp::ObjType::Map => {
|
||||
amp::Diff::Map(self.gen_map_diff(obj_id, obj, pending, workshop))
|
||||
}
|
||||
amp::ObjType::Table => {
|
||||
amp::Diff::Table(self.gen_table_diff(obj_id, obj, pending, workshop))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// no changes so just return empty edits or props
|
||||
match obj.obj_type {
|
||||
amp::ObjType::Map => amp::Diff::Map(amp::MapDiff {
|
||||
object_id: workshop.make_external_objid(obj_id),
|
||||
props: HashMap::new(),
|
||||
}),
|
||||
amp::ObjType::Table => amp::Diff::Table(amp::TableDiff {
|
||||
object_id: workshop.make_external_objid(obj_id),
|
||||
props: HashMap::new(),
|
||||
}),
|
||||
amp::ObjType::List => amp::Diff::List(amp::ListDiff {
|
||||
object_id: workshop.make_external_objid(obj_id),
|
||||
edits: Vec::new(),
|
||||
}),
|
||||
amp::ObjType::Text => amp::Diff::Text(amp::TextDiff {
|
||||
object_id: workshop.make_external_objid(obj_id),
|
||||
edits: Vec::new(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_list_diff(
|
||||
&self,
|
||||
obj_id: &ObjectId,
|
||||
obj: &ObjState,
|
||||
pending: &[PendingDiff],
|
||||
workshop: &dyn PatchWorkshop,
|
||||
) -> amp::ListDiff {
|
||||
let mut edits = Edits::new();
|
||||
// used to ensure we don't generate duplicate patches for some op ids (added to the pending
|
||||
// list to ensure we have a tree for deeper operations)
|
||||
let mut seen_op_ids = HashSet::new();
|
||||
for pending_edit in pending.iter() {
|
||||
match pending_edit {
|
||||
PendingDiff::SeqInsert(op, index, opid) => {
|
||||
seen_op_ids.insert(op.id);
|
||||
let value = match op.action {
|
||||
InternalOpType::Set(ref value) => gen_value_diff(op, value, workshop),
|
||||
InternalOpType::Make(_) => self.gen_obj_diff(&op.id.into(), workshop),
|
||||
_ => panic!("del or inc found in field operations"),
|
||||
};
|
||||
let op_id = workshop.make_external_opid(opid);
|
||||
edits.append_edit(amp::DiffEdit::SingleElementInsert {
|
||||
index: *index as u64,
|
||||
elem_id: op_id.clone().into(),
|
||||
op_id: workshop.make_external_opid(&op.id),
|
||||
value,
|
||||
});
|
||||
}
|
||||
PendingDiff::SeqUpdate(op, index, opid) => {
|
||||
seen_op_ids.insert(op.id);
|
||||
let value = match op.action {
|
||||
InternalOpType::Set(ref value) => gen_value_diff(op, value, workshop),
|
||||
InternalOpType::Make(_) => self.gen_obj_diff(&op.id.into(), workshop),
|
||||
InternalOpType::Del | InternalOpType::Inc(..) => {
|
||||
// do nothing
|
||||
continue;
|
||||
}
|
||||
};
|
||||
edits.append_edit(amp::DiffEdit::Update {
|
||||
index: *index as u64,
|
||||
op_id: workshop.make_external_opid(opid),
|
||||
value,
|
||||
});
|
||||
}
|
||||
PendingDiff::SeqRemove(op, index) => {
|
||||
seen_op_ids.insert(op.id);
|
||||
|
||||
edits.append_edit(amp::DiffEdit::Remove {
|
||||
index: (*index) as u64,
|
||||
count: 1,
|
||||
});
|
||||
}
|
||||
PendingDiff::Set(op) => {
|
||||
for op in obj.conflicts(&op.operation_key()) {
|
||||
if !seen_op_ids.contains(&op.id) {
|
||||
seen_op_ids.insert(op.id);
|
||||
let value = match op.action {
|
||||
InternalOpType::Set(ref value) => {
|
||||
gen_value_diff(op, value, workshop)
|
||||
}
|
||||
InternalOpType::Make(_) => {
|
||||
self.gen_obj_diff(&op.id.into(), workshop)
|
||||
}
|
||||
_ => panic!("del or inc found in field operations"),
|
||||
};
|
||||
edits.append_edit(amp::DiffEdit::Update {
|
||||
index: obj.index_of(op.id).unwrap_or(0) as u64,
|
||||
op_id: workshop.make_external_opid(&op.id),
|
||||
value,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
PendingDiff::CursorChange(_) => {
|
||||
panic!("found cursor change pending diff while generating sequence diff");
|
||||
}
|
||||
}
|
||||
}
|
||||
amp::ListDiff {
|
||||
object_id: workshop.make_external_objid(obj_id),
|
||||
edits: edits.into_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_text_diff(
|
||||
&self,
|
||||
obj_id: &ObjectId,
|
||||
obj: &ObjState,
|
||||
pending: &[PendingDiff],
|
||||
workshop: &dyn PatchWorkshop,
|
||||
) -> amp::TextDiff {
|
||||
let mut edits = Edits::new();
|
||||
// used to ensure we don't generate duplicate patches for some op ids (added to the pending
|
||||
// list to ensure we have a tree for deeper operations)
|
||||
let mut seen_op_ids = HashSet::new();
|
||||
for pending_edit in pending.iter() {
|
||||
match pending_edit {
|
||||
PendingDiff::SeqInsert(op, index, opid) => {
|
||||
seen_op_ids.insert(op.id);
|
||||
let value = match op.action {
|
||||
InternalOpType::Set(ref value) => gen_value_diff(op, value, workshop),
|
||||
InternalOpType::Make(_) => self.gen_obj_diff(&op.id.into(), workshop),
|
||||
_ => panic!("del or inc found in field operations"),
|
||||
};
|
||||
let op_id = workshop.make_external_opid(opid);
|
||||
edits.append_edit(amp::DiffEdit::SingleElementInsert {
|
||||
index: *index as u64,
|
||||
elem_id: op_id.clone().into(),
|
||||
op_id: workshop.make_external_opid(&op.id),
|
||||
value,
|
||||
});
|
||||
}
|
||||
PendingDiff::SeqUpdate(op, index, opid) => {
|
||||
seen_op_ids.insert(op.id);
|
||||
let value = match op.action {
|
||||
InternalOpType::Set(ref value) => gen_value_diff(op, value, workshop),
|
||||
InternalOpType::Make(_) => self.gen_obj_diff(&op.id.into(), workshop),
|
||||
InternalOpType::Del | InternalOpType::Inc(..) => {
|
||||
// do nothing
|
||||
continue;
|
||||
}
|
||||
};
|
||||
edits.append_edit(amp::DiffEdit::Update {
|
||||
index: *index as u64,
|
||||
op_id: workshop.make_external_opid(opid),
|
||||
value,
|
||||
});
|
||||
}
|
||||
PendingDiff::SeqRemove(op, index) => {
|
||||
seen_op_ids.insert(op.id);
|
||||
|
||||
edits.append_edit(amp::DiffEdit::Remove {
|
||||
index: (*index) as u64,
|
||||
count: 1,
|
||||
});
|
||||
}
|
||||
PendingDiff::Set(op) => {
|
||||
for op in obj.conflicts(&op.operation_key()) {
|
||||
if !seen_op_ids.contains(&op.id) {
|
||||
seen_op_ids.insert(op.id);
|
||||
let value = match op.action {
|
||||
InternalOpType::Set(ref value) => {
|
||||
gen_value_diff(op, value, workshop)
|
||||
}
|
||||
InternalOpType::Make(_) => {
|
||||
self.gen_obj_diff(&op.id.into(), workshop)
|
||||
}
|
||||
_ => panic!("del or inc found in field operations"),
|
||||
};
|
||||
edits.append_edit(amp::DiffEdit::Update {
|
||||
index: obj.index_of(op.id).unwrap_or(0) as u64,
|
||||
op_id: workshop.make_external_opid(&op.id),
|
||||
value,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
PendingDiff::CursorChange(_) => {
|
||||
panic!("found cursor change pending diff while generating sequence diff");
|
||||
}
|
||||
}
|
||||
}
|
||||
amp::TextDiff {
|
||||
object_id: workshop.make_external_objid(obj_id),
|
||||
edits: edits.into_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_map_diff(
|
||||
&self,
|
||||
obj_id: &ObjectId,
|
||||
obj: &ObjState,
|
||||
pending: &[PendingDiff],
|
||||
workshop: &dyn PatchWorkshop,
|
||||
) -> amp::MapDiff {
|
||||
// I may have duplicate keys - I do this to make sure I visit each one only once
|
||||
let keys: HashSet<_> = pending.iter().map(PendingDiff::operation_key).collect();
|
||||
let mut props = HashMap::with_capacity(keys.len());
|
||||
for key in &keys {
|
||||
let key_string = workshop.key_to_string(key);
|
||||
let mut opid_to_value = HashMap::new();
|
||||
for op in obj.conflicts(key) {
|
||||
let value = match op.action {
|
||||
InternalOpType::Set(ref value) => gen_value_diff(op, value, workshop),
|
||||
InternalOpType::Make(_) => self.gen_obj_diff(&op.id.into(), workshop),
|
||||
_ => panic!("del or inc found in field_operations"),
|
||||
};
|
||||
opid_to_value.insert(workshop.make_external_opid(&op.id), value);
|
||||
}
|
||||
props.insert(key_string, opid_to_value);
|
||||
}
|
||||
amp::MapDiff {
|
||||
object_id: workshop.make_external_objid(obj_id),
|
||||
props,
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_table_diff(
|
||||
&self,
|
||||
obj_id: &ObjectId,
|
||||
obj: &ObjState,
|
||||
pending: &[PendingDiff],
|
||||
workshop: &dyn PatchWorkshop,
|
||||
) -> amp::TableDiff {
|
||||
let mut props = HashMap::new();
|
||||
// I may have duplicate keys - I do this to make sure I visit each one only once
|
||||
let keys: HashSet<_> = pending.iter().map(PendingDiff::operation_key).collect();
|
||||
for key in &keys {
|
||||
let key_string = workshop.key_to_string(key);
|
||||
let mut opid_to_value = HashMap::new();
|
||||
for op in obj.conflicts(key) {
|
||||
let link = match op.action {
|
||||
InternalOpType::Set(ref value) => gen_value_diff(op, value, workshop),
|
||||
InternalOpType::Make(_) => self.gen_obj_diff(&op.id.into(), workshop),
|
||||
_ => panic!("del or inc found in field_operations"),
|
||||
};
|
||||
opid_to_value.insert(workshop.make_external_opid(&op.id), link);
|
||||
}
|
||||
props.insert(key_string, opid_to_value);
|
||||
}
|
||||
amp::TableDiff {
|
||||
object_id: workshop.make_external_objid(obj_id),
|
||||
props,
|
||||
}
|
||||
}
|
||||
}
|
23
automerge-backend/src/patches/patch_workshop.rs
Normal file
23
automerge-backend/src/patches/patch_workshop.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
use automerge_protocol as amp;
|
||||
use smol_str::SmolStr;
|
||||
|
||||
use crate::{
|
||||
internal::{Key, ObjectId, OpId},
|
||||
object_store::ObjState,
|
||||
};
|
||||
|
||||
/// An abstraction over the information `PendingDiffs` needs access to in order
|
||||
/// to generate a `Patch`. In practice the implementation will always be an
|
||||
/// `OpSet` but this abstraction boundary makes it easier to avoid accidentally
|
||||
/// coupling the patch generation to internals of the `OpSet`
|
||||
///
|
||||
/// It's a "workshop" because it's not a factory, it doesn't do the actual
|
||||
/// building of the patch. It's just where some tools to make the patch can be
|
||||
/// found
|
||||
pub(crate) trait PatchWorkshop {
|
||||
fn key_to_string(&self, key: &Key) -> SmolStr;
|
||||
fn find_cursor(&self, opid: &::OpId) -> Option<amp::CursorDiff>;
|
||||
fn get_obj(&self, object_id: &ObjectId) -> Option<&ObjState>;
|
||||
fn make_external_objid(&self, object_id: &ObjectId) -> amp::ObjectId;
|
||||
fn make_external_opid(&self, opid: &OpId) -> amp::OpId;
|
||||
}
|
367
automerge-backend/src/sync.rs
Normal file
367
automerge-backend/src/sync.rs
Normal file
|
@ -0,0 +1,367 @@
|
|||
use std::{
|
||||
borrow::Cow,
|
||||
collections::{HashMap, HashSet},
|
||||
convert::TryFrom,
|
||||
io,
|
||||
io::Write,
|
||||
};
|
||||
|
||||
use automerge_protocol::{ChangeHash, Patch};
|
||||
|
||||
use crate::{
|
||||
decoding, decoding::Decoder, encoding, encoding::Encodable, AutomergeError, Backend, Change,
|
||||
};
|
||||
|
||||
mod bloom;
|
||||
mod state;
|
||||
|
||||
pub use bloom::BloomFilter;
|
||||
pub use state::{SyncHave, SyncState};
|
||||
|
||||
const HASH_SIZE: usize = 32; // 256 bits = 32 bytes
|
||||
const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification
|
||||
|
||||
impl Backend {
|
||||
pub fn generate_sync_message(&self, sync_state: &mut SyncState) -> Option<SyncMessage> {
|
||||
let our_heads = self.get_heads();
|
||||
|
||||
let our_need = self.get_missing_deps(sync_state.their_heads.as_ref().unwrap_or(&vec![]));
|
||||
|
||||
let their_heads_set = if let Some(ref heads) = sync_state.their_heads {
|
||||
heads.iter().collect::<HashSet<_>>()
|
||||
} else {
|
||||
HashSet::new()
|
||||
};
|
||||
let our_have = if our_need.iter().all(|hash| their_heads_set.contains(hash)) {
|
||||
vec![self.make_bloom_filter(sync_state.shared_heads.clone())]
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
if let Some(ref their_have) = sync_state.their_have {
|
||||
if let Some(first_have) = their_have.first().as_ref() {
|
||||
if !first_have
|
||||
.last_sync
|
||||
.iter()
|
||||
.all(|hash| self.get_change_by_hash(hash).is_some())
|
||||
{
|
||||
let reset_msg = SyncMessage {
|
||||
heads: our_heads,
|
||||
need: Vec::new(),
|
||||
have: vec![SyncHave::default()],
|
||||
changes: Vec::new(),
|
||||
};
|
||||
return Some(reset_msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut changes_to_send = if let (Some(their_have), Some(their_need)) = (
|
||||
sync_state.their_have.as_ref(),
|
||||
sync_state.their_need.as_ref(),
|
||||
) {
|
||||
self.get_changes_to_send(their_have.clone(), their_need)
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let heads_unchanged = if let Some(last_sent_heads) = sync_state.last_sent_heads.as_ref() {
|
||||
last_sent_heads == &our_heads
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
let heads_equal = if let Some(their_heads) = sync_state.their_heads.as_ref() {
|
||||
their_heads == &our_heads
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if heads_unchanged && heads_equal && changes_to_send.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// deduplicate the changes to send with those we have already sent
|
||||
changes_to_send.retain(|change| !sync_state.sent_hashes.contains(&change.hash));
|
||||
|
||||
sync_state.last_sent_heads = Some(our_heads.clone());
|
||||
sync_state
|
||||
.sent_hashes
|
||||
.extend(changes_to_send.iter().map(|c| c.hash));
|
||||
|
||||
let sync_message = SyncMessage {
|
||||
heads: our_heads,
|
||||
have: our_have,
|
||||
need: our_need,
|
||||
changes: changes_to_send.into_iter().cloned().collect(),
|
||||
};
|
||||
|
||||
Some(sync_message)
|
||||
}
|
||||
|
||||
pub fn receive_sync_message(
|
||||
&mut self,
|
||||
sync_state: &mut SyncState,
|
||||
message: SyncMessage,
|
||||
) -> Result<Option<Patch>, AutomergeError> {
|
||||
let mut patch = None;
|
||||
|
||||
let before_heads = self.get_heads();
|
||||
|
||||
let SyncMessage {
|
||||
heads: message_heads,
|
||||
changes: message_changes,
|
||||
need: message_need,
|
||||
have: message_have,
|
||||
} = message;
|
||||
|
||||
let changes_is_empty = message_changes.is_empty();
|
||||
if !changes_is_empty {
|
||||
patch = Some(self.apply_changes(message_changes)?);
|
||||
sync_state.shared_heads = advance_heads(
|
||||
&before_heads.iter().collect(),
|
||||
&self.get_heads().into_iter().collect(),
|
||||
&sync_state.shared_heads,
|
||||
);
|
||||
}
|
||||
|
||||
// trim down the sent hashes to those that we know they haven't seen
|
||||
self.filter_changes(&message_heads, &mut sync_state.sent_hashes);
|
||||
|
||||
if changes_is_empty && message_heads == before_heads {
|
||||
sync_state.last_sent_heads = Some(message_heads.clone());
|
||||
}
|
||||
|
||||
let known_heads = message_heads
|
||||
.iter()
|
||||
.filter(|head| self.get_change_by_hash(head).is_some())
|
||||
.collect::<Vec<_>>();
|
||||
if known_heads.len() == message_heads.len() {
|
||||
sync_state.shared_heads = message_heads.clone();
|
||||
} else {
|
||||
sync_state.shared_heads = sync_state
|
||||
.shared_heads
|
||||
.iter()
|
||||
.chain(known_heads)
|
||||
.collect::<HashSet<_>>()
|
||||
.into_iter()
|
||||
.copied()
|
||||
.collect::<Vec<_>>();
|
||||
sync_state.shared_heads.sort();
|
||||
}
|
||||
|
||||
sync_state.their_have = Some(message_have);
|
||||
sync_state.their_heads = Some(message_heads);
|
||||
sync_state.their_need = Some(message_need);
|
||||
|
||||
Ok(patch)
|
||||
}
|
||||
|
||||
fn make_bloom_filter(&self, last_sync: Vec<ChangeHash>) -> SyncHave {
|
||||
let new_changes = self.get_changes(&last_sync);
|
||||
let hashes = new_changes
|
||||
.into_iter()
|
||||
.map(|change| change.hash)
|
||||
.collect::<Vec<_>>();
|
||||
SyncHave {
|
||||
last_sync,
|
||||
bloom: BloomFilter::from(&hashes[..]),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_changes_to_send(&self, have: Vec<SyncHave>, need: &[ChangeHash]) -> Vec<&Change> {
|
||||
if have.is_empty() {
|
||||
need.iter()
|
||||
.filter_map(|hash| self.get_change_by_hash(hash))
|
||||
.collect()
|
||||
} else {
|
||||
let mut last_sync_hashes = HashSet::new();
|
||||
let mut bloom_filters = Vec::with_capacity(have.len());
|
||||
|
||||
for h in have {
|
||||
let SyncHave { last_sync, bloom } = h;
|
||||
for hash in last_sync {
|
||||
last_sync_hashes.insert(hash);
|
||||
}
|
||||
bloom_filters.push(bloom);
|
||||
}
|
||||
let last_sync_hashes = last_sync_hashes.into_iter().collect::<Vec<_>>();
|
||||
|
||||
let changes = self.get_changes(&last_sync_hashes);
|
||||
|
||||
let mut change_hashes = HashSet::with_capacity(changes.len());
|
||||
let mut dependents: HashMap<ChangeHash, Vec<ChangeHash>> = HashMap::new();
|
||||
let mut hashes_to_send = HashSet::new();
|
||||
|
||||
for change in &changes {
|
||||
change_hashes.insert(change.hash);
|
||||
|
||||
for dep in &change.deps {
|
||||
dependents.entry(*dep).or_default().push(change.hash);
|
||||
}
|
||||
|
||||
if bloom_filters
|
||||
.iter()
|
||||
.all(|bloom| !bloom.contains_hash(&change.hash))
|
||||
{
|
||||
hashes_to_send.insert(change.hash);
|
||||
}
|
||||
}
|
||||
|
||||
let mut stack = hashes_to_send.iter().copied().collect::<Vec<_>>();
|
||||
while let Some(hash) = stack.pop() {
|
||||
if let Some(deps) = dependents.get(&hash) {
|
||||
for dep in deps {
|
||||
if hashes_to_send.insert(*dep) {
|
||||
stack.push(*dep);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut changes_to_send = Vec::new();
|
||||
for hash in need {
|
||||
hashes_to_send.insert(*hash);
|
||||
if !change_hashes.contains(hash) {
|
||||
let change = self.get_change_by_hash(hash);
|
||||
if let Some(change) = change {
|
||||
changes_to_send.push(change);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for change in changes {
|
||||
if hashes_to_send.contains(&change.hash) {
|
||||
changes_to_send.push(change);
|
||||
}
|
||||
}
|
||||
changes_to_send
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SyncMessage {
|
||||
pub heads: Vec<ChangeHash>,
|
||||
pub need: Vec<ChangeHash>,
|
||||
pub have: Vec<SyncHave>,
|
||||
pub changes: Vec<Change>,
|
||||
}
|
||||
|
||||
impl SyncMessage {
|
||||
pub fn encode(self) -> Result<Vec<u8>, encoding::Error> {
|
||||
let mut buf = vec![MESSAGE_TYPE_SYNC];
|
||||
|
||||
encode_hashes(&mut buf, &self.heads)?;
|
||||
encode_hashes(&mut buf, &self.need)?;
|
||||
(self.have.len() as u32).encode(&mut buf)?;
|
||||
for have in self.have {
|
||||
encode_hashes(&mut buf, &have.last_sync)?;
|
||||
have.bloom.into_bytes()?.encode(&mut buf)?;
|
||||
}
|
||||
|
||||
(self.changes.len() as u32).encode(&mut buf)?;
|
||||
for change in self.changes {
|
||||
change.raw_bytes().encode(&mut buf)?;
|
||||
}
|
||||
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
pub fn decode(bytes: &[u8]) -> Result<SyncMessage, decoding::Error> {
|
||||
let mut decoder = Decoder::new(Cow::Borrowed(bytes));
|
||||
|
||||
let message_type = decoder.read::<u8>()?;
|
||||
if message_type != MESSAGE_TYPE_SYNC {
|
||||
return Err(decoding::Error::WrongType {
|
||||
expected_one_of: vec![MESSAGE_TYPE_SYNC],
|
||||
found: message_type,
|
||||
});
|
||||
}
|
||||
|
||||
let heads = decode_hashes(&mut decoder)?;
|
||||
let need = decode_hashes(&mut decoder)?;
|
||||
let have_count = decoder.read::<u32>()?;
|
||||
let mut have = Vec::with_capacity(have_count as usize);
|
||||
for _ in 0..have_count {
|
||||
let last_sync = decode_hashes(&mut decoder)?;
|
||||
let bloom_bytes: Vec<u8> = decoder.read()?;
|
||||
let bloom = BloomFilter::try_from(bloom_bytes.as_slice())?;
|
||||
have.push(SyncHave { last_sync, bloom });
|
||||
}
|
||||
|
||||
let change_count = decoder.read::<u32>()?;
|
||||
let mut changes = Vec::with_capacity(change_count as usize);
|
||||
for _ in 0..change_count {
|
||||
let change = decoder.read()?;
|
||||
changes.push(Change::from_bytes(change)?);
|
||||
}
|
||||
|
||||
Ok(SyncMessage {
|
||||
heads,
|
||||
need,
|
||||
have,
|
||||
changes,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_hashes(buf: &mut Vec<u8>, hashes: &[ChangeHash]) -> Result<(), encoding::Error> {
|
||||
debug_assert!(
|
||||
hashes.windows(2).all(|h| h[0] <= h[1]),
|
||||
"hashes were not sorted"
|
||||
);
|
||||
hashes.encode(buf)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl Encodable for &[ChangeHash] {
|
||||
fn encode<W: Write>(&self, buf: &mut W) -> io::Result<usize> {
|
||||
let head = self.len().encode(buf)?;
|
||||
let mut body = 0;
|
||||
for hash in self.iter() {
|
||||
buf.write_all(&hash.0)?;
|
||||
body += hash.0.len();
|
||||
}
|
||||
Ok(head + body)
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_hashes(decoder: &mut Decoder) -> Result<Vec<ChangeHash>, decoding::Error> {
|
||||
let length = decoder.read::<u32>()?;
|
||||
let mut hashes = Vec::with_capacity(length as usize);
|
||||
|
||||
for _ in 0..length {
|
||||
let hash_bytes = decoder.read_bytes(HASH_SIZE)?;
|
||||
let hash = ChangeHash::try_from(hash_bytes).map_err(decoding::Error::BadChangeFormat)?;
|
||||
hashes.push(hash);
|
||||
}
|
||||
|
||||
Ok(hashes)
|
||||
}
|
||||
|
||||
fn advance_heads(
|
||||
my_old_heads: &HashSet<&ChangeHash>,
|
||||
my_new_heads: &HashSet<ChangeHash>,
|
||||
our_old_shared_heads: &[ChangeHash],
|
||||
) -> Vec<ChangeHash> {
|
||||
let new_heads = my_new_heads
|
||||
.iter()
|
||||
.filter(|head| !my_old_heads.contains(head))
|
||||
.copied()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let common_heads = our_old_shared_heads
|
||||
.iter()
|
||||
.filter(|head| my_new_heads.contains(head))
|
||||
.copied()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut advanced_heads = HashSet::with_capacity(new_heads.len() + common_heads.len());
|
||||
for head in new_heads.into_iter().chain(common_heads) {
|
||||
advanced_heads.insert(head);
|
||||
}
|
||||
let mut advanced_heads = advanced_heads.into_iter().collect::<Vec<_>>();
|
||||
advanced_heads.sort();
|
||||
advanced_heads
|
||||
}
|
|
@ -1,7 +1,8 @@
|
|||
use std::borrow::Borrow;
|
||||
use std::{borrow::Cow, convert::TryFrom};
|
||||
|
||||
use crate::storage::parse;
|
||||
use crate::ChangeHash;
|
||||
use automerge_protocol::ChangeHash;
|
||||
|
||||
use crate::{decoding, decoding::Decoder, encoding, encoding::Encodable};
|
||||
|
||||
// These constants correspond to a 1% false positive rate. The values can be changed without
|
||||
// breaking compatibility of the network protocol, since the parameters used for a particular
|
||||
|
@ -9,7 +10,7 @@ use crate::ChangeHash;
|
|||
const BITS_PER_ENTRY: u32 = 10;
|
||||
const NUM_PROBES: u32 = 7;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize)]
|
||||
#[derive(Default, Debug, Clone)]
|
||||
pub struct BloomFilter {
|
||||
num_entries: u32,
|
||||
num_bits_per_entry: u32,
|
||||
|
@ -17,52 +18,17 @@ pub struct BloomFilter {
|
|||
bits: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Default for BloomFilter {
|
||||
fn default() -> Self {
|
||||
BloomFilter {
|
||||
num_entries: 0,
|
||||
num_bits_per_entry: BITS_PER_ENTRY,
|
||||
num_probes: NUM_PROBES,
|
||||
bits: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub(crate) enum ParseError {
|
||||
#[error(transparent)]
|
||||
Leb128(#[from] parse::leb128::Error),
|
||||
}
|
||||
|
||||
impl BloomFilter {
|
||||
pub fn to_bytes(&self) -> Vec<u8> {
|
||||
let mut buf = Vec::new();
|
||||
if self.num_entries != 0 {
|
||||
leb128::write::unsigned(&mut buf, self.num_entries as u64).unwrap();
|
||||
leb128::write::unsigned(&mut buf, self.num_bits_per_entry as u64).unwrap();
|
||||
leb128::write::unsigned(&mut buf, self.num_probes as u64).unwrap();
|
||||
buf.extend(&self.bits);
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
||||
pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, ParseError> {
|
||||
if input.is_empty() {
|
||||
Ok((input, Self::default()))
|
||||
pub fn into_bytes(self) -> Result<Vec<u8>, encoding::Error> {
|
||||
if self.num_entries == 0 {
|
||||
Ok(Vec::new())
|
||||
} else {
|
||||
let (i, num_entries) = parse::leb128_u32(input)?;
|
||||
let (i, num_bits_per_entry) = parse::leb128_u32(i)?;
|
||||
let (i, num_probes) = parse::leb128_u32(i)?;
|
||||
let (i, bits) = parse::take_n(bits_capacity(num_entries, num_bits_per_entry), i)?;
|
||||
Ok((
|
||||
i,
|
||||
Self {
|
||||
num_entries,
|
||||
num_bits_per_entry,
|
||||
num_probes,
|
||||
bits: bits.to_vec(),
|
||||
},
|
||||
))
|
||||
let mut buf = Vec::new();
|
||||
self.num_entries.encode(&mut buf)?;
|
||||
self.num_bits_per_entry.encode(&mut buf)?;
|
||||
self.num_probes.encode(&mut buf)?;
|
||||
buf.extend(self.bits);
|
||||
Ok(buf)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -79,8 +45,7 @@ impl BloomFilter {
|
|||
let z = u32::from_le_bytes([hash_bytes[8], hash_bytes[9], hash_bytes[10], hash_bytes[11]])
|
||||
% modulo;
|
||||
|
||||
let mut probes = Vec::with_capacity(self.num_probes as usize);
|
||||
probes.push(x);
|
||||
let mut probes = vec![x];
|
||||
for _ in 1..self.num_probes {
|
||||
x = (x + y) % modulo;
|
||||
y = (y + z) % modulo;
|
||||
|
@ -121,23 +86,6 @@ impl BloomFilter {
|
|||
true
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_hashes<H: Borrow<ChangeHash>>(hashes: impl ExactSizeIterator<Item = H>) -> Self {
|
||||
let num_entries = hashes.len() as u32;
|
||||
let num_bits_per_entry = BITS_PER_ENTRY;
|
||||
let num_probes = NUM_PROBES;
|
||||
let bits = vec![0; bits_capacity(num_entries, num_bits_per_entry)];
|
||||
let mut filter = Self {
|
||||
num_entries,
|
||||
num_bits_per_entry,
|
||||
num_probes,
|
||||
bits,
|
||||
};
|
||||
for hash in hashes {
|
||||
filter.add_hash(hash.borrow());
|
||||
}
|
||||
filter
|
||||
}
|
||||
}
|
||||
|
||||
fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize {
|
||||
|
@ -145,16 +93,44 @@ fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize {
|
|||
f as usize
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
#[error("{0}")]
|
||||
pub struct DecodeError(String);
|
||||
|
||||
impl TryFrom<&[u8]> for BloomFilter {
|
||||
type Error = DecodeError;
|
||||
|
||||
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
|
||||
Self::parse(parse::Input::new(bytes))
|
||||
.map(|(_, b)| b)
|
||||
.map_err(|e| DecodeError(e.to_string()))
|
||||
impl From<&[ChangeHash]> for BloomFilter {
|
||||
fn from(hashes: &[ChangeHash]) -> Self {
|
||||
let num_entries = hashes.len() as u32;
|
||||
let num_bits_per_entry = BITS_PER_ENTRY;
|
||||
let num_probes = NUM_PROBES;
|
||||
let bits = vec![0; bits_capacity(num_entries, num_bits_per_entry) as usize];
|
||||
let mut filter = Self {
|
||||
num_entries,
|
||||
num_bits_per_entry,
|
||||
num_probes,
|
||||
bits,
|
||||
};
|
||||
for hash in hashes {
|
||||
filter.add_hash(hash);
|
||||
}
|
||||
filter
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&[u8]> for BloomFilter {
|
||||
type Error = decoding::Error;
|
||||
|
||||
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
|
||||
if bytes.is_empty() {
|
||||
Ok(Self::default())
|
||||
} else {
|
||||
let mut decoder = Decoder::new(Cow::Borrowed(bytes));
|
||||
let num_entries = decoder.read()?;
|
||||
let num_bits_per_entry = decoder.read()?;
|
||||
let num_probes = decoder.read()?;
|
||||
let bits =
|
||||
decoder.read_bytes(bits_capacity(num_entries, num_bits_per_entry) as usize)?;
|
||||
Ok(Self {
|
||||
num_entries,
|
||||
num_bits_per_entry,
|
||||
num_probes,
|
||||
bits: bits.to_vec(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
67
automerge-backend/src/sync/state.rs
Normal file
67
automerge-backend/src/sync/state.rs
Normal file
|
@ -0,0 +1,67 @@
|
|||
use std::{borrow::Cow, collections::HashSet};
|
||||
|
||||
use automerge_protocol::ChangeHash;
|
||||
|
||||
use super::{decode_hashes, encode_hashes};
|
||||
use crate::{decoding, decoding::Decoder, encoding, BloomFilter};
|
||||
|
||||
const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SyncState {
|
||||
pub shared_heads: Vec<ChangeHash>,
|
||||
pub last_sent_heads: Option<Vec<ChangeHash>>,
|
||||
pub their_heads: Option<Vec<ChangeHash>>,
|
||||
pub their_need: Option<Vec<ChangeHash>>,
|
||||
pub their_have: Option<Vec<SyncHave>>,
|
||||
pub sent_hashes: HashSet<ChangeHash>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct SyncHave {
|
||||
pub last_sync: Vec<ChangeHash>,
|
||||
pub bloom: BloomFilter,
|
||||
}
|
||||
|
||||
impl SyncState {
|
||||
pub fn encode(&self) -> Result<Vec<u8>, encoding::Error> {
|
||||
let mut buf = vec![SYNC_STATE_TYPE];
|
||||
encode_hashes(&mut buf, &self.shared_heads)?;
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
pub fn decode(bytes: &[u8]) -> Result<Self, decoding::Error> {
|
||||
let mut decoder = Decoder::new(Cow::Borrowed(bytes));
|
||||
|
||||
let record_type = decoder.read::<u8>()?;
|
||||
if record_type != SYNC_STATE_TYPE {
|
||||
return Err(decoding::Error::WrongType {
|
||||
expected_one_of: vec![SYNC_STATE_TYPE],
|
||||
found: record_type,
|
||||
});
|
||||
}
|
||||
|
||||
let shared_heads = decode_hashes(&mut decoder)?;
|
||||
Ok(Self {
|
||||
shared_heads,
|
||||
last_sent_heads: Some(Vec::new()),
|
||||
their_heads: None,
|
||||
their_need: None,
|
||||
their_have: Some(Vec::new()),
|
||||
sent_hashes: HashSet::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for SyncState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
shared_heads: Vec::new(),
|
||||
last_sent_heads: Some(Vec::new()),
|
||||
their_heads: None,
|
||||
their_need: None,
|
||||
their_have: None,
|
||||
sent_hashes: HashSet::new(),
|
||||
}
|
||||
}
|
||||
}
|
1435
automerge-backend/tests/apply_change.rs
Normal file
1435
automerge-backend/tests/apply_change.rs
Normal file
File diff suppressed because it is too large
Load diff
611
automerge-backend/tests/apply_local_change.rs
Normal file
611
automerge-backend/tests/apply_local_change.rs
Normal file
|
@ -0,0 +1,611 @@
|
|||
extern crate automerge_backend;
|
||||
use std::{collections::HashSet, convert::TryInto, num::NonZeroU32};
|
||||
|
||||
use amp::{RootDiff, SortedVec};
|
||||
use automerge_backend::{Backend, Change};
|
||||
use automerge_protocol as amp;
|
||||
use automerge_protocol::{
|
||||
ActorId, ChangeHash, Diff, DiffEdit, ElementId, ListDiff, ObjType, ObjectId, Op, OpType, Patch,
|
||||
};
|
||||
use maplit::hashmap;
|
||||
|
||||
#[test]
|
||||
fn test_apply_local_change() {
|
||||
let actor: ActorId = "eb738e04ef8848ce8b77309b6c7f7e39".try_into().unwrap();
|
||||
let change_request = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
seq: 1,
|
||||
deps: Vec::new(),
|
||||
start_op: 1,
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("magpie".into()),
|
||||
key: "bird".into(),
|
||||
obj: ObjectId::Root,
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
let patch = backend.apply_local_change(change_request).unwrap().0;
|
||||
|
||||
let changes = backend.get_changes(&[]);
|
||||
let expected_change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: changes[0].time,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
action: OpType::Set("magpie".into()),
|
||||
obj: ObjectId::Root,
|
||||
key: "bird".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
assert_eq!(changes[0], &expected_change);
|
||||
|
||||
let expected_patch = Patch {
|
||||
actor: Some(actor.clone()),
|
||||
max_op: 1,
|
||||
pending_changes: 0,
|
||||
seq: Some(1),
|
||||
clock: hashmap! {
|
||||
actor => 1,
|
||||
},
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"bird".into() => hashmap!{
|
||||
"1@eb738e04ef8848ce8b77309b6c7f7e39".try_into().unwrap() => Diff::Value("magpie".into())
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
assert_eq!(patch, expected_patch);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_on_duplicate_requests() {
|
||||
let actor: ActorId = "37704788917a499cb0206fa8519ac4d9".try_into().unwrap();
|
||||
let change_request1 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
message: None,
|
||||
hash: None,
|
||||
time: 0,
|
||||
deps: Vec::new(),
|
||||
start_op: 1,
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("magpie".into()),
|
||||
obj: ObjectId::Root,
|
||||
key: "bird".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
let change_request2 = amp::Change {
|
||||
actor_id: actor,
|
||||
seq: 2,
|
||||
message: None,
|
||||
hash: None,
|
||||
time: 0,
|
||||
deps: Vec::new(),
|
||||
start_op: 2,
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("jay".into()),
|
||||
obj: ObjectId::Root,
|
||||
key: "bird".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
let mut backend = Backend::new();
|
||||
backend.apply_local_change(change_request1.clone()).unwrap();
|
||||
backend.apply_local_change(change_request2.clone()).unwrap();
|
||||
assert!(backend.apply_local_change(change_request1).is_err());
|
||||
assert!(backend.apply_local_change(change_request2).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_handle_concurrent_frontend_and_backend_changes() {
|
||||
let actor: ActorId = "cb55260e9d7e457886a4fc73fd949202".try_into().unwrap();
|
||||
let local1 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
time: 0,
|
||||
deps: Vec::new(),
|
||||
message: None,
|
||||
hash: None,
|
||||
start_op: 1,
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("magpie".into()),
|
||||
obj: ObjectId::Root,
|
||||
key: "bird".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
let local2 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 2,
|
||||
start_op: 2,
|
||||
time: 0,
|
||||
deps: Vec::new(),
|
||||
message: None,
|
||||
hash: None,
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("jay".into()),
|
||||
obj: ObjectId::Root,
|
||||
key: "bird".into(),
|
||||
insert: false,
|
||||
pred: vec![actor.op_id_at(1)].into(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
let remote_actor: ActorId = "6d48a01318644eed90455d2cb68ac657".try_into().unwrap();
|
||||
let remote1 = amp::Change {
|
||||
actor_id: remote_actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
deps: Vec::new(),
|
||||
message: None,
|
||||
hash: None,
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("goldfish".into()),
|
||||
obj: ObjectId::Root,
|
||||
key: "fish".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let mut expected_change1 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("magpie".into()),
|
||||
obj: ObjectId::Root,
|
||||
key: "bird".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
let mut expected_change2 = amp::Change {
|
||||
actor_id: remote_actor,
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("goldfish".into()),
|
||||
key: "fish".into(),
|
||||
obj: ObjectId::Root,
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
let mut expected_change3 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 2,
|
||||
start_op: 2,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("jay".into()),
|
||||
obj: ObjectId::Root,
|
||||
key: "bird".into(),
|
||||
pred: vec![actor.op_id_at(1)].into(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
let mut backend = Backend::new();
|
||||
backend.apply_local_change(local1).unwrap();
|
||||
let backend_after_first = backend.clone();
|
||||
let changes1 = backend_after_first.get_changes(&[]);
|
||||
let change01 = changes1.get(0).unwrap();
|
||||
|
||||
backend.apply_changes(vec![remote1]).unwrap();
|
||||
let backend_after_second = backend.clone();
|
||||
let changes2 = backend_after_second.get_changes(&[change01.hash]);
|
||||
let change12 = *changes2.get(0).unwrap();
|
||||
|
||||
backend.apply_local_change(local2).unwrap();
|
||||
let changes3 = backend.get_changes(&[change01.hash, change12.hash]);
|
||||
let change23 = changes3.get(0).unwrap();
|
||||
|
||||
expected_change1.time = change01.time;
|
||||
expected_change2.time = change12.time;
|
||||
expected_change3.time = change23.time;
|
||||
expected_change3.deps = vec![change01.hash];
|
||||
|
||||
assert_eq!(change01, &&expected_change1.try_into().unwrap());
|
||||
assert_eq!(change12, &expected_change2.try_into().unwrap());
|
||||
assert_changes_equal(change23.decode(), expected_change3.clone());
|
||||
assert_eq!(change23, &&expected_change3.try_into().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_transform_list_indexes_into_element_ids() {
|
||||
let actor: ActorId = "8f389df8fecb4ddc989102321af3578e".try_into().unwrap();
|
||||
let remote_actor: ActorId = "9ba21574dc44411b8ce37bc6037a9687".try_into().unwrap();
|
||||
let remote1: Change = amp::Change {
|
||||
actor_id: remote_actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Make(ObjType::List),
|
||||
key: "birds".into(),
|
||||
obj: ObjectId::Root,
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let remote2: Change = amp::Change {
|
||||
actor_id: remote_actor.clone(),
|
||||
seq: 2,
|
||||
start_op: 2,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![remote1.hash],
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("magpie".into()),
|
||||
obj: ObjectId::from(remote_actor.op_id_at(1)),
|
||||
key: ElementId::Head.into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let local1 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
message: None,
|
||||
hash: None,
|
||||
time: 0,
|
||||
deps: vec![remote1.hash],
|
||||
start_op: 2,
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::from(remote_actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("goldfinch".into()),
|
||||
key: ElementId::Head.into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
let local2 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 2,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
time: 0,
|
||||
start_op: 3,
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::from(remote_actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("wagtail".into()),
|
||||
key: actor.op_id_at(2).into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
let local3 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 3,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![remote2.hash],
|
||||
time: 0,
|
||||
start_op: 4,
|
||||
operations: vec![
|
||||
Op {
|
||||
obj: ObjectId::from(remote_actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("Magpie".into()),
|
||||
key: remote_actor.op_id_at(2).into(),
|
||||
insert: false,
|
||||
pred: vec![remote_actor.op_id_at(2)].into(),
|
||||
},
|
||||
Op {
|
||||
obj: ObjectId::from(remote_actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("Goldfinch".into()),
|
||||
key: actor.op_id_at(2).into(),
|
||||
insert: false,
|
||||
pred: vec![actor.op_id_at(2)].into(),
|
||||
},
|
||||
],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
let mut expected_change1 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 2,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![remote1.hash],
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::from(remote_actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("goldfinch".into()),
|
||||
key: ElementId::Head.into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
let mut expected_change2 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 2,
|
||||
start_op: 3,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::from(remote_actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("wagtail".into()),
|
||||
key: actor.op_id_at(2).into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
let mut expected_change3 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 3,
|
||||
start_op: 4,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![
|
||||
Op {
|
||||
obj: ObjectId::from(remote_actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("Magpie".into()),
|
||||
key: remote_actor.op_id_at(2).into(),
|
||||
pred: vec![remote_actor.op_id_at(2)].into(),
|
||||
insert: false,
|
||||
},
|
||||
Op {
|
||||
obj: ObjectId::from(remote_actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("Goldfinch".into()),
|
||||
key: actor.op_id_at(2).into(),
|
||||
pred: vec![actor.op_id_at(2)].into(),
|
||||
insert: false,
|
||||
},
|
||||
],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.apply_changes(vec![remote1.clone()]).unwrap();
|
||||
backend.apply_local_change(local1).unwrap();
|
||||
let backend_after_first = backend.clone();
|
||||
let changes1 = backend_after_first.get_changes(&[remote1.hash]);
|
||||
let change12 = *changes1.get(0).unwrap();
|
||||
|
||||
backend.apply_changes(vec![remote2.clone()]).unwrap();
|
||||
backend.apply_local_change(local2).unwrap();
|
||||
let backend_after_second = backend.clone();
|
||||
let changes2 = backend_after_second.get_changes(&[remote2.hash, change12.hash]);
|
||||
let change23 = *changes2.get(0).unwrap();
|
||||
|
||||
backend.apply_local_change(local3).unwrap();
|
||||
let changes3 = backend.get_changes(&[remote2.hash, change23.hash]);
|
||||
let change34 = changes3.get(0).unwrap().decode();
|
||||
|
||||
expected_change1.time = change12.time;
|
||||
expected_change2.time = change23.time;
|
||||
expected_change2.deps = vec![change12.hash];
|
||||
expected_change3.time = change34.time;
|
||||
expected_change3.deps = vec![remote2.hash, change23.hash];
|
||||
|
||||
assert_changes_equal(change34, expected_change3);
|
||||
assert_eq!(change12, &expected_change1.try_into().unwrap());
|
||||
assert_changes_equal(change23.decode(), expected_change2.clone());
|
||||
assert_eq!(change23, &expected_change2.try_into().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_handle_list_insertion_and_deletion_in_same_change() {
|
||||
let actor: ActorId = "0723d2a1940744868ffd6b294ada813f".try_into().unwrap();
|
||||
let local1 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
message: None,
|
||||
hash: None,
|
||||
time: 0,
|
||||
deps: Vec::new(),
|
||||
start_op: 1,
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::Root,
|
||||
action: amp::OpType::Make(ObjType::List),
|
||||
key: "birds".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
let local2 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 2,
|
||||
message: None,
|
||||
hash: None,
|
||||
time: 0,
|
||||
deps: Vec::new(),
|
||||
start_op: 2,
|
||||
operations: vec![
|
||||
Op {
|
||||
obj: ObjectId::from(actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("magpie".into()),
|
||||
key: ElementId::Head.into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
},
|
||||
Op {
|
||||
obj: ObjectId::from(actor.op_id_at(1)),
|
||||
action: OpType::Del(NonZeroU32::new(1).unwrap()),
|
||||
key: actor.op_id_at(2).into(),
|
||||
insert: false,
|
||||
pred: vec![actor.op_id_at(2)].into(),
|
||||
},
|
||||
],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
let mut expected_patch = Patch {
|
||||
actor: Some(actor.clone()),
|
||||
seq: Some(2),
|
||||
max_op: 3,
|
||||
pending_changes: 0,
|
||||
clock: hashmap! {
|
||||
actor.clone() => 2
|
||||
},
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"birds".into() => hashmap!{
|
||||
actor.op_id_at(1) => Diff::List(ListDiff{
|
||||
object_id: ObjectId::from(actor.op_id_at(1)),
|
||||
edits: vec![
|
||||
DiffEdit::SingleElementInsert{
|
||||
index: 0,
|
||||
elem_id: actor.op_id_at(2).into(),
|
||||
op_id: actor.op_id_at(2),
|
||||
value: Diff::Value("magpie".into()),
|
||||
},
|
||||
DiffEdit::Remove{index: 0, count: 1},
|
||||
],
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.apply_local_change(local1).unwrap();
|
||||
let patch = backend.apply_local_change(local2).unwrap().0;
|
||||
expected_patch.deps = patch.deps.clone();
|
||||
assert_eq!(patch, expected_patch);
|
||||
|
||||
let changes = backend.get_changes(&[]);
|
||||
assert_eq!(changes.len(), 2);
|
||||
let change1 = changes[0].clone();
|
||||
let change2 = changes[1].clone();
|
||||
|
||||
let expected_change1 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: change1.time,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::Root,
|
||||
action: amp::OpType::Make(ObjType::List),
|
||||
key: "birds".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let expected_change2 = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 2,
|
||||
start_op: 2,
|
||||
time: change2.time,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![change1.hash],
|
||||
operations: vec![
|
||||
Op {
|
||||
obj: ObjectId::from(actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("magpie".into()),
|
||||
key: ElementId::Head.into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
},
|
||||
Op {
|
||||
obj: ObjectId::from(actor.op_id_at(1)),
|
||||
action: OpType::Del(NonZeroU32::new(1).unwrap()),
|
||||
key: actor.op_id_at(2).into(),
|
||||
pred: vec![actor.op_id_at(2)].into(),
|
||||
insert: false,
|
||||
},
|
||||
],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(change1, expected_change1);
|
||||
assert_eq!(change2, expected_change2);
|
||||
}
|
||||
|
||||
/// Asserts that the changes are equal without respect to order of the hashes
|
||||
/// in the change dependencies
|
||||
fn assert_changes_equal(mut change1: amp::Change, change2: amp::Change) {
|
||||
let change2_clone = change2.clone();
|
||||
let deps1: HashSet<&ChangeHash> = change1.deps.iter().collect();
|
||||
let deps2: HashSet<&ChangeHash> = change2.deps.iter().collect();
|
||||
assert_eq!(
|
||||
deps1, deps2,
|
||||
"The two changes did not have equal dependencies, left: {:?}, right: {:?}",
|
||||
deps1, deps2
|
||||
);
|
||||
change1.deps = change2.deps;
|
||||
assert_eq!(change1, change2_clone)
|
||||
}
|
33
automerge-backend/tests/get_changes.rs
Normal file
33
automerge-backend/tests/get_changes.rs
Normal file
|
@ -0,0 +1,33 @@
|
|||
use automerge_backend::{Backend, Change};
|
||||
|
||||
// This test reproduces issue 95 (https://github.com/automerge/automerge-rs/issues/95)
|
||||
// where compressed changes were losing their header during decompression such
|
||||
// that when the compressed changes were written out again they were invalid.
|
||||
#[test]
|
||||
fn test_deflate_correctly() {
|
||||
let init_change: Vec<u8> = vec![
|
||||
133, 111, 74, 131, 252, 38, 106, 255, 2, 195, 2, 117, 143, 189, 74, 4, 49, 16, 128, 147,
|
||||
189, 61, 239, 7, 185, 83, 196, 43, 101, 26, 75, 183, 178, 179, 17, 181, 177, 17, 27, 181,
|
||||
14, 217, 120, 55, 144, 77, 150, 73, 178, 156, 87, 172, 133, 224, 3, 88, 248, 58, 98, 227,
|
||||
29, 86, 98, 167, 22, 118, 190, 133, 96, 86, 177, 176, 48, 153, 129, 249, 253, 102, 134,
|
||||
173, 124, 108, 220, 111, 221, 188, 239, 14, 239, 6, 184, 57, 111, 157, 84, 156, 127, 190,
|
||||
190, 93, 45, 13, 14, 13, 122, 20, 26, 103, 194, 163, 53, 172, 207, 219, 201, 112, 181, 179,
|
||||
54, 90, 223, 217, 238, 239, 45, 159, 246, 207, 94, 120, 217, 98, 201, 19, 103, 44, 153, 37,
|
||||
173, 180, 189, 212, 89, 240, 110, 221, 110, 177, 222, 188, 137, 177, 228, 146, 49, 254,
|
||||
171, 53, 235, 61, 112, 206, 146, 186, 35, 3, 57, 75, 174, 43, 39, 168, 115, 82, 38, 230,
|
||||
255, 179, 83, 175, 166, 158, 45, 120, 146, 250, 139, 82, 37, 252, 251, 69, 119, 218, 208,
|
||||
227, 79, 31, 57, 239, 198, 252, 168, 190, 229, 215, 252, 192, 26, 37, 161, 176, 90, 163,
|
||||
131, 137, 50, 17, 66, 232, 129, 208, 5, 151, 193, 49, 9, 229, 148, 241, 80, 41, 163, 76,
|
||||
188, 201, 65, 161, 124, 112, 32, 60, 120, 75, 81, 160, 12, 186, 66, 35, 8, 42, 65, 216,
|
||||
244, 252, 16, 43, 244, 66, 129, 37, 137, 224, 84, 14, 185, 213, 177, 150, 130, 167, 80,
|
||||
128, 8, 50, 118, 102, 112, 20, 180, 22, 5, 52, 183, 69, 164, 22, 18, 13, 10, 80, 36, 124,
|
||||
6, 251, 36, 28, 4, 237, 9, 37, 170, 56, 21, 65, 5, 240, 129, 202, 63, 107, 158, 19, 154,
|
||||
49, 70, 74, 86, 10, 18, 99, 18, 229, 36, 183, 50, 20, 113, 229, 103, 206, 190, 0,
|
||||
];
|
||||
let change: Change = Change::from_bytes(init_change.clone()).unwrap();
|
||||
let mut backend = Backend::new();
|
||||
backend.apply_changes(vec![change]).unwrap();
|
||||
|
||||
let change_back = backend.get_changes(&[]);
|
||||
assert_eq!(change_back[0].raw_bytes().to_vec(), init_change);
|
||||
}
|
690
automerge-backend/tests/get_patch.rs
Normal file
690
automerge-backend/tests/get_patch.rs
Normal file
|
@ -0,0 +1,690 @@
|
|||
extern crate automerge_backend;
|
||||
use std::{convert::TryInto, num::NonZeroU32};
|
||||
|
||||
use amp::{RootDiff, SortedVec};
|
||||
use automerge_backend::{Backend, Change};
|
||||
use automerge_protocol as amp;
|
||||
use automerge_protocol::{
|
||||
ActorId, Diff, DiffEdit, ElementId, ListDiff, MapDiff, ObjectId, Op, Patch, ScalarValue,
|
||||
};
|
||||
use maplit::hashmap;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_include_most_recent_value_for_key() {
|
||||
let actor: ActorId = "ec28cfbcdb9e4f32ad24b3c776e651b0".try_into().unwrap();
|
||||
let change1: Change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
deps: Vec::new(),
|
||||
message: None,
|
||||
hash: None,
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("magpie".into()),
|
||||
key: "bird".into(),
|
||||
obj: ObjectId::Root,
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let change2: Change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 2,
|
||||
start_op: 2,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![change1.hash],
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::Root,
|
||||
action: amp::OpType::Set("blackbird".into()),
|
||||
key: "bird".into(),
|
||||
pred: vec![actor.op_id_at(1)].into(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let expected_patch = Patch {
|
||||
actor: None,
|
||||
seq: None,
|
||||
max_op: 2,
|
||||
pending_changes: 0,
|
||||
clock: hashmap! {
|
||||
actor.clone() => 2,
|
||||
},
|
||||
deps: vec![change2.hash],
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"bird".into() => hashmap!{
|
||||
actor.op_id_at(2) => Diff::Value("blackbird".into()),
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.load_changes(vec![change1, change2]).unwrap();
|
||||
let patch = backend.get_patch().unwrap();
|
||||
assert_eq!(patch, expected_patch)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_includes_conflicting_values_for_key() {
|
||||
let actor1: ActorId = "111111".try_into().unwrap();
|
||||
let actor2: ActorId = "222222".try_into().unwrap();
|
||||
let change1: Change = amp::Change {
|
||||
actor_id: actor1.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
deps: Vec::new(),
|
||||
message: None,
|
||||
hash: None,
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("magpie".into()),
|
||||
obj: ObjectId::Root,
|
||||
key: "bird".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let change2: Change = amp::Change {
|
||||
actor_id: actor2.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("blackbird".into()),
|
||||
key: "bird".into(),
|
||||
obj: ObjectId::Root,
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let expected_patch = Patch {
|
||||
clock: hashmap! {
|
||||
actor1.clone() => 1,
|
||||
actor2.clone() => 1,
|
||||
},
|
||||
max_op: 1,
|
||||
pending_changes: 0,
|
||||
seq: None,
|
||||
actor: None,
|
||||
deps: vec![change1.hash, change2.hash],
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"bird".into() => hashmap!{
|
||||
actor1.op_id_at(1) => Diff::Value("magpie".into()),
|
||||
actor2.op_id_at(1) => Diff::Value("blackbird".into()),
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.load_changes(vec![change1, change2]).unwrap();
|
||||
let patch = backend.get_patch().unwrap();
|
||||
assert_eq!(patch, expected_patch)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_handles_counter_increment_at_keys_in_a_map() {
|
||||
let actor: ActorId = "46c92088e4484ae5945dc63bf606a4a5".try_into().unwrap();
|
||||
let change1: Change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set(ScalarValue::Counter(1)),
|
||||
obj: ObjectId::Root,
|
||||
key: "counter".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let change2: Change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 2,
|
||||
start_op: 2,
|
||||
time: 0,
|
||||
deps: vec![change1.hash],
|
||||
message: None,
|
||||
hash: None,
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Inc(2),
|
||||
obj: ObjectId::Root,
|
||||
key: "counter".into(),
|
||||
pred: vec![actor.op_id_at(1)].into(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let expected_patch = Patch {
|
||||
seq: None,
|
||||
actor: None,
|
||||
clock: hashmap! {
|
||||
actor.clone() => 2,
|
||||
},
|
||||
max_op: 2,
|
||||
pending_changes: 0,
|
||||
deps: vec![change2.hash],
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"counter".into() => hashmap!{
|
||||
actor.op_id_at(1) => Diff::Value(ScalarValue::Counter(3))
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.load_changes(vec![change1, change2]).unwrap();
|
||||
let patch = backend.get_patch().unwrap();
|
||||
assert_eq!(patch, expected_patch)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_creates_nested_maps() {
|
||||
let actor: ActorId = "06148f9422cb40579fd02f1975c34a51".try_into().unwrap();
|
||||
let change1: Change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![
|
||||
Op {
|
||||
action: amp::OpType::Make(amp::ObjType::Map),
|
||||
obj: ObjectId::Root,
|
||||
key: "birds".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
},
|
||||
Op {
|
||||
action: amp::OpType::Set(ScalarValue::F64(3.0)),
|
||||
key: "wrens".into(),
|
||||
obj: ObjectId::from(actor.op_id_at(1)),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
},
|
||||
],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let change2: Change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 2,
|
||||
start_op: 3,
|
||||
time: 0,
|
||||
deps: vec![change1.hash],
|
||||
message: None,
|
||||
hash: None,
|
||||
operations: vec![
|
||||
Op {
|
||||
obj: ObjectId::from(actor.op_id_at(1)),
|
||||
action: amp::OpType::Del(NonZeroU32::new(1).unwrap()),
|
||||
key: "wrens".into(),
|
||||
pred: vec![actor.op_id_at(2)].into(),
|
||||
insert: false,
|
||||
},
|
||||
Op {
|
||||
obj: ObjectId::from(actor.op_id_at(1)),
|
||||
action: amp::OpType::Set(ScalarValue::F64(15.0)),
|
||||
key: "sparrows".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
},
|
||||
],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let expected_patch = Patch {
|
||||
clock: hashmap! {
|
||||
actor.clone() => 2,
|
||||
},
|
||||
actor: None,
|
||||
seq: None,
|
||||
max_op: 4,
|
||||
pending_changes: 0,
|
||||
deps: vec![change2.hash],
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"birds".into() => hashmap!{
|
||||
actor.op_id_at(1) => Diff::Map(MapDiff{
|
||||
object_id: ObjectId::from(actor.op_id_at(1)),
|
||||
props: hashmap!{
|
||||
"sparrows".into() => hashmap!{
|
||||
actor.op_id_at(4) => Diff::Value(ScalarValue::F64(15.0))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.load_changes(vec![change1, change2]).unwrap();
|
||||
let patch = backend.get_patch().unwrap();
|
||||
assert_eq!(patch, expected_patch)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_create_lists() {
|
||||
let actor: ActorId = "90bf7df682f747fa82ac604b35010906".try_into().unwrap();
|
||||
let change1: Change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![
|
||||
Op {
|
||||
action: amp::OpType::Make(amp::ObjType::List),
|
||||
obj: ObjectId::Root,
|
||||
key: "birds".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
},
|
||||
Op {
|
||||
obj: ObjectId::from(actor.op_id_at(1)),
|
||||
action: amp::OpType::Set("chaffinch".into()),
|
||||
key: ElementId::Head.into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
},
|
||||
],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let expected_patch = Patch {
|
||||
clock: hashmap! {
|
||||
actor.clone() => 1,
|
||||
},
|
||||
max_op: 2,
|
||||
pending_changes: 0,
|
||||
actor: None,
|
||||
seq: None,
|
||||
deps: vec![change1.hash],
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"birds".into() => hashmap!{
|
||||
actor.op_id_at(1) => Diff::List(ListDiff{
|
||||
object_id: ObjectId::from(actor.op_id_at(1)),
|
||||
edits: vec![DiffEdit::SingleElementInsert {
|
||||
index: 0,
|
||||
elem_id: actor.op_id_at(2).into(),
|
||||
op_id: actor.op_id_at(2),
|
||||
value: Diff::Value("chaffinch".into()),
|
||||
}],
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.load_changes(vec![change1]).unwrap();
|
||||
let patch = backend.get_patch().unwrap();
|
||||
assert_eq!(patch, expected_patch)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_includes_latests_state_of_list() {
|
||||
let actor: ActorId = "6caaa2e433de42ae9c3fa65c9ff3f03e".try_into().unwrap();
|
||||
let change1: Change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![
|
||||
Op {
|
||||
action: amp::OpType::Make(amp::ObjType::List),
|
||||
obj: ObjectId::Root,
|
||||
key: "todos".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
},
|
||||
Op {
|
||||
action: amp::OpType::Make(amp::ObjType::Map),
|
||||
obj: ObjectId::from(actor.op_id_at(1)),
|
||||
key: ElementId::Head.into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
},
|
||||
Op {
|
||||
obj: ObjectId::from(actor.op_id_at(2)),
|
||||
action: amp::OpType::Set("water plants".into()),
|
||||
key: "title".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
},
|
||||
Op {
|
||||
obj: ObjectId::from(actor.op_id_at(2)),
|
||||
action: amp::OpType::Set(false.into()),
|
||||
key: "done".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
},
|
||||
],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let expected_patch = Patch {
|
||||
clock: hashmap! {
|
||||
actor.clone() => 1
|
||||
},
|
||||
max_op: 4,
|
||||
pending_changes: 0,
|
||||
actor: None,
|
||||
seq: None,
|
||||
deps: vec![change1.hash],
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"todos".into() => hashmap!{
|
||||
actor.op_id_at(1) => Diff::List(ListDiff{
|
||||
object_id: ObjectId::from(actor.op_id_at(1)),
|
||||
edits: vec![DiffEdit::SingleElementInsert{
|
||||
index: 0,
|
||||
elem_id: actor.op_id_at(2).into(),
|
||||
op_id: actor.op_id_at(2),
|
||||
value: Diff::Map(MapDiff{
|
||||
object_id: actor.op_id_at(2).into(),
|
||||
props: hashmap!{
|
||||
"title".into() => hashmap!{
|
||||
actor.op_id_at(3) => Diff::Value("water plants".into()),
|
||||
},
|
||||
"done".into() => hashmap!{
|
||||
actor.op_id_at(4) => Diff::Value(false.into())
|
||||
}
|
||||
}
|
||||
})
|
||||
}],
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.load_changes(vec![change1]).unwrap();
|
||||
let patch = backend.get_patch().unwrap();
|
||||
assert_eq!(patch, expected_patch)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_includes_date_objects_at_root() {
|
||||
let actor: ActorId = "90f5dd5d4f524e95ad5929e08d1194f1".try_into().unwrap();
|
||||
let change1: Change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![Op {
|
||||
obj: ObjectId::Root,
|
||||
action: amp::OpType::Set(ScalarValue::Timestamp(1_586_541_033_457)),
|
||||
key: "now".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let expected_patch = Patch {
|
||||
clock: hashmap! {
|
||||
actor.clone() => 1,
|
||||
},
|
||||
max_op: 1,
|
||||
pending_changes: 0,
|
||||
actor: None,
|
||||
seq: None,
|
||||
deps: vec![change1.hash],
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"now".into() => hashmap!{
|
||||
actor.op_id_at(1) => Diff::Value(ScalarValue::Timestamp(1_586_541_033_457))
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.load_changes(vec![change1]).unwrap();
|
||||
let patch = backend.get_patch().unwrap();
|
||||
assert_eq!(patch, expected_patch)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_includes_date_objects_in_a_list() {
|
||||
let actor: ActorId = "08b050f976a249349021a2e63d99c8e8".try_into().unwrap();
|
||||
let change1: Change = amp::Change {
|
||||
actor_id: actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![
|
||||
Op {
|
||||
obj: ObjectId::Root,
|
||||
action: amp::OpType::Make(amp::ObjType::List),
|
||||
key: "list".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
},
|
||||
Op {
|
||||
obj: ObjectId::from(actor.op_id_at(1)),
|
||||
action: amp::OpType::Set(ScalarValue::Timestamp(1_586_541_089_595)),
|
||||
key: ElementId::Head.into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
},
|
||||
],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let expected_patch = Patch {
|
||||
clock: hashmap! {
|
||||
actor.clone() => 1,
|
||||
},
|
||||
max_op: 2,
|
||||
pending_changes: 0,
|
||||
actor: None,
|
||||
seq: None,
|
||||
deps: vec![change1.hash],
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"list".into() => hashmap!{
|
||||
actor.op_id_at(1) => Diff::List(ListDiff{
|
||||
object_id: ObjectId::from(actor.op_id_at(1)),
|
||||
edits: vec![DiffEdit::SingleElementInsert {
|
||||
index: 0,
|
||||
elem_id: actor.op_id_at(2).into(),
|
||||
op_id: actor.op_id_at(2),
|
||||
value: Diff::Value(ScalarValue::Timestamp(1_586_541_089_595)),
|
||||
}],
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.load_changes(vec![change1]).unwrap();
|
||||
let patch = backend.get_patch().unwrap();
|
||||
assert_eq!(patch, expected_patch)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_includes_updates_for_conflicting_list_elements() {
|
||||
let local_actor = ActorId::random();
|
||||
let actor1: ActorId = "da45d93f2b18456f8318c723d1430563".try_into().unwrap();
|
||||
let actor2: ActorId = "6caaa2e433de42ae9c3fa65c9ff3f03e".try_into().unwrap();
|
||||
let local_change = amp::Change {
|
||||
actor_id: local_actor.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![
|
||||
Op {
|
||||
action: amp::OpType::Make(amp::ObjType::List),
|
||||
obj: ObjectId::Root,
|
||||
key: "list".into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
},
|
||||
Op {
|
||||
action: amp::OpType::Set("local".into()),
|
||||
obj: local_actor.op_id_at(1).into(),
|
||||
key: amp::ElementId::Head.into(),
|
||||
pred: SortedVec::new(),
|
||||
insert: true,
|
||||
},
|
||||
],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
let binchange: Change = local_change.clone().try_into().unwrap();
|
||||
|
||||
let remote_change_1: Change = amp::Change {
|
||||
actor_id: actor1.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![binchange.hash],
|
||||
extra_bytes: Vec::new(),
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("remote1".into()),
|
||||
obj: local_actor.op_id_at(1).into(),
|
||||
key: local_actor.op_id_at(2).into(),
|
||||
pred: vec![local_actor.op_id_at(2)].into(),
|
||||
insert: false,
|
||||
}],
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let remote_change_2: Change = amp::Change {
|
||||
actor_id: actor2.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
time: 0,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![binchange.hash],
|
||||
extra_bytes: Vec::new(),
|
||||
operations: vec![Op {
|
||||
action: amp::OpType::Set("remote2".into()),
|
||||
obj: local_actor.op_id_at(1).into(),
|
||||
key: local_actor.op_id_at(2).into(),
|
||||
pred: vec![local_actor.op_id_at(2)].into(),
|
||||
insert: false,
|
||||
}],
|
||||
}
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let mut deps = vec![remote_change_2.hash, remote_change_1.hash];
|
||||
deps.sort();
|
||||
|
||||
let expected_patch = Patch {
|
||||
clock: hashmap! {
|
||||
local_actor.clone() => 1,
|
||||
actor1.clone() => 1,
|
||||
actor2.clone() => 1,
|
||||
},
|
||||
max_op: 2,
|
||||
actor: None,
|
||||
seq: None,
|
||||
deps,
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"list".into() => hashmap!{
|
||||
local_actor.op_id_at(1) => Diff::List(ListDiff{
|
||||
object_id: ObjectId::from(local_actor.op_id_at(1)),
|
||||
edits: vec![
|
||||
DiffEdit::SingleElementInsert{
|
||||
index: 0,
|
||||
elem_id: local_actor.op_id_at(2).into(),
|
||||
op_id: actor1.op_id_at(1),
|
||||
value: Diff::Value("remote1".into()),
|
||||
},
|
||||
DiffEdit::Update{
|
||||
index: 0,
|
||||
op_id: actor2.op_id_at(1),
|
||||
value: Diff::Value("remote2".into())
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
pending_changes: 0,
|
||||
};
|
||||
|
||||
let mut backend = Backend::new();
|
||||
backend.apply_local_change(local_change).unwrap();
|
||||
backend
|
||||
.load_changes(vec![remote_change_1, remote_change_2])
|
||||
.unwrap();
|
||||
let patch = backend.get_patch().unwrap();
|
||||
assert_eq!(patch, expected_patch)
|
||||
}
|
52
automerge-backend/tests/load.rs
Normal file
52
automerge-backend/tests/load.rs
Normal file
|
@ -0,0 +1,52 @@
|
|||
use automerge_backend::Backend;
|
||||
|
||||
#[test]
|
||||
fn test_load_index_out_of_bounds() {
|
||||
// these are just random bytes
|
||||
let bytes = vec![133, 111, 74, 131, 0, 46, 128, 0];
|
||||
let _ = Backend::load(bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_index_out_of_bounds_2() {
|
||||
// these are just random bytes
|
||||
let bytes = vec![
|
||||
133, 111, 74, 131, 171, 99, 102, 54, 2, 16, 42, 0, 18, 255, 255, 61, 57, 57, 57, 29, 48,
|
||||
48, 48, 116, 0, 0, 0, 46, 46,
|
||||
];
|
||||
let _ = Backend::load(bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_index_out_of_bounds_3() {
|
||||
// these are just random bytes
|
||||
let bytes = vec![133, 111, 74, 131, 29, 246, 20, 11, 0, 2, 8, 61, 44];
|
||||
let _ = Backend::load(bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_leb_failed_to_read_whole_buffer() {
|
||||
// these are just random bytes
|
||||
let bytes = vec![133, 111, 74, 131, 46, 46, 46, 46, 46];
|
||||
let _ = Backend::load(bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_overflowing_add() {
|
||||
// these are just random bytes
|
||||
let bytes = vec![
|
||||
133, 111, 74, 131, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 1,
|
||||
16,
|
||||
];
|
||||
let _ = Backend::load(bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_overflowing_sub() {
|
||||
// these are just random bytes
|
||||
let bytes = vec![
|
||||
133, 111, 74, 131, 68, 193, 221, 243, 2, 16, 35, 80, 80, 10, 131, 0, 255, 28, 10, 0, 0, 65,
|
||||
8, 0, 133, 0,
|
||||
];
|
||||
let _ = Backend::load(bytes);
|
||||
}
|
2
automerge-c-v2/.gitignore
vendored
Normal file
2
automerge-c-v2/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
automerge
|
||||
automerge.o
|
25
automerge-c-v2/Cargo.toml
Normal file
25
automerge-c-v2/Cargo.toml
Normal file
|
@ -0,0 +1,25 @@
|
|||
[package]
|
||||
name = "automerge-c-v2"
|
||||
version = "0.1.0"
|
||||
authors = ["Orion Henry <orion.henry@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
name = "automerge"
|
||||
crate-type = ["cdylib", "staticlib"]
|
||||
bench = false
|
||||
doc = false
|
||||
|
||||
[dependencies]
|
||||
automerge-backend = { path = "../automerge-backend" }
|
||||
automerge-protocol = { path = "../automerge-protocol" }
|
||||
libc = "^0.2"
|
||||
serde = "^1.0"
|
||||
serde_json = "^1.0"
|
||||
errno = "^0.2"
|
||||
thiserror = "1.0.16"
|
||||
rmp = "0.8.10"
|
||||
rmp-serde = "0.15.4"
|
||||
|
||||
[build-dependencies]
|
||||
cbindgen = "^0.14"
|
30
automerge-c-v2/Makefile
Normal file
30
automerge-c-v2/Makefile
Normal file
|
@ -0,0 +1,30 @@
|
|||
|
||||
CC=gcc
|
||||
CFLAGS=-I.
|
||||
DEPS=automerge.h
|
||||
LIBS=-lpthread -ldl -lm
|
||||
LDIR=../target/release
|
||||
LIB=../target/release/libautomerge.a
|
||||
DEBUG_LIB=../target/debug/libautomerge.a
|
||||
|
||||
all: automerge $(LIB)
|
||||
|
||||
debug: LDIR=../target/debug
|
||||
debug: automerge $(DEBUG_LIB)
|
||||
|
||||
automerge: automerge.o $(LDIR)/libautomerge.a
|
||||
$(CC) -o $@ automerge.o $(LDIR)/libautomerge.a $(LIBS) -L$(LDIR)
|
||||
|
||||
$(DEBUG_LIB): src/lib.rs
|
||||
cargo build
|
||||
|
||||
$(LIB): src/lib.rs
|
||||
cargo build --release
|
||||
|
||||
%.o: %.c $(DEPS)
|
||||
$(CC) -c -o $@ $< $(CFLAGS)
|
||||
|
||||
.PHONY: clean
|
||||
|
||||
clean:
|
||||
rm -f *.o automerge $(LIB) $(DEBUG_LIB)
|
271
automerge-c-v2/automerge.c
Normal file
271
automerge-c-v2/automerge.c
Normal file
|
@ -0,0 +1,271 @@
|
|||
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
#include "automerge.h"
|
||||
|
||||
#define BUFSIZE 4096
|
||||
#define CMP_PATCH(x, y) \
|
||||
do { \
|
||||
char _buff[BUFSIZE]; \
|
||||
char _buff2[BUFSIZE]; \
|
||||
Buffer _rbuff = automerge_create_buff(); \
|
||||
int ret = automerge_get_patch(x, &_rbuff); \
|
||||
int len1 = _rbuff.len; \
|
||||
ASSERT_RET(x, 0); \
|
||||
ret = automerge_get_patch(y, &_rbuff); \
|
||||
int len2 = _rbuff.len; \
|
||||
ASSERT_RET(y, 0); \
|
||||
printf("*** get_patch of " #x " & " #y " -- (likely) equal? *** --> %s\n\n", len1 == len2 ? "true": "false"); \
|
||||
assert(len1 == len2); \
|
||||
automerge_free_buff(&_rbuff); \
|
||||
} while (0)
|
||||
|
||||
// Probably shouldn't use implicit declaration of `ret`...
|
||||
#define ASSERT_RET(db, expected) \
|
||||
do { \
|
||||
if (ret != expected) { \
|
||||
printf("LINE: %d, expected ret to be: %d, but it was: %d. Error: %s\n", __LINE__, expected, ret, automerge_error(db)); \
|
||||
assert(ret == expected); \
|
||||
} \
|
||||
} while(0)
|
||||
|
||||
#define SEND_MSG(x, y) \
|
||||
do { \
|
||||
ret = automerge_generate_sync_message(db ## x, &rbuff, ss ## x); \
|
||||
ASSERT_RET(db ## x, 0); \
|
||||
ret = automerge_receive_sync_message(db ## y, &rbuff, ss ## y, rbuff.data, rbuff.len); \
|
||||
ASSERT_RET(db ## y, 0); \
|
||||
} while (0)
|
||||
|
||||
void test_sync_basic() {
|
||||
printf("begin sync test - basic\n");
|
||||
int ret;
|
||||
|
||||
Buffer rbuff = automerge_create_buff();
|
||||
Backend * dbA = automerge_init();
|
||||
Backend * dbB = automerge_init();
|
||||
|
||||
SyncState * ssA = automerge_sync_state_init();
|
||||
SyncState * ssB = automerge_sync_state_init();
|
||||
|
||||
ret = automerge_generate_sync_message(dbA, &rbuff, ssA);
|
||||
ASSERT_RET(dbA, 0);
|
||||
ret = automerge_receive_sync_message(dbB, &rbuff, ssB, rbuff.data, rbuff.len);
|
||||
ASSERT_RET(dbB, 0);
|
||||
|
||||
ret = automerge_generate_sync_message(dbB, &rbuff, ssB);
|
||||
ASSERT_RET(dbB, 0);
|
||||
assert(rbuff.len == 0);
|
||||
|
||||
automerge_sync_state_free(ssA);
|
||||
automerge_sync_state_free(ssB);
|
||||
automerge_free_buff(&rbuff);
|
||||
}
|
||||
|
||||
void test_sync_encode_decode() {
|
||||
printf("begin sync test - encode/decode\n");
|
||||
int ret;
|
||||
|
||||
char buff[BUFSIZE];
|
||||
char sync_state_buff[BUFSIZE];
|
||||
|
||||
Buffer rbuff = automerge_create_buff();
|
||||
Backend * dbA = automerge_init();
|
||||
Backend * dbB = automerge_init();
|
||||
SyncState * ssA = automerge_sync_state_init();
|
||||
SyncState * ssB = automerge_sync_state_init();
|
||||
|
||||
const char * requestA1 = "{\"actor\":\"111111\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"magpie\",\"pred\":[]}]}";
|
||||
const char * requestB1 = "{\"actor\":\"222222\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"crow\",\"pred\":[]}]}";
|
||||
|
||||
unsigned char * A1msgpack = NULL;
|
||||
unsigned char * B1msgpack = NULL;
|
||||
uintptr_t A1msgpack_len = 0;
|
||||
uintptr_t B1msgpack_len = 0;
|
||||
|
||||
debug_json_change_to_msgpack(requestA1, &A1msgpack, &A1msgpack_len);
|
||||
debug_json_change_to_msgpack(requestB1, &B1msgpack, &B1msgpack_len);
|
||||
|
||||
ret = automerge_apply_local_change(dbA, &rbuff, A1msgpack, A1msgpack_len);
|
||||
ASSERT_RET(dbA, 0);
|
||||
ret = automerge_apply_local_change(dbB, &rbuff, B1msgpack, B1msgpack_len);
|
||||
ASSERT_RET(dbB, 0);
|
||||
|
||||
// A -> B
|
||||
SEND_MSG(A, B);
|
||||
|
||||
// B -> A
|
||||
SEND_MSG(B, A);
|
||||
|
||||
// A -> B
|
||||
SEND_MSG(A, B);
|
||||
|
||||
// B -> A
|
||||
SEND_MSG(B, A);
|
||||
|
||||
ret = automerge_generate_sync_message(dbA, &rbuff, ssA);
|
||||
ASSERT_RET(dbA, 0);
|
||||
|
||||
// Save the sync state
|
||||
ret = automerge_encode_sync_state(dbB, &rbuff, ssB);
|
||||
ASSERT_RET(dbB, 0);
|
||||
// Read it back
|
||||
ret = automerge_decode_sync_state(dbB, rbuff.data, rbuff.len, &ssB);
|
||||
ASSERT_RET(dbB, 0);
|
||||
|
||||
// Redo B -> A
|
||||
SEND_MSG(B, A);
|
||||
|
||||
ret = automerge_generate_sync_message(dbA, &rbuff, ssA);
|
||||
ASSERT_RET(dbA, 0);
|
||||
assert(rbuff.len == 0);
|
||||
}
|
||||
|
||||
int main() {
|
||||
int ret;
|
||||
|
||||
// In a real application you would need to check to make sure your buffer is large enough for any given read
|
||||
char buff[BUFSIZE];
|
||||
char buff2[BUFSIZE];
|
||||
char buff3[BUFSIZE];
|
||||
|
||||
printf("begin\n");
|
||||
|
||||
Buffer rbuff = automerge_create_buff();
|
||||
Backend * dbA = automerge_init();
|
||||
Backend * dbB = automerge_init();
|
||||
|
||||
const char * requestA1 = "{\"actor\":\"111111\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"magpie\",\"pred\":[]}]}";
|
||||
const char * requestA2 = "{\"actor\":\"111111\",\"seq\":2,\"time\":0,\"deps\":[],\"startOp\":2,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"dog\",\"value\":\"mastiff\",\"pred\":[]}]}";
|
||||
const char * requestB1 = "{\"actor\":\"222222\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"crow\",\"pred\":[]}]}";
|
||||
const char * requestB2 = "{\"actor\":\"222222\",\"seq\":2,\"time\":0,\"deps\":[],\"startOp\":2,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"cat\",\"value\":\"tabby\",\"pred\":[]}]}";
|
||||
|
||||
unsigned char * A1msgpack = NULL;
|
||||
unsigned char * A2msgpack = NULL;
|
||||
unsigned char * B1msgpack = NULL;
|
||||
unsigned char * B2msgpack = NULL;
|
||||
uintptr_t A1msgpack_len = 0;
|
||||
uintptr_t A2msgpack_len = 0;
|
||||
uintptr_t B1msgpack_len = 0;
|
||||
uintptr_t B2msgpack_len = 0;
|
||||
|
||||
debug_json_change_to_msgpack(requestA1, &A1msgpack, &A1msgpack_len);
|
||||
debug_json_change_to_msgpack(requestA2, &A2msgpack, &A2msgpack_len);
|
||||
debug_json_change_to_msgpack(requestB1, &B1msgpack, &B1msgpack_len);
|
||||
debug_json_change_to_msgpack(requestB2, &B2msgpack, &B2msgpack_len);
|
||||
|
||||
ret = automerge_apply_local_change(dbA, &rbuff, A1msgpack, A1msgpack_len);
|
||||
ASSERT_RET(dbA, 0);
|
||||
printf("cap: %ld, len: %ld, ptr: %p\n",rbuff.cap, rbuff.len, rbuff.data);
|
||||
debug_print_msgpack_patch("*** patchA1 ***", rbuff.data, rbuff.len);
|
||||
|
||||
// TODO: Port this test to msgpack
|
||||
// ret = automerge_apply_local_change(dbA, &rbuff, "{}");
|
||||
// ASSERT_RET(dbA, 0);
|
||||
// printf("*** patchA2 expected error string ** (%s)\n\n",automerge_error(dbA));
|
||||
|
||||
ret = automerge_apply_local_change(dbA, &rbuff, A2msgpack, A2msgpack_len);
|
||||
ASSERT_RET(dbA, 0);
|
||||
debug_print_msgpack_patch("*** patchA2 ***", rbuff.data, rbuff.len);
|
||||
|
||||
ret = automerge_apply_local_change(dbB, &rbuff, B1msgpack, B1msgpack_len);
|
||||
ASSERT_RET(dbB, 0);
|
||||
debug_print_msgpack_patch("*** patchB1 ***", rbuff.data, rbuff.len);
|
||||
|
||||
ret = automerge_apply_local_change(dbB, &rbuff, B2msgpack, B2msgpack_len);
|
||||
ASSERT_RET(dbB, 0);
|
||||
debug_print_msgpack_patch("*** patchB2 ***", rbuff.data, rbuff.len);
|
||||
|
||||
printf("*** clone dbA -> dbC ***\n\n");
|
||||
Backend * dbC = NULL;
|
||||
ret = automerge_clone(dbA, &dbC);
|
||||
ASSERT_RET(dbA, 0);
|
||||
|
||||
CMP_PATCH(dbA, dbC);
|
||||
|
||||
ret = automerge_save(dbA, &rbuff);
|
||||
ASSERT_RET(dbA, 0);
|
||||
printf("*** save dbA - %ld bytes ***\n\n", rbuff.len);
|
||||
|
||||
printf("*** load the save into dbD ***\n\n");
|
||||
Backend * dbD = automerge_load(rbuff.data, rbuff.len);
|
||||
|
||||
CMP_PATCH(dbA, dbD);
|
||||
|
||||
ret = automerge_get_changes_for_actor(dbA, &rbuff, "111111");
|
||||
ASSERT_RET(dbA, 0);
|
||||
|
||||
// We are reading one return value (rbuff) while needing to return
|
||||
// something else, so we need another `Buffers` struct
|
||||
// Buffers rbuff2 = automerge_create_buffs();
|
||||
// int start = 0;
|
||||
// for(int i = 0; i < rbuff.lens_len; ++i) {
|
||||
// int len = rbuff.lens[i];
|
||||
// char * data_start = rbuff.data + start;
|
||||
// automerge_decode_change(dbA, &rbuff2, data_start, len);
|
||||
// util_read_buffs(&rbuff2, 0, buff2);
|
||||
// printf("Change decoded to msgpack\n");
|
||||
// start += len;
|
||||
// automerge_encode_change(dbB, &rbuff2, buff2, rbuff2.lens[0]);
|
||||
// assert(memcmp(data_start, rbuff2.data, len) == 0);
|
||||
// }
|
||||
// CBuffers cbuffs = { data: rbuff.data, data_len: rbuff.data_len, lens: rbuff.lens, lens_len: rbuff.lens_len };
|
||||
// ret = automerge_apply_changes(dbB, &rbuff, cbuffs);
|
||||
// ASSERT_RET(dbB, 0);
|
||||
// automerge_free_buffs(&rbuff2);
|
||||
|
||||
ret = automerge_apply_changes(dbB, &rbuff, rbuff.data, rbuff.len);
|
||||
ASSERT_RET(dbB, 0);
|
||||
|
||||
printf("*** get head from dbB ***\n\n");
|
||||
ret = automerge_get_heads(dbB, &rbuff);
|
||||
ASSERT_RET(dbB,0);
|
||||
|
||||
//int num_heads = 0;
|
||||
//for (int i = 0; i < rbuff.lens_len; ++i) {
|
||||
// assert(rbuff.lens[i] == 32);
|
||||
// util_read_buffs(&rbuff, i, buff3 + (num_heads * 32));
|
||||
// num_heads++;
|
||||
//}
|
||||
//assert(num_heads == 2);
|
||||
ret = automerge_get_changes(dbB, &rbuff, rbuff.data, rbuff.len);
|
||||
ASSERT_RET(dbB, 0);
|
||||
|
||||
printf("*** copy changes from dbB to A ***\n\n");
|
||||
ret = automerge_get_changes_for_actor(dbB, &rbuff, "222222");
|
||||
ASSERT_RET(dbB, 0);
|
||||
|
||||
ret = automerge_apply_changes(dbA, &rbuff, rbuff.data, rbuff.len);
|
||||
ASSERT_RET(dbA, 0);
|
||||
|
||||
CMP_PATCH(dbA, dbB);
|
||||
|
||||
printf("*** copy changes from dbA to E using load ***\n\n");
|
||||
Backend * dbE = automerge_init();
|
||||
ret = automerge_get_changes(dbA, &rbuff, NULL, 0);
|
||||
ASSERT_RET(dbA, 0);
|
||||
ret = automerge_load_changes(dbE, rbuff.data, rbuff.len);
|
||||
ASSERT_RET(dbE, 0);
|
||||
|
||||
CMP_PATCH(dbA, dbE);
|
||||
CMP_PATCH(dbA, dbB);
|
||||
|
||||
//ret = automerge_get_missing_deps(dbE, &rbuff, buff3, num_heads);
|
||||
//ASSERT_RET(dbE, 0);
|
||||
//util_read_buffs(&rbuff, 0, buff);
|
||||
//assert(strlen(buff) == 2); // [] - nothing missing
|
||||
|
||||
test_sync_basic();
|
||||
test_sync_encode_decode();
|
||||
|
||||
printf("free resources\n");
|
||||
automerge_free(dbA);
|
||||
automerge_free(dbB);
|
||||
automerge_free(dbC);
|
||||
automerge_free(dbD);
|
||||
automerge_free(dbE);
|
||||
automerge_free_buff(&rbuff);
|
||||
|
||||
printf("end\n");
|
||||
}
|
214
automerge-c-v2/automerge.h
Normal file
214
automerge-c-v2/automerge.h
Normal file
|
@ -0,0 +1,214 @@
|
|||
#ifndef automerge_h
|
||||
#define automerge_h
|
||||
|
||||
/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */
|
||||
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
|
||||
typedef struct Backend Backend;
|
||||
|
||||
typedef struct SyncState SyncState;
|
||||
|
||||
/**
|
||||
* A sequence of byte buffers that are contiguous in memory
|
||||
* The C caller allocates one of these with `create_buffs`
|
||||
* and passes it into each API call. This prevents allocating memory
|
||||
* on each call. The struct fields are just the constituent fields in a Vec
|
||||
* This is used for returning data to C.
|
||||
*/
|
||||
typedef struct {
|
||||
/**
|
||||
* A pointer to the bytes
|
||||
*/
|
||||
uint8_t *data;
|
||||
/**
|
||||
* The amount of meaningful bytes
|
||||
*/
|
||||
uintptr_t len;
|
||||
/**
|
||||
* The total allocated memory `data` points to
|
||||
* This is needed so Rust can free `data`
|
||||
*/
|
||||
uintptr_t cap;
|
||||
} Buffer;
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This should be called with a valid pointer to a `Backend`
|
||||
* `CBuffers` should be non-null & have valid fields.
|
||||
*/
|
||||
intptr_t automerge_apply_changes(Backend *backend, Buffer *buffs, const uint8_t *changes, uintptr_t changes_len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This should be called with a valid pointer to a `Backend`
|
||||
* and a valid pointer to a `Buffers``
|
||||
*/
|
||||
intptr_t automerge_apply_local_change(Backend *backend, Buffer *buffs, const uint8_t *request, uintptr_t len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This should be called with a valid pointer to a `Backend`
|
||||
*/
|
||||
intptr_t automerge_clone(Backend *backend, Backend **new_);
|
||||
|
||||
/**
|
||||
* Create a `Buffers` struct to store return values
|
||||
*/
|
||||
Buffer automerge_create_buff(void);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid pointer to a change and the correct len
|
||||
*/
|
||||
intptr_t automerge_decode_change(Backend *backend, Buffer *buffs, const uint8_t *change, uintptr_t len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* `encoded_state_[ptr|len]` must be the address & length of a byte array
|
||||
*/
|
||||
intptr_t automerge_decode_sync_state(Backend *backend,
|
||||
const uint8_t *encoded_state_ptr,
|
||||
uintptr_t encoded_state_len,
|
||||
SyncState **sync_state);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid pointer to a JSON string of a change
|
||||
*/
|
||||
intptr_t automerge_encode_change(Backend *backend, Buffer *buffs, const uint8_t *change, uintptr_t len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* Must be called with a pointer to a valid Backend, sync_state, and buffs
|
||||
*/
|
||||
intptr_t automerge_encode_sync_state(Backend *backend, Buffer *buffs, SyncState *sync_state);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must be called with a valid backend pointer
|
||||
*/
|
||||
const char *automerge_error(Backend *backend);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must be called with a valid backend pointer
|
||||
*/
|
||||
void automerge_free(Backend *backend);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* Must point to a valid `Buffers` struct
|
||||
* Free the memory a `Buffers` struct points to
|
||||
*/
|
||||
intptr_t automerge_free_buff(Buffer *buffs);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* Must be called with a valid backend pointer
|
||||
* sync_state must be a valid pointer to a SyncState
|
||||
* Returns an `isize` indicating the length of the binary message
|
||||
* (-1 if there was an error, 0 if there is no message)
|
||||
*/
|
||||
intptr_t automerge_generate_sync_message(Backend *backend, Buffer *buffs, SyncState *sync_state);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must be called with a valid backend pointer,
|
||||
* binary must be a valid pointer to `hashes` hashes
|
||||
*/
|
||||
intptr_t automerge_get_changes(Backend *backend, Buffer *buffs, const uint8_t *bin, uintptr_t hashes);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must be called with a valid pointer to a `Backend`
|
||||
* and a valid C String
|
||||
*/
|
||||
intptr_t automerge_get_changes_for_actor(Backend *backend, Buffer *buffs, const char *actor);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must be called with a valid backend pointer
|
||||
*/
|
||||
intptr_t automerge_get_heads(Backend *backend, Buffer *buffs);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
*/
|
||||
intptr_t automerge_get_last_local_change(Backend *backend, Buffer *buffs);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must be called with a valid backend pointer,
|
||||
* binary must be a valid pointer to len bytes
|
||||
*/
|
||||
intptr_t automerge_get_missing_deps(Backend *backend, Buffer *buffs, const uint8_t *bin, uintptr_t len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This should be called with a valid pointer to a `Backend`
|
||||
* and a valid pointer to a `Buffers``
|
||||
*/
|
||||
intptr_t automerge_get_patch(Backend *backend, Buffer *buffs);
|
||||
|
||||
Backend *automerge_init(void);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must be called with a valid pointer to len bytes
|
||||
*/
|
||||
Backend *automerge_load(const uint8_t *data, uintptr_t len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This should be called with a valid pointer to a `Backend`
|
||||
* and a valid pointers to a `CBuffers`
|
||||
*/
|
||||
intptr_t automerge_load_changes(Backend *backend, const uint8_t *changes, uintptr_t changes_len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* Must be called with a valid backend pointer
|
||||
* sync_state must be a valid pointer to a SyncState
|
||||
* `encoded_msg_[ptr|len]` must be the address & length of a byte array
|
||||
*/
|
||||
intptr_t automerge_receive_sync_message(Backend *backend,
|
||||
Buffer *buffs,
|
||||
SyncState *sync_state,
|
||||
const uint8_t *encoded_msg_ptr,
|
||||
uintptr_t encoded_msg_len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This should be called with a valid pointer to a `Backend`
|
||||
*/
|
||||
intptr_t automerge_save(Backend *backend, Buffer *buffs);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* sync_state must be a valid pointer to a SyncState
|
||||
*/
|
||||
void automerge_sync_state_free(SyncState *sync_state);
|
||||
|
||||
SyncState *automerge_sync_state_init(void);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must be called with a valid C-string
|
||||
*/
|
||||
intptr_t debug_json_change_to_msgpack(const char *change, uint8_t **out_msgpack, uintptr_t *out_len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must be called with a valid pointer to len bytes
|
||||
*/
|
||||
intptr_t debug_msgpack_change_to_json(const uint8_t *msgpack, uintptr_t len, uint8_t *out_json);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* `prefix` & `buff` must be valid pointers
|
||||
*/
|
||||
void debug_print_msgpack_patch(const char *prefix, const uint8_t *buff, uintptr_t len);
|
||||
|
||||
#endif /* automerge_h */
|
16
automerge-c-v2/build.rs
Normal file
16
automerge-c-v2/build.rs
Normal file
|
@ -0,0 +1,16 @@
|
|||
extern crate cbindgen;
|
||||
|
||||
use std::{env, path::PathBuf};
|
||||
|
||||
fn main() {
|
||||
let crate_dir = PathBuf::from(
|
||||
env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR env var is not defined"),
|
||||
);
|
||||
|
||||
let config = cbindgen::Config::from_file("cbindgen.toml")
|
||||
.expect("Unable to find cbindgen.toml configuration file");
|
||||
|
||||
if let Ok(writer) = cbindgen::generate_with_config(&crate_dir, config) {
|
||||
writer.write_to_file(crate_dir.join("automerge.h"));
|
||||
}
|
||||
}
|
8
automerge-c-v2/cbindgen.toml
Normal file
8
automerge-c-v2/cbindgen.toml
Normal file
|
@ -0,0 +1,8 @@
|
|||
include_guard = "automerge_h"
|
||||
autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */"
|
||||
language = "C"
|
||||
includes = []
|
||||
sys_includes = ["stdint.h", "stdbool.h"]
|
||||
no_includes = true
|
||||
line_length = 140
|
||||
|
735
automerge-c-v2/src/lib.rs
Normal file
735
automerge-c-v2/src/lib.rs
Normal file
|
@ -0,0 +1,735 @@
|
|||
extern crate automerge_backend;
|
||||
extern crate errno;
|
||||
extern crate libc;
|
||||
extern crate serde;
|
||||
|
||||
use core::fmt::Debug;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
convert::TryInto,
|
||||
ffi::{CStr, CString},
|
||||
mem::ManuallyDrop,
|
||||
ops::{Deref, DerefMut},
|
||||
os::raw::c_char,
|
||||
ptr,
|
||||
};
|
||||
|
||||
use automerge_backend::{AutomergeError, Change};
|
||||
use automerge_protocol as amp;
|
||||
use automerge_protocol::{error::InvalidActorId, ActorId, ChangeHash, Patch};
|
||||
use errno::{set_errno, Errno};
|
||||
|
||||
// I dislike using macros but it saves me a bunch of typing
|
||||
// This is especially true b/c the V2 backend returns a bunch more errors
|
||||
// And we need to return an `isize` (not a Result), so we can't use the `?` operator
|
||||
|
||||
/// Try to turn a `*mut Backend` into a &mut Backend,
|
||||
/// return an error code if failure
|
||||
macro_rules! get_backend_mut {
|
||||
($backend:expr) => {{
|
||||
let backend = $backend.as_mut();
|
||||
match backend {
|
||||
Some(b) => b,
|
||||
// Don't call `handle_error` b/c there is no valid backend!
|
||||
None => return CError::NullBackend.error_code(),
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
/// Turn a `*mut Buffer` into a `&mut Buffer`
|
||||
macro_rules! get_buff_mut {
|
||||
($buffs:expr) => {{
|
||||
let buffs = $buffs.as_mut();
|
||||
match buffs {
|
||||
Some(b) => b,
|
||||
None => return CError::NullBuffers.error_code(),
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! get_data_vec {
|
||||
($buff:expr) => {{
|
||||
let data: Vec<u8> = Vec::from_raw_parts($buff.data, $buff.len, $buff.cap);
|
||||
data
|
||||
}};
|
||||
}
|
||||
|
||||
/// Turn a `*const CBuffers` into a `&CBuffers`
|
||||
macro_rules! write_to_buff_epilogue {
|
||||
($buff:expr, $vec:expr) => {{
|
||||
$buff.cap = $vec.capacity();
|
||||
$buff.len = $vec.len();
|
||||
$buff.data = $vec.as_mut_ptr();
|
||||
let _ = ManuallyDrop::new($vec);
|
||||
}};
|
||||
}
|
||||
|
||||
/// Try to deserialize some bytes into a value using MessagePack
|
||||
/// return an error code if failure
|
||||
macro_rules! from_msgpack {
|
||||
($backend:expr, $ptr:expr, $len:expr) => {{
|
||||
// Null pointer check?
|
||||
if $ptr.as_ref().is_none() {
|
||||
return $backend.handle_error(CError::NullChange);
|
||||
}
|
||||
let slice = std::slice::from_raw_parts($ptr, $len);
|
||||
match rmp_serde::from_read_ref(slice) {
|
||||
Ok(v) => v,
|
||||
Err(e) => return $backend.handle_error(CError::FromMessagePack(e)),
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
/// Get hashes from a binary buffer
|
||||
macro_rules! get_hashes {
|
||||
($backend:expr, $bin:expr, $hashes:expr) => {{
|
||||
let mut hashes: Vec<ChangeHash> = vec![];
|
||||
if $hashes > 0 {
|
||||
let bytes: Vec<Vec<u8>> = from_msgpack!($backend, $bin, $hashes);
|
||||
for chunk in bytes {
|
||||
let hash: ChangeHash = match chunk.as_slice().try_into() {
|
||||
Ok(v) => v,
|
||||
Err(e) => return $backend.handle_error(CError::InvalidHashes(e.to_string())),
|
||||
};
|
||||
hashes.push(hash);
|
||||
}
|
||||
}
|
||||
hashes
|
||||
}};
|
||||
}
|
||||
|
||||
/// Try to call an Automerge method,
|
||||
/// return an error code if failure
|
||||
macro_rules! call_automerge {
|
||||
($backend:expr, $expr:expr) => {
|
||||
match $expr {
|
||||
Ok(x) => x,
|
||||
// We have to do `AutomergeError::from` to convert a `DecodeError` to a
|
||||
// `AutomergeError`
|
||||
Err(e) => return $backend.handle_error(CError::Automerge(AutomergeError::from(e))),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Get a `Vec<Change>` from a `*const CBuffers`
|
||||
/// Using a macro instead of a method so we can return if there is an error
|
||||
macro_rules! get_changes {
|
||||
($backend:expr, $changes:expr, $len:expr) => {{
|
||||
let raws: Vec<Vec<u8>> = from_msgpack!($backend, $changes, $len);
|
||||
let mut changes = vec![];
|
||||
for raw in raws {
|
||||
let change = call_automerge!($backend, Change::from_bytes(raw));
|
||||
changes.push(change);
|
||||
}
|
||||
changes
|
||||
}};
|
||||
}
|
||||
|
||||
/// All possible errors that a C caller could face
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum CError {
|
||||
// TODO: The `NullBackend` and error is not attached to anything
|
||||
// (since normally we attach errors to a specific backend)
|
||||
// We could solve this by using a technique like this:
|
||||
// https://michael-f-bryan.github.io/rust-ffi-guide/errors/return_types.html
|
||||
// to create a `get_last_error_message` function, but the benefit seems very low
|
||||
// b/c the NullBackend error message is always the same
|
||||
#[error("Invalid pointer to Backend")]
|
||||
NullBackend,
|
||||
#[error("Invalid pointer to Buffers")]
|
||||
NullBuffers,
|
||||
#[error("Invalid pointer to CBuffers")]
|
||||
NullCBuffers,
|
||||
#[error("Invalid pointer to change")]
|
||||
NullChange,
|
||||
#[error("Invalid byte buffer of hashes: `{0}`")]
|
||||
InvalidHashes(String),
|
||||
#[error(transparent)]
|
||||
ToMessagePack(#[from] rmp_serde::encode::Error),
|
||||
#[error(transparent)]
|
||||
FromMessagePack(#[from] rmp_serde::decode::Error),
|
||||
#[error(transparent)]
|
||||
FromUtf8(#[from] std::string::FromUtf8Error),
|
||||
#[error("No local change")]
|
||||
NoLocalChange,
|
||||
#[error(transparent)]
|
||||
Automerge(#[from] AutomergeError),
|
||||
#[error(transparent)]
|
||||
InvalidActorid(#[from] InvalidActorId),
|
||||
}
|
||||
|
||||
impl CError {
|
||||
fn error_code(&self) -> isize {
|
||||
// 0 is reserved for "success"
|
||||
// TODO: This -1 code might be useless since we wipe the *actual* error code
|
||||
// and replace it with an uninformative`-1` that only tells us we couldn't
|
||||
// format the error message.
|
||||
// -1 is reserved for "we had an error & we could't convert it to a CString"
|
||||
const BASE: isize = 2;
|
||||
let code = match self {
|
||||
CError::NullBackend => BASE,
|
||||
CError::NullBuffers => BASE + 1,
|
||||
CError::NullCBuffers => BASE + 2,
|
||||
CError::NullChange => BASE + 3,
|
||||
CError::InvalidHashes(_) => BASE + 4,
|
||||
CError::ToMessagePack(_) => BASE + 5,
|
||||
CError::FromMessagePack(_) => BASE + 6,
|
||||
CError::FromUtf8(_) => BASE + 7,
|
||||
CError::InvalidActorid(_) => BASE + 8,
|
||||
CError::NoLocalChange => BASE + 9,
|
||||
CError::Automerge(_) => BASE + 10,
|
||||
};
|
||||
-code
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Backend {
|
||||
handle: automerge_backend::Backend,
|
||||
error: Option<CString>,
|
||||
last_local_change: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
/// A sequence of byte buffers that are contiguous in memory
|
||||
/// The C caller allocates one of these with `create_buffs`
|
||||
/// and passes it into each API call. This prevents allocating memory
|
||||
/// on each call. The struct fields are just the constituent fields in a Vec
|
||||
/// This is used for returning data to C.
|
||||
// This struct is accidentally an SoA layout, so it should be more performant!
|
||||
#[repr(C)]
|
||||
pub struct Buffer {
|
||||
/// A pointer to the bytes
|
||||
data: *mut u8,
|
||||
/// The amount of meaningful bytes
|
||||
len: usize,
|
||||
/// The total allocated memory `data` points to
|
||||
/// This is needed so Rust can free `data`
|
||||
cap: usize,
|
||||
}
|
||||
|
||||
impl Backend {
|
||||
fn init(handle: automerge_backend::Backend) -> Backend {
|
||||
Backend {
|
||||
handle,
|
||||
error: None,
|
||||
last_local_change: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_error(&mut self, err: CError) -> isize {
|
||||
let c_error = match CString::new(format!("{}", err)) {
|
||||
Ok(e) => e,
|
||||
Err(_) => {
|
||||
return -1;
|
||||
}
|
||||
};
|
||||
self.error = Some(c_error);
|
||||
err.error_code()
|
||||
}
|
||||
|
||||
unsafe fn write_msgpack<T: serde::ser::Serialize>(
|
||||
&mut self,
|
||||
vals: &T,
|
||||
buffers: &mut Buffer,
|
||||
) -> isize {
|
||||
match write_msgpack_to_buff(vals, buffers) {
|
||||
Ok(()) => 0,
|
||||
Err(e) => self.handle_error(CError::ToMessagePack(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Backend {
|
||||
type Target = automerge_backend::Backend;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.handle
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Backend {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.handle
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Backend> for *mut Backend {
|
||||
fn from(b: Backend) -> Self {
|
||||
Box::into_raw(Box::new(b))
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn automerge_init() -> *mut Backend {
|
||||
Backend::init(automerge_backend::Backend::new()).into()
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must be called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_free(backend: *mut Backend) {
|
||||
// TODO: Can we do a null pointer check here by using `get_backend_mut`
|
||||
let backend: Backend = *Box::from_raw(backend);
|
||||
drop(backend)
|
||||
}
|
||||
|
||||
/// Create a `Buffers` struct to store return values
|
||||
#[no_mangle]
|
||||
pub extern "C" fn automerge_create_buff() -> Buffer {
|
||||
// Don't drop the vectors so their underlying buffers aren't de-allocated
|
||||
let mut data = ManuallyDrop::new(Vec::new());
|
||||
Buffer {
|
||||
data: data.as_mut_ptr(),
|
||||
len: data.len(),
|
||||
cap: data.capacity(),
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Must point to a valid `Buffers` struct
|
||||
/// Free the memory a `Buffers` struct points to
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_free_buff(buffs: *mut Buffer) -> isize {
|
||||
let buff = get_buff_mut!(buffs);
|
||||
// We construct the vec & drop it at the end of this function
|
||||
get_data_vec!(buff);
|
||||
0
|
||||
}
|
||||
|
||||
unsafe fn write_msgpack_to_buff<T: serde::ser::Serialize>(
|
||||
vals: &T,
|
||||
buff: &mut Buffer,
|
||||
) -> Result<(), rmp_serde::encode::Error> {
|
||||
let mut data = get_data_vec!(buff);
|
||||
let mut writer = std::io::Cursor::new(&mut data);
|
||||
rmp_serde::encode::write_named(&mut writer, &vals)?;
|
||||
write_to_buff_epilogue!(buff, data);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
unsafe fn write_bin_to_buff(bin: &[u8], buff: &mut Buffer) {
|
||||
let mut data = get_data_vec!(buff);
|
||||
data.set_len(0);
|
||||
data.extend(bin);
|
||||
write_to_buff_epilogue!(buff, data);
|
||||
}
|
||||
|
||||
unsafe fn clear_buffs(buff: &mut Buffer) {
|
||||
let mut data = get_data_vec!(buff);
|
||||
data.set_len(0);
|
||||
write_to_buff_epilogue!(buff, data);
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This should be called with a valid pointer to a `Backend`
|
||||
/// and a valid pointer to a `Buffers``
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_apply_local_change(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
request: *const u8,
|
||||
len: usize,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buffs = get_buff_mut!(buffs);
|
||||
let request: amp::Change = from_msgpack!(backend, request, len);
|
||||
let (patch, change) = call_automerge!(backend, backend.apply_local_change(request));
|
||||
backend.last_local_change = Some(change.raw_bytes().to_vec());
|
||||
backend.write_msgpack(&patch, buffs)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_last_local_change(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buff = get_buff_mut!(buffs);
|
||||
let change = match &backend.last_local_change {
|
||||
Some(c) => c,
|
||||
None => return backend.handle_error(CError::NoLocalChange),
|
||||
};
|
||||
write_bin_to_buff(change, buff);
|
||||
0
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This should be called with a valid pointer to a `Backend`
|
||||
/// `CBuffers` should be non-null & have valid fields.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_apply_changes(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
changes: *const u8,
|
||||
changes_len: usize,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buffs = get_buff_mut!(buffs);
|
||||
let changes = get_changes!(backend, changes, changes_len);
|
||||
let patch = call_automerge!(backend, backend.apply_changes(changes));
|
||||
backend.write_msgpack(&patch, buffs)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This should be called with a valid pointer to a `Backend`
|
||||
/// and a valid pointer to a `Buffers``
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_patch(backend: *mut Backend, buffs: *mut Buffer) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buff = get_buff_mut!(buffs);
|
||||
let patch = call_automerge!(backend, backend.get_patch());
|
||||
backend.write_msgpack(&patch, buff)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This should be called with a valid pointer to a `Backend`
|
||||
/// and a valid pointers to a `CBuffers`
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_load_changes(
|
||||
backend: *mut Backend,
|
||||
changes: *const u8,
|
||||
changes_len: usize,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let changes = get_changes!(backend, changes, changes_len);
|
||||
call_automerge!(backend, backend.load_changes(changes));
|
||||
0
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This should be called with a valid pointer to a `Backend`
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_save(backend: *mut Backend, buffs: *mut Buffer) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buff = get_buff_mut!(buffs);
|
||||
let bin = call_automerge!(backend, backend.save());
|
||||
write_bin_to_buff(&bin, buff);
|
||||
0
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This should be called with a valid pointer to a `Backend`
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_clone(backend: *mut Backend, new: *mut *mut Backend) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
(*new) = backend.clone().into();
|
||||
0
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must be called with a valid pointer to len bytes
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_load(data: *const u8, len: usize) -> *mut Backend {
|
||||
let bytes = std::slice::from_raw_parts(data, len);
|
||||
let result = automerge_backend::Backend::load(bytes.to_vec());
|
||||
match result {
|
||||
Ok(b) => Backend::init(b).into(),
|
||||
Err(_) => {
|
||||
set_errno(Errno(1));
|
||||
ptr::null_mut()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Lossily converts a C String into a Cow<...>
|
||||
// TODO: Should we do UTF-8 check?
|
||||
unsafe fn from_cstr<'a>(s: *const c_char) -> Cow<'a, str> {
|
||||
let s: &'a CStr = CStr::from_ptr(s);
|
||||
s.to_string_lossy()
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must be called with a valid pointer to a `Backend`
|
||||
/// and a valid C String
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_changes_for_actor(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
actor: *const c_char,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buffs = get_buff_mut!(buffs);
|
||||
let actor = from_cstr(actor);
|
||||
let actor_id: ActorId = match actor.as_ref().try_into() {
|
||||
Ok(id) => id,
|
||||
Err(e) => return backend.handle_error(CError::InvalidActorid(e)),
|
||||
};
|
||||
let changes = call_automerge!(backend, backend.get_changes_for_actor_id(&actor_id));
|
||||
let bytes: Vec<_> = changes
|
||||
.into_iter()
|
||||
.map(|c| c.raw_bytes().to_vec())
|
||||
.collect();
|
||||
backend.write_msgpack(&bytes, buffs)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid pointer to a change and the correct len
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_decode_change(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
change: *const u8,
|
||||
len: usize,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buffs = get_buff_mut!(buffs);
|
||||
let bytes = std::slice::from_raw_parts(change, len);
|
||||
let change = call_automerge!(backend, Change::from_bytes(bytes.to_vec()));
|
||||
backend.write_msgpack(&change.decode(), buffs);
|
||||
0
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid pointer to a JSON string of a change
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_encode_change(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
change: *const u8,
|
||||
len: usize,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buff = get_buff_mut!(buffs);
|
||||
let uncomp: amp::Change = from_msgpack!(backend, change, len);
|
||||
// This should never panic?
|
||||
let change: Change = uncomp.try_into().unwrap();
|
||||
write_bin_to_buff(change.raw_bytes(), buff);
|
||||
0
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must be called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_heads(backend: *mut Backend, buffs: *mut Buffer) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buffs = get_buff_mut!(buffs);
|
||||
let hashes = backend.get_heads();
|
||||
let bytes: Vec<_> = hashes.iter().map(|h| h.0.as_ref()).collect();
|
||||
backend.write_msgpack(&bytes, buffs)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must be called with a valid backend pointer,
|
||||
/// binary must be a valid pointer to `hashes` hashes
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_changes(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
bin: *const u8,
|
||||
hashes: usize,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buffs = get_buff_mut!(buffs);
|
||||
let hashes = get_hashes!(backend, bin, hashes);
|
||||
let changes = backend.get_changes(&hashes);
|
||||
let bytes: Vec<_> = changes
|
||||
.into_iter()
|
||||
.map(|c| c.raw_bytes().to_vec())
|
||||
.collect();
|
||||
backend.write_msgpack(&bytes, buffs)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must be called with a valid backend pointer,
|
||||
/// binary must be a valid pointer to len bytes
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_missing_deps(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
bin: *const u8,
|
||||
len: usize,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buffs = get_buff_mut!(buffs);
|
||||
let heads = get_hashes!(backend, bin, len);
|
||||
let missing = backend.get_missing_deps(&heads);
|
||||
backend.write_msgpack(&missing, buffs)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must be called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_error(backend: *mut Backend) -> *const c_char {
|
||||
(*backend)
|
||||
.error
|
||||
.as_ref()
|
||||
.map(|e| e.as_ptr())
|
||||
.unwrap_or_else(|| ptr::null_mut())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SyncState {
|
||||
handle: automerge_backend::SyncState,
|
||||
}
|
||||
|
||||
impl From<SyncState> for *mut SyncState {
|
||||
fn from(s: SyncState) -> Self {
|
||||
Box::into_raw(Box::new(s))
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Must be called with a valid backend pointer
|
||||
/// sync_state must be a valid pointer to a SyncState
|
||||
/// `encoded_msg_[ptr|len]` must be the address & length of a byte array
|
||||
// Returns an `isize` indicating the length of the patch as a JSON string
|
||||
// (-1 if there was an error, 0 if there is no patch)
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_receive_sync_message(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
sync_state: &mut SyncState,
|
||||
encoded_msg_ptr: *const u8,
|
||||
encoded_msg_len: usize,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buffs = get_buff_mut!(buffs);
|
||||
let slice = std::slice::from_raw_parts(encoded_msg_ptr, encoded_msg_len);
|
||||
let msg = call_automerge!(backend, automerge_backend::SyncMessage::decode(slice));
|
||||
let patch = call_automerge!(
|
||||
backend,
|
||||
backend.receive_sync_message(&mut sync_state.handle, msg)
|
||||
);
|
||||
if let Some(patch) = patch {
|
||||
backend.write_msgpack(&patch, buffs)
|
||||
} else {
|
||||
// There is nothing to return, clear the buffs
|
||||
clear_buffs(buffs);
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Must be called with a valid backend pointer
|
||||
/// sync_state must be a valid pointer to a SyncState
|
||||
/// Returns an `isize` indicating the length of the binary message
|
||||
/// (-1 if there was an error, 0 if there is no message)
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_generate_sync_message(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
sync_state: &mut SyncState,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buff = get_buff_mut!(buffs);
|
||||
let msg = backend.generate_sync_message(&mut sync_state.handle);
|
||||
if let Some(msg) = msg {
|
||||
let bytes = call_automerge!(backend, msg.encode());
|
||||
write_bin_to_buff(&bytes, buff);
|
||||
} else {
|
||||
clear_buffs(buff);
|
||||
}
|
||||
0
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn automerge_sync_state_init() -> *mut SyncState {
|
||||
let state = SyncState {
|
||||
handle: automerge_backend::SyncState::default(),
|
||||
};
|
||||
state.into()
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// sync_state must be a valid pointer to a SyncState
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_sync_state_free(sync_state: *mut SyncState) {
|
||||
let sync_state: SyncState = *Box::from_raw(sync_state);
|
||||
drop(sync_state);
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Must be called with a pointer to a valid Backend, sync_state, and buffs
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_encode_sync_state(
|
||||
backend: *mut Backend,
|
||||
buffs: *mut Buffer,
|
||||
sync_state: &mut SyncState,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let buffs = get_buff_mut!(buffs);
|
||||
let encoded = call_automerge!(backend, sync_state.handle.encode());
|
||||
write_bin_to_buff(&encoded, buffs);
|
||||
0
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// `encoded_state_[ptr|len]` must be the address & length of a byte array
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_decode_sync_state(
|
||||
backend: *mut Backend,
|
||||
encoded_state_ptr: *const u8,
|
||||
encoded_state_len: usize,
|
||||
sync_state: *mut *mut SyncState,
|
||||
) -> isize {
|
||||
let backend = get_backend_mut!(backend);
|
||||
let slice = std::slice::from_raw_parts(encoded_state_ptr, encoded_state_len);
|
||||
let decoded_state = call_automerge!(backend, automerge_backend::SyncState::decode(slice));
|
||||
let state = SyncState {
|
||||
handle: decoded_state,
|
||||
};
|
||||
(*sync_state) = state.into();
|
||||
0
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must be called with a valid C-string
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn debug_json_change_to_msgpack(
|
||||
change: *const c_char,
|
||||
out_msgpack: *mut *mut u8,
|
||||
out_len: *mut usize,
|
||||
) -> isize {
|
||||
let s = from_cstr(change);
|
||||
// `unwrap` here is ok b/c this is a debug function
|
||||
let uncomp: amp::Change = serde_json::from_str(&s).unwrap();
|
||||
|
||||
// `unwrap` here is ok b/c this is a debug function
|
||||
let mut bytes = ManuallyDrop::new(rmp_serde::to_vec_named(&uncomp).unwrap());
|
||||
*out_msgpack = bytes.as_mut_ptr();
|
||||
*out_len = bytes.len();
|
||||
0
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// `prefix` & `buff` must be valid pointers
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn debug_print_msgpack_patch(
|
||||
prefix: *const c_char,
|
||||
buff: *const u8,
|
||||
len: usize,
|
||||
) {
|
||||
if prefix.is_null() {
|
||||
panic!("null ptr: prefix");
|
||||
}
|
||||
if buff.is_null() {
|
||||
panic!("null ptr: buff");
|
||||
}
|
||||
if len == 0 {
|
||||
panic!("invalid len: 0");
|
||||
}
|
||||
let prefix = from_cstr(prefix);
|
||||
let slice = std::slice::from_raw_parts(buff, len);
|
||||
let patch: Patch = rmp_serde::from_read_ref(slice).unwrap();
|
||||
let as_json = serde_json::to_string(&patch).unwrap();
|
||||
println!("{}: {}", prefix, as_json);
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must be called with a valid pointer to len bytes
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn debug_msgpack_change_to_json(
|
||||
msgpack: *const u8,
|
||||
len: usize,
|
||||
out_json: *mut u8,
|
||||
) -> isize {
|
||||
let slice = std::slice::from_raw_parts(msgpack, len);
|
||||
let uncomp: amp::Change = rmp_serde::from_slice(slice).unwrap();
|
||||
let json = serde_json::to_vec(&uncomp).unwrap();
|
||||
ptr::copy_nonoverlapping(json.as_ptr(), out_json, json.len());
|
||||
// null-terminate
|
||||
*out_json.add(json.len()) = 0;
|
||||
json.len() as isize
|
||||
}
|
2
automerge-c/.gitignore
vendored
Normal file
2
automerge-c/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
automerge
|
||||
automerge.o
|
22
automerge-c/Cargo.toml
Normal file
22
automerge-c/Cargo.toml
Normal file
|
@ -0,0 +1,22 @@
|
|||
[package]
|
||||
name = "automerge-c"
|
||||
version = "0.1.0"
|
||||
authors = ["Orion Henry <orion.henry@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
name = "automerge"
|
||||
crate-type = ["cdylib", "staticlib"]
|
||||
bench = false
|
||||
doc = false
|
||||
|
||||
[dependencies]
|
||||
automerge-backend = { path = "../automerge-backend" }
|
||||
automerge-protocol = { path = "../automerge-protocol" }
|
||||
libc = "^0.2"
|
||||
serde = "^1.0"
|
||||
serde_json = "^1.0"
|
||||
errno = "^0.2"
|
||||
|
||||
[build-dependencies]
|
||||
cbindgen = "^0.14"
|
30
automerge-c/Makefile
Normal file
30
automerge-c/Makefile
Normal file
|
@ -0,0 +1,30 @@
|
|||
|
||||
CC=gcc
|
||||
CFLAGS=-I.
|
||||
DEPS=automerge.h
|
||||
LIBS=-lpthread -ldl -lm
|
||||
LDIR=../target/release
|
||||
LIB=../target/release/libautomerge.a
|
||||
DEBUG_LIB=../target/debug/libautomerge.a
|
||||
|
||||
all: automerge $(LIB)
|
||||
|
||||
debug: LDIR=../target/debug
|
||||
debug: automerge $(DEBUG_LIB)
|
||||
|
||||
automerge: automerge.o $(LDIR)/libautomerge.a
|
||||
$(CC) -o $@ automerge.o $(LDIR)/libautomerge.a $(LIBS) -L$(LDIR)
|
||||
|
||||
$(DEBUG_LIB): src/lib.rs
|
||||
cargo build
|
||||
|
||||
$(LIB): src/lib.rs
|
||||
cargo build --release
|
||||
|
||||
%.o: %.c $(DEPS)
|
||||
$(CC) -c -o $@ $< $(CFLAGS)
|
||||
|
||||
.PHONY: clean
|
||||
|
||||
clean:
|
||||
rm -f *.o automerge $(LIB) $(DEBUG_LIB)
|
258
automerge-c/automerge.c
Normal file
258
automerge-c/automerge.c
Normal file
|
@ -0,0 +1,258 @@
|
|||
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
#include "automerge.h"
|
||||
|
||||
#define BUFSIZE 4096
|
||||
|
||||
void test_sync_basic() {
|
||||
printf("begin sync test - basic\n");
|
||||
int len;
|
||||
|
||||
// In a real application you would need to check to make sure your buffer is large enough for any given read
|
||||
char buff[BUFSIZE];
|
||||
|
||||
Backend * dbA = automerge_init();
|
||||
Backend * dbB = automerge_init();
|
||||
|
||||
SyncState * ssA = automerge_sync_state_init();
|
||||
SyncState * ssB = automerge_sync_state_init();
|
||||
|
||||
len = automerge_generate_sync_message(dbA, ssA);
|
||||
// In a real application, we would use `len` to allocate `buff` here
|
||||
int len2 = automerge_read_binary(dbA, buff);
|
||||
automerge_receive_sync_message(dbB, ssB, buff, len);
|
||||
len = automerge_generate_sync_message(dbB, ssB);
|
||||
// No more sync messages were generated
|
||||
assert(len == 0);
|
||||
}
|
||||
|
||||
void test_sync_encode_decode() {
|
||||
printf("begin sync test - encode/decode\n");
|
||||
int len;
|
||||
|
||||
char buff[BUFSIZE];
|
||||
char sync_state_buff[BUFSIZE];
|
||||
|
||||
Backend * dbA = automerge_init();
|
||||
Backend * dbB = automerge_init();
|
||||
|
||||
const char * requestA1 = "{\"actor\":\"111111\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"magpie\",\"pred\":[]}]}";
|
||||
const char * requestB1 = "{\"actor\":\"222222\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"crow\",\"pred\":[]}]}";
|
||||
automerge_apply_local_change(dbA, requestA1);
|
||||
automerge_apply_local_change(dbB, requestB1);
|
||||
|
||||
SyncState * ssA = automerge_sync_state_init();
|
||||
SyncState * ssB = automerge_sync_state_init();
|
||||
|
||||
len = automerge_generate_sync_message(dbA, ssA);
|
||||
automerge_read_binary(dbA, buff);
|
||||
automerge_receive_sync_message(dbB, ssB, buff, len);
|
||||
|
||||
len = automerge_generate_sync_message(dbB, ssB);
|
||||
automerge_read_binary(dbB, buff);
|
||||
automerge_receive_sync_message(dbA, ssA, buff, len);
|
||||
|
||||
len = automerge_generate_sync_message(dbA, ssA);
|
||||
automerge_read_binary(dbA, buff);
|
||||
automerge_receive_sync_message(dbB, ssB, buff, len);
|
||||
|
||||
|
||||
len = automerge_generate_sync_message(dbB, ssB);
|
||||
automerge_read_binary(dbB, buff);
|
||||
automerge_receive_sync_message(dbA, ssA, buff, len);
|
||||
|
||||
len = automerge_generate_sync_message(dbA, ssA);
|
||||
|
||||
// Save the sync state
|
||||
int encoded_len = automerge_encode_sync_state(dbB, ssB);
|
||||
automerge_read_binary(dbB, sync_state_buff);
|
||||
// Read it back
|
||||
ssB = automerge_decode_sync_state(sync_state_buff, encoded_len);
|
||||
|
||||
len = automerge_generate_sync_message(dbB, ssB);
|
||||
automerge_read_binary(dbB, buff);
|
||||
automerge_receive_sync_message(dbA, ssA, buff, len);
|
||||
|
||||
|
||||
len = automerge_generate_sync_message(dbA, ssA);
|
||||
assert(len == 0);
|
||||
}
|
||||
|
||||
void test_sync() {
|
||||
printf("begin sync test");
|
||||
test_sync_basic();
|
||||
test_sync_encode_decode();
|
||||
}
|
||||
|
||||
int main() {
|
||||
int len;
|
||||
|
||||
// In a real application you would need to check to make sure your buffer is large enough for any given read
|
||||
char buff[BUFSIZE];
|
||||
char buff2[BUFSIZE];
|
||||
char buff3[BUFSIZE];
|
||||
|
||||
printf("begin\n");
|
||||
|
||||
Backend * dbA = automerge_init();
|
||||
Backend * dbB = automerge_init();
|
||||
|
||||
const char * requestA1 = "{\"actor\":\"111111\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"magpie\",\"pred\":[]}]}";
|
||||
const char * requestA2 = "{\"actor\":\"111111\",\"seq\":2,\"time\":0,\"deps\":[],\"startOp\":2,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"dog\",\"value\":\"mastiff\",\"pred\":[]}]}";
|
||||
const char * requestB1 = "{\"actor\":\"222222\",\"seq\":1,\"time\":0,\"deps\":[],\"startOp\":1,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"bird\",\"value\":\"crow\",\"pred\":[]}]}";
|
||||
const char * requestB2 = "{\"actor\":\"222222\",\"seq\":2,\"time\":0,\"deps\":[],\"startOp\":2,\"ops\":[{\"action\":\"set\",\"obj\":\"_root\",\"key\":\"cat\",\"value\":\"tabby\",\"pred\":[]}]}";
|
||||
|
||||
printf("*** requestA1 ***\n\n%s\n\n",requestA1);
|
||||
|
||||
len = automerge_get_last_local_change(dbA);
|
||||
assert(len == -1);
|
||||
printf("*** last_local expected error string ** (%s)\n\n",automerge_error(dbA));
|
||||
|
||||
len = automerge_apply_local_change(dbA, requestA1);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbA, buff);
|
||||
printf("*** patchA1 ***\n\n%s\n\n",buff);
|
||||
|
||||
len = automerge_get_last_local_change(dbA);
|
||||
assert(len > 0);
|
||||
assert(len <= BUFSIZE);
|
||||
len = automerge_read_binary(dbA, buff);
|
||||
assert(len == 0);
|
||||
|
||||
len = automerge_apply_local_change(dbA, "{}");
|
||||
assert(len == -1);
|
||||
printf("*** patchA2 expected error string ** (%s)\n\n",automerge_error(dbA));
|
||||
|
||||
len = automerge_apply_local_change(dbA, requestA2);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbA, buff);
|
||||
printf("*** patchA2 ***\n\n%s\n\n",buff);
|
||||
|
||||
len = automerge_apply_local_change(dbB, requestB1);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbB, buff);
|
||||
printf("*** patchB1 ***\n\n%s\n\n",buff);
|
||||
|
||||
len = automerge_apply_local_change(dbB, requestB2);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbB, buff);
|
||||
printf("*** patchB2 ***\n\n%s\n\n",buff);
|
||||
|
||||
printf("*** clone dbA -> dbC ***\n\n");
|
||||
Backend * dbC = automerge_clone(dbA);
|
||||
|
||||
len = automerge_get_patch(dbA);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbA, buff);
|
||||
len = automerge_get_patch(dbC);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbC, buff2);
|
||||
// the json can serialize in different orders so I can do a stright strcmp()
|
||||
printf("*** get_patch of dbA & dbC -- equal? *** --> %s\n\n",strlen(buff) == strlen(buff2) ? "true" : "false");
|
||||
assert(strlen(buff) == strlen(buff2));
|
||||
|
||||
len = automerge_save(dbA);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_binary(dbA, buff2);
|
||||
printf("*** save dbA - %d bytes ***\n\n",len);
|
||||
|
||||
printf("*** load the save into dbD ***\n\n");
|
||||
Backend * dbD = automerge_load(len, buff2);
|
||||
len = automerge_get_patch(dbD);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbD, buff2);
|
||||
printf("*** get_patch of dbA & dbD -- equal? *** --> %s\n\n",strlen(buff) == strlen(buff2) ? "true" : "false");
|
||||
assert(strlen(buff) == strlen(buff2));
|
||||
|
||||
printf("*** copy changes from dbA to B ***\n\n");
|
||||
len = automerge_get_changes_for_actor(dbA,"111111");
|
||||
while (len > 0) {
|
||||
assert(len <= BUFSIZE);
|
||||
int nextlen = automerge_read_binary(dbA,buff);
|
||||
automerge_write_change(dbB,len,buff);
|
||||
|
||||
// decode the change for debug
|
||||
// encode and decode could happen with either dbA or dbB,
|
||||
// however encode needs to be done against dbB instead of dbA
|
||||
// only because dbA is in the middle of iterating over some binary results
|
||||
// and needs to finish before queuing another
|
||||
automerge_decode_change(dbA,len,buff);
|
||||
automerge_read_json(dbA, buff2);
|
||||
printf("Change decoded to json -- %s\n",buff2);
|
||||
automerge_encode_change(dbB,buff2);
|
||||
automerge_read_binary(dbB,buff3);
|
||||
assert(memcmp(buff,buff3,len) == 0);
|
||||
|
||||
len = nextlen;
|
||||
}
|
||||
automerge_apply_changes(dbB);
|
||||
|
||||
printf("*** get head from dbB ***\n\n");
|
||||
int num_heads = 0;
|
||||
len = automerge_get_heads(dbB);
|
||||
while (len > 0) {
|
||||
assert(len == 32);
|
||||
int nextlen = automerge_read_binary(dbB,buff3 + (num_heads * 32));
|
||||
num_heads++;
|
||||
len = nextlen;
|
||||
}
|
||||
assert(num_heads == 2);
|
||||
len = automerge_get_changes(dbB,num_heads,buff3);
|
||||
assert(len == 0);
|
||||
|
||||
printf("*** copy changes from dbB to A ***\n\n");
|
||||
len = automerge_get_changes_for_actor(dbB,"222222");
|
||||
while (len > 0) {
|
||||
assert(len <= BUFSIZE);
|
||||
int nextlen = automerge_read_binary(dbB,buff);
|
||||
automerge_write_change(dbA,len,buff);
|
||||
len = nextlen;
|
||||
}
|
||||
automerge_apply_changes(dbA);
|
||||
|
||||
len = automerge_get_patch(dbA);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbA, buff);
|
||||
len = automerge_get_patch(dbB);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbB, buff2);
|
||||
printf("*** get_patch of dbA & dbB -- equal? *** --> %s\n\n",strlen(buff) == strlen(buff2) ? "true" : "false");
|
||||
assert(strlen(buff) == strlen(buff2));
|
||||
|
||||
printf("*** copy changes from dbA to E using load ***\n\n");
|
||||
Backend * dbE = automerge_init();
|
||||
len = automerge_get_changes(dbA,0,NULL);
|
||||
while (len > 0) {
|
||||
assert(len <= BUFSIZE);
|
||||
int nextlen = automerge_read_binary(dbA,buff);
|
||||
automerge_write_change(dbE,len,buff);
|
||||
len = nextlen;
|
||||
}
|
||||
automerge_load_changes(dbE);
|
||||
|
||||
len = automerge_get_patch(dbA);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbA, buff);
|
||||
len = automerge_get_patch(dbE);
|
||||
assert(len <= BUFSIZE);
|
||||
automerge_read_json(dbE, buff2);
|
||||
printf("*** get_patch of dbA & dbE -- equal? *** --> %s\n\n",strlen(buff) == strlen(buff2) ? "true" : "false");
|
||||
assert(strlen(buff) == strlen(buff2));
|
||||
|
||||
len = automerge_get_missing_deps(dbE, num_heads, buff3);
|
||||
automerge_read_json(dbE, buff); // [] - nothing missing
|
||||
assert(strlen(buff) == 2);
|
||||
|
||||
test_sync();
|
||||
|
||||
printf("free resources\n");
|
||||
automerge_free(dbA);
|
||||
automerge_free(dbB);
|
||||
automerge_free(dbC);
|
||||
automerge_free(dbD);
|
||||
automerge_free(dbE);
|
||||
|
||||
printf("end\n");
|
||||
}
|
189
automerge-c/automerge.h
Normal file
189
automerge-c/automerge.h
Normal file
|
@ -0,0 +1,189 @@
|
|||
#ifndef automerge_h
|
||||
#define automerge_h
|
||||
|
||||
/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */
|
||||
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
|
||||
typedef struct Backend Backend;
|
||||
|
||||
typedef struct SyncState SyncState;
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
*/
|
||||
intptr_t automerge_apply_changes(Backend *backend);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
* request must be a valid pointer pointing to a cstring
|
||||
*/
|
||||
intptr_t automerge_apply_local_change(Backend *backend, const char *request);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
*/
|
||||
Backend *automerge_clone(Backend *backend);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid pointer to a change and the correct len
|
||||
*/
|
||||
intptr_t automerge_decode_change(Backend *backend, uintptr_t len, const uint8_t *change);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* `encoded_state_[ptr|len]` must be the address & length of a byte array
|
||||
* Returns an opaque pointer to a SyncState
|
||||
* panics (segfault?) if the buffer was invalid
|
||||
*/
|
||||
SyncState *automerge_decode_sync_state(const uint8_t *encoded_state_ptr, uintptr_t encoded_state_len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid pointer a json string of a change
|
||||
*/
|
||||
intptr_t automerge_encode_change(Backend *backend, const char *change);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* Must be called with a valid backend pointer
|
||||
* sync_state must be a valid pointer to a SyncState
|
||||
* Returns an `isize` indicating the length of the binary message
|
||||
* (-1 if there was an error)
|
||||
*/
|
||||
intptr_t automerge_encode_sync_state(Backend *backend, SyncState *sync_state);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
*/
|
||||
const char *automerge_error(Backend *backend);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
*/
|
||||
void automerge_free(Backend *backend);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* Must be called with a valid backend pointer
|
||||
* sync_state must be a valid pointer to a SyncState
|
||||
* Returns an `isize` indicating the length of the binary message
|
||||
* (-1 if there was an error, 0 if there is no message)
|
||||
*/
|
||||
intptr_t automerge_generate_sync_message(Backend *backend, SyncState *sync_state);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
* binary must be a valid pointer to len bytes
|
||||
*/
|
||||
intptr_t automerge_get_changes(Backend *backend, uintptr_t len, const uint8_t *binary);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* `backend` and `other` must be valid pointers to Backends
|
||||
*/
|
||||
intptr_t automerge_get_changes_added(Backend *backend, Backend *other);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
*/
|
||||
intptr_t automerge_get_changes_for_actor(Backend *backend, const char *actor);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid pointer a json string of a change
|
||||
*/
|
||||
intptr_t automerge_get_heads(Backend *backend);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid pointer to a backend
|
||||
* the automerge api changed to return a change and a patch
|
||||
* this C api was not designed to returned mixed values so i borrowed the
|
||||
* get_last_local_change call from the javascript api to solve the same problem
|
||||
*/
|
||||
intptr_t automerge_get_last_local_change(Backend *backend);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
* binary must be a valid pointer to len bytes
|
||||
*/
|
||||
intptr_t automerge_get_missing_deps(Backend *backend, uintptr_t len, const uint8_t *binary);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
*/
|
||||
intptr_t automerge_get_patch(Backend *backend);
|
||||
|
||||
Backend *automerge_init(void);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* data pointer must be a valid pointer to len bytes
|
||||
*/
|
||||
Backend *automerge_load(uintptr_t len, const uint8_t *data);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
*/
|
||||
intptr_t automerge_load_changes(Backend *backend);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
*
|
||||
* This must me called with a valid backend pointer
|
||||
* the buffer must be a valid pointer pointing to at least as much space as was
|
||||
* required by the previous binary result call
|
||||
*/
|
||||
intptr_t automerge_read_binary(Backend *backend, uint8_t *buffer);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
* and buffer must be a valid pointer of at least the number of bytes returned by the previous
|
||||
* call that generated a json result
|
||||
*/
|
||||
intptr_t automerge_read_json(Backend *backend, char *buffer);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* Must be called with a valid backend pointer
|
||||
* sync_state must be a valid pointer to a SyncState
|
||||
* `encoded_msg_[ptr|len]` must be the address & length of a byte array
|
||||
*/
|
||||
intptr_t automerge_receive_sync_message(Backend *backend, SyncState *sync_state, const uint8_t *encoded_msg_ptr, uintptr_t encoded_msg_len);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
*/
|
||||
intptr_t automerge_save(Backend *backend);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* sync_state must be a valid pointer to a SyncState
|
||||
*/
|
||||
void automerge_sync_state_free(SyncState *sync_state);
|
||||
|
||||
SyncState *automerge_sync_state_init(void);
|
||||
|
||||
/**
|
||||
* # Safety
|
||||
* This must me called with a valid backend pointer
|
||||
* change must point to a valid memory location with at least len bytes
|
||||
*/
|
||||
void automerge_write_change(Backend *backend, uintptr_t len, const uint8_t *change);
|
||||
|
||||
#endif /* automerge_h */
|
16
automerge-c/build.rs
Normal file
16
automerge-c/build.rs
Normal file
|
@ -0,0 +1,16 @@
|
|||
extern crate cbindgen;
|
||||
|
||||
use std::{env, path::PathBuf};
|
||||
|
||||
fn main() {
|
||||
let crate_dir = PathBuf::from(
|
||||
env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR env var is not defined"),
|
||||
);
|
||||
|
||||
let config = cbindgen::Config::from_file("cbindgen.toml")
|
||||
.expect("Unable to find cbindgen.toml configuration file");
|
||||
|
||||
if let Ok(writer) = cbindgen::generate_with_config(&crate_dir, config) {
|
||||
writer.write_to_file(crate_dir.join("automerge.h"));
|
||||
}
|
||||
}
|
8
automerge-c/cbindgen.toml
Normal file
8
automerge-c/cbindgen.toml
Normal file
|
@ -0,0 +1,8 @@
|
|||
include_guard = "automerge_h"
|
||||
autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */"
|
||||
language = "C"
|
||||
includes = []
|
||||
sys_includes = ["stdint.h", "stdbool.h"]
|
||||
no_includes = true
|
||||
line_length = 140
|
||||
|
573
automerge-c/src/lib.rs
Normal file
573
automerge-c/src/lib.rs
Normal file
|
@ -0,0 +1,573 @@
|
|||
extern crate automerge_backend;
|
||||
extern crate errno;
|
||||
extern crate libc;
|
||||
extern crate serde;
|
||||
|
||||
use core::fmt::Debug;
|
||||
use std::{
|
||||
convert::TryInto,
|
||||
ffi::{CStr, CString},
|
||||
ops::{Deref, DerefMut},
|
||||
os::raw::c_char,
|
||||
ptr,
|
||||
};
|
||||
|
||||
use automerge_backend::{AutomergeError, Change};
|
||||
use automerge_protocol as amp;
|
||||
use automerge_protocol::ChangeHash;
|
||||
use errno::{set_errno, Errno};
|
||||
use serde::ser::Serialize;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Backend {
|
||||
handle: automerge_backend::Backend,
|
||||
text: Option<String>,
|
||||
last_local_change: Option<Change>,
|
||||
binary: Vec<Vec<u8>>,
|
||||
queue: Option<Vec<Vec<u8>>>,
|
||||
error: Option<CString>,
|
||||
}
|
||||
|
||||
struct BinaryResults(Result<Vec<Vec<u8>>, AutomergeError>);
|
||||
|
||||
impl Deref for Backend {
|
||||
type Target = automerge_backend::Backend;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.handle
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn from_buf_raw<T>(ptr: *const T, elts: usize) -> Vec<T> {
|
||||
let mut dst = Vec::with_capacity(elts);
|
||||
dst.set_len(elts);
|
||||
ptr::copy(ptr, dst.as_mut_ptr(), elts);
|
||||
dst
|
||||
}
|
||||
|
||||
fn err<T, V: Debug>(result: Result<T, V>) -> Result<T, String> {
|
||||
match result {
|
||||
Ok(val) => Ok(val),
|
||||
Err(err) => Err(format!("{:?}", err)),
|
||||
}
|
||||
}
|
||||
|
||||
impl Backend {
|
||||
fn init(handle: automerge_backend::Backend) -> Backend {
|
||||
Backend {
|
||||
handle,
|
||||
text: None,
|
||||
last_local_change: None,
|
||||
binary: Vec::new(),
|
||||
queue: None,
|
||||
error: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_result(&mut self, result: Result<isize, String>) -> isize {
|
||||
match result {
|
||||
Ok(len) => {
|
||||
self.error = None;
|
||||
len
|
||||
}
|
||||
Err(err) => self.handle_error(err),
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_json<T: Serialize>(&mut self, val: Result<T, AutomergeError>) -> isize {
|
||||
let result = err(val)
|
||||
.and_then(|val| err(serde_json::to_string(&val)))
|
||||
.map(|text| {
|
||||
let len = (text.len() + 1) as isize;
|
||||
self.text = Some(text);
|
||||
len
|
||||
});
|
||||
self.handle_result(result)
|
||||
}
|
||||
|
||||
fn handle_binary(&mut self, b: Result<Vec<u8>, AutomergeError>) -> isize {
|
||||
let result = err(b).map(|bin| {
|
||||
let len = bin.len();
|
||||
self.binary = vec![bin];
|
||||
len as isize
|
||||
});
|
||||
self.handle_result(result)
|
||||
}
|
||||
|
||||
fn handle_ok(&mut self) -> isize {
|
||||
self.error = None;
|
||||
0
|
||||
}
|
||||
|
||||
fn handle_error<E: Debug>(&mut self, err: E) -> isize {
|
||||
// in theory - if an error string had embedded nulls
|
||||
// we could get a error = None and -1
|
||||
self.error = CString::new(format!("{:?}", err)).ok();
|
||||
-1
|
||||
}
|
||||
|
||||
fn handle_binaries(&mut self, b: BinaryResults) -> isize {
|
||||
let result = err(b.0).map(|bin| {
|
||||
self.error = None;
|
||||
if !bin.is_empty() {
|
||||
let len = bin[0].len();
|
||||
self.binary = bin;
|
||||
self.binary.reverse();
|
||||
len as isize
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
self.handle_result(result)
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Backend {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.handle
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Backend> for *mut Backend {
|
||||
fn from(b: Backend) -> Self {
|
||||
Box::into_raw(Box::new(b))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<&Change>> for BinaryResults {
|
||||
fn from(changes: Vec<&Change>) -> Self {
|
||||
BinaryResults(Ok(changes.iter().map(|b| b.raw_bytes().into()).collect()))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Result<Vec<&Change>, AutomergeError>> for BinaryResults {
|
||||
fn from(result: Result<Vec<&Change>, AutomergeError>) -> Self {
|
||||
BinaryResults(result.map(|changes| changes.iter().map(|b| b.raw_bytes().into()).collect()))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<ChangeHash>> for BinaryResults {
|
||||
fn from(heads: Vec<ChangeHash>) -> Self {
|
||||
BinaryResults(Ok(heads.iter().map(|head| head.0.to_vec()).collect()))
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
init => automerge_init
|
||||
clone => automerge_clone
|
||||
free => automerge_free
|
||||
save => automerge_save
|
||||
load => automerge_load
|
||||
applyLocalChange => automerge_apply_local_change
|
||||
getPatch => automerge_get_patch
|
||||
applyChanges => automerge_apply_changes
|
||||
loadChanges => automerge_load_changes
|
||||
getChangesForActor => automerge_get_changes_for_actor
|
||||
getChanges => automerge_get_changes
|
||||
getMissingDeps => automerge_get_missing_deps
|
||||
*/
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn automerge_init() -> *mut Backend {
|
||||
Backend::init(automerge_backend::Backend::new()).into()
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_free(backend: *mut Backend) {
|
||||
let backend: Backend = *Box::from_raw(backend);
|
||||
drop(backend)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
/// request must be a valid pointer pointing to a cstring
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_apply_local_change(
|
||||
backend: *mut Backend,
|
||||
request: *const c_char,
|
||||
) -> isize {
|
||||
let request: &CStr = CStr::from_ptr(request);
|
||||
let request = request.to_string_lossy();
|
||||
let request: Result<amp::Change, _> = serde_json::from_str(&request);
|
||||
match request {
|
||||
Ok(request) => {
|
||||
let result = (*backend).apply_local_change(request);
|
||||
match result {
|
||||
Ok((patch, change)) => {
|
||||
(*backend).last_local_change = Some(change);
|
||||
(*backend).generate_json(Ok(patch))
|
||||
}
|
||||
Err(err) => (*backend).handle_error(err),
|
||||
}
|
||||
}
|
||||
Err(err) => (*backend).handle_error(err),
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
/// change must point to a valid memory location with at least len bytes
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_write_change(
|
||||
backend: *mut Backend,
|
||||
len: usize,
|
||||
change: *const u8,
|
||||
) {
|
||||
let bytes = from_buf_raw(change, len);
|
||||
if let Some(ref mut queue) = (*backend).queue {
|
||||
queue.push(bytes)
|
||||
} else {
|
||||
(*backend).queue = Some(vec![bytes])
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_apply_changes(backend: *mut Backend) -> isize {
|
||||
match (*backend).queue.take() {
|
||||
Some(changes) => {
|
||||
let changes = changes
|
||||
.iter()
|
||||
.map(|c| Change::from_bytes(c.to_vec()).unwrap())
|
||||
.collect();
|
||||
let patch = (*backend).apply_changes(changes);
|
||||
(*backend).generate_json(patch)
|
||||
}
|
||||
None => (*backend).handle_error("no changes queued"),
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_patch(backend: *mut Backend) -> isize {
|
||||
let patch = (*backend).get_patch();
|
||||
(*backend).generate_json(patch)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_load_changes(backend: *mut Backend) -> isize {
|
||||
if let Some(changes) = (*backend).queue.take() {
|
||||
let changes = changes
|
||||
.iter()
|
||||
.map(|c| Change::from_bytes(c.to_vec()).unwrap())
|
||||
.collect();
|
||||
if (*backend).load_changes(changes).is_ok() {
|
||||
return (*backend).handle_ok();
|
||||
}
|
||||
}
|
||||
(*backend).handle_error("no changes queued")
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_clone(backend: *mut Backend) -> *mut Backend {
|
||||
(*backend).clone().into()
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_save(backend: *mut Backend) -> isize {
|
||||
let data = (*backend).save();
|
||||
(*backend).handle_binary(data)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// data pointer must be a valid pointer to len bytes
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_load(len: usize, data: *const u8) -> *mut Backend {
|
||||
let bytes = from_buf_raw(data, len);
|
||||
let result = automerge_backend::Backend::load(bytes);
|
||||
if let Ok(backend) = result {
|
||||
Backend::init(backend).into()
|
||||
} else {
|
||||
set_errno(Errno(1));
|
||||
ptr::null_mut()
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_changes_for_actor(
|
||||
backend: *mut Backend,
|
||||
actor: *const c_char,
|
||||
) -> isize {
|
||||
let actor: &CStr = CStr::from_ptr(actor);
|
||||
let actor = actor.to_string_lossy();
|
||||
match actor.as_ref().try_into() {
|
||||
Ok(actor) => {
|
||||
let changes = (*backend).get_changes_for_actor_id(&actor);
|
||||
(*backend).handle_binaries(changes.into())
|
||||
}
|
||||
Err(err) => (*backend).handle_error(err),
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid pointer to a change and the correct len
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_decode_change(
|
||||
backend: *mut Backend,
|
||||
len: usize,
|
||||
change: *const u8,
|
||||
) -> isize {
|
||||
let bytes = from_buf_raw(change, len);
|
||||
let change = Change::from_bytes(bytes).unwrap();
|
||||
(*backend).generate_json(Ok(change.decode()))
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid pointer a json string of a change
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_encode_change(
|
||||
backend: *mut Backend,
|
||||
change: *const c_char,
|
||||
) -> isize {
|
||||
let change: &CStr = CStr::from_ptr(change);
|
||||
let change = change.to_string_lossy();
|
||||
let uncomp_change: amp::Change = serde_json::from_str(&change).unwrap();
|
||||
let change: Change = uncomp_change.try_into().unwrap();
|
||||
(*backend).handle_binary(Ok(change.raw_bytes().into()))
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid pointer to a backend
|
||||
/// the automerge api changed to return a change and a patch
|
||||
/// this C api was not designed to returned mixed values so i borrowed the
|
||||
/// get_last_local_change call from the javascript api to solve the same problem
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_last_local_change(backend: *mut Backend) -> isize {
|
||||
match (*backend).last_local_change.as_ref() {
|
||||
Some(change) => (*backend).handle_binary(Ok(change.raw_bytes().into())),
|
||||
None => (*backend).handle_error("no last change"),
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid pointer a json string of a change
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_heads(backend: *mut Backend) -> isize {
|
||||
let heads = (*backend).get_heads();
|
||||
(*backend).handle_binaries(heads.into())
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
/// binary must be a valid pointer to len bytes
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_changes(
|
||||
backend: *mut Backend,
|
||||
len: usize,
|
||||
binary: *const u8,
|
||||
) -> isize {
|
||||
let mut have_deps = Vec::new();
|
||||
for i in 0..len {
|
||||
have_deps.push(
|
||||
from_buf_raw(binary.offset(i as isize * 32), 32)
|
||||
.as_slice()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
let changes = (*backend).get_changes(&have_deps);
|
||||
(*backend).handle_binaries(Ok(changes).into())
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// `backend` and `other` must be valid pointers to Backends
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_changes_added(
|
||||
backend: *mut Backend,
|
||||
other: *mut Backend,
|
||||
) -> isize {
|
||||
let changes = (*backend).get_changes_added(&*other);
|
||||
(*backend).handle_binaries(Ok(changes).into())
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
/// binary must be a valid pointer to len bytes
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_get_missing_deps(
|
||||
backend: *mut Backend,
|
||||
len: usize,
|
||||
binary: *const u8,
|
||||
) -> isize {
|
||||
let mut heads = Vec::new();
|
||||
for i in 0..len {
|
||||
heads.push(
|
||||
from_buf_raw(binary.offset(i as isize * 32), 32)
|
||||
.as_slice()
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
)
|
||||
}
|
||||
let missing = (*backend).get_missing_deps(&heads);
|
||||
(*backend).generate_json(Ok(missing))
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_error(backend: *mut Backend) -> *const c_char {
|
||||
(*backend)
|
||||
.error
|
||||
.as_ref()
|
||||
.map(|e| e.as_ptr())
|
||||
.unwrap_or_else(|| ptr::null_mut())
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// This must me called with a valid backend pointer
|
||||
/// and buffer must be a valid pointer of at least the number of bytes returned by the previous
|
||||
/// call that generated a json result
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_read_json(backend: *mut Backend, buffer: *mut c_char) -> isize {
|
||||
if let Some(text) = &(*backend).text {
|
||||
let len = text.len();
|
||||
buffer.copy_from(text.as_ptr().cast(), len);
|
||||
(*buffer.add(len)) = 0; // null terminate
|
||||
(*backend).text = None;
|
||||
0
|
||||
} else {
|
||||
(*buffer) = 0;
|
||||
(*backend).handle_error("no json to be read")
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
///
|
||||
/// This must me called with a valid backend pointer
|
||||
/// the buffer must be a valid pointer pointing to at least as much space as was
|
||||
/// required by the previous binary result call
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_read_binary(backend: *mut Backend, buffer: *mut u8) -> isize {
|
||||
if let Some(bin) = (*backend).binary.pop() {
|
||||
let len = bin.len();
|
||||
buffer.copy_from(bin.as_ptr(), len);
|
||||
if let Some(next) = (*backend).binary.last() {
|
||||
next.len() as isize
|
||||
} else {
|
||||
0
|
||||
}
|
||||
} else {
|
||||
(*backend).handle_error("no binary to be read")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SyncState {
|
||||
handle: automerge_backend::SyncState,
|
||||
}
|
||||
|
||||
impl From<SyncState> for *mut SyncState {
|
||||
fn from(s: SyncState) -> Self {
|
||||
Box::into_raw(Box::new(s))
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Must be called with a valid backend pointer
|
||||
/// sync_state must be a valid pointer to a SyncState
|
||||
/// `encoded_msg_[ptr|len]` must be the address & length of a byte array
|
||||
// Returns an `isize` indicating the length of the patch as a JSON string
|
||||
// (-1 if there was an error, 0 if there is no patch)
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_receive_sync_message(
|
||||
backend: *mut Backend,
|
||||
sync_state: &mut SyncState,
|
||||
encoded_msg_ptr: *const u8,
|
||||
encoded_msg_len: usize,
|
||||
) -> isize {
|
||||
let slice = std::slice::from_raw_parts(encoded_msg_ptr, encoded_msg_len);
|
||||
let decoded = automerge_backend::SyncMessage::decode(slice);
|
||||
let msg = match decoded {
|
||||
Ok(msg) => msg,
|
||||
Err(e) => {
|
||||
return (*backend).handle_error(e);
|
||||
}
|
||||
};
|
||||
let patch = (*backend).receive_sync_message(&mut sync_state.handle, msg);
|
||||
if let Ok(None) = patch {
|
||||
0
|
||||
} else {
|
||||
(*backend).generate_json(patch)
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Must be called with a valid backend pointer
|
||||
/// sync_state must be a valid pointer to a SyncState
|
||||
/// Returns an `isize` indicating the length of the binary message
|
||||
/// (-1 if there was an error, 0 if there is no message)
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_generate_sync_message(
|
||||
backend: *mut Backend,
|
||||
sync_state: &mut SyncState,
|
||||
) -> isize {
|
||||
let msg = (*backend).generate_sync_message(&mut sync_state.handle);
|
||||
if let Some(msg) = msg {
|
||||
(*backend).handle_binary(msg.encode().or(Err(AutomergeError::EncodeFailed)))
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn automerge_sync_state_init() -> *mut SyncState {
|
||||
let state = SyncState {
|
||||
handle: automerge_backend::SyncState::default(),
|
||||
};
|
||||
state.into()
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// Must be called with a valid backend pointer
|
||||
/// sync_state must be a valid pointer to a SyncState
|
||||
/// Returns an `isize` indicating the length of the binary message
|
||||
/// (-1 if there was an error)
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_encode_sync_state(
|
||||
backend: *mut Backend,
|
||||
sync_state: &mut SyncState,
|
||||
) -> isize {
|
||||
(*backend).handle_binary(
|
||||
sync_state
|
||||
.handle
|
||||
.encode()
|
||||
.or(Err(AutomergeError::EncodeFailed)),
|
||||
)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// `encoded_state_[ptr|len]` must be the address & length of a byte array
|
||||
/// Returns an opaque pointer to a SyncState
|
||||
/// panics (segfault?) if the buffer was invalid
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_decode_sync_state(
|
||||
encoded_state_ptr: *const u8,
|
||||
encoded_state_len: usize,
|
||||
) -> *mut SyncState {
|
||||
let slice = std::slice::from_raw_parts(encoded_state_ptr, encoded_state_len);
|
||||
let decoded_state = automerge_backend::SyncState::decode(slice);
|
||||
// TODO: Is there a way to avoid `unwrap` here?
|
||||
let state = decoded_state.unwrap();
|
||||
let state = SyncState { handle: state };
|
||||
state.into()
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// sync_state must be a valid pointer to a SyncState
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn automerge_sync_state_free(sync_state: *mut SyncState) {
|
||||
let sync_state: SyncState = *Box::from_raw(sync_state);
|
||||
drop(sync_state);
|
||||
}
|
28
automerge-cli/Cargo.toml
Normal file
28
automerge-cli/Cargo.toml
Normal file
|
@ -0,0 +1,28 @@
|
|||
[package]
|
||||
name = "automerge-cli"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Good <alex@memoryandthought.me>"]
|
||||
edition = "2018"
|
||||
|
||||
[[bin]]
|
||||
name = "automerge"
|
||||
path = "src/main.rs"
|
||||
bench = false
|
||||
doc = false
|
||||
|
||||
[dependencies]
|
||||
clap = "3.0.0-beta.2"
|
||||
serde_json = "^1.0"
|
||||
anyhow = "1.0"
|
||||
atty = "0.2"
|
||||
thiserror = "1.0.16"
|
||||
combine = "4.5.2"
|
||||
maplit = "1.0.2"
|
||||
colored_json = "2.1.0"
|
||||
|
||||
automerge-backend = { path = "../automerge-backend" }
|
||||
automerge-frontend = { path = "../automerge-frontend" }
|
||||
automerge-protocol = { path = "../automerge-protocol" }
|
||||
|
||||
[dev-dependencies]
|
||||
duct = "0.13"
|
|
@ -1,4 +1,5 @@
|
|||
use automerge as am;
|
||||
use automerge_backend as amb;
|
||||
use automerge_frontend as amf;
|
||||
use combine::{parser::char as charparser, EasyParser, ParseError, Parser};
|
||||
use thiserror::Error;
|
||||
|
||||
|
@ -14,7 +15,12 @@ pub enum ChangeError {
|
|||
#[error("Error loading changes: {:?}", source)]
|
||||
ErrApplyingInitialChanges {
|
||||
#[source]
|
||||
source: am::AutomergeError,
|
||||
source: amb::AutomergeError,
|
||||
},
|
||||
#[error("Some changes were invalid: {:?}", source)]
|
||||
InvalidChangeRequest {
|
||||
#[from]
|
||||
source: amf::InvalidChangeRequest,
|
||||
},
|
||||
#[error("Error writing changes to output file: {:?}", source)]
|
||||
ErrWritingChanges {
|
||||
|
@ -134,7 +140,7 @@ where
|
|||
op_parser()
|
||||
.skip(charparser::spaces())
|
||||
.skip(charparser::string("$"))
|
||||
.and(path_segment_parser(am::Path::root())),
|
||||
.and(path_segment_parser(amf::Path::root())),
|
||||
)
|
||||
.skip(charparser::spaces())
|
||||
.then(|(operation, path)| {
|
||||
|
@ -169,17 +175,29 @@ pub fn change(
|
|||
mut writer: impl std::io::Write,
|
||||
script: &str,
|
||||
) -> Result<(), ChangeError> {
|
||||
let mut backend = amb::Backend::new();
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
reader
|
||||
.read_to_end(&mut buf)
|
||||
.map_err(|e| ChangeError::ErrReadingChanges { source: e })?;
|
||||
let backend = am::Automerge::load(&buf)
|
||||
let changes = amb::Change::load_document(&buf)
|
||||
.map_err(|e| ChangeError::ErrApplyingInitialChanges { source: e })?;
|
||||
let mut frontend = amf::Frontend::new();
|
||||
let patch = backend
|
||||
.apply_changes(changes)
|
||||
.map_err(|e| ChangeError::ErrApplyingInitialChanges { source: e })?;
|
||||
// This unwrap should be fine, we've generated the patch ourselves, if it's invalid then
|
||||
// there's no way for the user to recover
|
||||
frontend.apply_patch(patch).unwrap();
|
||||
let local_change = parse_change_script(script)?;
|
||||
let ((), new_changes) = frontend.change::<_, _, amf::InvalidChangeRequest>(None, |d| {
|
||||
d.add_change(local_change)?;
|
||||
Ok(())
|
||||
})?;
|
||||
if let Some(c) = new_changes {
|
||||
// The user can't do anything to recover if this fails so we unwrap
|
||||
backend.apply_local_change(c).unwrap();
|
||||
}
|
||||
let change_bytes = backend.save().unwrap();
|
||||
writer
|
||||
.write_all(&change_bytes)
|
|
@ -1,8 +1,7 @@
|
|||
use automerge as am;
|
||||
use automerge_backend as amb;
|
||||
use automerge_protocol as amp;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::{color_json::print_colored_json, SkipVerifyFlag};
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ExamineError {
|
||||
#[error("Error reading change file: {:?}", source)]
|
||||
|
@ -13,7 +12,7 @@ pub enum ExamineError {
|
|||
#[error("Error loading changes: {:?}", source)]
|
||||
ApplyingInitialChanges {
|
||||
#[source]
|
||||
source: am::AutomergeError,
|
||||
source: amb::AutomergeError,
|
||||
},
|
||||
#[error("Error writing to output: {:?}", source)]
|
||||
WritingToOutput {
|
||||
|
@ -22,29 +21,21 @@ pub enum ExamineError {
|
|||
},
|
||||
}
|
||||
|
||||
pub(crate) fn examine(
|
||||
pub fn examine(
|
||||
mut input: impl std::io::Read,
|
||||
mut output: impl std::io::Write,
|
||||
skip: SkipVerifyFlag,
|
||||
is_tty: bool,
|
||||
) -> Result<(), ExamineError> {
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
input
|
||||
.read_to_end(&mut buf)
|
||||
.map_err(|e| ExamineError::ReadingChanges { source: e })?;
|
||||
let doc = skip
|
||||
.load(&buf)
|
||||
let changes = amb::Change::load_document(&buf)
|
||||
.map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?;
|
||||
let uncompressed_changes: Vec<_> = doc
|
||||
.get_changes(&[])
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| c.decode())
|
||||
.collect();
|
||||
let uncompressed_changes: Vec<amp::Change> = changes.iter().map(|c| c.decode()).collect();
|
||||
if is_tty {
|
||||
let json_changes = serde_json::to_value(uncompressed_changes).unwrap();
|
||||
print_colored_json(&json_changes).unwrap();
|
||||
writeln!(output).unwrap();
|
||||
colored_json::write_colored_json(&json_changes, &mut output).unwrap();
|
||||
} else {
|
||||
let json_changes = serde_json::to_string_pretty(&uncompressed_changes).unwrap();
|
||||
output
|
93
automerge-cli/src/export.rs
Normal file
93
automerge-cli/src/export.rs
Normal file
|
@ -0,0 +1,93 @@
|
|||
use anyhow::Result;
|
||||
|
||||
fn get_state_json(input_data: Vec<u8>) -> Result<serde_json::Value> {
|
||||
let mut backend = automerge_backend::Backend::new();
|
||||
let changes = automerge_backend::Change::load_document(&input_data)?;
|
||||
let patch = backend.apply_changes(changes)?;
|
||||
|
||||
let mut frontend = automerge_frontend::Frontend::new();
|
||||
frontend.apply_patch(patch)?;
|
||||
|
||||
Ok(frontend.state().to_json())
|
||||
}
|
||||
|
||||
pub fn export_json(
|
||||
mut changes_reader: impl std::io::Read,
|
||||
mut writer: impl std::io::Write,
|
||||
is_tty: bool,
|
||||
) -> Result<()> {
|
||||
let mut input_data = vec![];
|
||||
changes_reader.read_to_end(&mut input_data)?;
|
||||
|
||||
let state_json = get_state_json(input_data)?;
|
||||
if is_tty {
|
||||
colored_json::write_colored_json(&state_json, &mut writer).unwrap()
|
||||
} else {
|
||||
writeln!(
|
||||
writer,
|
||||
"{}",
|
||||
serde_json::to_string_pretty(&state_json).unwrap()
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn cli_export_with_empty_input() {
|
||||
assert_eq!(get_state_json(vec![]).unwrap(), serde_json::json!({}))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cli_export_with_flat_map() {
|
||||
let initial_state_json: serde_json::Value =
|
||||
serde_json::from_str(r#"{"sparrows": 15.0}"#).unwrap();
|
||||
let value: automerge_frontend::Value =
|
||||
automerge_frontend::Value::from_json(&initial_state_json);
|
||||
|
||||
let (_, initial_change) =
|
||||
automerge_frontend::Frontend::new_with_initial_state(value).unwrap();
|
||||
let mut backend = automerge_backend::Backend::new();
|
||||
backend.apply_local_change(initial_change).unwrap();
|
||||
|
||||
let change_bytes = backend.save().unwrap();
|
||||
assert_eq!(
|
||||
get_state_json(change_bytes).unwrap(),
|
||||
serde_json::json!({"sparrows": 15.0})
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cli_export_with_nested_map() {
|
||||
let initial_state_json: serde_json::Value = serde_json::from_str(
|
||||
r#"{
|
||||
"birds": {
|
||||
"wrens": 3.0,
|
||||
"sparrows": 15.0
|
||||
}
|
||||
}"#,
|
||||
)
|
||||
.unwrap();
|
||||
let value: automerge_frontend::Value =
|
||||
automerge_frontend::Value::from_json(&initial_state_json);
|
||||
|
||||
let (_, initial_change) =
|
||||
automerge_frontend::Frontend::new_with_initial_state(value).unwrap();
|
||||
let mut backend = automerge_backend::Backend::new();
|
||||
backend.apply_local_change(initial_change).unwrap();
|
||||
|
||||
let change_bytes = backend.save().unwrap();
|
||||
assert_eq!(
|
||||
get_state_json(change_bytes).unwrap(),
|
||||
serde_json::json!({
|
||||
"birds": {
|
||||
"wrens": 3.0,
|
||||
"sparrows": 15.0
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
23
automerge-cli/src/import.rs
Normal file
23
automerge-cli/src/import.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
use anyhow::Result;
|
||||
use automerge_backend::Backend;
|
||||
use automerge_frontend::{Frontend, Value};
|
||||
|
||||
fn initialize_from_json(json_value: &serde_json::Value) -> Result<Vec<u8>> {
|
||||
let value: Value = Value::from_json(json_value);
|
||||
|
||||
let (_, initial_change) = Frontend::new_with_initial_state(value)?;
|
||||
let mut backend = Backend::new();
|
||||
backend.apply_local_change(initial_change)?;
|
||||
|
||||
Ok(backend.save()?)
|
||||
}
|
||||
|
||||
pub fn import_json(mut reader: impl std::io::Read, mut writer: impl std::io::Write) -> Result<()> {
|
||||
let mut buffer = String::new();
|
||||
reader.read_to_string(&mut buffer)?;
|
||||
|
||||
let json_value: serde_json::Value = serde_json::from_str(&buffer)?;
|
||||
let changes_bytes = initialize_from_json(&json_value)?;
|
||||
writer.write_all(&changes_bytes)?;
|
||||
Ok(())
|
||||
}
|
182
automerge-cli/src/main.rs
Normal file
182
automerge-cli/src/main.rs
Normal file
|
@ -0,0 +1,182 @@
|
|||
use std::{fs::File, path::PathBuf, str::FromStr};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use clap::Clap;
|
||||
|
||||
mod change;
|
||||
mod examine;
|
||||
mod export;
|
||||
mod import;
|
||||
|
||||
#[derive(Debug, Clap)]
|
||||
#[clap(about = "Automerge CLI")]
|
||||
struct Opts {
|
||||
#[clap(subcommand)]
|
||||
cmd: Command,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ExportFormat {
|
||||
Json,
|
||||
Toml,
|
||||
}
|
||||
|
||||
impl FromStr for ExportFormat {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(input: &str) -> Result<ExportFormat> {
|
||||
match input {
|
||||
"json" => Ok(ExportFormat::Json),
|
||||
"toml" => Ok(ExportFormat::Toml),
|
||||
_ => Err(anyhow!("Invalid export format: {}", input)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clap)]
|
||||
enum Command {
|
||||
/// Output current state of an Automerge document in a specified format
|
||||
Export {
|
||||
/// Format for output: json, toml
|
||||
#[clap(long, short, default_value = "json")]
|
||||
format: ExportFormat,
|
||||
|
||||
/// Path that contains Automerge changes
|
||||
#[clap(parse(from_os_str))]
|
||||
changes_file: Option<PathBuf>,
|
||||
},
|
||||
|
||||
Import {
|
||||
/// Format for input: json, toml
|
||||
#[clap(long, short, default_value = "json")]
|
||||
format: ExportFormat,
|
||||
|
||||
#[clap(parse(from_os_str))]
|
||||
input_file: Option<PathBuf>,
|
||||
|
||||
/// Path to write Automerge changes to
|
||||
#[clap(parse(from_os_str), long("out"), short('o'))]
|
||||
changes_file: Option<PathBuf>,
|
||||
},
|
||||
|
||||
/// Read an automerge document from a file or stdin, perform a change on it and write a new
|
||||
/// document to stdout or the specified output file.
|
||||
Change {
|
||||
/// The change script to perform. Change scripts have the form <command> <path> [<JSON value>].
|
||||
/// The possible commands are 'set', 'insert', 'delete', and 'increment'.
|
||||
///
|
||||
/// Paths look like this: $["mapkey"][0]. They always lways start with a '$', then each
|
||||
/// subsequent segment of the path is either a string in double quotes to index a key in a
|
||||
/// map, or an integer index to address an array element.
|
||||
///
|
||||
/// Examples
|
||||
///
|
||||
/// ## set
|
||||
///
|
||||
/// > automerge change 'set $["someobject"] {"items": []}' somefile
|
||||
///
|
||||
/// ## insert
|
||||
///
|
||||
/// > automerge change 'insert $["someobject"]["items"][0] "item1"' somefile
|
||||
///
|
||||
/// ## increment
|
||||
///
|
||||
/// > automerge change 'increment $["mycounter"]'
|
||||
///
|
||||
/// ## delete
|
||||
///
|
||||
/// > automerge change 'delete $["someobject"]["items"]' somefile
|
||||
script: String,
|
||||
|
||||
/// The file to change, if omitted will assume stdin
|
||||
#[clap(parse(from_os_str))]
|
||||
input_file: Option<PathBuf>,
|
||||
|
||||
/// Path to write Automerge changes to, if omitted will write to stdout
|
||||
#[clap(parse(from_os_str), long("out"), short('o'))]
|
||||
output_file: Option<PathBuf>,
|
||||
},
|
||||
|
||||
/// Read an automerge document and print a JSON representation of the changes in it to stdout
|
||||
Examine { input_file: Option<PathBuf> },
|
||||
}
|
||||
|
||||
fn open_file_or_stdin(maybe_path: Option<PathBuf>) -> Result<Box<dyn std::io::Read>> {
|
||||
if atty::is(atty::Stream::Stdin) {
|
||||
if let Some(path) = maybe_path {
|
||||
Ok(Box::new(File::open(&path).unwrap()))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Must provide file path if not providing input via stdin"
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Ok(Box::new(std::io::stdin()))
|
||||
}
|
||||
}
|
||||
|
||||
fn create_file_or_stdout(maybe_path: Option<PathBuf>) -> Result<Box<dyn std::io::Write>> {
|
||||
if atty::is(atty::Stream::Stdout) {
|
||||
if let Some(path) = maybe_path {
|
||||
Ok(Box::new(File::create(&path).unwrap()))
|
||||
} else {
|
||||
Err(anyhow!("Must provide file path if not piping to stdout"))
|
||||
}
|
||||
} else {
|
||||
Ok(Box::new(std::io::stdout()))
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let opts = Opts::parse();
|
||||
match opts.cmd {
|
||||
Command::Export {
|
||||
changes_file,
|
||||
format,
|
||||
} => match format {
|
||||
ExportFormat::Json => {
|
||||
let mut in_buffer = open_file_or_stdin(changes_file)?;
|
||||
export::export_json(
|
||||
&mut in_buffer,
|
||||
&mut std::io::stdout(),
|
||||
atty::is(atty::Stream::Stdout),
|
||||
)
|
||||
}
|
||||
ExportFormat::Toml => unimplemented!(),
|
||||
},
|
||||
|
||||
Command::Import {
|
||||
format,
|
||||
input_file,
|
||||
changes_file,
|
||||
} => match format {
|
||||
ExportFormat::Json => {
|
||||
let mut out_buffer = create_file_or_stdout(changes_file)?;
|
||||
let mut in_buffer = open_file_or_stdin(input_file)?;
|
||||
import::import_json(&mut in_buffer, &mut out_buffer)
|
||||
}
|
||||
ExportFormat::Toml => unimplemented!(),
|
||||
},
|
||||
Command::Change {
|
||||
input_file,
|
||||
output_file,
|
||||
script,
|
||||
} => {
|
||||
let in_buffer = open_file_or_stdin(input_file)?;
|
||||
let mut out_buffer = create_file_or_stdout(output_file)?;
|
||||
change::change(in_buffer, &mut out_buffer, script.as_str())
|
||||
.map_err(|e| anyhow::format_err!("Unable to make changes: {:?}", e))
|
||||
}
|
||||
Command::Examine { input_file } => {
|
||||
let in_buffer = open_file_or_stdin(input_file)?;
|
||||
let out_buffer = std::io::stdout();
|
||||
match examine::examine(in_buffer, out_buffer, atty::is(atty::Stream::Stdout)) {
|
||||
Ok(()) => {}
|
||||
Err(e) => {
|
||||
eprintln!("Error: {:?}", e);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
|
@ -2,45 +2,45 @@ use std::env;
|
|||
|
||||
use duct::cmd;
|
||||
|
||||
// #[test]
|
||||
// fn import_stdin() {
|
||||
// let bin = env!("CARGO_BIN_EXE_automerge");
|
||||
// let initial_state_json = serde_json::json!({
|
||||
// "birds": {
|
||||
// "wrens": 3.0,
|
||||
// "sparrows": 15.0
|
||||
// }
|
||||
// });
|
||||
// let json_bytes = serde_json::to_string_pretty(&initial_state_json).unwrap();
|
||||
#[test]
|
||||
fn import_stdin() {
|
||||
let bin = env!("CARGO_BIN_EXE_automerge");
|
||||
let initial_state_json = serde_json::json!({
|
||||
"birds": {
|
||||
"wrens": 3.0,
|
||||
"sparrows": 15.0
|
||||
}
|
||||
});
|
||||
let json_bytes = serde_json::to_string_pretty(&initial_state_json).unwrap();
|
||||
|
||||
// let no_pipe_no_file = cmd!(bin, "import").stdin_bytes(json_bytes.clone()).run();
|
||||
let no_pipe_no_file = cmd!(bin, "import").stdin_bytes(json_bytes.clone()).run();
|
||||
|
||||
// assert!(no_pipe_no_file.is_err());
|
||||
assert!(no_pipe_no_file.is_err());
|
||||
|
||||
// let pipe_no_file = cmd!(bin, "import")
|
||||
// .stdin_bytes(json_bytes.clone())
|
||||
// .stdout_capture()
|
||||
// .run();
|
||||
let pipe_no_file = cmd!(bin, "import")
|
||||
.stdin_bytes(json_bytes.clone())
|
||||
.stdout_capture()
|
||||
.run();
|
||||
|
||||
// assert!(pipe_no_file.is_ok());
|
||||
assert!(pipe_no_file.is_ok());
|
||||
|
||||
// let mut temp_file = std::env::temp_dir();
|
||||
// temp_file.push("import_test.mpl");
|
||||
// let no_pipe_file = cmd!(bin, "import", "--out", &temp_file)
|
||||
// .stdin_bytes(json_bytes)
|
||||
// .run();
|
||||
let mut temp_file = std::env::temp_dir();
|
||||
temp_file.push("import_test.mpl");
|
||||
let no_pipe_file = cmd!(bin, "import", "--out", &temp_file)
|
||||
.stdin_bytes(json_bytes)
|
||||
.run();
|
||||
|
||||
// assert!(no_pipe_file.is_ok());
|
||||
// std::fs::remove_file(temp_file).unwrap();
|
||||
// }
|
||||
assert!(no_pipe_file.is_ok());
|
||||
std::fs::remove_file(temp_file).unwrap();
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn export_stdout() {
|
||||
// let bin = env!("CARGO_BIN_EXE_automerge");
|
||||
// let no_pipe_no_file = cmd!(bin, "export").stdout_capture().run();
|
||||
#[test]
|
||||
fn export_stdout() {
|
||||
let bin = env!("CARGO_BIN_EXE_automerge");
|
||||
let no_pipe_no_file = cmd!(bin, "export").stdout_capture().run();
|
||||
|
||||
// assert!(no_pipe_no_file.is_err());
|
||||
// }
|
||||
assert!(no_pipe_no_file.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_export_isomorphic() {
|
||||
|
@ -61,7 +61,6 @@ fn import_export_isomorphic() {
|
|||
assert_eq!(stdout, json_bytes);
|
||||
}
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn import_change_export() {
|
||||
let bin = env!("CARGO_BIN_EXE_automerge");
|
||||
|
@ -90,4 +89,3 @@ fn import_change_export() {
|
|||
});
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
*/
|
1
automerge-frontend/.gitignore
vendored
Normal file
1
automerge-frontend/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
target/*
|
48
automerge-frontend/Cargo.toml
Normal file
48
automerge-frontend/Cargo.toml
Normal file
|
@ -0,0 +1,48 @@
|
|||
[package]
|
||||
name = "automerge-frontend"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Good <alex@memoryandthought.me>"]
|
||||
edition = "2018"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[lib]
|
||||
bench = false
|
||||
|
||||
[dependencies]
|
||||
automerge-protocol = { path = "../automerge-protocol" }
|
||||
futures = "0.3.4"
|
||||
serde = { version = "^1.0", features=["derive"] }
|
||||
serde_json = "^1.0"
|
||||
uuid = { version = "^0.8.2", features=["v4"] }
|
||||
maplit = "1.0.2"
|
||||
thiserror = "1.0.16"
|
||||
im-rc = "15.0.0"
|
||||
unicode-segmentation = "1.7.1"
|
||||
arbitrary = { version = "1", features = ["derive"], optional = true }
|
||||
smol_str = "0.1.18"
|
||||
|
||||
[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies]
|
||||
getrandom = { version = "0.2.2", features=["js"] }
|
||||
uuid = { version = "0.8.1", features = ["wasm-bindgen", "v4", "serde"] }
|
||||
|
||||
[dev-dependencies]
|
||||
automerge-backend = { path = "../automerge-backend" }
|
||||
criterion = "0.3.3"
|
||||
rand = "0.8.2"
|
||||
env_logger = "0.8.3"
|
||||
log = "0.4.14"
|
||||
wasm-bindgen-test = "0.3.22"
|
||||
pretty_assertions = "0.7.1"
|
||||
|
||||
[[bench]]
|
||||
name = "statetree_apply_diff"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "change"
|
||||
harness = false
|
||||
|
||||
[features]
|
||||
default = ["std"]
|
||||
derive-arbitrary = ["arbitrary", "smol_str/arbitrary"]
|
||||
std = []
|
4
automerge-frontend/README.md
Normal file
4
automerge-frontend/README.md
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Automerge Frontend
|
||||
|
||||
This is an implementation of the "frontend" of the automerge data structure. It
|
||||
is designed to be used on the UI thread of a user facing application.
|
41
automerge-frontend/benches/change.rs
Normal file
41
automerge-frontend/benches/change.rs
Normal file
|
@ -0,0 +1,41 @@
|
|||
use automerge_frontend::{Frontend, InvalidChangeRequest, LocalChange, Path, Value};
|
||||
use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion};
|
||||
use rand::{thread_rng, Rng};
|
||||
use smol_str::SmolStr;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
pub fn insert_long_string(c: &mut Criterion) {
|
||||
c.bench_function("Frontend::change insert long string", move |b| {
|
||||
b.iter_batched(
|
||||
|| {
|
||||
let doc = Frontend::new();
|
||||
let random_string: SmolStr = thread_rng()
|
||||
.sample_iter(&rand::distributions::Alphanumeric)
|
||||
.take(6000)
|
||||
.map(char::from)
|
||||
.collect();
|
||||
(doc, random_string)
|
||||
},
|
||||
|(mut doc, string)| {
|
||||
#[allow(clippy::unit_arg)]
|
||||
black_box({
|
||||
doc.change::<_, _, InvalidChangeRequest>(None, |d| {
|
||||
d.add_change(LocalChange::set(
|
||||
Path::root().key("text"),
|
||||
Value::Text(string.graphemes(true).map(|s| s.into()).collect()),
|
||||
))
|
||||
})
|
||||
.unwrap()
|
||||
})
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = frontend_benches;
|
||||
config = Criterion::default().sample_size(10);
|
||||
targets = insert_long_string,
|
||||
}
|
||||
criterion_main!(frontend_benches);
|
133
automerge-frontend/benches/statetree_apply_diff.rs
Normal file
133
automerge-frontend/benches/statetree_apply_diff.rs
Normal file
|
@ -0,0 +1,133 @@
|
|||
use amp::RootDiff;
|
||||
use automerge_frontend::Frontend;
|
||||
use automerge_protocol as amp;
|
||||
use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion};
|
||||
use maplit::hashmap;
|
||||
|
||||
pub fn sequential_inserts_in_multiple_patches(c: &mut Criterion) {
|
||||
let actor_id = amp::ActorId::random();
|
||||
let make_list_opid = actor_id.op_id_at(1);
|
||||
let mut patches: Vec<amp::Patch> = vec![amp::Patch {
|
||||
actor: None,
|
||||
seq: None,
|
||||
clock: hashmap! {actor_id.clone() => 1},
|
||||
deps: Vec::new(),
|
||||
max_op: 1,
|
||||
pending_changes: 0,
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"text".into() => hashmap!{
|
||||
make_list_opid.clone() => amp::Diff::Text(amp::TextDiff{
|
||||
object_id: make_list_opid.clone().into(),
|
||||
edits: Vec::new(),
|
||||
}),
|
||||
}
|
||||
},
|
||||
},
|
||||
}];
|
||||
for index in 0..6000 {
|
||||
let op_num = index + 2;
|
||||
let this_op_id = actor_id.op_id_at(op_num as u64);
|
||||
patches.push(amp::Patch {
|
||||
actor: None,
|
||||
seq: None,
|
||||
clock: hashmap! {actor_id.clone() => op_num as u64},
|
||||
deps: Vec::new(),
|
||||
max_op: op_num as u64,
|
||||
pending_changes: 0,
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"text".into() => hashmap!{
|
||||
make_list_opid.clone() => amp::Diff::Text(amp::TextDiff{
|
||||
object_id: make_list_opid.clone().into(),
|
||||
edits: vec![amp::DiffEdit::SingleElementInsert{
|
||||
index,
|
||||
elem_id: this_op_id.clone().into(),
|
||||
op_id: this_op_id.clone(),
|
||||
value: amp::Diff::Value(amp::ScalarValue::Str("c".into())),
|
||||
}],
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
c.bench_function(
|
||||
"StateTreeValue::apply_diff sequential text inserts across multiple patches",
|
||||
move |b| {
|
||||
b.iter_batched(
|
||||
|| {
|
||||
let doc = Frontend::new();
|
||||
(doc, patches.clone())
|
||||
},
|
||||
|(mut doc, patches)| {
|
||||
#[allow(clippy::unit_arg)]
|
||||
black_box({
|
||||
for patch in patches.into_iter() {
|
||||
doc.apply_patch(patch).unwrap();
|
||||
}
|
||||
doc
|
||||
})
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
pub fn sequential_inserts_in_single_patch(c: &mut Criterion) {
|
||||
let actor_id = amp::ActorId::random();
|
||||
let make_list_opid = actor_id.op_id_at(1);
|
||||
let mut edits: Vec<amp::DiffEdit> = Vec::new();
|
||||
for index in 0..6000 {
|
||||
let op_num = index + 2;
|
||||
let this_op_id = actor_id.op_id_at(op_num as u64);
|
||||
edits.push(amp::DiffEdit::SingleElementInsert {
|
||||
index,
|
||||
elem_id: this_op_id.clone().into(),
|
||||
op_id: this_op_id.clone(),
|
||||
value: amp::Diff::Value(amp::ScalarValue::Str("c".into())),
|
||||
});
|
||||
}
|
||||
let patch: amp::Patch = amp::Patch {
|
||||
actor: None,
|
||||
seq: None,
|
||||
clock: hashmap! {actor_id => 1},
|
||||
deps: Vec::new(),
|
||||
max_op: 1,
|
||||
pending_changes: 0,
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"text".into() => hashmap!{
|
||||
make_list_opid.clone() => amp::Diff::Text(amp::TextDiff{
|
||||
object_id: make_list_opid.into(),
|
||||
edits,
|
||||
}),
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
c.bench_function(
|
||||
"StateTreeValue::apply_diff sequential text inserts in a single patch",
|
||||
move |b| {
|
||||
b.iter_batched(
|
||||
|| patch.clone(),
|
||||
|patch| {
|
||||
#[allow(clippy::unit_arg)]
|
||||
black_box({
|
||||
let mut doc = Frontend::new();
|
||||
doc.apply_patch(patch).unwrap()
|
||||
})
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = Criterion::default().sample_size(10);
|
||||
targets = sequential_inserts_in_multiple_patches, sequential_inserts_in_single_patch,
|
||||
}
|
||||
criterion_main!(benches);
|
117
automerge-frontend/src/error.rs
Normal file
117
automerge-frontend/src/error.rs
Normal file
|
@ -0,0 +1,117 @@
|
|||
use std::{error::Error, fmt};
|
||||
|
||||
use automerge_protocol as amp;
|
||||
use automerge_protocol::ObjectId;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::{value::Value, Path};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum AutomergeFrontendError {
|
||||
InvalidChangeRequest,
|
||||
MissingObjectError(ObjectId),
|
||||
NoSuchPathError(Path),
|
||||
PathIsNotCounter,
|
||||
CannotOverwriteCounter,
|
||||
MismatchedSequenceNumber,
|
||||
InvalidActorIdString(String),
|
||||
}
|
||||
|
||||
impl fmt::Display for AutomergeFrontendError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<automerge_protocol::error::InvalidActorId> for AutomergeFrontendError {
|
||||
fn from(e: automerge_protocol::error::InvalidActorId) -> AutomergeFrontendError {
|
||||
AutomergeFrontendError::InvalidActorIdString(e.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for AutomergeFrontendError {}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum InvalidInitialStateError {
|
||||
InitialStateMustBeMap,
|
||||
}
|
||||
|
||||
impl fmt::Display for InvalidInitialStateError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for InvalidInitialStateError {}
|
||||
|
||||
//TODO Most of these errors should have paths associated with them to make it
|
||||
//easier to understand where things are going wrong
|
||||
#[derive(Error, Debug, PartialEq)]
|
||||
pub enum InvalidPatch {
|
||||
#[error("Mismatched sequence number, expected: {expected} but got {actual}")]
|
||||
MismatchedSequenceNumber { expected: u64, actual: u64 },
|
||||
#[error("Received a diff inserting a non text object in a text object. Target object id was {object_id}, diff was {diff:?}")]
|
||||
InsertNonTextInTextObject {
|
||||
object_id: ObjectId,
|
||||
diff: amp::Diff,
|
||||
},
|
||||
#[error(
|
||||
"Received a diff for a character in a text object which created more than one character"
|
||||
)]
|
||||
InsertMultipleCharsInTextChar,
|
||||
#[error("Received a diff which had multiple values for a key in a table. Table id was {table_id}, diff was {diff:?}")]
|
||||
ConflictsReceivedForTableKey { table_id: ObjectId, diff: amp::Diff },
|
||||
#[error("Patch contained a diff which expected object with ID {object_id:?} to be {patch_expected_type:?} but we think it is {actual_type:?}")]
|
||||
MismatchingObjectType {
|
||||
object_id: ObjectId,
|
||||
patch_expected_type: Option<amp::ObjType>,
|
||||
actual_type: Option<amp::ObjType>,
|
||||
},
|
||||
#[error("Patch referenced an object id {patch_expected_id:?} at a path where we ecpected {actual_id:?}")]
|
||||
MismatchingObjectIDs {
|
||||
patch_expected_id: Option<ObjectId>,
|
||||
actual_id: ObjectId,
|
||||
},
|
||||
#[error("Patch attempted to reference an index which did not exist for object {object_id}")]
|
||||
InvalidIndex { object_id: ObjectId, index: usize },
|
||||
#[error("The patch tried to create an object but specified no value for the new object")]
|
||||
DiffCreatedObjectWithNoValue,
|
||||
#[error("The patch contained a diff with a list edit which referenced the '_head' of a list, rather than a specific element ID")]
|
||||
DiffEditWithHeadElemId,
|
||||
#[error("Value diff containing cursor")]
|
||||
ValueDiffContainedCursor,
|
||||
}
|
||||
|
||||
#[derive(Error, Debug, PartialEq)]
|
||||
pub enum InvalidChangeRequest {
|
||||
#[error("attempted to set the value of {path:?}, which is not allowed because that value is a counter")]
|
||||
CannotOverwriteCounter { path: Path },
|
||||
#[error("attempted an operation on a path that does not exist: {path:?}")]
|
||||
NoSuchPathError { path: Path },
|
||||
#[error("attempted to set a non map object {value:?} as the root")]
|
||||
CannotSetNonMapObjectAsRoot { value: Value },
|
||||
#[error("attempted to increment an object which is not a counter at {path:?}")]
|
||||
IncrementForNonCounterObject { path: Path },
|
||||
#[error("attempted to insert using a path which does not end in an index: {path:?}")]
|
||||
InsertWithNonSequencePath { path: Path },
|
||||
#[error("attempted to insert into an object which is not a sequence at {path:?}")]
|
||||
InsertForNonSequenceObject { path: Path },
|
||||
#[error("attempted to insert past the end of a sequence, path was {path:?}, max length of sequence is {sequence_length}")]
|
||||
InsertPastEndOfSequence { path: Path, sequence_length: u64 },
|
||||
#[error("attempted to insert something into a text object which is not a character, object: {object:?}")]
|
||||
InsertNonTextInTextObject { path: Path, object: Value },
|
||||
#[error("attmpted to delete root object")]
|
||||
CannotDeleteRootObject,
|
||||
#[error("Attempted to access a missing index")]
|
||||
MissingIndexError {
|
||||
#[from]
|
||||
source: MissingIndexError,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Error, Debug, PartialEq)]
|
||||
#[error("Attempted to access index {missing_index} in a collection with max index: {size_of_collection}")]
|
||||
pub struct MissingIndexError {
|
||||
pub missing_index: usize,
|
||||
pub size_of_collection: usize,
|
||||
}
|
523
automerge-frontend/src/lib.rs
Normal file
523
automerge-frontend/src/lib.rs
Normal file
|
@ -0,0 +1,523 @@
|
|||
use automerge_protocol as amp;
|
||||
use automerge_protocol::{ActorId, ChangeHash, ObjectId, Op, OpId, Patch};
|
||||
|
||||
mod error;
|
||||
mod mutation;
|
||||
mod path;
|
||||
mod state_tree;
|
||||
mod value;
|
||||
|
||||
use std::{collections::HashMap, convert::TryFrom, error::Error, fmt::Debug};
|
||||
|
||||
pub use error::{
|
||||
AutomergeFrontendError, InvalidChangeRequest, InvalidInitialStateError, InvalidPatch,
|
||||
};
|
||||
pub use mutation::{LocalChange, MutableDocument};
|
||||
pub use path::Path;
|
||||
use path::PathElement;
|
||||
use state_tree::ResolvedPath;
|
||||
pub use value::{Conflicts, Cursor, Primitive, Value};
|
||||
|
||||
/// Tracks the possible states of the frontend
|
||||
///
|
||||
/// What does this mean and why do we need it? The reason the frontend/backend
|
||||
/// split exists in the first place is that we want to quickly apply local
|
||||
/// changes (local in this sense means something like "on the UI thread") on a
|
||||
/// low latency local cache whilst also shipping those same changes off to a
|
||||
/// backend, which can reconcile them with historical changes and new changes
|
||||
/// received over the network - work which may be more compute intensive and
|
||||
/// so have to high a latency to be acceptable on the UI thread.
|
||||
///
|
||||
/// This frontend/backend split implies that we need to optimistically apply
|
||||
/// local changes somehow. In order to do this we immediately apply changes to
|
||||
/// a copy of the local state (state being an instance of [StateTree]) and
|
||||
/// add the sequence number of the new change to a list of in flight requests.
|
||||
/// In detail the logic looks like this:
|
||||
///
|
||||
/// When we receive a patch from the backend:
|
||||
/// 1. Check that if the patch is for our actor ID then the sequence number of
|
||||
/// the patch is the same as the sequence number of the oldest in flight
|
||||
/// request.
|
||||
/// 2. Apply the patch to the `reconciled_state` of the current state
|
||||
/// 3. If there are no in flight requests remaining then transition from
|
||||
/// the `WaitingForInFlightRequests` state to the `Reconciled` state,
|
||||
/// moving the `reconciled_state` into the `Reconciled` enum branch
|
||||
#[derive(Clone, Debug)]
|
||||
enum FrontendState {
|
||||
/// The backend is processing some requests so we need to keep an optimistic version of the
|
||||
/// state.
|
||||
WaitingForInFlightRequests {
|
||||
/// The sequence numbers of in flight changes.
|
||||
in_flight_requests: Vec<u64>,
|
||||
/// The root state that the backend tracks.
|
||||
reconciled_root_state: state_tree::StateTree,
|
||||
/// The optimistic version of the root state that the user manipulates.
|
||||
optimistically_updated_root_state: state_tree::StateTree,
|
||||
/// A flag to track whether this state has seen a patch from the backend that represented
|
||||
/// changes from another actor.
|
||||
///
|
||||
/// If this is true then our optimistic state will not equal the reconciled state so we may
|
||||
/// need to do extra work when moving to the reconciled state.
|
||||
seen_non_local_patch: bool,
|
||||
/// The maximum operation observed.
|
||||
max_op: u64,
|
||||
},
|
||||
/// The backend has processed all changes and we no longer wait for anything.
|
||||
Reconciled {
|
||||
/// The root state that the backend tracks.
|
||||
reconciled_root_state: state_tree::StateTree,
|
||||
/// A copy of the reconciled root state that we keep to be able to undo changes a user
|
||||
/// makes when changing the state.
|
||||
reconciled_root_state_copy_for_rollback: state_tree::StateTree,
|
||||
/// The maximum operation observed.
|
||||
max_op: u64,
|
||||
/// The dependencies of the last received patch.
|
||||
deps_of_last_received_patch: Vec<ChangeHash>,
|
||||
},
|
||||
}
|
||||
|
||||
impl FrontendState {
|
||||
/// Apply a patch received from the backend to this frontend state,
|
||||
/// returns the updated cached value (if it has changed) and a new
|
||||
/// `FrontendState` which replaces this one
|
||||
fn apply_remote_patch(
|
||||
&mut self,
|
||||
self_actor: &ActorId,
|
||||
patch: Patch,
|
||||
) -> Result<(), InvalidPatch> {
|
||||
match self {
|
||||
FrontendState::WaitingForInFlightRequests {
|
||||
in_flight_requests,
|
||||
reconciled_root_state,
|
||||
optimistically_updated_root_state,
|
||||
seen_non_local_patch,
|
||||
max_op: _,
|
||||
} => {
|
||||
let mut new_in_flight_requests = in_flight_requests.clone();
|
||||
// If the actor ID and seq exist then this patch corresponds
|
||||
// to a local change (i.e it came from Backend::apply_local_change
|
||||
// so we don't need to apply it, we just need to remove it from
|
||||
// the in_flight_requests vector
|
||||
let mut is_local = false;
|
||||
if let (Some(patch_actor), Some(patch_seq)) = (&patch.actor, patch.seq) {
|
||||
// If this is a local change corresponding to our actor then we
|
||||
// need to match it against in flight requests
|
||||
if self_actor == patch_actor {
|
||||
// Check that if the patch is for our actor ID then it is not
|
||||
// out of order
|
||||
if new_in_flight_requests[0] != patch_seq {
|
||||
return Err(InvalidPatch::MismatchedSequenceNumber {
|
||||
expected: new_in_flight_requests[0],
|
||||
actual: patch_seq,
|
||||
});
|
||||
}
|
||||
is_local = true;
|
||||
// unwrap should be fine here as `in_flight_requests` should never have zero length
|
||||
// because we transition to reconciled state when that happens
|
||||
let (_, remaining_requests) = new_in_flight_requests.split_first().unwrap();
|
||||
new_in_flight_requests = remaining_requests.iter().copied().collect();
|
||||
}
|
||||
}
|
||||
let checked_diff = reconciled_root_state.check_diff(patch.diffs)?;
|
||||
|
||||
reconciled_root_state.apply_diff(checked_diff);
|
||||
if new_in_flight_requests.is_empty() {
|
||||
if *seen_non_local_patch {
|
||||
*optimistically_updated_root_state = reconciled_root_state.clone();
|
||||
}
|
||||
*self = FrontendState::Reconciled {
|
||||
reconciled_root_state: std::mem::take(reconciled_root_state),
|
||||
reconciled_root_state_copy_for_rollback: std::mem::take(
|
||||
optimistically_updated_root_state,
|
||||
),
|
||||
max_op: patch.max_op,
|
||||
deps_of_last_received_patch: patch.deps,
|
||||
}
|
||||
} else {
|
||||
*in_flight_requests = new_in_flight_requests;
|
||||
*seen_non_local_patch = *seen_non_local_patch || !is_local;
|
||||
// don't update max_op as we have progressed since then
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
FrontendState::Reconciled {
|
||||
reconciled_root_state,
|
||||
reconciled_root_state_copy_for_rollback,
|
||||
max_op,
|
||||
deps_of_last_received_patch,
|
||||
} => {
|
||||
let checked_diff = reconciled_root_state.check_diff(patch.diffs)?;
|
||||
|
||||
reconciled_root_state.apply_diff(checked_diff.clone());
|
||||
// quicker and cheaper to apply the diff again than to clone the large root state
|
||||
reconciled_root_state_copy_for_rollback.apply_diff(checked_diff);
|
||||
*max_op = patch.max_op;
|
||||
*deps_of_last_received_patch = patch.deps;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_object_id(&self, path: &Path) -> Option<ObjectId> {
|
||||
self.resolve_path(path).and_then(|r| r.object_id())
|
||||
}
|
||||
|
||||
fn get_value(&self, path: &Path) -> Option<Value> {
|
||||
self.resolve_path(path).map(|r| r.default_value())
|
||||
}
|
||||
|
||||
fn resolve_path(&self, path: &Path) -> Option<ResolvedPath> {
|
||||
let root = match self {
|
||||
FrontendState::WaitingForInFlightRequests {
|
||||
optimistically_updated_root_state,
|
||||
..
|
||||
} => optimistically_updated_root_state,
|
||||
FrontendState::Reconciled {
|
||||
reconciled_root_state,
|
||||
..
|
||||
} => reconciled_root_state,
|
||||
};
|
||||
root.resolve_path(path)
|
||||
}
|
||||
|
||||
/// Apply a patch. The change closure will be passed a `MutableDocument`
|
||||
/// which it can use to query the document state and make changes. It
|
||||
/// can also throw an error of type `E`. If an error is thrown in the
|
||||
/// closure no chnages are made and the error is returned.
|
||||
pub fn optimistically_apply_change<F, O, E>(
|
||||
&mut self,
|
||||
actor: &ActorId,
|
||||
change_closure: F,
|
||||
seq: u64,
|
||||
) -> Result<OptimisticChangeResult<O>, E>
|
||||
where
|
||||
E: Error,
|
||||
F: FnOnce(&mut dyn MutableDocument) -> Result<O, E>,
|
||||
{
|
||||
match self {
|
||||
FrontendState::WaitingForInFlightRequests {
|
||||
in_flight_requests,
|
||||
reconciled_root_state: _,
|
||||
optimistically_updated_root_state,
|
||||
seen_non_local_patch: _,
|
||||
max_op,
|
||||
} => {
|
||||
let mut mutation_tracker = mutation::MutationTracker::new(
|
||||
optimistically_updated_root_state,
|
||||
*max_op,
|
||||
actor.clone(),
|
||||
);
|
||||
// TODO: somehow handle rolling back the optimistic state if the closure gives an
|
||||
// error
|
||||
let result = match change_closure(&mut mutation_tracker) {
|
||||
Ok(result) => result,
|
||||
Err(e) => {
|
||||
// reset the original state
|
||||
mutation_tracker.rollback();
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
*max_op = mutation_tracker.max_op;
|
||||
let ops = mutation_tracker.ops();
|
||||
if !ops.is_empty() {
|
||||
// we actually have made a change so expect it to be sent to the backend
|
||||
in_flight_requests.push(seq);
|
||||
}
|
||||
|
||||
Ok(OptimisticChangeResult {
|
||||
ops,
|
||||
deps: Vec::new(),
|
||||
closure_result: result,
|
||||
})
|
||||
}
|
||||
FrontendState::Reconciled {
|
||||
reconciled_root_state,
|
||||
reconciled_root_state_copy_for_rollback,
|
||||
max_op,
|
||||
deps_of_last_received_patch,
|
||||
} => {
|
||||
let mut mutation_tracker = mutation::MutationTracker::new(
|
||||
reconciled_root_state_copy_for_rollback,
|
||||
*max_op,
|
||||
actor.clone(),
|
||||
);
|
||||
let result = match change_closure(&mut mutation_tracker) {
|
||||
Ok(result) => result,
|
||||
Err(e) => {
|
||||
// reset the original state
|
||||
mutation_tracker.rollback();
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
*max_op = mutation_tracker.max_op;
|
||||
let ops = mutation_tracker.ops();
|
||||
let in_flight_requests = vec![seq];
|
||||
let deps = deps_of_last_received_patch.clone();
|
||||
if !ops.is_empty() {
|
||||
*self = FrontendState::WaitingForInFlightRequests {
|
||||
in_flight_requests,
|
||||
optimistically_updated_root_state: std::mem::take(
|
||||
reconciled_root_state_copy_for_rollback,
|
||||
),
|
||||
seen_non_local_patch: false,
|
||||
reconciled_root_state: std::mem::take(reconciled_root_state),
|
||||
max_op: *max_op,
|
||||
}
|
||||
} else {
|
||||
// the old and new states should be equal since we have no operations
|
||||
debug_assert_eq!(
|
||||
*reconciled_root_state_copy_for_rollback,
|
||||
*reconciled_root_state
|
||||
);
|
||||
// we can remain in the reconciled frontend state since we didn't make a change
|
||||
};
|
||||
Ok(OptimisticChangeResult {
|
||||
ops,
|
||||
deps,
|
||||
closure_result: result,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn in_flight_requests(&self) -> Vec<u64> {
|
||||
match self {
|
||||
FrontendState::WaitingForInFlightRequests {
|
||||
in_flight_requests, ..
|
||||
} => in_flight_requests.clone(),
|
||||
_ => Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn max_op(&self) -> u64 {
|
||||
match self {
|
||||
FrontendState::WaitingForInFlightRequests { max_op, .. } => *max_op,
|
||||
FrontendState::Reconciled { max_op, .. } => *max_op,
|
||||
}
|
||||
}
|
||||
|
||||
fn value(&self) -> Value {
|
||||
match self {
|
||||
FrontendState::WaitingForInFlightRequests {
|
||||
optimistically_updated_root_state,
|
||||
..
|
||||
} => optimistically_updated_root_state.value(),
|
||||
FrontendState::Reconciled {
|
||||
reconciled_root_state,
|
||||
..
|
||||
} => reconciled_root_state.value(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Frontend {
|
||||
pub actor_id: ActorId,
|
||||
pub seq: u64,
|
||||
/// The current state of the frontend, see the description of
|
||||
/// `FrontendState` for details. It's an `Option` to allow consuming it
|
||||
/// using Option::take whilst behind a mutable reference.
|
||||
state: FrontendState,
|
||||
/// A cache of the value of this frontend
|
||||
cached_value: Option<Value>,
|
||||
/// A function for generating timestamps
|
||||
timestamper: Box<dyn Fn() -> Option<i64>>,
|
||||
}
|
||||
|
||||
impl Debug for Frontend {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
|
||||
let Frontend {
|
||||
actor_id,
|
||||
seq,
|
||||
state,
|
||||
cached_value,
|
||||
timestamper: _,
|
||||
} = self;
|
||||
{
|
||||
let mut builder = f.debug_struct("Frontend");
|
||||
let _ = builder.field("actor_id", &actor_id);
|
||||
let _ = builder.field("seq", &seq);
|
||||
let _ = builder.field("state", &state);
|
||||
let _ = builder.field("cached_value", &cached_value);
|
||||
builder.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
impl Default for Frontend {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Frontend {
|
||||
#[cfg(feature = "std")]
|
||||
pub fn new() -> Self {
|
||||
let system_time = || {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.ok()
|
||||
.and_then(|d| i64::try_from(d.as_millis()).ok())
|
||||
};
|
||||
Self::new_with_timestamper(Box::new(system_time))
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
pub fn new_with_actor_id(actor_id: &[u8]) -> Self {
|
||||
let system_time = || {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.ok()
|
||||
.and_then(|d| i64::try_from(d.as_millis()).ok())
|
||||
};
|
||||
Self::new_with_timestamper_and_actor_id(Box::new(system_time), actor_id)
|
||||
}
|
||||
|
||||
pub fn new_with_timestamper(t: Box<dyn Fn() -> Option<i64>>) -> Self {
|
||||
Self::new_with_timestamper_and_actor_id(t, uuid::Uuid::new_v4().as_bytes())
|
||||
}
|
||||
|
||||
pub fn new_with_timestamper_and_actor_id(
|
||||
t: Box<dyn Fn() -> Option<i64>>,
|
||||
actor_id: &[u8],
|
||||
) -> Self {
|
||||
let root_state = state_tree::StateTree::new();
|
||||
Frontend {
|
||||
actor_id: ActorId::from_bytes(actor_id),
|
||||
seq: 0,
|
||||
state: FrontendState::Reconciled {
|
||||
reconciled_root_state: root_state.clone(),
|
||||
reconciled_root_state_copy_for_rollback: root_state,
|
||||
max_op: 0,
|
||||
deps_of_last_received_patch: Vec::new(),
|
||||
},
|
||||
cached_value: None,
|
||||
timestamper: t,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
pub fn new_with_initial_state(
|
||||
initial_state: Value,
|
||||
) -> Result<(Self, amp::Change), InvalidInitialStateError> {
|
||||
match &initial_state {
|
||||
Value::Map(kvs) => {
|
||||
let mut front = Frontend::new();
|
||||
let (init_ops, _) =
|
||||
kvs.iter()
|
||||
.fold((Vec::new(), 1), |(mut ops, max_op), (k, v)| {
|
||||
let (more_ops, max_op) = value::value_to_op_requests(
|
||||
&front.actor_id,
|
||||
max_op,
|
||||
ObjectId::Root,
|
||||
&::Key::Map(k.clone()),
|
||||
v,
|
||||
false,
|
||||
);
|
||||
ops.extend(more_ops);
|
||||
(ops, max_op)
|
||||
});
|
||||
|
||||
let init_change_request = amp::Change {
|
||||
actor_id: front.actor_id.clone(),
|
||||
start_op: 1,
|
||||
time: (front.timestamper)().unwrap_or(0),
|
||||
seq: 1,
|
||||
message: Some("Initialization".to_string()),
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: init_ops,
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
// Unwrap here is fine because it should be impossible to
|
||||
// cause an error applying a local change from a `Value`. If
|
||||
// that happens we've made an error, not the user.
|
||||
front.change(Some("initialization".into()), |doc| {
|
||||
doc.add_change(LocalChange::set(Path::root(), initial_state))
|
||||
.map_err(|_| InvalidInitialStateError::InitialStateMustBeMap)
|
||||
})?;
|
||||
Ok((front, init_change_request))
|
||||
}
|
||||
_ => Err(InvalidInitialStateError::InitialStateMustBeMap),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn state(&mut self) -> &Value {
|
||||
if let Some(ref v) = self.cached_value {
|
||||
v
|
||||
} else {
|
||||
let value = self.state.value();
|
||||
self.cached_value = Some(value);
|
||||
self.cached_value.as_ref().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn change<F, O, E>(
|
||||
&mut self,
|
||||
message: Option<String>,
|
||||
change_closure: F,
|
||||
) -> Result<(O, Option<amp::Change>), E>
|
||||
where
|
||||
E: Error,
|
||||
F: FnOnce(&mut dyn MutableDocument) -> Result<O, E>,
|
||||
{
|
||||
let start_op = self.state.max_op() + 1;
|
||||
let change_result =
|
||||
self.state
|
||||
.optimistically_apply_change(&self.actor_id, change_closure, self.seq + 1)?;
|
||||
self.cached_value = None;
|
||||
if !change_result.ops.is_empty() {
|
||||
self.seq += 1;
|
||||
let change = amp::Change {
|
||||
start_op,
|
||||
actor_id: self.actor_id.clone(),
|
||||
seq: self.seq,
|
||||
time: (self.timestamper)().unwrap_or(0),
|
||||
message,
|
||||
hash: None,
|
||||
deps: change_result.deps,
|
||||
operations: change_result.ops,
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
Ok((change_result.closure_result, Some(change)))
|
||||
} else {
|
||||
Ok((change_result.closure_result, None))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_patch(&mut self, patch: Patch) -> Result<(), InvalidPatch> {
|
||||
self.cached_value = None;
|
||||
if let Some(seq) = patch.clock.get(&self.actor_id) {
|
||||
if *seq > self.seq {
|
||||
self.seq = *seq;
|
||||
}
|
||||
}
|
||||
self.state.apply_remote_patch(&self.actor_id, patch)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_object_id(&self, path: &Path) -> Option<ObjectId> {
|
||||
self.state.get_object_id(path)
|
||||
}
|
||||
|
||||
pub fn in_flight_requests(&self) -> Vec<u64> {
|
||||
self.state.in_flight_requests()
|
||||
}
|
||||
|
||||
/// Gets the set of values for `path`, returns None if the path does not
|
||||
/// exist
|
||||
pub fn get_conflicts(&self, path: &Path) -> Option<HashMap<OpId, Value>> {
|
||||
self.state.resolve_path(path).map(|o| o.values())
|
||||
}
|
||||
|
||||
/// Returns the value given by path, if it exists
|
||||
pub fn get_value(&self, path: &Path) -> Option<Value> {
|
||||
self.state.get_value(path)
|
||||
}
|
||||
}
|
||||
|
||||
struct OptimisticChangeResult<O> {
|
||||
ops: Vec<Op>,
|
||||
deps: Vec<ChangeHash>,
|
||||
closure_result: O,
|
||||
}
|
697
automerge-frontend/src/mutation.rs
Normal file
697
automerge-frontend/src/mutation.rs
Normal file
|
@ -0,0 +1,697 @@
|
|||
use automerge_protocol as amp;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
use crate::{
|
||||
error::InvalidChangeRequest,
|
||||
state_tree::{
|
||||
LocalOperationResult, MultiGrapheme, MultiValue, ResolvedPath, ResolvedPathMut,
|
||||
SetOrInsertPayload, StateTree,
|
||||
},
|
||||
value::{Cursor, Primitive, Value},
|
||||
Path, PathElement,
|
||||
};
|
||||
|
||||
pub trait MutableDocument {
|
||||
fn value_at_path(&self, path: &Path) -> Option<Value>;
|
||||
fn cursor_to_path(&self, path: &Path) -> Option<Cursor>;
|
||||
fn add_change(&mut self, change: LocalChange) -> Result<(), InvalidChangeRequest>;
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum LocalOperation {
|
||||
Set(Value),
|
||||
Delete,
|
||||
Increment(i64),
|
||||
Insert(Value),
|
||||
InsertMany(Vec<Value>),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct LocalChange {
|
||||
path: Path,
|
||||
operation: LocalOperation,
|
||||
}
|
||||
|
||||
impl LocalChange {
|
||||
/// Set the value at `path` to `value`
|
||||
pub fn set<TV>(path: Path, value: TV) -> LocalChange
|
||||
where
|
||||
TV: Into<Value>,
|
||||
{
|
||||
LocalChange {
|
||||
path,
|
||||
operation: LocalOperation::Set(value.into()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Delete the entry at `path`
|
||||
pub fn delete(path: Path) -> LocalChange {
|
||||
LocalChange {
|
||||
path,
|
||||
operation: LocalOperation::Delete,
|
||||
}
|
||||
}
|
||||
|
||||
/// Increment the counter at `path` by 1
|
||||
pub fn increment(path: Path) -> LocalChange {
|
||||
LocalChange {
|
||||
path,
|
||||
operation: LocalOperation::Increment(1),
|
||||
}
|
||||
}
|
||||
|
||||
/// Increment the counter at path by a (possibly negative) amount `by`
|
||||
pub fn increment_by(path: Path, by: i64) -> LocalChange {
|
||||
LocalChange {
|
||||
path,
|
||||
operation: LocalOperation::Increment(by),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert(path: Path, value: Value) -> LocalChange {
|
||||
LocalChange {
|
||||
path,
|
||||
operation: LocalOperation::Insert(value),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_many(path: Path, values: Vec<Value>) -> LocalChange {
|
||||
LocalChange {
|
||||
path,
|
||||
operation: LocalOperation::InsertMany(values),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum LocalOperationForRollback {
|
||||
Set { old: Option<MultiValue> },
|
||||
SetList { old: MultiValue },
|
||||
SetText { old: MultiGrapheme },
|
||||
Delete { old: MultiValue },
|
||||
DeleteText { old: MultiGrapheme },
|
||||
Insert,
|
||||
InsertMany { count: usize },
|
||||
Increment { by: i64 },
|
||||
}
|
||||
|
||||
/// `MutationTracker` is used as the context in which a mutation closure is
|
||||
/// applied. The mutation tracker implements `MutableDocument`, which is how it
|
||||
/// captures the changes that the mutation closure is making.
|
||||
///
|
||||
/// For each operation in the mutation closure the `MutationTracker` generates
|
||||
/// a diff and immediately applies it to the `StateTree` it is constructed
|
||||
/// with. It also adds the change to a set of operations. This set of operations
|
||||
/// is used to generate a `ChangeRequest` once the closure is completed.
|
||||
pub struct MutationTracker<'a> {
|
||||
state: &'a mut StateTree,
|
||||
ops: Vec<amp::Op>,
|
||||
copies_for_rollback: Vec<(Path, LocalOperationForRollback)>,
|
||||
pub max_op: u64,
|
||||
actor_id: amp::ActorId,
|
||||
}
|
||||
|
||||
impl<'a> MutationTracker<'a> {
|
||||
pub(crate) fn new(state_tree: &'a mut StateTree, max_op: u64, actor_id: amp::ActorId) -> Self {
|
||||
Self {
|
||||
state: state_tree,
|
||||
ops: Vec::new(),
|
||||
copies_for_rollback: Vec::new(),
|
||||
max_op,
|
||||
actor_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ops(self) -> Vec<amp::Op> {
|
||||
self.ops
|
||||
}
|
||||
|
||||
/// If the `value` is a map, individually assign each k,v in it to a key in
|
||||
/// the root object
|
||||
fn wrap_root_assignment(&mut self, value: Value) -> Result<(), InvalidChangeRequest> {
|
||||
match value {
|
||||
Value::Map(kvs) => {
|
||||
for (k, v) in kvs.iter() {
|
||||
self.add_change(LocalChange::set(Path::root().key(k.clone()), v.clone()))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(InvalidChangeRequest::CannotSetNonMapObjectAsRoot {
|
||||
value: value.clone(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_state_change(&mut self, change: LocalOperationResult) {
|
||||
self.max_op += change.new_ops.len() as u64;
|
||||
self.ops.extend(change.new_ops);
|
||||
}
|
||||
|
||||
fn insert_helper<I>(&mut self, path: &Path, values: I) -> Result<(), InvalidChangeRequest>
|
||||
where
|
||||
I: ExactSizeIterator<Item = Value>,
|
||||
{
|
||||
if let Some(name) = path.name() {
|
||||
let index = match name {
|
||||
PathElement::Index(i) => i,
|
||||
_ => {
|
||||
return Err(InvalidChangeRequest::InsertWithNonSequencePath {
|
||||
path: path.clone(),
|
||||
})
|
||||
}
|
||||
};
|
||||
if let Some(parent) = self.state.resolve_path_mut(&path.parent()) {
|
||||
match parent {
|
||||
ResolvedPathMut::List(mut list_target) => {
|
||||
let payload = SetOrInsertPayload {
|
||||
start_op: self.max_op + 1,
|
||||
actor: &self.actor_id.clone(),
|
||||
value: values,
|
||||
};
|
||||
let res = list_target.insert_many(*index, payload)?;
|
||||
self.apply_state_change(res);
|
||||
}
|
||||
ResolvedPathMut::Text(mut text_target) => {
|
||||
let mut chars = Vec::with_capacity(values.len());
|
||||
for value in values {
|
||||
match value {
|
||||
Value::Primitive(Primitive::Str(s)) => {
|
||||
if s.graphemes(true).count() == 1 {
|
||||
chars.push(s.clone())
|
||||
} else {
|
||||
return Err(
|
||||
InvalidChangeRequest::InsertNonTextInTextObject {
|
||||
path: path.clone(),
|
||||
object: Value::Primitive(Primitive::Str(s)),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(InvalidChangeRequest::InsertNonTextInTextObject {
|
||||
path: path.clone(),
|
||||
object: value.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
let payload = SetOrInsertPayload {
|
||||
start_op: self.max_op + 1,
|
||||
actor: &self.actor_id.clone(),
|
||||
value: chars.into_iter(),
|
||||
};
|
||||
let res = text_target.insert_many(*index, payload)?;
|
||||
self.apply_state_change(res);
|
||||
}
|
||||
_ => return Err(InvalidChangeRequest::NoSuchPathError { path: path.clone() }),
|
||||
};
|
||||
Ok(())
|
||||
} else {
|
||||
Err(InvalidChangeRequest::InsertForNonSequenceObject { path: path.clone() })
|
||||
}
|
||||
} else {
|
||||
Err(InvalidChangeRequest::NoSuchPathError { path: path.clone() })
|
||||
}
|
||||
}
|
||||
|
||||
/// Undo the operations applied to this document.
|
||||
///
|
||||
/// This is used in the case of an error to undo the already applied changes.
|
||||
pub fn rollback(self) {
|
||||
for (path, op) in self.copies_for_rollback.into_iter().rev() {
|
||||
match op {
|
||||
LocalOperationForRollback::Set { old } => {
|
||||
if let Some(key) = path.name() {
|
||||
if let Some(parent) = self.state.resolve_path_mut(&path.parent()) {
|
||||
match (key, parent) {
|
||||
(PathElement::Key(key), ResolvedPathMut::Root(mut map)) => {
|
||||
map.rollback_set(key.clone(), old)
|
||||
}
|
||||
(PathElement::Key(key), ResolvedPathMut::Map(mut map)) => {
|
||||
map.rollback_set(key.clone(), old)
|
||||
}
|
||||
(PathElement::Key(key), ResolvedPathMut::Table(mut table)) => {
|
||||
table.rollback_set(key.clone(), old)
|
||||
}
|
||||
(PathElement::Key(_), ResolvedPathMut::List(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Text(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Character(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Counter(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Primitive(_)) => {
|
||||
unreachable!("found non object with key")
|
||||
}
|
||||
(PathElement::Index(_), ResolvedPathMut::List(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Text(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Root(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Map(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Table(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Character(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Counter(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Primitive(_)) => {
|
||||
unreachable!("found index element while rolling back a set")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
LocalOperationForRollback::SetList { old } => {
|
||||
if let Some(key) = path.name() {
|
||||
if let Some(parent) = self.state.resolve_path_mut(&path.parent()) {
|
||||
match (key, parent) {
|
||||
(PathElement::Key(_), _) => {
|
||||
unreachable!("found key element while rolling back a setlist")
|
||||
}
|
||||
(PathElement::Index(i), ResolvedPathMut::List(mut list)) => {
|
||||
list.rollback_set(*i as usize, old)
|
||||
}
|
||||
(PathElement::Index(_), ResolvedPathMut::Text(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Root(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Map(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Table(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Character(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Counter(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Primitive(_)) => {
|
||||
unreachable!("found non list with index")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
LocalOperationForRollback::SetText { old } => {
|
||||
if let Some(key) = path.name() {
|
||||
if let Some(parent) = self.state.resolve_path_mut(&path.parent()) {
|
||||
match (key, parent) {
|
||||
(PathElement::Key(_), _) => {
|
||||
unreachable!("found key element while rolling back a settext")
|
||||
}
|
||||
(PathElement::Index(i), ResolvedPathMut::Text(mut text)) => {
|
||||
text.rollback_set(*i as usize, old)
|
||||
}
|
||||
(PathElement::Index(_), ResolvedPathMut::List(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Root(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Map(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Table(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Character(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Counter(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Primitive(_)) => {
|
||||
unreachable!("found non text with index")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
LocalOperationForRollback::Delete { old } => {
|
||||
if let Some(key) = path.name() {
|
||||
if let Some(parent) = self.state.resolve_path_mut(&path.parent()) {
|
||||
match (key, parent) {
|
||||
(PathElement::Key(key), ResolvedPathMut::Root(mut map)) => {
|
||||
map.rollback_delete(key.clone(), old)
|
||||
}
|
||||
(PathElement::Key(key), ResolvedPathMut::Map(mut map)) => {
|
||||
map.rollback_delete(key.clone(), old)
|
||||
}
|
||||
(PathElement::Key(key), ResolvedPathMut::Table(mut table)) => {
|
||||
table.rollback_delete(key.clone(), old)
|
||||
}
|
||||
(PathElement::Key(_), ResolvedPathMut::List(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Text(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Character(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Counter(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Primitive(_)) => {
|
||||
unreachable!("found non object with key")
|
||||
}
|
||||
(PathElement::Index(i), ResolvedPathMut::List(mut list)) => {
|
||||
list.rollback_delete(*i as usize, old)
|
||||
}
|
||||
(PathElement::Index(_), ResolvedPathMut::Text(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Root(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Map(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Table(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Character(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Counter(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Primitive(_)) => {
|
||||
unreachable!("found non list with index")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
LocalOperationForRollback::DeleteText { old } => {
|
||||
if let Some(key) = path.name() {
|
||||
if let Some(parent) = self.state.resolve_path_mut(&path.parent()) {
|
||||
match (key, parent) {
|
||||
(PathElement::Key(_), ResolvedPathMut::Root(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Map(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Table(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::List(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Text(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Character(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Counter(_))
|
||||
| (PathElement::Key(_), ResolvedPathMut::Primitive(_)) => {
|
||||
unreachable!("found key for SetText")
|
||||
}
|
||||
(PathElement::Index(i), ResolvedPathMut::Text(mut text)) => {
|
||||
text.rollback_delete(*i as usize, old)
|
||||
}
|
||||
(PathElement::Index(_), ResolvedPathMut::List(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Root(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Map(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Table(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Character(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Counter(_))
|
||||
| (PathElement::Index(_), ResolvedPathMut::Primitive(_)) => {
|
||||
unreachable!("found non text with index")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
LocalOperationForRollback::Insert => {
|
||||
if let Some(PathElement::Index(index)) = path.name() {
|
||||
if let Some(parent) = self.state.resolve_path_mut(&path.parent()) {
|
||||
match parent {
|
||||
ResolvedPathMut::List(mut list) => {
|
||||
list.rollback_insert(*index as usize)
|
||||
}
|
||||
ResolvedPathMut::Text(mut text) => {
|
||||
text.rollback_insert(*index as usize)
|
||||
}
|
||||
ResolvedPathMut::Root(_)
|
||||
| ResolvedPathMut::Map(_)
|
||||
| ResolvedPathMut::Table(_)
|
||||
| ResolvedPathMut::Character(_)
|
||||
| ResolvedPathMut::Counter(_)
|
||||
| ResolvedPathMut::Primitive(_) => {
|
||||
unreachable!("Found non list object in rollback insert")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
LocalOperationForRollback::InsertMany { count } => {
|
||||
if let Some(PathElement::Index(index)) = path.name() {
|
||||
if let Some(parent) = self.state.resolve_path_mut(&path.parent()) {
|
||||
match parent {
|
||||
ResolvedPathMut::List(mut list) => {
|
||||
for _ in 0..count {
|
||||
list.rollback_insert(*index as usize)
|
||||
}
|
||||
}
|
||||
ResolvedPathMut::Text(mut text) => {
|
||||
for _ in 0..count {
|
||||
text.rollback_insert(*index as usize)
|
||||
}
|
||||
}
|
||||
ResolvedPathMut::Root(_)
|
||||
| ResolvedPathMut::Map(_)
|
||||
| ResolvedPathMut::Table(_)
|
||||
| ResolvedPathMut::Character(_)
|
||||
| ResolvedPathMut::Counter(_)
|
||||
| ResolvedPathMut::Primitive(_) => {
|
||||
unreachable!("Found non list object in rollback insert")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
LocalOperationForRollback::Increment { by } => {
|
||||
if path.name().is_some() {
|
||||
if let Some(ResolvedPathMut::Counter(mut counter)) =
|
||||
self.state.resolve_path_mut(&path)
|
||||
{
|
||||
counter.rollback_increment(by)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MutableDocument for MutationTracker<'a> {
|
||||
fn value_at_path(&self, path: &Path) -> Option<Value> {
|
||||
self.state.resolve_path(path).map(|r| r.default_value())
|
||||
}
|
||||
|
||||
fn cursor_to_path(&self, path: &Path) -> Option<Cursor> {
|
||||
if let Some(PathElement::Index(i)) = path.name() {
|
||||
if let Some(parent) = self.state.resolve_path(&path.parent()) {
|
||||
match parent {
|
||||
ResolvedPath::List(list_target) => list_target.get_cursor(*i).ok(),
|
||||
ResolvedPath::Text(text_target) => text_target.get_cursor(*i).ok(),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn add_change(&mut self, change: LocalChange) -> Result<(), InvalidChangeRequest> {
|
||||
match change.operation {
|
||||
LocalOperation::Set(value) => {
|
||||
//TODO double resolving is ugly here
|
||||
if let Some(ResolvedPath::Counter(_)) = self.state.resolve_path(&change.path) {
|
||||
return Err(InvalidChangeRequest::CannotOverwriteCounter { path: change.path });
|
||||
};
|
||||
if let Some(name) = change.path.name() {
|
||||
if let Some(parent) = self.state.resolve_path_mut(&change.path.parent()) {
|
||||
let (rollback_op, res) = match (name, parent) {
|
||||
(
|
||||
PathElement::Key(ref k),
|
||||
ResolvedPathMut::Root(ref mut root_target),
|
||||
) => {
|
||||
let payload = SetOrInsertPayload {
|
||||
start_op: self.max_op + 1,
|
||||
actor: &self.actor_id.clone(),
|
||||
value,
|
||||
};
|
||||
let (old, res) = root_target.set_key(k.clone(), payload);
|
||||
Ok((LocalOperationForRollback::Set { old }, res))
|
||||
}
|
||||
(PathElement::Key(ref k), ResolvedPathMut::Map(ref mut maptarget)) => {
|
||||
let payload = SetOrInsertPayload {
|
||||
start_op: self.max_op + 1,
|
||||
actor: &self.actor_id.clone(),
|
||||
value,
|
||||
};
|
||||
let (old, res) = maptarget.set_key(k.clone(), payload);
|
||||
Ok((LocalOperationForRollback::Set { old }, res))
|
||||
}
|
||||
(
|
||||
PathElement::Key(ref k),
|
||||
ResolvedPathMut::Table(ref mut tabletarget),
|
||||
) => {
|
||||
let payload = SetOrInsertPayload {
|
||||
start_op: self.max_op + 1,
|
||||
actor: &self.actor_id.clone(),
|
||||
value,
|
||||
};
|
||||
let (old, res) = tabletarget.set_key(k.clone(), payload);
|
||||
Ok((LocalOperationForRollback::Set { old }, res))
|
||||
}
|
||||
// In this case we are trying to modify a key in something which is not
|
||||
// an object or a table, so the path does not exist
|
||||
(PathElement::Key(_), _) => {
|
||||
Err(InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path.clone(),
|
||||
})
|
||||
}
|
||||
(PathElement::Index(i), ResolvedPathMut::List(ref mut list_target)) => {
|
||||
let payload = SetOrInsertPayload {
|
||||
start_op: self.max_op + 1,
|
||||
actor: &self.actor_id.clone(),
|
||||
value,
|
||||
};
|
||||
let (old, res) = list_target.set(*i, payload)?;
|
||||
Ok((LocalOperationForRollback::SetList { old }, res))
|
||||
}
|
||||
(PathElement::Index(i), ResolvedPathMut::Text(ref mut text)) => {
|
||||
match value {
|
||||
Value::Primitive(Primitive::Str(s)) => {
|
||||
if s.graphemes(true).count() == 1 {
|
||||
let payload = SetOrInsertPayload {
|
||||
start_op: self.max_op + 1,
|
||||
actor: &self.actor_id.clone(),
|
||||
value: s,
|
||||
};
|
||||
let (old, res) = text.set(*i, payload)?;
|
||||
Ok((LocalOperationForRollback::SetText { old }, res))
|
||||
} else {
|
||||
Err(InvalidChangeRequest::InsertNonTextInTextObject {
|
||||
path: change.path.clone(),
|
||||
object: Value::Primitive(Primitive::Str(s)),
|
||||
})
|
||||
}
|
||||
}
|
||||
_ => Err(InvalidChangeRequest::InsertNonTextInTextObject {
|
||||
path: change.path.clone(),
|
||||
object: value.clone(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
(PathElement::Index(_), _) => {
|
||||
Err(InvalidChangeRequest::InsertWithNonSequencePath {
|
||||
path: change.path.clone(),
|
||||
})
|
||||
}
|
||||
}?;
|
||||
|
||||
self.copies_for_rollback.push((change.path, rollback_op));
|
||||
self.apply_state_change(res);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(InvalidChangeRequest::NoSuchPathError { path: change.path })
|
||||
}
|
||||
} else {
|
||||
self.wrap_root_assignment(value)
|
||||
}
|
||||
}
|
||||
LocalOperation::Delete => {
|
||||
if let Some(name) = change.path.name() {
|
||||
if let Some(pr) = self.state.resolve_path_mut(&change.path.parent()) {
|
||||
let (rollback_op, state_change) = match pr {
|
||||
ResolvedPathMut::Counter(_) => {
|
||||
return Err(InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path,
|
||||
})
|
||||
}
|
||||
ResolvedPathMut::List(mut l) => match name {
|
||||
PathElement::Index(i) => {
|
||||
let (old, res) = l.remove(*i)?;
|
||||
(LocalOperationForRollback::Delete { old }, res)
|
||||
}
|
||||
_ => {
|
||||
return Err(InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path,
|
||||
})
|
||||
}
|
||||
},
|
||||
ResolvedPathMut::Text(mut t) => match name {
|
||||
PathElement::Index(i) => {
|
||||
let (old, res) = t.remove(*i)?;
|
||||
(LocalOperationForRollback::DeleteText { old }, res)
|
||||
}
|
||||
_ => {
|
||||
return Err(InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path,
|
||||
})
|
||||
}
|
||||
},
|
||||
ResolvedPathMut::Primitive(_) => {
|
||||
return Err(InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path,
|
||||
})
|
||||
}
|
||||
ResolvedPathMut::Map(mut m) => match name {
|
||||
PathElement::Key(k) => {
|
||||
let (old, res) = m.delete_key(k).ok_or_else(|| {
|
||||
InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path.clone(),
|
||||
}
|
||||
})?;
|
||||
(LocalOperationForRollback::Delete { old }, res)
|
||||
}
|
||||
_ => {
|
||||
return Err(InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path,
|
||||
})
|
||||
}
|
||||
},
|
||||
ResolvedPathMut::Table(mut t) => match name {
|
||||
PathElement::Key(k) => {
|
||||
let (old, res) = t.delete_key(k).ok_or_else(|| {
|
||||
InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path.clone(),
|
||||
}
|
||||
})?;
|
||||
(LocalOperationForRollback::Delete { old }, res)
|
||||
}
|
||||
_ => {
|
||||
return Err(InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path,
|
||||
})
|
||||
}
|
||||
},
|
||||
ResolvedPathMut::Character(_) => {
|
||||
return Err(InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path,
|
||||
})
|
||||
}
|
||||
ResolvedPathMut::Root(mut r) => match name {
|
||||
PathElement::Key(k) => {
|
||||
let (old, res) = r.delete_key(k).ok_or_else(|| {
|
||||
InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path.clone(),
|
||||
}
|
||||
})?;
|
||||
(LocalOperationForRollback::Delete { old }, res)
|
||||
}
|
||||
_ => {
|
||||
return Err(InvalidChangeRequest::NoSuchPathError {
|
||||
path: change.path,
|
||||
})
|
||||
}
|
||||
},
|
||||
};
|
||||
self.copies_for_rollback.push((change.path, rollback_op));
|
||||
self.apply_state_change(state_change);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(InvalidChangeRequest::NoSuchPathError { path: change.path })
|
||||
}
|
||||
} else {
|
||||
Err(InvalidChangeRequest::CannotDeleteRootObject)
|
||||
}
|
||||
}
|
||||
LocalOperation::Increment(by) => {
|
||||
if change.path.name().is_some() {
|
||||
if let Some(pr) = self.state.resolve_path_mut(&change.path) {
|
||||
match pr {
|
||||
ResolvedPathMut::Counter(mut counter_target) => {
|
||||
let res = counter_target.increment(by);
|
||||
self.copies_for_rollback.push((
|
||||
change.path,
|
||||
LocalOperationForRollback::Increment { by },
|
||||
));
|
||||
self.apply_state_change(res);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(InvalidChangeRequest::IncrementForNonCounterObject {
|
||||
path: change.path.clone(),
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
Err(InvalidChangeRequest::NoSuchPathError { path: change.path })
|
||||
}
|
||||
} else {
|
||||
Err(InvalidChangeRequest::IncrementForNonCounterObject {
|
||||
path: change.path.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
LocalOperation::Insert(value) => {
|
||||
match self.insert_helper(&change.path, std::iter::once(value)) {
|
||||
Ok(()) => {
|
||||
self.copies_for_rollback
|
||||
.push((change.path, LocalOperationForRollback::Insert));
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
LocalOperation::InsertMany(values) => {
|
||||
let count = values.len();
|
||||
match self.insert_helper(&change.path, values.into_iter()) {
|
||||
Ok(()) => {
|
||||
self.copies_for_rollback
|
||||
.push((change.path, LocalOperationForRollback::InsertMany { count }));
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
60
automerge-frontend/src/path.rs
Normal file
60
automerge-frontend/src/path.rs
Normal file
|
@ -0,0 +1,60 @@
|
|||
use std::fmt;
|
||||
|
||||
use smol_str::SmolStr;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub(crate) enum PathElement {
|
||||
Key(SmolStr),
|
||||
Index(u32),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Path(Vec<PathElement>);
|
||||
|
||||
impl Path {
|
||||
pub fn root() -> Path {
|
||||
Path(Vec::new())
|
||||
}
|
||||
|
||||
pub fn index(mut self, index: u32) -> Self {
|
||||
self.0.push(PathElement::Index(index));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn key<S: Into<SmolStr>>(mut self, key: S) -> Path {
|
||||
self.0.push(PathElement::Key(key.into()));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn parent(&self) -> Self {
|
||||
if self.0.is_empty() {
|
||||
Path(Vec::new())
|
||||
} else {
|
||||
let mut new_path = self.0.clone();
|
||||
new_path.pop();
|
||||
Path(new_path)
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the final component of the path, if any
|
||||
pub(crate) fn name(&self) -> Option<&PathElement> {
|
||||
self.0.last()
|
||||
}
|
||||
|
||||
pub(crate) fn elements(self) -> Vec<PathElement> {
|
||||
self.0
|
||||
}
|
||||
|
||||
pub(crate) fn is_root(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PathElement {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
PathElement::Key(k) => write!(f, "{}", k),
|
||||
PathElement::Index(i) => write!(f, "{}", i),
|
||||
}
|
||||
}
|
||||
}
|
511
automerge-frontend/src/state_tree/diffable_sequence.rs
Normal file
511
automerge-frontend/src/state_tree/diffable_sequence.rs
Normal file
|
@ -0,0 +1,511 @@
|
|||
use std::collections::HashSet;
|
||||
|
||||
use amp::OpId;
|
||||
use automerge_protocol as amp;
|
||||
|
||||
use super::{MultiGrapheme, MultiValue};
|
||||
use crate::error::InvalidPatch;
|
||||
|
||||
pub(super) trait DiffableValue: Sized + Default {
|
||||
fn check_construct(
|
||||
opid: &::OpId,
|
||||
diff: &::Diff,
|
||||
parent_object_id: &::ObjectId,
|
||||
) -> Result<(), InvalidPatch>;
|
||||
|
||||
fn construct(opid: amp::OpId, diff: amp::Diff) -> Self;
|
||||
|
||||
fn check_diff(
|
||||
&self,
|
||||
opid: &::OpId,
|
||||
diff: &::Diff,
|
||||
parent_object_id: &::ObjectId,
|
||||
) -> Result<(), InvalidPatch>;
|
||||
|
||||
fn apply_diff(&mut self, opid: amp::OpId, diff: amp::Diff);
|
||||
|
||||
fn apply_diff_iter<I>(&mut self, diff: &mut I)
|
||||
where
|
||||
I: Iterator<Item = (amp::OpId, amp::Diff)>;
|
||||
|
||||
fn default_opid(&self) -> amp::OpId;
|
||||
|
||||
fn only_for_opid(&self, opid: amp::OpId) -> Option<Self>;
|
||||
|
||||
fn add_values_from(&mut self, other: Self);
|
||||
}
|
||||
|
||||
impl DiffableValue for MultiGrapheme {
|
||||
fn check_construct(
|
||||
opid: &::OpId,
|
||||
diff: &::Diff,
|
||||
parent_object_id: &::ObjectId,
|
||||
) -> Result<(), InvalidPatch> {
|
||||
MultiGrapheme::check_new_from_diff(opid, diff, parent_object_id)
|
||||
}
|
||||
|
||||
fn construct(opid: amp::OpId, diff: amp::Diff) -> Self {
|
||||
MultiGrapheme::new_from_diff(opid, diff)
|
||||
}
|
||||
|
||||
fn check_diff(
|
||||
&self,
|
||||
opid: &::OpId,
|
||||
diff: &::Diff,
|
||||
parent_object_id: &::ObjectId,
|
||||
) -> Result<(), InvalidPatch> {
|
||||
MultiGrapheme::check_diff(self, opid, diff, parent_object_id)
|
||||
}
|
||||
|
||||
fn apply_diff(&mut self, opid: amp::OpId, diff: amp::Diff) {
|
||||
MultiGrapheme::apply_diff(self, opid, diff)
|
||||
}
|
||||
|
||||
fn apply_diff_iter<I>(&mut self, diff: &mut I)
|
||||
where
|
||||
I: Iterator<Item = (amp::OpId, amp::Diff)>,
|
||||
{
|
||||
self.apply_diff_iter(diff)
|
||||
//MultiGrapheme::apply_diff_iter(self, diff)
|
||||
}
|
||||
|
||||
fn default_opid(&self) -> amp::OpId {
|
||||
self.default_opid().clone()
|
||||
}
|
||||
|
||||
fn only_for_opid(&self, opid: amp::OpId) -> Option<MultiGrapheme> {
|
||||
self.only_for_opid(opid)
|
||||
}
|
||||
|
||||
fn add_values_from(&mut self, other: MultiGrapheme) {
|
||||
self.add_values_from(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl DiffableValue for MultiValue {
|
||||
fn check_construct(
|
||||
opid: &::OpId,
|
||||
diff: &::Diff,
|
||||
_parent_object_id: &::ObjectId,
|
||||
) -> Result<(), InvalidPatch> {
|
||||
MultiValue::check_new_from_diff(opid, diff)
|
||||
}
|
||||
|
||||
fn construct(opid: amp::OpId, diff: amp::Diff) -> Self {
|
||||
MultiValue::new_from_diff(opid, diff)
|
||||
}
|
||||
|
||||
fn check_diff(
|
||||
&self,
|
||||
opid: &::OpId,
|
||||
diff: &::Diff,
|
||||
_parent_object_id: &::ObjectId,
|
||||
) -> Result<(), InvalidPatch> {
|
||||
self.check_diff(opid, diff)
|
||||
}
|
||||
|
||||
fn apply_diff(&mut self, opid: amp::OpId, diff: amp::Diff) {
|
||||
self.apply_diff(opid, diff)
|
||||
}
|
||||
|
||||
fn apply_diff_iter<I>(&mut self, diff: &mut I)
|
||||
where
|
||||
I: Iterator<Item = (amp::OpId, amp::Diff)>,
|
||||
{
|
||||
self.apply_diff_iter(diff)
|
||||
}
|
||||
|
||||
fn default_opid(&self) -> amp::OpId {
|
||||
self.default_opid()
|
||||
}
|
||||
|
||||
fn only_for_opid(&self, opid: amp::OpId) -> Option<MultiValue> {
|
||||
self.only_for_opid(opid)
|
||||
}
|
||||
|
||||
fn add_values_from(&mut self, other: MultiValue) {
|
||||
self.add_values_from(other)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct SequenceElement<T>
|
||||
where
|
||||
T: DiffableValue,
|
||||
T: Clone,
|
||||
T: PartialEq,
|
||||
{
|
||||
opid: OpId,
|
||||
value: SequenceValue<T>,
|
||||
}
|
||||
|
||||
impl<T> SequenceElement<T>
|
||||
where
|
||||
T: Clone,
|
||||
T: DiffableValue,
|
||||
T: PartialEq,
|
||||
{
|
||||
fn original(value: T) -> Self {
|
||||
Self {
|
||||
opid: value.default_opid(),
|
||||
value: SequenceValue::Original(value),
|
||||
}
|
||||
}
|
||||
|
||||
fn new(value: T) -> Self {
|
||||
Self {
|
||||
opid: value.default_opid(),
|
||||
value: SequenceValue::New(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub(super) struct DiffableSequence<T>
|
||||
where
|
||||
T: DiffableValue,
|
||||
T: Clone,
|
||||
T: PartialEq,
|
||||
{
|
||||
// stores the opid that created the element and the diffable value
|
||||
underlying: Box<im_rc::Vector<SequenceElement<T>>>,
|
||||
}
|
||||
|
||||
impl<T> DiffableSequence<T>
|
||||
where
|
||||
T: Clone,
|
||||
T: DiffableValue,
|
||||
T: PartialEq,
|
||||
{
|
||||
pub fn new() -> DiffableSequence<T> {
|
||||
DiffableSequence {
|
||||
underlying: Box::new(im_rc::Vector::new()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn new_from<I>(i: I) -> DiffableSequence<T>
|
||||
where
|
||||
I: IntoIterator<Item = T>,
|
||||
{
|
||||
DiffableSequence {
|
||||
underlying: Box::new(i.into_iter().map(SequenceElement::original).collect()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_diff(
|
||||
&self,
|
||||
object_id: &::ObjectId,
|
||||
edits: &[amp::DiffEdit],
|
||||
) -> Result<(), InvalidPatch> {
|
||||
let mut size = self.underlying.len();
|
||||
for edit in edits {
|
||||
match edit {
|
||||
amp::DiffEdit::Remove { index, count } => {
|
||||
let index = *index as usize;
|
||||
let count = *count as usize;
|
||||
if index >= size {
|
||||
return Err(InvalidPatch::InvalidIndex {
|
||||
object_id: object_id.clone(),
|
||||
index,
|
||||
});
|
||||
}
|
||||
if index + count > size {
|
||||
return Err(InvalidPatch::InvalidIndex {
|
||||
object_id: object_id.clone(),
|
||||
index: size,
|
||||
});
|
||||
}
|
||||
size -= count;
|
||||
}
|
||||
amp::DiffEdit::SingleElementInsert {
|
||||
index,
|
||||
elem_id: _,
|
||||
op_id,
|
||||
value,
|
||||
} => {
|
||||
T::check_construct(op_id, value, object_id)?;
|
||||
if *index as usize > size {
|
||||
return Err(InvalidPatch::InvalidIndex {
|
||||
object_id: object_id.clone(),
|
||||
index: *index as usize,
|
||||
});
|
||||
}
|
||||
size += 1;
|
||||
}
|
||||
amp::DiffEdit::MultiElementInsert(amp::MultiElementInsert {
|
||||
elem_id,
|
||||
values,
|
||||
index,
|
||||
}) => {
|
||||
let index = *index as usize;
|
||||
if index > size {
|
||||
return Err(InvalidPatch::InvalidIndex {
|
||||
index,
|
||||
object_id: object_id.clone(),
|
||||
});
|
||||
}
|
||||
for (i, value) in values.iter().enumerate() {
|
||||
let opid = elem_id.as_opid().unwrap().increment_by(i as u64);
|
||||
T::check_construct(&opid, &::Diff::Value(value.clone()), object_id)?;
|
||||
}
|
||||
size += values.len();
|
||||
}
|
||||
amp::DiffEdit::Update {
|
||||
index,
|
||||
value: _,
|
||||
op_id: _,
|
||||
} => {
|
||||
// TODO: handle updates after things like inserts shifting them
|
||||
if *index as usize >= size {
|
||||
return Err(InvalidPatch::InvalidIndex {
|
||||
index: *index as usize,
|
||||
object_id: object_id.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
// if let Some((_id, elem)) = self.underlying.get(*index as usize) {
|
||||
// elem.check_diff(op_id, value)?;
|
||||
// } else {
|
||||
// }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn apply_diff(&mut self, _object_id: &::ObjectId, edits: Vec<amp::DiffEdit>) {
|
||||
let mut changed_indices = HashSet::new();
|
||||
for edit in edits {
|
||||
match edit {
|
||||
amp::DiffEdit::Remove { index, count } => {
|
||||
let index = index as usize;
|
||||
let count = count as usize;
|
||||
self.underlying.slice(index..(index + count));
|
||||
|
||||
for i in changed_indices.clone().iter() {
|
||||
// if the index is to the right of that being removed we need to shift it
|
||||
if *i >= index as u64 {
|
||||
// we don't need to keep the old value
|
||||
changed_indices.remove(i);
|
||||
// but if the value is not in the removed range then we need to add the
|
||||
// updated value in again
|
||||
if *i >= (index + count) as u64 {
|
||||
changed_indices.insert(*i - count as u64);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
amp::DiffEdit::SingleElementInsert {
|
||||
index,
|
||||
elem_id: _,
|
||||
op_id,
|
||||
value,
|
||||
} => {
|
||||
let node = T::construct(op_id, value);
|
||||
if (index as usize) == self.underlying.len() {
|
||||
self.underlying.push_back(SequenceElement::new(node));
|
||||
} else {
|
||||
self.underlying
|
||||
.insert(index as usize, SequenceElement::new(node));
|
||||
};
|
||||
changed_indices.insert(index);
|
||||
}
|
||||
amp::DiffEdit::MultiElementInsert(amp::MultiElementInsert {
|
||||
elem_id,
|
||||
values,
|
||||
index,
|
||||
}) => {
|
||||
let index = index as usize;
|
||||
// building an intermediate vector can be better than just inserting
|
||||
// TODO: only do this if there are a certain (to be worked out) number of
|
||||
// values
|
||||
// TODO: if all inserts are at the end then use push_back
|
||||
let mut intermediate = im_rc::Vector::new();
|
||||
for (i, value) in values.iter().enumerate() {
|
||||
let opid = elem_id.as_opid().unwrap().increment_by(i as u64);
|
||||
let mv = T::construct(opid, amp::Diff::Value(value.clone()));
|
||||
intermediate.push_back(SequenceElement::new(mv));
|
||||
}
|
||||
let right = self.underlying.split_off(index);
|
||||
self.underlying.append(intermediate);
|
||||
self.underlying.append(right);
|
||||
for i in index..(index + values.len()) {
|
||||
changed_indices.insert(i as u64);
|
||||
}
|
||||
}
|
||||
amp::DiffEdit::Update {
|
||||
index,
|
||||
value,
|
||||
op_id,
|
||||
} => {
|
||||
if let Some(v) = self.underlying.get_mut(index as usize) {
|
||||
v.value.apply_diff(op_id, value);
|
||||
}
|
||||
changed_indices.insert(index);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
for i in changed_indices {
|
||||
if let Some(u) = self.underlying.get_mut(i as usize) {
|
||||
u.value.finish()
|
||||
}
|
||||
}
|
||||
|
||||
debug_assert!(
|
||||
self.underlying
|
||||
.iter()
|
||||
.all(|u| matches!(u.value, SequenceValue::Original(_))),
|
||||
"diffable sequence apply_diff_iter didn't call finish on all values"
|
||||
);
|
||||
}
|
||||
|
||||
pub(super) fn remove(&mut self, index: usize) -> T {
|
||||
match self.underlying.remove(index).value {
|
||||
SequenceValue::Original(t) => t,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn len(&self) -> usize {
|
||||
self.underlying.len()
|
||||
}
|
||||
|
||||
pub(super) fn set(&mut self, index: usize, value: T) -> T {
|
||||
let elem_id = self
|
||||
.underlying
|
||||
.get(index)
|
||||
.map(|existing| existing.opid.clone())
|
||||
.expect("Failed to get existing index in set");
|
||||
self.underlying
|
||||
.set(
|
||||
index,
|
||||
SequenceElement {
|
||||
opid: elem_id,
|
||||
value: SequenceValue::Original(value),
|
||||
},
|
||||
)
|
||||
.value
|
||||
.get()
|
||||
.clone()
|
||||
}
|
||||
|
||||
pub(super) fn get(&self, index: usize) -> Option<(&OpId, &T)> {
|
||||
self.underlying.get(index).map(|e| (&e.opid, e.value.get()))
|
||||
}
|
||||
|
||||
pub(super) fn get_mut(&mut self, index: usize) -> Option<(&mut OpId, &mut T)> {
|
||||
self.underlying
|
||||
.get_mut(index)
|
||||
.map(|e| (&mut e.opid, e.value.get_mut()))
|
||||
}
|
||||
|
||||
pub(super) fn insert(&mut self, index: usize, value: T) {
|
||||
self.underlying
|
||||
.insert(index, SequenceElement::original(value))
|
||||
}
|
||||
|
||||
pub(super) fn iter(&self) -> impl std::iter::Iterator<Item = &T> {
|
||||
// Making this unwrap safe is the entire point of this data structure
|
||||
self.underlying.iter().map(|i| i.value.get())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum SequenceValue<T>
|
||||
where
|
||||
T: DiffableValue,
|
||||
{
|
||||
Original(T),
|
||||
New(T),
|
||||
Updated { original: T, updates: Vec<T> },
|
||||
}
|
||||
|
||||
impl<T> SequenceValue<T>
|
||||
where
|
||||
T: DiffableValue,
|
||||
T: Clone,
|
||||
{
|
||||
fn finish(&mut self) {
|
||||
match self {
|
||||
SequenceValue::Original(_) => { // do nothing, this is the finished state
|
||||
}
|
||||
SequenceValue::New(v) => *self = SequenceValue::Original(std::mem::take(v)),
|
||||
SequenceValue::Updated { updates, .. } => {
|
||||
let initial_update = updates.remove(0);
|
||||
let t =
|
||||
std::mem::take(updates)
|
||||
.into_iter()
|
||||
.fold(initial_update, |mut acc, elem| {
|
||||
acc.add_values_from(elem);
|
||||
acc
|
||||
});
|
||||
*self = SequenceValue::Original(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self) -> &T {
|
||||
match self {
|
||||
SequenceValue::Original(v) => v,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_mut(&mut self) -> &mut T {
|
||||
match self {
|
||||
SequenceValue::Original(v) => v,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_diff(&mut self, opid: amp::OpId, diff: amp::Diff) {
|
||||
match self {
|
||||
SequenceValue::Original(v) => {
|
||||
let updated = if let Some(mut existing) = v.only_for_opid(opid.clone()) {
|
||||
existing.apply_diff(opid, diff);
|
||||
existing
|
||||
} else {
|
||||
T::construct(opid, diff)
|
||||
};
|
||||
*self = SequenceValue::Updated {
|
||||
original: std::mem::take(v),
|
||||
updates: vec![updated],
|
||||
};
|
||||
}
|
||||
SequenceValue::New(v) => {
|
||||
let updated = if let Some(mut existing) = v.only_for_opid(opid.clone()) {
|
||||
existing.apply_diff(opid, diff);
|
||||
existing
|
||||
} else {
|
||||
T::construct(opid, diff)
|
||||
};
|
||||
*self = SequenceValue::Updated {
|
||||
original: v.clone(),
|
||||
updates: vec![std::mem::take(v), updated],
|
||||
};
|
||||
}
|
||||
SequenceValue::Updated { original, updates } => {
|
||||
let updated = if let Some(mut update) = updates
|
||||
.get(1..)
|
||||
.and_then(|i| i.iter().find_map(|v| v.only_for_opid(opid.clone())))
|
||||
{
|
||||
update.apply_diff(opid, diff);
|
||||
update
|
||||
} else if let Some(mut initial) =
|
||||
updates.get(0).and_then(|u| u.only_for_opid(opid.clone()))
|
||||
{
|
||||
initial.apply_diff(opid, diff);
|
||||
initial
|
||||
} else if let Some(mut original) = original.only_for_opid(opid.clone()) {
|
||||
original.apply_diff(opid, diff);
|
||||
original
|
||||
} else {
|
||||
T::construct(opid, diff)
|
||||
};
|
||||
updates.push(updated);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
876
automerge-frontend/src/state_tree/mod.rs
Normal file
876
automerge-frontend/src/state_tree/mod.rs
Normal file
|
@ -0,0 +1,876 @@
|
|||
use std::{collections::HashMap, convert::TryInto};
|
||||
|
||||
use amp::{ElementId, SortedVec};
|
||||
use automerge_protocol as amp;
|
||||
use automerge_protocol::RootDiff;
|
||||
use diffable_sequence::DiffableSequence;
|
||||
use multivalue::NewValueRequest;
|
||||
use smol_str::SmolStr;
|
||||
|
||||
use crate::{error, Path, PathElement, Primitive, Value};
|
||||
|
||||
mod diffable_sequence;
|
||||
mod multivalue;
|
||||
mod resolved_path;
|
||||
|
||||
pub use multivalue::{MultiGrapheme, MultiValue};
|
||||
pub(crate) use resolved_path::SetOrInsertPayload;
|
||||
pub use resolved_path::{ResolvedPath, ResolvedPathMut};
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Default)]
|
||||
pub struct CheckedRootDiff(RootDiff);
|
||||
|
||||
/// Represents the result of running a local operation (i.e one that happens within the frontend
|
||||
/// before any interaction with a backend).
|
||||
pub(crate) struct LocalOperationResult {
|
||||
/// Any operations which need to be sent to the backend to reconcile this change
|
||||
pub new_ops: Vec<amp::Op>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) struct StateTree {
|
||||
root_props: HashMap<SmolStr, MultiValue>,
|
||||
cursors: Cursors,
|
||||
}
|
||||
|
||||
impl Default for StateTree {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
root_props: HashMap::new(),
|
||||
cursors: Cursors::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl StateTree {
|
||||
pub fn new() -> StateTree {
|
||||
StateTree {
|
||||
root_props: HashMap::new(),
|
||||
cursors: Cursors::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_diff(&self, diff: amp::RootDiff) -> Result<CheckedRootDiff, error::InvalidPatch> {
|
||||
for (prop, prop_diff) in &diff.props {
|
||||
let mut diff_iter = prop_diff.iter();
|
||||
match diff_iter.next() {
|
||||
None => {
|
||||
// all ok here
|
||||
}
|
||||
Some((opid, diff)) => {
|
||||
match self.root_props.get(prop) {
|
||||
Some(n) => n.check_diff(opid, diff)?,
|
||||
None => {
|
||||
MultiValue::check_new_from_diff(opid, diff)?;
|
||||
}
|
||||
};
|
||||
// TODO: somehow get this working
|
||||
// self.root_props
|
||||
// .get(prop)
|
||||
// .unwrap()
|
||||
// .check_diff_iter(&mut diff_iter)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(CheckedRootDiff(diff))
|
||||
}
|
||||
|
||||
pub fn apply_diff(&mut self, diff: CheckedRootDiff) {
|
||||
for (prop, prop_diff) in diff.0.props {
|
||||
let mut diff_iter = prop_diff.into_iter();
|
||||
match diff_iter.next() {
|
||||
None => {
|
||||
self.root_props.remove(&prop);
|
||||
}
|
||||
Some((opid, diff)) => {
|
||||
match self.root_props.get_mut(&prop) {
|
||||
Some(n) => n.apply_diff(opid, diff),
|
||||
None => {
|
||||
let value = MultiValue::new_from_diff(opid.clone(), diff);
|
||||
self.root_props.insert(prop.clone(), value);
|
||||
}
|
||||
};
|
||||
self.root_props
|
||||
.get_mut(&prop)
|
||||
.unwrap()
|
||||
.apply_diff_iter(&mut diff_iter);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn remove(&mut self, k: &str) -> Option<MultiValue> {
|
||||
self.root_props.remove(k)
|
||||
}
|
||||
|
||||
fn get(&self, k: &str) -> Option<&MultiValue> {
|
||||
self.root_props.get(k)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path<'a>(
|
||||
&'a self,
|
||||
path: &Path,
|
||||
) -> Option<resolved_path::ResolvedPath<'a>> {
|
||||
if path.is_root() {
|
||||
return Some(ResolvedPath::new_root(self));
|
||||
}
|
||||
let mut stack = path.clone().elements();
|
||||
stack.reverse();
|
||||
|
||||
if let Some(PathElement::Key(k)) = stack.pop() {
|
||||
let o = self.root_props.get(&k)?;
|
||||
|
||||
o.resolve_path(stack, amp::ObjectId::Root, amp::Key::Map(k))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path_mut<'a>(
|
||||
&'a mut self,
|
||||
path: &Path,
|
||||
) -> Option<resolved_path::ResolvedPathMut<'a>> {
|
||||
if path.is_root() {
|
||||
return Some(ResolvedPathMut::new_root(self));
|
||||
}
|
||||
let mut stack = path.clone().elements();
|
||||
stack.reverse();
|
||||
|
||||
if let Some(PathElement::Key(k)) = stack.pop() {
|
||||
let o = self.root_props.get_mut(&k)?;
|
||||
|
||||
o.resolve_path_mut(stack, amp::ObjectId::Root, amp::Key::Map(k))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn value(&self) -> Value {
|
||||
let mut m = HashMap::new();
|
||||
for (k, v) in &self.root_props {
|
||||
m.insert(k.clone(), v.default_value());
|
||||
}
|
||||
Value::Map(m)
|
||||
}
|
||||
}
|
||||
|
||||
/// A node in the state tree is either a leaf node containing a scalarvalue,
|
||||
/// or an internal composite type (e.g a Map or a List)
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum StateTreeValue {
|
||||
Leaf(Primitive),
|
||||
Composite(StateTreeComposite),
|
||||
}
|
||||
|
||||
impl Default for StateTreeValue {
|
||||
fn default() -> Self {
|
||||
Self::Leaf(Primitive::Null)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum StateTreeComposite {
|
||||
Map(StateTreeMap),
|
||||
Table(StateTreeTable),
|
||||
Text(StateTreeText),
|
||||
List(StateTreeList),
|
||||
}
|
||||
|
||||
impl StateTreeComposite {
|
||||
fn check_diff(&self, diff: &::Diff) -> Result<(), error::InvalidPatch> {
|
||||
if diff.object_id() != Some(self.object_id()) {
|
||||
return Err(error::InvalidPatch::MismatchingObjectIDs {
|
||||
patch_expected_id: diff.object_id(),
|
||||
actual_id: self.object_id(),
|
||||
});
|
||||
};
|
||||
match (diff, self) {
|
||||
(
|
||||
amp::Diff::Map(amp::MapDiff {
|
||||
props: prop_diffs,
|
||||
object_id: _,
|
||||
}),
|
||||
StateTreeComposite::Map(map),
|
||||
) => map.check_diff(prop_diffs),
|
||||
(
|
||||
amp::Diff::Table(amp::TableDiff {
|
||||
props: prop_diffs,
|
||||
object_id: _,
|
||||
}),
|
||||
StateTreeComposite::Table(table),
|
||||
) => table.check_diff(prop_diffs),
|
||||
(
|
||||
amp::Diff::List(amp::ListDiff {
|
||||
edits,
|
||||
object_id: _,
|
||||
}),
|
||||
StateTreeComposite::List(list),
|
||||
) => list.check_diff(edits),
|
||||
(
|
||||
amp::Diff::Text(amp::TextDiff {
|
||||
edits,
|
||||
object_id: _,
|
||||
}),
|
||||
StateTreeComposite::Text(text),
|
||||
) => text.check_diff(edits),
|
||||
// TODO throw an error
|
||||
(amp::Diff::Value(..), _) => unreachable!(),
|
||||
// TODO throw an error
|
||||
(amp::Diff::Cursor(..), _) => unreachable!(),
|
||||
(amp::Diff::Map(_), _)
|
||||
| (amp::Diff::Table(_), _)
|
||||
| (amp::Diff::List(_), _)
|
||||
| (amp::Diff::Text(_), _) => Err(error::InvalidPatch::MismatchingObjectType {
|
||||
object_id: self.object_id(),
|
||||
patch_expected_type: diff.object_type(),
|
||||
actual_type: Some(self.obj_type()),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_diff(&mut self, diff: amp::Diff) {
|
||||
match (diff, self) {
|
||||
(
|
||||
amp::Diff::Map(amp::MapDiff {
|
||||
props: prop_diffs,
|
||||
object_id: _,
|
||||
}),
|
||||
StateTreeComposite::Map(map),
|
||||
) => map.apply_diff(prop_diffs),
|
||||
(
|
||||
amp::Diff::Table(amp::TableDiff {
|
||||
props: prop_diffs,
|
||||
object_id: _,
|
||||
}),
|
||||
StateTreeComposite::Table(table),
|
||||
) => table.apply_diff(prop_diffs),
|
||||
(
|
||||
amp::Diff::List(amp::ListDiff {
|
||||
edits,
|
||||
object_id: _,
|
||||
}),
|
||||
StateTreeComposite::List(list),
|
||||
) => list.apply_diff(edits),
|
||||
(
|
||||
amp::Diff::Text(amp::TextDiff {
|
||||
edits,
|
||||
object_id: _,
|
||||
}),
|
||||
StateTreeComposite::Text(text),
|
||||
) => text.apply_diff(edits),
|
||||
// TODO throw an error
|
||||
(amp::Diff::Value(..), _) => unreachable!(),
|
||||
// TODO throw an error
|
||||
(amp::Diff::Cursor(..), _) => unreachable!(),
|
||||
(amp::Diff::Map(_), _)
|
||||
| (amp::Diff::Table(_), _)
|
||||
| (amp::Diff::List(_), _)
|
||||
| (amp::Diff::Text(_), _) => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn obj_type(&self) -> amp::ObjType {
|
||||
match self {
|
||||
Self::Map(..) => amp::ObjType::Map,
|
||||
Self::Table(..) => amp::ObjType::Table,
|
||||
Self::Text(..) => amp::ObjType::Text,
|
||||
Self::List(..) => amp::ObjType::List,
|
||||
}
|
||||
}
|
||||
|
||||
fn object_id(&self) -> amp::ObjectId {
|
||||
match self {
|
||||
Self::Map(StateTreeMap { object_id, .. }) => object_id.clone(),
|
||||
Self::Table(StateTreeTable { object_id, .. }) => object_id.clone(),
|
||||
Self::Text(StateTreeText { object_id, .. }) => object_id.clone(),
|
||||
Self::List(StateTreeList { object_id, .. }) => object_id.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn realise_value(&self) -> Value {
|
||||
match self {
|
||||
Self::Map(StateTreeMap { props, .. }) => Value::Map(
|
||||
props
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.default_value()))
|
||||
.collect(),
|
||||
),
|
||||
Self::Table(StateTreeTable { props, .. }) => Value::Table(
|
||||
props
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.default_value()))
|
||||
.collect(),
|
||||
),
|
||||
Self::List(StateTreeList {
|
||||
elements: elems, ..
|
||||
}) => Value::Sequence(elems.iter().map(|e| e.default_value()).collect()),
|
||||
Self::Text(StateTreeText { graphemes, .. }) => Value::Text(
|
||||
graphemes
|
||||
.iter()
|
||||
.map(|c| c.default_grapheme().clone())
|
||||
.collect(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_path(&self, path: Vec<PathElement>) -> Option<ResolvedPath> {
|
||||
match self {
|
||||
Self::Map(map) => map.resolve_path(path),
|
||||
Self::Table(table) => table.resolve_path(path),
|
||||
Self::List(list) => list.resolve_path(path),
|
||||
Self::Text(text) => text.resolve_path(path),
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_path_mut(&mut self, path: Vec<PathElement>) -> Option<ResolvedPathMut> {
|
||||
match self {
|
||||
Self::Map(map) => map.resolve_path_mut(path),
|
||||
Self::Table(table) => table.resolve_path_mut(path),
|
||||
Self::List(list) => list.resolve_path_mut(path),
|
||||
Self::Text(text) => text.resolve_path_mut(path),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl StateTreeValue {
|
||||
fn check_new_from_diff(diff: &::Diff) -> Result<(), error::InvalidPatch> {
|
||||
match diff {
|
||||
amp::Diff::Value(v) => match v {
|
||||
amp::ScalarValue::Bytes(_)
|
||||
| amp::ScalarValue::Str(_)
|
||||
| amp::ScalarValue::Int(_)
|
||||
| amp::ScalarValue::Uint(_)
|
||||
| amp::ScalarValue::F64(_)
|
||||
| amp::ScalarValue::Counter(_)
|
||||
| amp::ScalarValue::Timestamp(_)
|
||||
| amp::ScalarValue::Boolean(_)
|
||||
| amp::ScalarValue::Null => Ok(()),
|
||||
amp::ScalarValue::Cursor(..) => Err(error::InvalidPatch::ValueDiffContainedCursor),
|
||||
},
|
||||
amp::Diff::Map(_)
|
||||
| amp::Diff::Table(_)
|
||||
| amp::Diff::List(_)
|
||||
| amp::Diff::Text(_)
|
||||
| amp::Diff::Cursor(_) => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
fn new_from_diff(diff: amp::Diff) -> StateTreeValue {
|
||||
match diff {
|
||||
amp::Diff::Value(v) => {
|
||||
let value = match v {
|
||||
amp::ScalarValue::Bytes(b) => Primitive::Bytes(b),
|
||||
amp::ScalarValue::Str(s) => Primitive::Str(s),
|
||||
amp::ScalarValue::Int(i) => Primitive::Int(i),
|
||||
amp::ScalarValue::Uint(u) => Primitive::Uint(u),
|
||||
amp::ScalarValue::F64(f) => Primitive::F64(f),
|
||||
amp::ScalarValue::Counter(i) => Primitive::Counter(i),
|
||||
amp::ScalarValue::Timestamp(i) => Primitive::Timestamp(i),
|
||||
amp::ScalarValue::Boolean(b) => Primitive::Boolean(b),
|
||||
amp::ScalarValue::Null => Primitive::Null,
|
||||
amp::ScalarValue::Cursor(..) => {
|
||||
unreachable!("value diff contained a cursor")
|
||||
}
|
||||
};
|
||||
StateTreeValue::Leaf(value)
|
||||
}
|
||||
amp::Diff::Map(amp::MapDiff { object_id, props }) => {
|
||||
let mut map = StateTreeMap {
|
||||
object_id,
|
||||
props: HashMap::new(),
|
||||
};
|
||||
map.apply_diff(props);
|
||||
StateTreeValue::Composite(StateTreeComposite::Map(map))
|
||||
}
|
||||
amp::Diff::Table(amp::TableDiff { object_id, props }) => {
|
||||
let mut table = StateTreeTable {
|
||||
object_id,
|
||||
props: HashMap::new(),
|
||||
};
|
||||
table.apply_diff(props);
|
||||
StateTreeValue::Composite(StateTreeComposite::Table(table))
|
||||
}
|
||||
amp::Diff::List(amp::ListDiff { object_id, edits }) => {
|
||||
let mut list = StateTreeList {
|
||||
object_id,
|
||||
elements: DiffableSequence::new(),
|
||||
};
|
||||
list.apply_diff(edits);
|
||||
StateTreeValue::Composite(StateTreeComposite::List(list))
|
||||
}
|
||||
amp::Diff::Text(amp::TextDiff { object_id, edits }) => {
|
||||
let mut text = StateTreeText {
|
||||
object_id,
|
||||
graphemes: DiffableSequence::new(),
|
||||
};
|
||||
text.apply_diff(edits);
|
||||
StateTreeValue::Composite(StateTreeComposite::Text(text))
|
||||
}
|
||||
|
||||
amp::Diff::Cursor(ref c) => StateTreeValue::Leaf(c.into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn realise_value(&self) -> Value {
|
||||
match self {
|
||||
StateTreeValue::Leaf(p) => p.clone().into(),
|
||||
StateTreeValue::Composite(composite) => composite.realise_value(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
struct StateTreeMap {
|
||||
object_id: amp::ObjectId,
|
||||
props: HashMap<SmolStr, MultiValue>,
|
||||
}
|
||||
|
||||
impl StateTreeMap {
|
||||
fn check_diff(
|
||||
&self,
|
||||
prop_diffs: &HashMap<SmolStr, HashMap<amp::OpId, amp::Diff>>,
|
||||
) -> Result<(), error::InvalidPatch> {
|
||||
for (prop, prop_diff) in prop_diffs {
|
||||
let mut diff_iter = prop_diff.iter();
|
||||
match diff_iter.next() {
|
||||
None => {}
|
||||
Some((opid, diff)) => {
|
||||
match self.props.get(prop) {
|
||||
Some(n) => n.check_diff(opid, diff)?,
|
||||
None => {
|
||||
MultiValue::check_new_from_diff(opid, diff)?;
|
||||
}
|
||||
};
|
||||
// TODO: get this working
|
||||
// self.props
|
||||
// .get(prop)
|
||||
// .unwrap()
|
||||
// .check_diff_iter(&mut diff_iter)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply_diff(&mut self, prop_diffs: HashMap<SmolStr, HashMap<amp::OpId, amp::Diff>>) {
|
||||
for (prop, prop_diff) in prop_diffs {
|
||||
let mut diff_iter = prop_diff.into_iter();
|
||||
match diff_iter.next() {
|
||||
None => {
|
||||
self.props.remove(&prop);
|
||||
}
|
||||
Some((opid, diff)) => {
|
||||
match self.props.get_mut(&prop) {
|
||||
Some(n) => n.apply_diff(opid, diff),
|
||||
None => {
|
||||
let value = MultiValue::new_from_diff(opid.clone(), diff);
|
||||
self.props.insert(prop.clone(), value);
|
||||
}
|
||||
};
|
||||
self.props
|
||||
.get_mut(&prop)
|
||||
.unwrap()
|
||||
.apply_diff_iter(&mut diff_iter);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pred_for_key(&self, key: &str) -> SortedVec<amp::OpId> {
|
||||
self.props
|
||||
.get(key)
|
||||
.map(|v| vec![v.default_opid()].into())
|
||||
.unwrap_or_else(SortedVec::new)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path(&self, mut path: Vec<PathElement>) -> Option<ResolvedPath> {
|
||||
if let Some(PathElement::Key(key)) = path.pop() {
|
||||
self.props
|
||||
.get(&key)?
|
||||
.resolve_path(path, self.object_id.clone(), amp::Key::Map(key))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path_mut(
|
||||
&mut self,
|
||||
mut path: Vec<PathElement>,
|
||||
) -> Option<ResolvedPathMut> {
|
||||
if let Some(PathElement::Key(key)) = path.pop() {
|
||||
self.props.get_mut(&key)?.resolve_path_mut(
|
||||
path,
|
||||
self.object_id.clone(),
|
||||
amp::Key::Map(key),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
struct StateTreeTable {
|
||||
object_id: amp::ObjectId,
|
||||
props: HashMap<SmolStr, MultiValue>,
|
||||
}
|
||||
|
||||
impl StateTreeTable {
|
||||
fn check_diff(
|
||||
&self,
|
||||
prop_diffs: &HashMap<SmolStr, HashMap<amp::OpId, amp::Diff>>,
|
||||
) -> Result<(), error::InvalidPatch> {
|
||||
for (prop, prop_diff) in prop_diffs {
|
||||
let mut diff_iter = prop_diff.iter();
|
||||
match diff_iter.next() {
|
||||
None => {}
|
||||
Some((opid, diff)) => {
|
||||
match self.props.get(prop) {
|
||||
Some(n) => n.check_diff(opid, diff)?,
|
||||
None => {
|
||||
MultiValue::check_new_from_diff(opid, diff)?;
|
||||
}
|
||||
};
|
||||
// TODO: get this working
|
||||
// self.props
|
||||
// .get(prop)
|
||||
// .unwrap()
|
||||
// .check_diff_iter(&mut diff_iter)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply_diff(&mut self, prop_diffs: HashMap<SmolStr, HashMap<amp::OpId, amp::Diff>>) {
|
||||
for (prop, prop_diff) in prop_diffs {
|
||||
let mut diff_iter = prop_diff.into_iter();
|
||||
match diff_iter.next() {
|
||||
None => {
|
||||
self.props.remove(&prop);
|
||||
}
|
||||
Some((opid, diff)) => {
|
||||
match self.props.get_mut(&prop) {
|
||||
Some(n) => n.apply_diff(opid, diff),
|
||||
None => {
|
||||
let value = MultiValue::new_from_diff(opid.clone(), diff);
|
||||
self.props.insert(prop.clone(), value);
|
||||
}
|
||||
};
|
||||
self.props
|
||||
.get_mut(&prop)
|
||||
.unwrap()
|
||||
.apply_diff_iter(&mut diff_iter);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pred_for_key(&self, key: &str) -> SortedVec<amp::OpId> {
|
||||
self.props
|
||||
.get(key)
|
||||
.map(|v| vec![v.default_opid()].into())
|
||||
.unwrap_or_else(SortedVec::new)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path(&self, mut path: Vec<PathElement>) -> Option<ResolvedPath> {
|
||||
if let Some(PathElement::Key(key)) = path.pop() {
|
||||
self.props
|
||||
.get(&key)?
|
||||
.resolve_path(path, self.object_id.clone(), amp::Key::Map(key))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path_mut(
|
||||
&mut self,
|
||||
mut path: Vec<PathElement>,
|
||||
) -> Option<ResolvedPathMut> {
|
||||
if let Some(PathElement::Key(key)) = path.pop() {
|
||||
self.props.get_mut(&key)?.resolve_path_mut(
|
||||
path,
|
||||
self.object_id.clone(),
|
||||
amp::Key::Map(key),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
struct StateTreeText {
|
||||
object_id: amp::ObjectId,
|
||||
graphemes: DiffableSequence<MultiGrapheme>,
|
||||
}
|
||||
|
||||
impl StateTreeText {
|
||||
fn remove(&mut self, index: usize) -> Result<MultiGrapheme, error::MissingIndexError> {
|
||||
if index >= self.graphemes.len() {
|
||||
Err(error::MissingIndexError {
|
||||
missing_index: index,
|
||||
size_of_collection: self.graphemes.len(),
|
||||
})
|
||||
} else {
|
||||
let old = self.graphemes.remove(index);
|
||||
Ok(old)
|
||||
}
|
||||
}
|
||||
|
||||
fn set(
|
||||
&mut self,
|
||||
index: usize,
|
||||
value: MultiGrapheme,
|
||||
) -> Result<MultiGrapheme, error::MissingIndexError> {
|
||||
if self.graphemes.len() > index {
|
||||
let old = self.graphemes.set(index, value);
|
||||
Ok(old)
|
||||
} else {
|
||||
Err(error::MissingIndexError {
|
||||
missing_index: index,
|
||||
size_of_collection: self.graphemes.len(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn elem_at(
|
||||
&self,
|
||||
index: usize,
|
||||
) -> Result<(&::OpId, &SmolStr), error::MissingIndexError> {
|
||||
self.graphemes
|
||||
.get(index)
|
||||
.map(|mc| (mc.0, mc.1.default_grapheme()))
|
||||
.ok_or_else(|| error::MissingIndexError {
|
||||
missing_index: index,
|
||||
size_of_collection: self.graphemes.len(),
|
||||
})
|
||||
}
|
||||
|
||||
fn insert(
|
||||
&mut self,
|
||||
index: usize,
|
||||
value: MultiGrapheme,
|
||||
) -> Result<(), error::MissingIndexError> {
|
||||
self.insert_many(index, std::iter::once(value))
|
||||
}
|
||||
|
||||
fn insert_many<I>(&mut self, index: usize, values: I) -> Result<(), error::MissingIndexError>
|
||||
where
|
||||
I: IntoIterator<Item = MultiGrapheme>,
|
||||
{
|
||||
if index > self.graphemes.len() {
|
||||
Err(error::MissingIndexError {
|
||||
missing_index: index,
|
||||
size_of_collection: self.graphemes.len(),
|
||||
})
|
||||
} else {
|
||||
for (i, grapheme) in values.into_iter().enumerate() {
|
||||
self.graphemes.insert(index + i, grapheme);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn check_diff(&self, edits: &[amp::DiffEdit]) -> Result<(), error::InvalidPatch> {
|
||||
self.graphemes.check_diff(&self.object_id, edits)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply_diff(&mut self, edits: Vec<amp::DiffEdit>) {
|
||||
self.graphemes.apply_diff(&self.object_id, edits)
|
||||
}
|
||||
|
||||
pub fn pred_for_index(&self, index: u32) -> SortedVec<amp::OpId> {
|
||||
self.graphemes
|
||||
.get(index.try_into().unwrap())
|
||||
.map(|v| vec![v.1.default_opid().clone()].into())
|
||||
.unwrap_or_else(SortedVec::new)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path(&self, mut path: Vec<PathElement>) -> Option<ResolvedPath> {
|
||||
if let Some(PathElement::Index(i)) = path.pop() {
|
||||
if path.is_empty() {
|
||||
self.graphemes.get(i as usize)?.1.resolve_path(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path_mut(
|
||||
&mut self,
|
||||
mut path: Vec<PathElement>,
|
||||
) -> Option<ResolvedPathMut> {
|
||||
if let Some(PathElement::Index(i)) = path.pop() {
|
||||
if path.is_empty() {
|
||||
self.graphemes.get_mut(i as usize)?.1.resolve_path_mut(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
struct StateTreeList {
|
||||
object_id: amp::ObjectId,
|
||||
elements: DiffableSequence<MultiValue>,
|
||||
}
|
||||
|
||||
impl StateTreeList {
|
||||
fn remove(&mut self, index: usize) -> Result<MultiValue, error::MissingIndexError> {
|
||||
if index >= self.elements.len() {
|
||||
Err(error::MissingIndexError {
|
||||
missing_index: index,
|
||||
size_of_collection: self.elements.len(),
|
||||
})
|
||||
} else {
|
||||
let old = self.elements.remove(index);
|
||||
Ok(old)
|
||||
}
|
||||
}
|
||||
|
||||
fn set(
|
||||
&mut self,
|
||||
index: usize,
|
||||
value: MultiValue,
|
||||
) -> Result<MultiValue, error::MissingIndexError> {
|
||||
if self.elements.len() > index {
|
||||
let old = self.elements.set(index, value);
|
||||
Ok(old)
|
||||
} else {
|
||||
Err(error::MissingIndexError {
|
||||
missing_index: index,
|
||||
size_of_collection: self.elements.len(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn insert(&mut self, index: usize, value: MultiValue) -> Result<(), error::MissingIndexError> {
|
||||
self.insert_many(index, std::iter::once(value))
|
||||
}
|
||||
|
||||
fn insert_many<I>(&mut self, index: usize, values: I) -> Result<(), error::MissingIndexError>
|
||||
where
|
||||
I: IntoIterator<Item = MultiValue>,
|
||||
{
|
||||
if index > self.elements.len() {
|
||||
Err(error::MissingIndexError {
|
||||
missing_index: index,
|
||||
size_of_collection: self.elements.len(),
|
||||
})
|
||||
} else {
|
||||
for (i, value) in values.into_iter().enumerate() {
|
||||
self.elements.insert(index + i, value);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn check_diff(&self, edits: &[amp::DiffEdit]) -> Result<(), error::InvalidPatch> {
|
||||
self.elements.check_diff(&self.object_id, edits)
|
||||
}
|
||||
|
||||
fn apply_diff(&mut self, edits: Vec<amp::DiffEdit>) {
|
||||
self.elements.apply_diff(&self.object_id, edits);
|
||||
}
|
||||
|
||||
pub fn pred_for_index(&self, index: u32) -> SortedVec<amp::OpId> {
|
||||
self.elements
|
||||
.get(index.try_into().unwrap())
|
||||
.map(|v| vec![v.1.default_opid()].into())
|
||||
.unwrap_or_else(SortedVec::new)
|
||||
}
|
||||
|
||||
pub(crate) fn elem_at(
|
||||
&self,
|
||||
index: usize,
|
||||
) -> Result<(&::OpId, &MultiValue), error::MissingIndexError> {
|
||||
self.elements
|
||||
.get(index)
|
||||
.ok_or_else(|| error::MissingIndexError {
|
||||
missing_index: index,
|
||||
size_of_collection: self.elements.len(),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path(&self, mut path: Vec<PathElement>) -> Option<ResolvedPath> {
|
||||
if let Some(PathElement::Index(i)) = path.pop() {
|
||||
let elem_id = self
|
||||
.elem_at(i as usize)
|
||||
.ok()
|
||||
.map(|(e, _)| e.into())
|
||||
.unwrap_or(ElementId::Head);
|
||||
self.elements.get(i as usize)?.1.resolve_path(
|
||||
path,
|
||||
self.object_id.clone(),
|
||||
amp::Key::Seq(elem_id),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path_mut(
|
||||
&mut self,
|
||||
mut path: Vec<PathElement>,
|
||||
) -> Option<ResolvedPathMut> {
|
||||
if let Some(PathElement::Index(i)) = path.pop() {
|
||||
let elem_id = self
|
||||
.elem_at(i as usize)
|
||||
.ok()
|
||||
.map(|(e, _)| e.into())
|
||||
.unwrap_or(ElementId::Head);
|
||||
self.elements.get_mut(i as usize)?.1.resolve_path_mut(
|
||||
path,
|
||||
self.object_id.clone(),
|
||||
amp::Key::Seq(elem_id),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn random_op_id() -> amp::OpId {
|
||||
amp::OpId::new(1, &::ActorId::random())
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct CursorState {
|
||||
referring_object_id: amp::ObjectId,
|
||||
referring_key: amp::Key,
|
||||
referred_object_id: amp::ObjectId,
|
||||
referred_opid: amp::OpId,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
struct Cursors(HashMap<amp::ObjectId, Vec<CursorState>>);
|
||||
|
||||
impl Cursors {
|
||||
fn new() -> Cursors {
|
||||
Cursors(HashMap::new())
|
||||
}
|
||||
|
||||
fn new_from(cursor: CursorState) -> Cursors {
|
||||
Cursors(maplit::hashmap! {
|
||||
cursor.referred_object_id.clone() => vec![cursor],
|
||||
})
|
||||
}
|
||||
|
||||
fn extend(&mut self, other: Cursors) {
|
||||
for (k, v) in other.0 {
|
||||
if let Some(c1) = self.0.get_mut(&k) {
|
||||
c1.extend(v)
|
||||
} else {
|
||||
self.0.insert(k, v);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
740
automerge-frontend/src/state_tree/multivalue.rs
Normal file
740
automerge-frontend/src/state_tree/multivalue.rs
Normal file
|
@ -0,0 +1,740 @@
|
|||
use std::{cmp::Ordering, collections::HashMap, iter::Iterator};
|
||||
|
||||
use amp::SortedVec;
|
||||
use automerge_protocol as amp;
|
||||
use smol_str::SmolStr;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
use super::{
|
||||
CursorState, Cursors, DiffableSequence, ResolvedPath, ResolvedPathMut, StateTreeComposite,
|
||||
StateTreeList, StateTreeMap, StateTreeTable, StateTreeText, StateTreeValue,
|
||||
};
|
||||
use crate::{
|
||||
error,
|
||||
path::PathElement,
|
||||
value::{Primitive, Value},
|
||||
};
|
||||
|
||||
pub(crate) struct NewValueRequest<'a, 'c> {
|
||||
pub(crate) actor: &'a amp::ActorId,
|
||||
pub(crate) start_op: u64,
|
||||
pub(crate) key: amp::Key,
|
||||
pub(crate) value: Value,
|
||||
pub(crate) parent_obj: &'c amp::ObjectId,
|
||||
pub(crate) insert: bool,
|
||||
pub(crate) pred: SortedVec<amp::OpId>,
|
||||
}
|
||||
|
||||
/// A set of conflicting values for the same key, indexed by OpID
|
||||
#[derive(Debug, Clone, PartialEq, Default)]
|
||||
pub struct MultiValue {
|
||||
winning_value: (amp::OpId, StateTreeValue),
|
||||
conflicts: HashMap<amp::OpId, StateTreeValue>,
|
||||
}
|
||||
|
||||
impl MultiValue {
|
||||
pub fn check_new_from_diff(
|
||||
_opid: &::OpId,
|
||||
diff: &::Diff,
|
||||
) -> Result<(), error::InvalidPatch> {
|
||||
StateTreeValue::check_new_from_diff(diff)
|
||||
}
|
||||
|
||||
pub fn new_from_diff(opid: amp::OpId, diff: amp::Diff) -> MultiValue {
|
||||
let value = StateTreeValue::new_from_diff(diff);
|
||||
MultiValue {
|
||||
winning_value: (opid, value),
|
||||
conflicts: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn from_statetree_value(
|
||||
statetree_val: StateTreeValue,
|
||||
opid: amp::OpId,
|
||||
) -> MultiValue {
|
||||
MultiValue {
|
||||
winning_value: (opid, statetree_val),
|
||||
conflicts: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn new_from_value_2(req: NewValueRequest) -> NewValue {
|
||||
Self::new_from_value(
|
||||
req.actor,
|
||||
req.start_op,
|
||||
req.parent_obj.clone(),
|
||||
req.key,
|
||||
req.value,
|
||||
req.insert,
|
||||
req.pred,
|
||||
)
|
||||
}
|
||||
|
||||
pub(super) fn new_from_value(
|
||||
actor: &::ActorId,
|
||||
start_op: u64,
|
||||
parent_id: amp::ObjectId,
|
||||
key: amp::Key,
|
||||
value: Value,
|
||||
insert: bool,
|
||||
pred: SortedVec<amp::OpId>,
|
||||
) -> NewValue {
|
||||
NewValueContext {
|
||||
start_op,
|
||||
actor,
|
||||
key,
|
||||
insert,
|
||||
pred,
|
||||
parent_obj: &parent_id,
|
||||
}
|
||||
.create(value)
|
||||
}
|
||||
|
||||
pub(super) fn check_diff(
|
||||
&self,
|
||||
opid: &::OpId,
|
||||
diff: &::Diff,
|
||||
) -> Result<(), error::InvalidPatch> {
|
||||
self.check_diff_iter(&mut std::iter::once((opid, diff)))
|
||||
}
|
||||
|
||||
pub(super) fn check_diff_iter<'a, 'b, I>(&self, diff: &mut I) -> Result<(), error::InvalidPatch>
|
||||
where
|
||||
I: Iterator<Item = (&'a amp::OpId, &'b amp::Diff)>,
|
||||
{
|
||||
for (opid, subdiff) in diff {
|
||||
if let Some(existing_value) = self.get(opid) {
|
||||
match existing_value {
|
||||
StateTreeValue::Leaf(_) => {
|
||||
StateTreeValue::check_new_from_diff(subdiff)?;
|
||||
}
|
||||
StateTreeValue::Composite(composite) => {
|
||||
composite.check_diff(subdiff)?;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
StateTreeValue::check_new_from_diff(subdiff)?;
|
||||
};
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(super) fn apply_diff(&mut self, opid: amp::OpId, diff: amp::Diff) {
|
||||
self.apply_diff_iter(&mut std::iter::once((opid, diff)))
|
||||
}
|
||||
|
||||
pub(super) fn apply_diff_iter<I>(&mut self, diff: &mut I)
|
||||
where
|
||||
I: Iterator<Item = (amp::OpId, amp::Diff)>,
|
||||
{
|
||||
for (opid, subdiff) in diff {
|
||||
if let Some(existing_value) = self.get_mut(&opid) {
|
||||
match existing_value {
|
||||
StateTreeValue::Leaf(_) => {
|
||||
let value = StateTreeValue::new_from_diff(subdiff);
|
||||
self.update(&opid, value)
|
||||
}
|
||||
StateTreeValue::Composite(composite) => {
|
||||
composite.apply_diff(subdiff);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let value = StateTreeValue::new_from_diff(subdiff);
|
||||
self.update(&opid, value)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, opid: &::OpId) -> Option<&StateTreeValue> {
|
||||
if opid == &self.winning_value.0 {
|
||||
Some(&self.winning_value.1)
|
||||
} else {
|
||||
self.conflicts.get(opid)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_mut(&mut self, opid: &::OpId) -> Option<&mut StateTreeValue> {
|
||||
if opid == &self.winning_value.0 {
|
||||
Some(&mut self.winning_value.1)
|
||||
} else {
|
||||
self.conflicts.get_mut(opid)
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, opid: &::OpId, value: StateTreeValue) {
|
||||
if *opid >= self.winning_value.0 {
|
||||
self.conflicts
|
||||
.insert(self.winning_value.0.clone(), self.winning_value.1.clone());
|
||||
self.winning_value.0 = opid.clone();
|
||||
self.winning_value.1 = value;
|
||||
} else {
|
||||
self.conflicts.insert(opid.clone(), value);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn default_statetree_value(&self) -> &StateTreeValue {
|
||||
&self.winning_value.1
|
||||
}
|
||||
|
||||
pub(super) fn default_statetree_value_mut(&mut self) -> &mut StateTreeValue {
|
||||
&mut self.winning_value.1
|
||||
}
|
||||
|
||||
pub(super) fn default_value(&self) -> Value {
|
||||
self.winning_value.1.realise_value()
|
||||
}
|
||||
|
||||
pub(super) fn default_opid(&self) -> amp::OpId {
|
||||
self.winning_value.0.clone()
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl std::iter::Iterator<Item = (&::OpId, &StateTreeValue)> {
|
||||
std::iter::once((&(self.winning_value).0, &(self.winning_value.1)))
|
||||
.chain(self.conflicts.iter())
|
||||
}
|
||||
|
||||
pub(super) fn realise_values(&self) -> std::collections::HashMap<amp::OpId, Value> {
|
||||
self.iter()
|
||||
.map(|(opid, v)| (opid.clone(), v.realise_value()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path(
|
||||
&self,
|
||||
path: Vec<PathElement>,
|
||||
parent_object_id: amp::ObjectId,
|
||||
key: amp::Key,
|
||||
) -> Option<ResolvedPath> {
|
||||
if path.is_empty() {
|
||||
if let StateTreeValue::Leaf(Primitive::Counter(_)) = self.winning_value.1 {
|
||||
return Some(ResolvedPath::new_counter(parent_object_id, key, self));
|
||||
} else if let StateTreeValue::Leaf(_) = self.winning_value.1 {
|
||||
return Some(ResolvedPath::new_primitive(self));
|
||||
}
|
||||
|
||||
if let StateTreeValue::Composite(composite) = &self.winning_value.1 {
|
||||
match composite {
|
||||
StateTreeComposite::Map(map) => {
|
||||
return Some(ResolvedPath::new_map(self, map.object_id.clone()))
|
||||
}
|
||||
StateTreeComposite::Table(table) => {
|
||||
return Some(ResolvedPath::new_table(self, table.object_id.clone()))
|
||||
}
|
||||
StateTreeComposite::Text(text) => {
|
||||
return Some(ResolvedPath::new_text(self, text.object_id.clone()))
|
||||
}
|
||||
StateTreeComposite::List(list) => {
|
||||
return Some(ResolvedPath::new_list(self, list.object_id.clone()))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if let StateTreeValue::Composite(ref composite) = self.winning_value.1 {
|
||||
return composite.resolve_path(path);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path_mut(
|
||||
&mut self,
|
||||
path: Vec<PathElement>,
|
||||
parent_object_id: amp::ObjectId,
|
||||
key: amp::Key,
|
||||
) -> Option<ResolvedPathMut> {
|
||||
if path.is_empty() {
|
||||
if let StateTreeValue::Leaf(Primitive::Counter(_)) = self.winning_value.1 {
|
||||
return Some(ResolvedPathMut::new_counter(parent_object_id, key, self));
|
||||
} else if let StateTreeValue::Leaf(_) = self.winning_value.1 {
|
||||
return Some(ResolvedPathMut::new_primitive(self));
|
||||
}
|
||||
|
||||
if let StateTreeValue::Composite(composite) = &self.winning_value.1 {
|
||||
match composite {
|
||||
StateTreeComposite::Map(map) => {
|
||||
let oid = map.object_id.clone();
|
||||
return Some(ResolvedPathMut::new_map(self, oid));
|
||||
}
|
||||
StateTreeComposite::Table(table) => {
|
||||
let oid = table.object_id.clone();
|
||||
return Some(ResolvedPathMut::new_table(self, oid));
|
||||
}
|
||||
StateTreeComposite::Text(text) => {
|
||||
let oid = text.object_id.clone();
|
||||
return Some(ResolvedPathMut::new_text(self, oid));
|
||||
}
|
||||
StateTreeComposite::List(list) => {
|
||||
let oid = list.object_id.clone();
|
||||
return Some(ResolvedPathMut::new_list(self, oid));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if let StateTreeValue::Composite(ref mut composite) = self.winning_value.1 {
|
||||
return composite.resolve_path_mut(path);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub(super) fn only_for_opid(&self, opid: amp::OpId) -> Option<MultiValue> {
|
||||
if opid == self.winning_value.0 {
|
||||
Some(MultiValue {
|
||||
winning_value: self.winning_value.clone(),
|
||||
conflicts: HashMap::new(),
|
||||
})
|
||||
} else {
|
||||
self.conflicts.get(&opid).map(|value| MultiValue {
|
||||
winning_value: (opid.clone(), value.clone()),
|
||||
conflicts: HashMap::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_values_from(&mut self, other: MultiValue) {
|
||||
for (opid, value) in other.iter() {
|
||||
match opid.cmp(&self.winning_value.0) {
|
||||
Ordering::Greater => {
|
||||
let mut temp = (opid.clone(), value.clone());
|
||||
std::mem::swap(&mut temp, &mut self.winning_value);
|
||||
self.conflicts.insert(temp.0, temp.1);
|
||||
}
|
||||
Ordering::Less => {
|
||||
self.conflicts.insert(opid.clone(), value.clone());
|
||||
}
|
||||
Ordering::Equal => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct NewValue {
|
||||
value: StateTreeValue,
|
||||
opid: amp::OpId,
|
||||
ops: Vec<amp::Op>,
|
||||
new_cursors: Cursors,
|
||||
max_op: u64,
|
||||
}
|
||||
|
||||
impl NewValue {
|
||||
pub(super) fn max_op(&self) -> u64 {
|
||||
self.max_op
|
||||
}
|
||||
|
||||
pub(super) fn finish(self) -> (MultiValue, Vec<amp::Op>, Cursors) {
|
||||
(
|
||||
MultiValue::from_statetree_value(self.value, self.opid),
|
||||
self.ops,
|
||||
self.new_cursors,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// This struct exists to constrain the values of a text type to just containing
|
||||
/// sequences of grapheme clusters
|
||||
#[derive(Debug, Clone, PartialEq, Default)]
|
||||
pub struct MultiGrapheme {
|
||||
winning_value: (amp::OpId, SmolStr),
|
||||
conflicts: HashMap<amp::OpId, SmolStr>,
|
||||
}
|
||||
|
||||
impl MultiGrapheme {
|
||||
pub(super) fn new_from_grapheme_cluster(opid: amp::OpId, s: SmolStr) -> MultiGrapheme {
|
||||
debug_assert_eq!(s.graphemes(true).count(), 1);
|
||||
MultiGrapheme {
|
||||
winning_value: (opid, s),
|
||||
conflicts: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn check_new_from_diff(
|
||||
_opid: &::OpId,
|
||||
diff: &::Diff,
|
||||
parent_object_id: &::ObjectId,
|
||||
) -> Result<(), error::InvalidPatch> {
|
||||
match diff {
|
||||
amp::Diff::Value(amp::ScalarValue::Str(s)) => {
|
||||
if s.graphemes(true).count() != 1 {
|
||||
return Err(error::InvalidPatch::InsertNonTextInTextObject {
|
||||
object_id: parent_object_id.clone(),
|
||||
diff: diff.clone(),
|
||||
});
|
||||
} else {
|
||||
s
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(error::InvalidPatch::InsertNonTextInTextObject {
|
||||
object_id: parent_object_id.clone(),
|
||||
diff: diff.clone(),
|
||||
});
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(super) fn new_from_diff(opid: amp::OpId, diff: amp::Diff) -> MultiGrapheme {
|
||||
let winning_value = match diff {
|
||||
amp::Diff::Value(amp::ScalarValue::Str(s)) => s,
|
||||
_ => unreachable!("insert non text in text object"),
|
||||
};
|
||||
MultiGrapheme {
|
||||
winning_value: (opid, winning_value),
|
||||
conflicts: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn check_diff(
|
||||
&self,
|
||||
opid: &::OpId,
|
||||
diff: &::Diff,
|
||||
parent_object_id: &::ObjectId,
|
||||
) -> Result<(), error::InvalidPatch> {
|
||||
self.check_diff_iter(&mut std::iter::once((opid, diff)), parent_object_id)
|
||||
}
|
||||
|
||||
pub(super) fn check_diff_iter<'a, 'b, I>(
|
||||
&self,
|
||||
diff: &mut I,
|
||||
parent_object_id: &::ObjectId,
|
||||
) -> Result<(), error::InvalidPatch>
|
||||
where
|
||||
I: Iterator<Item = (&'a amp::OpId, &'b amp::Diff)>,
|
||||
{
|
||||
for (_opid, subdiff) in diff {
|
||||
match subdiff {
|
||||
amp::Diff::Value(amp::ScalarValue::Str(s)) => {
|
||||
if s.graphemes(true).count() != 1 {
|
||||
return Err(error::InvalidPatch::InsertNonTextInTextObject {
|
||||
object_id: parent_object_id.clone(),
|
||||
diff: subdiff.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(error::InvalidPatch::InsertNonTextInTextObject {
|
||||
object_id: parent_object_id.clone(),
|
||||
diff: subdiff.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(super) fn apply_diff(&mut self, opid: amp::OpId, diff: amp::Diff) {
|
||||
self.apply_diff_iter(&mut std::iter::once((opid, diff)))
|
||||
}
|
||||
|
||||
pub(super) fn apply_diff_iter<I>(&mut self, diff: &mut I)
|
||||
where
|
||||
I: Iterator<Item = (amp::OpId, amp::Diff)>,
|
||||
{
|
||||
for (opid, subdiff) in diff {
|
||||
match subdiff {
|
||||
amp::Diff::Value(amp::ScalarValue::Str(s)) => {
|
||||
self.update(&opid, s);
|
||||
}
|
||||
_ => unreachable!("insert non text in text object"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, key: &::OpId, value: SmolStr) {
|
||||
match key.cmp(&self.winning_value.0) {
|
||||
Ordering::Equal => {
|
||||
self.winning_value.1 = value;
|
||||
}
|
||||
Ordering::Greater => {
|
||||
self.conflicts
|
||||
.insert(self.winning_value.0.clone(), self.winning_value.1.clone());
|
||||
self.winning_value.0 = key.clone();
|
||||
self.winning_value.1 = value;
|
||||
}
|
||||
Ordering::Less => {
|
||||
self.conflicts.insert(key.clone(), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn default_grapheme(&self) -> &SmolStr {
|
||||
&self.winning_value.1
|
||||
}
|
||||
|
||||
pub fn default_opid(&self) -> &::OpId {
|
||||
&self.winning_value.0
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl std::iter::Iterator<Item = (&::OpId, &SmolStr)> {
|
||||
std::iter::once((&(self.winning_value).0, &(self.winning_value.1)))
|
||||
.chain(self.conflicts.iter())
|
||||
}
|
||||
|
||||
pub(super) fn realise_values(&self) -> std::collections::HashMap<amp::OpId, Value> {
|
||||
self.iter()
|
||||
.map(|(opid, v)| (opid.clone(), Value::Primitive(Primitive::Str(v.to_owned()))))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(super) fn only_for_opid(&self, opid: amp::OpId) -> Option<MultiGrapheme> {
|
||||
if opid == self.winning_value.0 {
|
||||
Some(MultiGrapheme {
|
||||
winning_value: self.winning_value.clone(),
|
||||
conflicts: HashMap::new(),
|
||||
})
|
||||
} else {
|
||||
self.conflicts.get(&opid).map(|value| MultiGrapheme {
|
||||
winning_value: (opid, value.clone()),
|
||||
conflicts: HashMap::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_values_from(&mut self, other: MultiGrapheme) {
|
||||
for (opid, value) in other.iter() {
|
||||
match opid.cmp(&self.winning_value.0) {
|
||||
Ordering::Greater => {
|
||||
let mut temp = (opid.clone(), value.to_owned());
|
||||
std::mem::swap(&mut temp, &mut self.winning_value);
|
||||
self.conflicts.insert(temp.0, temp.1);
|
||||
}
|
||||
Ordering::Less => {
|
||||
self.conflicts.insert(opid.clone(), value.to_owned());
|
||||
}
|
||||
Ordering::Equal => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path(&self, path: Vec<PathElement>) -> Option<ResolvedPath> {
|
||||
if path.is_empty() {
|
||||
Some(ResolvedPath::new_character(self))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path_mut(&mut self, path: Vec<PathElement>) -> Option<ResolvedPathMut> {
|
||||
if path.is_empty() {
|
||||
Some(ResolvedPathMut::new_character(self))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct NewValueContext<'a, O>
|
||||
where
|
||||
O: Into<amp::ObjectId>,
|
||||
O: Clone,
|
||||
{
|
||||
pub(crate) actor: &'a amp::ActorId,
|
||||
pub(crate) start_op: u64,
|
||||
pub(crate) key: amp::Key,
|
||||
pub(crate) parent_obj: O,
|
||||
pub(crate) insert: bool,
|
||||
pub(crate) pred: SortedVec<amp::OpId>,
|
||||
}
|
||||
|
||||
impl<'a, O> NewValueContext<'a, O>
|
||||
where
|
||||
O: Into<amp::ObjectId>,
|
||||
O: Clone,
|
||||
{
|
||||
fn create(self, value: Value) -> NewValue {
|
||||
match value {
|
||||
Value::Map(props) => self.new_map_or_table(props, amp::MapType::Map),
|
||||
Value::Table(props) => self.new_map_or_table(props, amp::MapType::Table),
|
||||
Value::Sequence(values) => self.new_list(values),
|
||||
Value::Text(graphemes) => self.new_text(graphemes),
|
||||
Value::Primitive(p) => self.new_primitive(p),
|
||||
}
|
||||
}
|
||||
|
||||
fn new_map_or_table(
|
||||
self,
|
||||
props: std::collections::HashMap<SmolStr, Value>,
|
||||
map_type: amp::MapType,
|
||||
) -> NewValue {
|
||||
let make_op_id = amp::OpId(self.start_op, self.actor.clone());
|
||||
let make_op = amp::Op {
|
||||
action: amp::OpType::Make(amp::ObjType::from(map_type)),
|
||||
obj: self.parent_obj.clone().into(),
|
||||
key: self.key.clone(),
|
||||
insert: self.insert,
|
||||
pred: self.pred,
|
||||
};
|
||||
// for each prop we add at least one op
|
||||
let mut ops = Vec::with_capacity(props.len() + 1);
|
||||
ops.push(make_op);
|
||||
let mut current_max_op = self.start_op;
|
||||
let mut cursors = Cursors::new();
|
||||
let mut result_props: HashMap<SmolStr, MultiValue> = HashMap::with_capacity(props.len());
|
||||
for (prop, value) in props {
|
||||
let context = NewValueContext {
|
||||
actor: self.actor,
|
||||
parent_obj: &make_op_id,
|
||||
start_op: current_max_op + 1,
|
||||
key: amp::Key::Map(prop.clone()),
|
||||
pred: SortedVec::new(),
|
||||
insert: false,
|
||||
};
|
||||
let next_value = context.create(value);
|
||||
current_max_op = next_value.max_op;
|
||||
let (multivalue, new_ops, new_cursors) = next_value.finish();
|
||||
cursors.extend(new_cursors);
|
||||
ops.extend(new_ops);
|
||||
result_props.insert(prop, multivalue);
|
||||
}
|
||||
let map = match map_type {
|
||||
amp::MapType::Map => StateTreeComposite::Map(StateTreeMap {
|
||||
object_id: make_op_id.clone().into(),
|
||||
props: result_props,
|
||||
}),
|
||||
amp::MapType::Table => StateTreeComposite::Table(StateTreeTable {
|
||||
object_id: make_op_id.clone().into(),
|
||||
props: result_props,
|
||||
}),
|
||||
};
|
||||
let value = StateTreeValue::Composite(map);
|
||||
NewValue {
|
||||
value,
|
||||
opid: make_op_id,
|
||||
max_op: current_max_op,
|
||||
new_cursors: cursors,
|
||||
ops,
|
||||
}
|
||||
}
|
||||
|
||||
fn new_list(self, values: Vec<Value>) -> NewValue {
|
||||
let make_list_opid = amp::OpId::new(self.start_op, self.actor);
|
||||
let make_op = amp::Op {
|
||||
action: amp::OpType::Make(amp::ObjType::List),
|
||||
obj: self.parent_obj.into(),
|
||||
key: self.key.clone(),
|
||||
insert: self.insert,
|
||||
pred: self.pred,
|
||||
};
|
||||
// for each value we add at least one op
|
||||
let mut ops = Vec::with_capacity(values.len() + 1);
|
||||
ops.push(make_op);
|
||||
let mut current_max_op = self.start_op;
|
||||
let mut cursors = Cursors::new();
|
||||
let mut result_elems: Vec<MultiValue> = Vec::with_capacity(values.len());
|
||||
let mut last_elemid = amp::ElementId::Head;
|
||||
for value in values {
|
||||
let elem_opid = self.actor.op_id_at(current_max_op + 1);
|
||||
let context = NewValueContext {
|
||||
start_op: current_max_op + 1,
|
||||
pred: SortedVec::new(),
|
||||
insert: true,
|
||||
key: amp::Key::Seq(last_elemid),
|
||||
actor: self.actor,
|
||||
parent_obj: make_list_opid.clone(),
|
||||
};
|
||||
last_elemid = elem_opid.clone().into();
|
||||
let next_value = context.create(value);
|
||||
current_max_op = next_value.max_op;
|
||||
let (multivalue, new_ops, new_cursors) = next_value.finish();
|
||||
cursors.extend(new_cursors);
|
||||
ops.extend(new_ops);
|
||||
result_elems.push(multivalue);
|
||||
}
|
||||
let list = StateTreeComposite::List(StateTreeList {
|
||||
object_id: make_list_opid.clone().into(),
|
||||
elements: DiffableSequence::new_from(result_elems),
|
||||
});
|
||||
let value = StateTreeValue::Composite(list);
|
||||
NewValue {
|
||||
value,
|
||||
opid: make_list_opid,
|
||||
max_op: current_max_op,
|
||||
new_cursors: cursors,
|
||||
ops,
|
||||
}
|
||||
}
|
||||
|
||||
fn new_text(self, graphemes: Vec<SmolStr>) -> NewValue {
|
||||
let make_text_opid = self.actor.op_id_at(self.start_op);
|
||||
let make_op = amp::Op {
|
||||
action: amp::OpType::Make(amp::ObjType::Text),
|
||||
obj: self.parent_obj.into(),
|
||||
key: self.key.clone(),
|
||||
insert: self.insert,
|
||||
pred: self.pred,
|
||||
};
|
||||
// for each value we add at least one op
|
||||
let mut ops = Vec::with_capacity(graphemes.len() + 1);
|
||||
ops.push(make_op);
|
||||
let mut current_max_op = self.start_op;
|
||||
let mut last_elemid = amp::ElementId::Head;
|
||||
let mut multigraphemes: Vec<MultiGrapheme> = Vec::with_capacity(graphemes.len());
|
||||
for grapheme in graphemes.iter() {
|
||||
current_max_op += 1;
|
||||
let opid = self.actor.op_id_at(current_max_op);
|
||||
let op = amp::Op {
|
||||
action: amp::OpType::Set(amp::ScalarValue::Str(grapheme.clone())),
|
||||
obj: make_text_opid.clone().into(),
|
||||
key: amp::Key::Seq(last_elemid),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
};
|
||||
multigraphemes.push(MultiGrapheme::new_from_grapheme_cluster(
|
||||
opid.clone(),
|
||||
grapheme.clone(),
|
||||
));
|
||||
ops.push(op);
|
||||
last_elemid = opid.clone().into();
|
||||
}
|
||||
let seq = DiffableSequence::new_from(multigraphemes);
|
||||
let text = StateTreeComposite::Text(StateTreeText {
|
||||
object_id: make_text_opid.clone().into(),
|
||||
graphemes: seq,
|
||||
});
|
||||
let value = StateTreeValue::Composite(text);
|
||||
NewValue {
|
||||
value,
|
||||
opid: make_text_opid,
|
||||
ops,
|
||||
new_cursors: Cursors::new(),
|
||||
max_op: current_max_op,
|
||||
}
|
||||
}
|
||||
|
||||
fn new_primitive(self, primitive: Primitive) -> NewValue {
|
||||
let new_cursors = match primitive {
|
||||
Primitive::Cursor(ref c) => Cursors::new_from(CursorState {
|
||||
index: c.index as usize,
|
||||
referring_object_id: self.parent_obj.clone().into(),
|
||||
referring_key: self.key.clone(),
|
||||
referred_opid: c.elem_opid.clone(),
|
||||
referred_object_id: c.object.clone(),
|
||||
}),
|
||||
_ => Cursors::new(),
|
||||
};
|
||||
let value = match &primitive {
|
||||
Primitive::Bytes(b) => amp::ScalarValue::Bytes(b.clone()),
|
||||
Primitive::Str(s) => amp::ScalarValue::Str(s.clone()),
|
||||
Primitive::Int(i) => amp::ScalarValue::Int(*i),
|
||||
Primitive::Uint(u) => amp::ScalarValue::Uint(*u),
|
||||
Primitive::F64(f) => amp::ScalarValue::F64(*f),
|
||||
Primitive::Counter(i) => amp::ScalarValue::Counter(*i),
|
||||
Primitive::Timestamp(t) => amp::ScalarValue::Timestamp(*t),
|
||||
Primitive::Boolean(b) => amp::ScalarValue::Boolean(*b),
|
||||
Primitive::Cursor(c) => amp::ScalarValue::Cursor(c.elem_opid.clone()),
|
||||
Primitive::Null => amp::ScalarValue::Null,
|
||||
};
|
||||
let opid = self.actor.op_id_at(self.start_op);
|
||||
NewValue {
|
||||
value: StateTreeValue::Leaf(primitive),
|
||||
opid,
|
||||
ops: vec![amp::Op {
|
||||
action: amp::OpType::Set(value),
|
||||
obj: self.parent_obj.into(),
|
||||
key: self.key,
|
||||
insert: self.insert,
|
||||
pred: self.pred.clone(),
|
||||
}],
|
||||
max_op: self.start_op,
|
||||
new_cursors,
|
||||
}
|
||||
}
|
||||
}
|
1040
automerge-frontend/src/state_tree/resolved_path.rs
Normal file
1040
automerge-frontend/src/state_tree/resolved_path.rs
Normal file
File diff suppressed because it is too large
Load diff
424
automerge-frontend/src/value.rs
Normal file
424
automerge-frontend/src/value.rs
Normal file
|
@ -0,0 +1,424 @@
|
|||
use std::{borrow::Cow, collections::HashMap};
|
||||
|
||||
use amp::SortedVec;
|
||||
use automerge_protocol as amp;
|
||||
use serde::Serialize;
|
||||
use smol_str::SmolStr;
|
||||
|
||||
use crate::path::PathElement;
|
||||
|
||||
#[derive(Serialize, Clone, Debug, PartialEq)]
|
||||
pub struct Conflicts(HashMap<amp::OpId, Value>);
|
||||
|
||||
impl From<HashMap<amp::OpId, Value>> for Conflicts {
|
||||
fn from(hmap: HashMap<amp::OpId, Value>) -> Self {
|
||||
Conflicts(hmap)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, Debug, PartialEq)]
|
||||
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
|
||||
#[serde(untagged)]
|
||||
pub enum Value {
|
||||
Map(HashMap<SmolStr, Value>),
|
||||
Table(HashMap<SmolStr, Value>),
|
||||
Sequence(Vec<Value>),
|
||||
/// Sequence of grapheme clusters
|
||||
Text(Vec<SmolStr>),
|
||||
Primitive(Primitive),
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, Debug, PartialEq)]
|
||||
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
|
||||
pub enum Primitive {
|
||||
Bytes(Vec<u8>),
|
||||
Str(SmolStr),
|
||||
Int(i64),
|
||||
Uint(u64),
|
||||
F64(f64),
|
||||
Counter(i64),
|
||||
Timestamp(i64),
|
||||
Boolean(bool),
|
||||
Cursor(Cursor),
|
||||
Null,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone, Debug, PartialEq)]
|
||||
#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))]
|
||||
pub struct Cursor {
|
||||
pub index: u32,
|
||||
pub(crate) object: amp::ObjectId,
|
||||
pub(crate) elem_opid: amp::OpId,
|
||||
}
|
||||
|
||||
impl Cursor {
|
||||
pub fn new(index: u32, obj: amp::ObjectId, op: amp::OpId) -> Cursor {
|
||||
Cursor {
|
||||
index,
|
||||
object: obj,
|
||||
elem_opid: op,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Cursor> for Value {
|
||||
fn from(c: Cursor) -> Self {
|
||||
Value::Primitive(Primitive::Cursor(c))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Primitive> for amp::ScalarValue {
|
||||
fn from(p: &Primitive) -> Self {
|
||||
match p {
|
||||
Primitive::Bytes(b) => amp::ScalarValue::Bytes(b.clone()),
|
||||
Primitive::Str(s) => amp::ScalarValue::Str(s.clone()),
|
||||
Primitive::Int(i) => amp::ScalarValue::Int(*i),
|
||||
Primitive::Uint(u) => amp::ScalarValue::Uint(*u),
|
||||
Primitive::F64(f) => amp::ScalarValue::F64(*f),
|
||||
Primitive::Counter(i) => amp::ScalarValue::Counter(*i),
|
||||
Primitive::Timestamp(i) => amp::ScalarValue::Timestamp(*i),
|
||||
Primitive::Boolean(b) => amp::ScalarValue::Boolean(*b),
|
||||
Primitive::Null => amp::ScalarValue::Null,
|
||||
Primitive::Cursor(c) => amp::ScalarValue::Cursor(c.elem_opid.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Primitive> for Value {
|
||||
fn from(p: Primitive) -> Self {
|
||||
Value::Primitive(p)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Value {
|
||||
fn from(s: &str) -> Self {
|
||||
Value::Primitive(Primitive::Str(SmolStr::new(s)))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&::CursorDiff> for Primitive {
|
||||
fn from(diff: &::CursorDiff) -> Self {
|
||||
Primitive::Cursor(Cursor {
|
||||
index: diff.index,
|
||||
object: diff.object_id.clone(),
|
||||
elem_opid: diff.elem_id.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<char> for Value {
|
||||
fn from(c: char) -> Value {
|
||||
Value::Primitive(Primitive::Str(SmolStr::new(c.to_string())))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<Vec<T>> for Value
|
||||
where
|
||||
T: Into<Value>,
|
||||
{
|
||||
fn from(v: Vec<T>) -> Self {
|
||||
Value::Sequence(v.into_iter().map(|t| t.into()).collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i64> for Value {
|
||||
fn from(v: i64) -> Self {
|
||||
Value::Primitive(Primitive::Int(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, K> From<HashMap<K, T>> for Value
|
||||
where
|
||||
T: Into<Value>,
|
||||
K: AsRef<str>,
|
||||
{
|
||||
fn from(h: HashMap<K, T>) -> Self {
|
||||
Value::Map(
|
||||
h.into_iter()
|
||||
.map(|(k, v)| (SmolStr::new(k), v.into()))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<Value> for Value {
|
||||
fn as_ref(&self) -> &Value {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Value {
|
||||
pub fn from_json(json: &serde_json::Value) -> Value {
|
||||
match json {
|
||||
serde_json::Value::Object(kvs) => {
|
||||
let result: HashMap<SmolStr, Value> = kvs
|
||||
.iter()
|
||||
.map(|(k, v)| (SmolStr::new(k), Value::from_json(v)))
|
||||
.collect();
|
||||
Value::Map(result)
|
||||
}
|
||||
serde_json::Value::Array(vs) => {
|
||||
Value::Sequence(vs.iter().map(Value::from_json).collect())
|
||||
}
|
||||
serde_json::Value::String(s) => Value::Primitive(Primitive::Str(SmolStr::new(s))),
|
||||
serde_json::Value::Number(n) => {
|
||||
Value::Primitive(Primitive::F64(n.as_f64().unwrap_or(0.0)))
|
||||
}
|
||||
serde_json::Value::Bool(b) => Value::Primitive(Primitive::Boolean(*b)),
|
||||
serde_json::Value::Null => Value::Primitive(Primitive::Null),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_json(&self) -> serde_json::Value {
|
||||
match self {
|
||||
Value::Map(map) => {
|
||||
let result: serde_json::map::Map<String, serde_json::Value> = map
|
||||
.iter()
|
||||
.map(|(k, v)| (k.to_string(), v.to_json()))
|
||||
.collect();
|
||||
serde_json::Value::Object(result)
|
||||
}
|
||||
Value::Table(map) => {
|
||||
let result: serde_json::map::Map<String, serde_json::Value> = map
|
||||
.iter()
|
||||
.map(|(k, v)| (k.to_string(), v.to_json()))
|
||||
.collect();
|
||||
serde_json::Value::Object(result)
|
||||
}
|
||||
Value::Sequence(elements) => {
|
||||
serde_json::Value::Array(elements.iter().map(|v| v.to_json()).collect())
|
||||
}
|
||||
Value::Text(graphemes) => serde_json::Value::String(graphemes.join("")),
|
||||
Value::Primitive(v) => match v {
|
||||
Primitive::F64(n) => serde_json::Value::Number(
|
||||
serde_json::Number::from_f64(*n).unwrap_or_else(|| serde_json::Number::from(0)),
|
||||
),
|
||||
Primitive::Uint(n) => serde_json::Value::Number(serde_json::Number::from(*n)),
|
||||
Primitive::Int(n) => serde_json::Value::Number(serde_json::Number::from(*n)),
|
||||
Primitive::Bytes(b) => serde_json::Value::Array(
|
||||
b.iter()
|
||||
.map(|byte| serde_json::Value::Number(serde_json::Number::from(*byte)))
|
||||
.collect(),
|
||||
),
|
||||
Primitive::Str(s) => serde_json::Value::String(s.to_string()),
|
||||
Primitive::Boolean(b) => serde_json::Value::Bool(*b),
|
||||
Primitive::Counter(c) => serde_json::Value::Number(serde_json::Number::from(*c)),
|
||||
Primitive::Timestamp(t) => serde_json::Value::Number(serde_json::Number::from(*t)),
|
||||
Primitive::Null => serde_json::Value::Null,
|
||||
Primitive::Cursor(c) => {
|
||||
serde_json::Value::Number(serde_json::Number::from(c.index))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_value(&self, path: crate::Path) -> Option<Cow<'_, Self>> {
|
||||
let mut path_elements = path.elements();
|
||||
path_elements.reverse();
|
||||
self.get_value_rev_path(path_elements)
|
||||
}
|
||||
|
||||
fn get_value_rev_path(&self, mut rev_path: Vec<PathElement>) -> Option<Cow<'_, Self>> {
|
||||
if let Some(element) = rev_path.pop() {
|
||||
match (self, element) {
|
||||
(Value::Map(m), PathElement::Key(k)) => {
|
||||
m.get(&k).and_then(|v| v.get_value_rev_path(rev_path))
|
||||
}
|
||||
(Value::Table(m), PathElement::Key(k)) => {
|
||||
m.get(&k).and_then(|v| v.get_value_rev_path(rev_path))
|
||||
}
|
||||
(Value::Sequence(s), PathElement::Index(i)) => s
|
||||
.get(i as usize)
|
||||
.and_then(|v| v.get_value_rev_path(rev_path)),
|
||||
(Value::Text(t), PathElement::Index(i)) => t
|
||||
.get(i as usize)
|
||||
.map(|v| Cow::Owned(Value::Primitive(Primitive::Str(v.clone())))),
|
||||
(Value::Map(_), PathElement::Index(_))
|
||||
| (Value::Table(_), PathElement::Index(_))
|
||||
| (Value::Sequence(_), PathElement::Key(_))
|
||||
| (Value::Text(_), PathElement::Key(_))
|
||||
| (Value::Primitive(_), PathElement::Key(_))
|
||||
| (Value::Primitive(_), PathElement::Index(_)) => None,
|
||||
}
|
||||
} else {
|
||||
Some(Cow::Borrowed(self))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a value to a vector of op requests that will create said value.
|
||||
///
|
||||
/// #Arguments
|
||||
///
|
||||
/// * actor - The actor who is creating this value
|
||||
/// * start_op - The start op which will be used to generate element IDs
|
||||
/// * parent_object - The ID of the "parent" object, i.e the object that will
|
||||
/// contain the newly created object
|
||||
/// * key - The property that the newly created object will populate
|
||||
/// within the parent object.
|
||||
/// * insert - Whether the op that creates this value should be insert
|
||||
///
|
||||
///
|
||||
/// Returns a vector of the op requests which will create this value
|
||||
pub(crate) fn value_to_op_requests(
|
||||
actor: &::ActorId,
|
||||
start_op: u64,
|
||||
parent_object: amp::ObjectId,
|
||||
key: &::Key,
|
||||
v: &Value,
|
||||
insert: bool,
|
||||
) -> (Vec<amp::Op>, u64) {
|
||||
match v {
|
||||
Value::Sequence(vs) => {
|
||||
let list_op = amp::OpId(start_op, actor.clone());
|
||||
let make_op = amp::Op {
|
||||
action: amp::OpType::Make(amp::ObjType::List),
|
||||
obj: parent_object,
|
||||
key: key.clone(),
|
||||
insert,
|
||||
pred: SortedVec::new(),
|
||||
};
|
||||
let mut op_num = start_op + 1;
|
||||
let mut result = vec![make_op];
|
||||
let mut last_elemid = amp::ElementId::Head;
|
||||
for v in vs.iter() {
|
||||
let (child_requests, new_op_num) = value_to_op_requests(
|
||||
actor,
|
||||
op_num,
|
||||
amp::ObjectId::from(list_op.clone()),
|
||||
&last_elemid.clone().into(),
|
||||
v,
|
||||
true,
|
||||
);
|
||||
last_elemid = amp::OpId::new(op_num, actor).into();
|
||||
op_num = new_op_num;
|
||||
result.extend(child_requests);
|
||||
}
|
||||
(result, op_num)
|
||||
}
|
||||
Value::Text(chars) => {
|
||||
let make_text_op = amp::OpId(start_op, actor.clone());
|
||||
let make_op = amp::Op {
|
||||
action: amp::OpType::Make(amp::ObjType::Text),
|
||||
obj: parent_object,
|
||||
key: key.clone(),
|
||||
insert,
|
||||
pred: SortedVec::new(),
|
||||
};
|
||||
let mut insert_ops: Vec<amp::Op> = Vec::new();
|
||||
let mut last_elemid = amp::ElementId::Head;
|
||||
let mut op_num = start_op + 1;
|
||||
for c in chars.iter() {
|
||||
insert_ops.push(amp::Op {
|
||||
action: amp::OpType::Set(amp::ScalarValue::Str(c.clone())),
|
||||
obj: amp::ObjectId::from(make_text_op.clone()),
|
||||
key: last_elemid.clone().into(),
|
||||
insert: true,
|
||||
pred: SortedVec::new(),
|
||||
});
|
||||
last_elemid = amp::OpId::new(op_num, actor).into();
|
||||
op_num += 1;
|
||||
}
|
||||
let mut ops = vec![make_op];
|
||||
ops.extend(insert_ops.into_iter());
|
||||
(ops, op_num)
|
||||
}
|
||||
Value::Map(kvs) => {
|
||||
let make_op_id = amp::OpId::new(start_op, actor);
|
||||
let make_op = amp::Op {
|
||||
action: amp::OpType::Make(amp::ObjType::Map),
|
||||
obj: parent_object,
|
||||
key: key.clone(),
|
||||
insert,
|
||||
pred: SortedVec::new(),
|
||||
};
|
||||
let mut op_num = start_op + 1;
|
||||
let mut result = vec![make_op];
|
||||
for (key, v) in kvs.iter() {
|
||||
let (child_requests, new_op_num) = value_to_op_requests(
|
||||
actor,
|
||||
op_num,
|
||||
amp::ObjectId::from(make_op_id.clone()),
|
||||
&::Key::from(key.as_str()),
|
||||
v,
|
||||
false,
|
||||
);
|
||||
op_num = new_op_num;
|
||||
result.extend(child_requests);
|
||||
}
|
||||
(result, op_num)
|
||||
}
|
||||
Value::Table(kvs) => {
|
||||
let make_op_id = amp::OpId::new(start_op, actor);
|
||||
let make_op = amp::Op {
|
||||
action: amp::OpType::Make(amp::ObjType::Table),
|
||||
obj: parent_object,
|
||||
key: key.clone(),
|
||||
insert,
|
||||
pred: SortedVec::new(),
|
||||
};
|
||||
let mut op_num = start_op + 1;
|
||||
let mut result = vec![make_op];
|
||||
for (key, v) in kvs.iter() {
|
||||
let (child_requests, new_op_num) = value_to_op_requests(
|
||||
actor,
|
||||
op_num,
|
||||
amp::ObjectId::from(make_op_id.clone()),
|
||||
&::Key::from(key.as_str()),
|
||||
v,
|
||||
false,
|
||||
);
|
||||
op_num = new_op_num;
|
||||
result.extend(child_requests);
|
||||
}
|
||||
(result, op_num)
|
||||
}
|
||||
Value::Primitive(prim_value) => {
|
||||
let ops = vec![amp::Op {
|
||||
action: amp::OpType::Set(prim_value.into()),
|
||||
obj: parent_object,
|
||||
key: key.clone(),
|
||||
insert,
|
||||
pred: SortedVec::new(),
|
||||
}];
|
||||
(ops, start_op + 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use maplit::hashmap;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
use crate::Path;
|
||||
|
||||
#[test]
|
||||
fn get_value() {
|
||||
let v = Value::Map(hashmap! {
|
||||
"hello".into() => Value::Primitive(Primitive::Str("world".into())),
|
||||
"again".into() => Value::Sequence(vec![Value::Primitive(Primitive::Int(2))])
|
||||
});
|
||||
|
||||
assert_eq!(v.get_value(Path::root()), Some(Cow::Borrowed(&v)));
|
||||
assert_eq!(
|
||||
v.get_value(Path::root().key("hello")),
|
||||
Some(Cow::Borrowed(&Value::Primitive(Primitive::Str(
|
||||
"world".into()
|
||||
))))
|
||||
);
|
||||
assert_eq!(v.get_value(Path::root().index(0)), None);
|
||||
assert_eq!(
|
||||
v.get_value(Path::root().key("again")),
|
||||
Some(Cow::Borrowed(&Value::Sequence(vec![Value::Primitive(
|
||||
Primitive::Int(2)
|
||||
)])))
|
||||
);
|
||||
assert_eq!(
|
||||
v.get_value(Path::root().key("again").index(0)),
|
||||
Some(Cow::Borrowed(&Value::Primitive(Primitive::Int(2))))
|
||||
);
|
||||
assert_eq!(v.get_value(Path::root().key("again").index(1)), None);
|
||||
}
|
||||
}
|
1047
automerge-frontend/tests/test_apply_patch.rs
Normal file
1047
automerge-frontend/tests/test_apply_patch.rs
Normal file
File diff suppressed because it is too large
Load diff
732
automerge-frontend/tests/test_backend_concurrency.rs
Normal file
732
automerge-frontend/tests/test_backend_concurrency.rs
Normal file
|
@ -0,0 +1,732 @@
|
|||
use amp::{RootDiff, SortedVec};
|
||||
use automerge_backend::Backend;
|
||||
use automerge_frontend::{
|
||||
Frontend, InvalidChangeRequest, InvalidPatch, LocalChange, Path, Primitive, Value,
|
||||
};
|
||||
use automerge_protocol as amp;
|
||||
use maplit::hashmap;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn random_op_id() -> amp::OpId {
|
||||
amp::OpId::new(1, &::ActorId::random())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_version_and_sequence_number_from_backend() {
|
||||
let mut doc = Frontend::new();
|
||||
let remote_actor1 = amp::ActorId::random();
|
||||
let remote_actor2 = amp::ActorId::random();
|
||||
|
||||
// This is a remote patch
|
||||
let patch = amp::Patch {
|
||||
actor: None,
|
||||
seq: None,
|
||||
clock: hashmap! {
|
||||
doc.actor_id.clone() => 4,
|
||||
remote_actor1 => 11,
|
||||
remote_actor2 => 41,
|
||||
},
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"blackbirds".into() => hashmap!{
|
||||
random_op_id() => amp::Diff::Value(amp::ScalarValue::F64(24.0))
|
||||
}
|
||||
},
|
||||
},
|
||||
max_op: 4,
|
||||
pending_changes: 0,
|
||||
};
|
||||
|
||||
// There were no in flight requests so the doc state should be reconciled
|
||||
// and should reflect the above patch
|
||||
doc.apply_patch(patch).unwrap();
|
||||
|
||||
// Now apply a local patch, this will move the doc into the "waiting for
|
||||
// in flight requests" state, which should reflect the change just made.
|
||||
let req = doc
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::set(
|
||||
Path::root().key("partridges"),
|
||||
Value::Primitive(Primitive::Int(1)),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
let expected_change_request = amp::Change {
|
||||
actor_id: doc.actor_id,
|
||||
seq: 5,
|
||||
start_op: 5,
|
||||
time: req.time,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![amp::Op {
|
||||
action: amp::OpType::Set(amp::ScalarValue::Int(1)),
|
||||
obj: amp::ObjectId::Root,
|
||||
key: "partridges".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
|
||||
assert_eq!(req, expected_change_request);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn remove_pending_requests_once_handled() {
|
||||
let mut doc = Frontend::new();
|
||||
|
||||
// First we add two local changes
|
||||
let _req1 = doc
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::set(
|
||||
Path::root().key("blackbirds"),
|
||||
Primitive::Int(24),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
let _req2 = doc
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::set(
|
||||
Path::root().key("partridges"),
|
||||
Primitive::Int(1),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
// The doc is waiting for those changes to be applied
|
||||
assert_eq!(doc.in_flight_requests(), vec![1, 2]);
|
||||
|
||||
// Apply a patch corresponding (via actor ID and seq) to the first change
|
||||
doc.apply_patch(amp::Patch {
|
||||
actor: Some(doc.actor_id.clone()),
|
||||
seq: Some(1),
|
||||
clock: hashmap! {
|
||||
doc.actor_id.clone() => 1,
|
||||
},
|
||||
max_op: 4,
|
||||
pending_changes: 0,
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"blackbirds".into() => hashmap!{
|
||||
random_op_id() => amp::Diff::Value(amp::ScalarValue::Int(24))
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
// The doc state should still reflect both local changes as we're still
|
||||
// waiting for the last in flight request to be fulfilled
|
||||
assert_eq!(
|
||||
doc.state(),
|
||||
&Into::<Value>::into(hashmap! {
|
||||
"blackbirds".to_string() => Primitive::Int(24),
|
||||
"partridges".to_string() => Primitive::Int(1),
|
||||
})
|
||||
);
|
||||
assert_eq!(doc.in_flight_requests(), vec![2]);
|
||||
|
||||
// Apply a patch corresponding (via actor ID and seq) to the second change
|
||||
doc.apply_patch(amp::Patch {
|
||||
actor: Some(doc.actor_id.clone()),
|
||||
seq: Some(2),
|
||||
clock: hashmap! {
|
||||
doc.actor_id.clone() => 2,
|
||||
},
|
||||
max_op: 5,
|
||||
pending_changes: 0,
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"partridges".into() => hashmap!{
|
||||
random_op_id() => amp::Diff::Value(amp::ScalarValue::Int(1))
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
// The doc state should have switched to reconciled
|
||||
assert!(doc.in_flight_requests().is_empty());
|
||||
|
||||
// The doc state should still reflect the local changes as they have now
|
||||
// been reconciled
|
||||
assert_eq!(
|
||||
doc.state(),
|
||||
&Into::<Value>::into(hashmap! {
|
||||
"blackbirds".to_string() => Primitive::Int(24),
|
||||
"partridges".to_string() => Primitive::Int(1),
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(doc.seq, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn leave_request_queue_unchanged_on_remote_changes() {
|
||||
let remote = amp::ActorId::random();
|
||||
let mut doc = Frontend::new();
|
||||
// Enqueue a local change, moving the document into the "waiting for in
|
||||
// flight requests" state
|
||||
let _req1 = doc
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::set(
|
||||
Path::root().key("blackbirds"),
|
||||
Primitive::Int(24),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
// The document is now waiting for the above request
|
||||
assert_eq!(doc.in_flight_requests(), vec![1]);
|
||||
|
||||
// Apply a remote patch (due to actor ID and seq missing)
|
||||
doc.apply_patch(amp::Patch {
|
||||
actor: None,
|
||||
seq: None,
|
||||
max_op: 10,
|
||||
pending_changes: 0,
|
||||
clock: hashmap! {
|
||||
remote.clone() => 1,
|
||||
},
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"pheasants".into() => hashmap!{
|
||||
random_op_id() => amp::Diff::Value(amp::ScalarValue::Int(2))
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
// The doc state should reflect outstanding in flight request and not the
|
||||
// remote patch (because we're still waiting for in flight requests)
|
||||
assert_eq!(
|
||||
doc.state(),
|
||||
&Into::<Value>::into(hashmap! {
|
||||
"blackbirds".to_string() => Primitive::Int(24),
|
||||
})
|
||||
);
|
||||
assert_eq!(doc.in_flight_requests(), vec![1]);
|
||||
|
||||
// Now apply a patch corresponding to the outstanding in flight request
|
||||
doc.apply_patch(amp::Patch {
|
||||
actor: Some(doc.actor_id.clone()),
|
||||
seq: Some(1),
|
||||
clock: hashmap! {
|
||||
doc.actor_id.clone() => 2,
|
||||
remote => 1,
|
||||
},
|
||||
max_op: 11,
|
||||
pending_changes: 0,
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"blackbirds".into() => hashmap!{
|
||||
random_op_id() => amp::Diff::Value(amp::ScalarValue::Int(24))
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
// The doc state should now reflect both the local and remote changes
|
||||
// as the doc is now reconciled (all in flight requests have received a
|
||||
// patch)
|
||||
assert_eq!(
|
||||
doc.state(),
|
||||
&Into::<Value>::into(hashmap! {
|
||||
"blackbirds".to_string() => Primitive::Int(24),
|
||||
"pheasants".to_string() => Primitive::Int(2),
|
||||
})
|
||||
);
|
||||
|
||||
assert!(doc.in_flight_requests().is_empty());
|
||||
assert_eq!(doc.seq, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_allow_out_of_order_request_patches() {
|
||||
let mut doc = Frontend::new();
|
||||
let _req1 = doc
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::set(
|
||||
Path::root().key("blackbirds"),
|
||||
Primitive::Int(24),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
let result = doc.apply_patch(amp::Patch {
|
||||
actor: Some(doc.actor_id.clone()),
|
||||
seq: Some(2),
|
||||
max_op: 8,
|
||||
pending_changes: 0,
|
||||
clock: hashmap! {
|
||||
doc.actor_id.clone() => 2,
|
||||
},
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"partridges".into() => hashmap!{
|
||||
random_op_id() => amp::Diff::Value(amp::ScalarValue::Int(1))
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
Err(InvalidPatch::MismatchedSequenceNumber {
|
||||
expected: 1,
|
||||
actual: 2
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handle_concurrent_insertions_into_lists() {
|
||||
let mut doc = Frontend::new();
|
||||
let _req1 = doc
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::set(
|
||||
Path::root().key("birds"),
|
||||
vec!["goldfinch"],
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
let birds_id = doc.get_object_id(&Path::root().key("birds")).unwrap();
|
||||
|
||||
// Apply the corresponding backend patch for the above state, document
|
||||
// shoudl be reconciled after this
|
||||
doc.apply_patch(amp::Patch {
|
||||
actor: Some(doc.actor_id.clone()),
|
||||
seq: Some(1),
|
||||
max_op: 1,
|
||||
pending_changes: 0,
|
||||
clock: hashmap! {
|
||||
doc.actor_id.clone() => 1,
|
||||
},
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"birds".into() => hashmap!{
|
||||
doc.actor_id.op_id_at(1) => amp::Diff::List(amp::ListDiff{
|
||||
object_id: birds_id.clone(),
|
||||
edits: vec![amp::DiffEdit::SingleElementInsert{
|
||||
index: 0,
|
||||
elem_id: doc.actor_id.op_id_at(1).into(),
|
||||
op_id: doc.actor_id.op_id_at(1),
|
||||
value: amp::Diff::Value("goldfinch".into()),
|
||||
}],
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
doc.state(),
|
||||
&Into::<Value>::into(hashmap! {"birds".to_string() => vec!["goldfinch"]})
|
||||
);
|
||||
assert!(doc.in_flight_requests().is_empty());
|
||||
|
||||
// Now add another change which updates the same list, this results in an
|
||||
// in flight reuest
|
||||
let _req2 = doc
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::insert(
|
||||
Path::root().key("birds").index(0),
|
||||
"chaffinch".into(),
|
||||
))?;
|
||||
doc.add_change(LocalChange::insert(
|
||||
Path::root().key("birds").index(2),
|
||||
"greenfinch".into(),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
doc.state(),
|
||||
&Into::<Value>::into(
|
||||
hashmap! {"birds".to_string() => vec!["chaffinch", "goldfinch", "greenfinch"]}
|
||||
)
|
||||
);
|
||||
|
||||
let remote = amp::ActorId::random();
|
||||
|
||||
// Apply a patch which does not take effect because we're still waiting
|
||||
// for the in flight requests to be responded to
|
||||
doc.apply_patch(amp::Patch {
|
||||
clock: hashmap! {
|
||||
doc.actor_id.clone() => 1,
|
||||
remote.clone() => 1,
|
||||
},
|
||||
max_op: 3,
|
||||
pending_changes: 0,
|
||||
actor: None,
|
||||
seq: None,
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"birds".into() => hashmap!{
|
||||
doc.actor_id.op_id_at(1) => amp::Diff::List(amp::ListDiff{
|
||||
object_id: birds_id.clone(),
|
||||
edits: vec![amp::DiffEdit::SingleElementInsert{
|
||||
index: 1,
|
||||
elem_id: remote.op_id_at(1).into(),
|
||||
op_id: doc.actor_id.op_id_at(1),
|
||||
value: amp::Diff::Value("bullfinch".into()),
|
||||
}],
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
// Check that the doc state hasn't been updated yet
|
||||
assert_eq!(
|
||||
doc.state(),
|
||||
&Into::<Value>::into(
|
||||
hashmap! {"birds".to_string() => vec!["chaffinch", "goldfinch", "greenfinch"]}
|
||||
)
|
||||
);
|
||||
|
||||
// Now apply a patch acknowledging the in flight request
|
||||
doc.apply_patch(amp::Patch {
|
||||
actor: Some(doc.actor_id.clone()),
|
||||
seq: Some(2),
|
||||
max_op: 3,
|
||||
pending_changes: 0,
|
||||
clock: hashmap! {
|
||||
doc.actor_id.clone() => 2,
|
||||
remote => 1,
|
||||
},
|
||||
deps: Vec::new(),
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"birds".into() => hashmap!{
|
||||
doc.actor_id.op_id_at(1) => amp::Diff::List(amp::ListDiff{
|
||||
object_id: birds_id,
|
||||
edits: vec![
|
||||
amp::DiffEdit::SingleElementInsert {
|
||||
index: 0,
|
||||
elem_id: doc.actor_id.op_id_at(2).into(),
|
||||
op_id: doc.actor_id.op_id_at(2),
|
||||
value: amp::Diff::Value("chaffinch".into()),
|
||||
},
|
||||
amp::DiffEdit::SingleElementInsert{
|
||||
index: 2,
|
||||
elem_id: doc.actor_id.op_id_at(3).into(),
|
||||
op_id: doc.actor_id.op_id_at(3),
|
||||
value: amp::Diff::Value("greenfinch".into()),
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert!(doc.in_flight_requests().is_empty());
|
||||
assert_eq!(
|
||||
doc.state(),
|
||||
&Into::<Value>::into(
|
||||
hashmap! {"birds".to_string() => vec!["chaffinch", "goldfinch", "greenfinch", "bullfinch"]}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allow_interleaving_of_patches_and_changes() {
|
||||
let mut doc = Frontend::new();
|
||||
let req1 = doc
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::set(
|
||||
Path::root().key("number"),
|
||||
Primitive::Int(1),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
let req2 = doc
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::set(
|
||||
Path::root().key("number"),
|
||||
Primitive::Int(2),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
req1,
|
||||
amp::Change {
|
||||
actor_id: doc.actor_id.clone(),
|
||||
seq: 1,
|
||||
start_op: 1,
|
||||
message: None,
|
||||
hash: None,
|
||||
time: req1.time,
|
||||
deps: Vec::new(),
|
||||
operations: vec![amp::Op {
|
||||
action: amp::OpType::Set(amp::ScalarValue::Int(1)),
|
||||
obj: amp::ObjectId::Root,
|
||||
key: "number".into(),
|
||||
insert: false,
|
||||
pred: SortedVec::new(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
req2,
|
||||
amp::Change {
|
||||
actor_id: doc.actor_id.clone(),
|
||||
seq: 2,
|
||||
start_op: 2,
|
||||
message: None,
|
||||
hash: None,
|
||||
time: req2.time,
|
||||
deps: Vec::new(),
|
||||
operations: vec![amp::Op {
|
||||
action: amp::OpType::Set(amp::ScalarValue::Int(2)),
|
||||
obj: amp::ObjectId::Root,
|
||||
key: "number".into(),
|
||||
insert: false,
|
||||
pred: vec![doc.actor_id.op_id_at(1)].into(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
);
|
||||
|
||||
let mut backend = Backend::new();
|
||||
let (patch1, _) = backend.apply_local_change(req1).unwrap();
|
||||
doc.apply_patch(patch1).unwrap();
|
||||
|
||||
let req3 = doc
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::set(
|
||||
Path::root().key("number"),
|
||||
Primitive::Int(3),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
req3,
|
||||
amp::Change {
|
||||
actor_id: doc.actor_id.clone(),
|
||||
seq: 3,
|
||||
start_op: 3,
|
||||
message: None,
|
||||
hash: None,
|
||||
time: req3.time,
|
||||
deps: Vec::new(),
|
||||
operations: vec![amp::Op {
|
||||
action: amp::OpType::Set(amp::ScalarValue::Int(3)),
|
||||
obj: amp::ObjectId::Root,
|
||||
key: "number".into(),
|
||||
insert: false,
|
||||
pred: vec![doc.actor_id.op_id_at(2)].into(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
//it('deps are filled in if the frontend does not have the latest patch', () => {
|
||||
//const actor1 = uuid(), actor2 = uuid()
|
||||
//const [doc1, change1] = Frontend.change(Frontend.init(actor1), doc => doc.number = 1)
|
||||
//const [state1, patch1, binChange1] = Backend.applyLocalChange(Backend.init(), change1)
|
||||
|
||||
//const [state1a, patch1a] = Backend.applyChanges(Backend.init(), [binChange1])
|
||||
//const doc1a = Frontend.applyPatch(Frontend.init(actor2), patch1a)
|
||||
//const [doc2, change2] = Frontend.change(doc1a, doc => doc.number = 2)
|
||||
//const [doc3, change3] = Frontend.change(doc2, doc => doc.number = 3)
|
||||
//assert.deepStrictEqual(change2, {
|
||||
//actor: actor2, seq: 1, startOp: 2, deps: [decodeChange(binChange1).hash], time: change2.time, message: '', ops: [
|
||||
//{obj: '_root', action: 'set', key: 'number', insert: false, value: 2, pred: [`1@${actor1}`]}
|
||||
//]
|
||||
//})
|
||||
//assert.deepStrictEqual(change3, {
|
||||
//actor: actor2, seq: 2, startOp: 3, deps: [], time: change3.time, message: '', ops: [
|
||||
//{obj: '_root', action: 'set', key: 'number', insert: false, value: 3, pred: [`2@${actor2}`]}
|
||||
//]
|
||||
//})
|
||||
|
||||
//const [state2, patch2, binChange2] = Backend.applyLocalChange(state1a, change2)
|
||||
//const [state3, patch3, binChange3] = Backend.applyLocalChange(state2, change3)
|
||||
//assert.deepStrictEqual(decodeChange(binChange2).deps, [decodeChange(binChange1).hash])
|
||||
//assert.deepStrictEqual(decodeChange(binChange3).deps, [decodeChange(binChange2).hash])
|
||||
//assert.deepStrictEqual(patch1a.deps, [decodeChange(binChange1).hash])
|
||||
//assert.deepStrictEqual(patch2.deps, [])
|
||||
|
||||
//const doc2a = Frontend.applyPatch(doc3, patch2)
|
||||
//const doc3a = Frontend.applyPatch(doc2a, patch3)
|
||||
//const [doc4, change4] = Frontend.change(doc3a, doc => doc.number = 4)
|
||||
//assert.deepStrictEqual(change4, {
|
||||
//actor: actor2, seq: 3, startOp: 4, time: change4.time, message: '', deps: [], ops: [
|
||||
//{obj: '_root', action: 'set', key: 'number', insert: false, value: 4, pred: [`3@${actor2}`]}
|
||||
//]
|
||||
//})
|
||||
//const [state4, patch4, binChange4] = Backend.applyLocalChange(state3, change4)
|
||||
//assert.deepStrictEqual(decodeChange(binChange4).deps, [decodeChange(binChange3).hash])
|
||||
//})
|
||||
#[test]
|
||||
fn test_deps_are_filled_in_if_frontend_does_not_have_latest_patch() {
|
||||
let (doc, change1) =
|
||||
Frontend::new_with_initial_state(hashmap! {"number" => Primitive::Int(1)}.into()).unwrap();
|
||||
|
||||
let mut backend1 = Backend::new();
|
||||
let (_, binchange1) = backend1.apply_local_change(change1).unwrap();
|
||||
|
||||
let mut doc2 = Frontend::new();
|
||||
let mut backend2 = Backend::new();
|
||||
let patch1 = backend2.apply_changes(vec![binchange1.clone()]).unwrap();
|
||||
doc2.apply_patch(patch1.clone()).unwrap();
|
||||
|
||||
let change2 = doc2
|
||||
.change::<_, _, InvalidChangeRequest>(None, |d| {
|
||||
d.add_change(LocalChange::set(
|
||||
Path::root().key("number"),
|
||||
Primitive::Int(2),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
let change3 = doc2
|
||||
.change::<_, _, InvalidChangeRequest>(None, |d| {
|
||||
d.add_change(LocalChange::set(
|
||||
Path::root().key("number"),
|
||||
Primitive::Int(3),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
let expected_change2 = amp::Change {
|
||||
actor_id: doc2.actor_id.clone(),
|
||||
start_op: 2,
|
||||
seq: 1,
|
||||
time: change2.time,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: vec![binchange1.hash],
|
||||
operations: vec![amp::Op {
|
||||
action: amp::OpType::Set(amp::ScalarValue::from(2)),
|
||||
obj: amp::ObjectId::Root,
|
||||
key: "number".into(),
|
||||
insert: false,
|
||||
pred: vec![doc.actor_id.op_id_at(1)].into(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
assert_eq!(change2, expected_change2);
|
||||
|
||||
let expected_change3 = amp::Change {
|
||||
actor_id: doc2.actor_id.clone(),
|
||||
start_op: 3,
|
||||
seq: 2,
|
||||
time: change3.time,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![amp::Op {
|
||||
action: amp::OpType::Set(amp::ScalarValue::from(3)),
|
||||
obj: amp::ObjectId::Root,
|
||||
key: "number".into(),
|
||||
insert: false,
|
||||
pred: vec![doc2.actor_id.op_id_at(2)].into(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
assert_eq!(change3, expected_change3);
|
||||
|
||||
let (patch2, binchange2) = backend2.apply_local_change(change2).unwrap();
|
||||
let (patch3, binchange3) = backend2.apply_local_change(change3).unwrap();
|
||||
|
||||
assert_eq!(binchange2.deps, vec![binchange1.hash]);
|
||||
assert_eq!(binchange3.deps, vec![binchange2.hash]);
|
||||
assert_eq!(patch1.deps, vec![binchange1.hash]);
|
||||
assert_eq!(patch2.deps, Vec::new());
|
||||
|
||||
doc2.apply_patch(patch2).unwrap();
|
||||
doc2.apply_patch(patch3).unwrap();
|
||||
|
||||
let change4 = doc2
|
||||
.change::<_, _, InvalidChangeRequest>(None, |d| {
|
||||
d.add_change(LocalChange::set(
|
||||
Path::root().key("number"),
|
||||
Primitive::Int(4),
|
||||
))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
|
||||
let expected_change4 = amp::Change {
|
||||
actor_id: doc2.actor_id.clone(),
|
||||
start_op: 4,
|
||||
seq: 3,
|
||||
time: change4.time,
|
||||
message: None,
|
||||
hash: None,
|
||||
deps: Vec::new(),
|
||||
operations: vec![amp::Op {
|
||||
action: amp::OpType::Set(amp::ScalarValue::from(4)),
|
||||
obj: amp::ObjectId::Root,
|
||||
key: "number".into(),
|
||||
insert: false,
|
||||
pred: vec![doc2.actor_id.op_id_at(3)].into(),
|
||||
}],
|
||||
extra_bytes: Vec::new(),
|
||||
};
|
||||
assert_eq!(change4, expected_change4);
|
||||
}
|
291
automerge-frontend/tests/test_cursor.rs
Normal file
291
automerge-frontend/tests/test_cursor.rs
Normal file
|
@ -0,0 +1,291 @@
|
|||
use std::convert::TryInto;
|
||||
|
||||
use amp::RootDiff;
|
||||
use automerge_backend::Backend;
|
||||
use automerge_frontend::{Frontend, InvalidChangeRequest, LocalChange, Path, Primitive, Value};
|
||||
use automerge_protocol as amp;
|
||||
use maplit::hashmap;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
#[test]
|
||||
fn test_allow_cursor_on_list_element() {
|
||||
let _ = env_logger::builder().is_test(true).try_init().unwrap();
|
||||
let mut frontend = Frontend::new();
|
||||
let change = frontend
|
||||
.change::<_, _, InvalidChangeRequest>(None, |d| {
|
||||
d.add_change(LocalChange::set(Path::root().key("list"), vec![1, 2, 3]))?;
|
||||
let cursor = d
|
||||
.cursor_to_path(&Path::root().key("list").index(1))
|
||||
.unwrap();
|
||||
d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
let mut backend = Backend::new();
|
||||
backend
|
||||
.apply_changes(vec![change.try_into().unwrap()])
|
||||
.unwrap();
|
||||
|
||||
let mut backend2 = Backend::new();
|
||||
backend2
|
||||
.apply_changes(backend.get_changes(&[]).into_iter().cloned().collect())
|
||||
.unwrap();
|
||||
let mut frontend2 = Frontend::new();
|
||||
frontend2
|
||||
.apply_patch(backend2.get_patch().unwrap())
|
||||
.unwrap();
|
||||
let index_value = frontend2.get_value(&Path::root().key("cursor")).unwrap();
|
||||
if let Value::Primitive(Primitive::Cursor(c)) = index_value {
|
||||
assert_eq!(c.index, 1)
|
||||
} else {
|
||||
panic!("value was not a cursor");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_allow_cursor_on_text_element() {
|
||||
let mut frontend = Frontend::new();
|
||||
let change = frontend
|
||||
.change::<_, _, InvalidChangeRequest>(None, |d| {
|
||||
d.add_change(LocalChange::set(
|
||||
Path::root().key("list"),
|
||||
Value::Text("123".graphemes(true).map(|s| s.into()).collect()),
|
||||
))?;
|
||||
let cursor = d
|
||||
.cursor_to_path(&Path::root().key("list").index(1))
|
||||
.unwrap();
|
||||
d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
|
||||
Ok(())
|
||||
})
|
||||
.unwrap()
|
||||
.1
|
||||
.unwrap();
|
||||
let mut backend = Backend::new();
|
||||
backend
|
||||
.apply_changes(vec![change.try_into().unwrap()])
|
||||
.unwrap();
|
||||
|
||||
let mut backend2 = Backend::new();
|
||||
backend2
|
||||
.apply_changes(backend.get_changes(&[]).into_iter().cloned().collect())
|
||||
.unwrap();
|
||||
let mut frontend2 = Frontend::new();
|
||||
frontend2
|
||||
.apply_patch(backend2.get_patch().unwrap())
|
||||
.unwrap();
|
||||
let index_value = frontend2.get_value(&Path::root().key("cursor")).unwrap();
|
||||
if let Value::Primitive(Primitive::Cursor(c)) = index_value {
|
||||
assert_eq!(c.index, 1)
|
||||
} else {
|
||||
panic!("value was not a cursor");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_do_not_allow_index_past_end_of_list() {
|
||||
let mut frontend = Frontend::new();
|
||||
frontend
|
||||
.change::<_, _, InvalidChangeRequest>(None, |d| {
|
||||
d.add_change(LocalChange::set(
|
||||
Path::root().key("list"),
|
||||
Value::Text("123".graphemes(true).map(|s| s.into()).collect()),
|
||||
))?;
|
||||
let cursor = d.cursor_to_path(&Path::root().key("list").index(10));
|
||||
assert_eq!(cursor, None);
|
||||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_updates_cursor_during_change_function() {
|
||||
// let mut frontend = Frontend::new();
|
||||
// frontend
|
||||
// .change::<_, _, InvalidChangeRequest>(None, |d| {
|
||||
// d.add_change(LocalChange::set(
|
||||
// Path::root().key("list"),
|
||||
// Value::Text("123".graphemes(true).map(|s| s.into()).collect()),
|
||||
// ))?;
|
||||
// let cursor = d
|
||||
// .cursor_to_path(&Path::root().key("list").index(1))
|
||||
// .unwrap();
|
||||
// d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
|
||||
// let cursor_the_second = d.value_at_path(&Path::root().key("cursor"));
|
||||
// if let Some(Value::Primitive(Primitive::Cursor(c))) = cursor_the_second {
|
||||
// assert_eq!(c.index, 1);
|
||||
// } else {
|
||||
// panic!("Cursor the second not found");
|
||||
// }
|
||||
|
||||
// d.add_change(LocalChange::insert(
|
||||
// Path::root().key("list").index(0),
|
||||
// Value::Primitive(Primitive::Str("0".into())),
|
||||
// ))?;
|
||||
// let cursor_the_third = d.value_at_path(&Path::root().key("cursor"));
|
||||
// if let Some(Value::Primitive(Primitive::Cursor(c))) = cursor_the_third {
|
||||
// assert_eq!(c.index, 2);
|
||||
// } else {
|
||||
// panic!("Cursor the third not found");
|
||||
// }
|
||||
// Ok(())
|
||||
// })
|
||||
// .unwrap();
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn test_set_cursor_to_new_element_in_diff() {
|
||||
let mut frontend = Frontend::new();
|
||||
let actor = frontend.actor_id.clone();
|
||||
let patch1 = amp::Patch {
|
||||
actor: Some(actor.clone()),
|
||||
deps: Vec::new(),
|
||||
seq: Some(1),
|
||||
clock: hashmap! {actor.clone() => 1},
|
||||
max_op: 3,
|
||||
pending_changes: 0,
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"list".into() => hashmap!{
|
||||
actor.op_id_at(1) => amp::Diff::List(amp::ListDiff{
|
||||
object_id: actor.op_id_at(1).into(),
|
||||
edits: vec![
|
||||
amp::DiffEdit::SingleElementInsert{
|
||||
index: 0,
|
||||
elem_id: actor.op_id_at(2).into(),
|
||||
op_id: actor.op_id_at(2),
|
||||
value: amp::Diff::Value("one".into()),
|
||||
},
|
||||
amp::DiffEdit::SingleElementInsert{
|
||||
index: 1,
|
||||
elem_id: actor.op_id_at(3).into(),
|
||||
op_id: actor.op_id_at(3),
|
||||
value: amp::Diff::Value("two".into()),
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
"cursor".into() => hashmap!{
|
||||
actor.op_id_at(4) => amp::Diff::Cursor(amp::CursorDiff{
|
||||
elem_id: actor.op_id_at(3),
|
||||
index: 1,
|
||||
object_id: actor.op_id_at(1).into(),
|
||||
})
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
frontend.apply_patch(patch1).unwrap();
|
||||
let patch2 = amp::Patch {
|
||||
actor: Some(actor.clone()),
|
||||
deps: Vec::new(),
|
||||
seq: Some(2),
|
||||
clock: hashmap! {actor.clone() => 2},
|
||||
max_op: 5,
|
||||
pending_changes: 0,
|
||||
diffs: RootDiff {
|
||||
props: hashmap! {
|
||||
"cursor".into() => hashmap!{
|
||||
actor.op_id_at(4) => amp::Diff::Cursor(amp::CursorDiff{
|
||||
elem_id: actor.op_id_at(2),
|
||||
index: 0,
|
||||
object_id: actor.op_id_at(1).into(),
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
frontend.apply_patch(patch2).unwrap();
|
||||
|
||||
frontend
|
||||
.change::<_, _, InvalidChangeRequest>(None, |doc| {
|
||||
doc.add_change(LocalChange::insert(
|
||||
Path::root().key("list").index(1),
|
||||
"three".into(),
|
||||
))?;
|
||||
let cursor = doc.value_at_path(&Path::root().key("cursor")).unwrap();
|
||||
match cursor {
|
||||
Value::Primitive(Primitive::Cursor(c)) => assert_eq!(c.index, 0),
|
||||
_ => panic!("Cursor value was not a cursor"),
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_set_cursor_to_new_element_in_local_change() {
|
||||
// let mut frontend = Frontend::new();
|
||||
// frontend
|
||||
// .change::<_, _, InvalidChangeRequest>(None, |d| {
|
||||
// d.add_change(LocalChange::set(
|
||||
// Path::root().key("list"),
|
||||
// Value::Text("123".graphemes(true).map(|s| s.into()).collect()),
|
||||
// ))?;
|
||||
// let cursor = d
|
||||
// .cursor_to_path(&Path::root().key("list").index(1))
|
||||
// .unwrap();
|
||||
// d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
|
||||
// let cursor_the_second = d.value_at_path(&Path::root().key("cursor"));
|
||||
// if let Some(Value::Primitive(Primitive::Cursor(c))) = cursor_the_second {
|
||||
// assert_eq!(c.index, 1);
|
||||
// } else {
|
||||
// panic!("Cursor the second not found");
|
||||
// }
|
||||
|
||||
// d.add_change(LocalChange::insert(
|
||||
// Path::root().key("list").index(0),
|
||||
// Value::Primitive(Primitive::Str("0".into())),
|
||||
// ))?;
|
||||
// d.add_change(LocalChange::insert(
|
||||
// Path::root().key("list").index(0),
|
||||
// Value::Primitive(Primitive::Str("1".into())),
|
||||
// ))?;
|
||||
// let cursor = d
|
||||
// .cursor_to_path(&Path::root().key("list").index(2))
|
||||
// .unwrap();
|
||||
// d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
|
||||
// d.add_change(LocalChange::insert(
|
||||
// Path::root().key("list").index(4),
|
||||
// "2".into(),
|
||||
// ))?;
|
||||
// let cursor_the_third = d.value_at_path(&Path::root().key("cursor"));
|
||||
// if let Some(Value::Primitive(Primitive::Cursor(c))) = cursor_the_third {
|
||||
// assert_eq!(c.index, 3);
|
||||
// } else {
|
||||
// panic!("Cursor the third not found");
|
||||
// }
|
||||
// Ok(())
|
||||
// })
|
||||
// .unwrap();
|
||||
// }
|
||||
#[test]
|
||||
fn test_delete_cursor_and_adding_again() {
|
||||
let mut frontend = Frontend::new();
|
||||
frontend
|
||||
.change::<_, _, InvalidChangeRequest>(None, |d| {
|
||||
d.add_change(LocalChange::set(
|
||||
Path::root().key("list"),
|
||||
Value::Text("123".graphemes(true).map(|s| s.into()).collect()),
|
||||
))?;
|
||||
let cursor = d
|
||||
.cursor_to_path(&Path::root().key("list").index(1))
|
||||
.unwrap();
|
||||
d.add_change(LocalChange::set(Path::root().key("cursor"), cursor.clone()))?;
|
||||
d.add_change(LocalChange::delete(Path::root().key("cursor")))?;
|
||||
d.add_change(LocalChange::set(Path::root().key("cursor"), cursor))?;
|
||||
|
||||
let cursor_value = d.value_at_path(&Path::root().key("cursor"));
|
||||
if let Some(Value::Primitive(Primitive::Cursor(c))) = cursor_value {
|
||||
assert_eq!(c.index, 1);
|
||||
} else {
|
||||
panic!("Cursor the third not found");
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
//TODO test removing a cursors
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue