Compare commits
55 commits
main
...
attribute-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4e304d11c6 | ||
|
|
08e6a86f28 | ||
|
|
979b9fd362 | ||
|
|
c149da3a6d | ||
|
|
af02ba6b86 | ||
|
|
657bd22d61 | ||
|
|
2663e0315c | ||
|
|
bebd310ab6 |
||
|
|
bc98b1ecc9 | ||
|
|
84619d8331 | ||
|
|
5d4e1f0c42 | ||
|
|
25afa0b12b | ||
|
|
0cf54c36a8 | ||
|
|
99b1127f5c | ||
|
|
ae87d7bc00 | ||
|
|
ce9771b29c |
||
|
|
e00797c512 | ||
|
|
57a0f62b75 | ||
|
|
a0f78561c4 | ||
|
|
ff1a20c626 | ||
|
|
b14d874dfc | ||
|
|
aad4852e30 | ||
|
|
63b4c96e71 | ||
|
|
1b1d50dfaf | ||
|
|
d02737ad12 | ||
|
|
8f4c1fc209 | ||
|
|
304195d720 | ||
|
|
b81e0fd619 | ||
|
|
22b62b14b5 | ||
|
|
cbf1ac03b2 | ||
|
|
4094e82f04 | ||
|
|
42446fa5c2 | ||
|
|
6d5f16c9cd | ||
|
|
dbbdd616fd | ||
|
|
523af57a26 | ||
|
|
d195a81d49 | ||
|
|
4c11c86532 | ||
|
|
42b6ffe9d8 | ||
|
|
b21b59e6a1 | ||
|
|
c1be06a6c7 | ||
|
|
e07211278f | ||
|
|
3c3f411329 | ||
|
|
5aad691e31 | ||
|
|
872efc5756 | ||
|
|
e37395f975 | ||
|
|
a84fa64554 | ||
|
|
a37d4a6870 | ||
|
|
5eb5714c13 | ||
|
|
4f9b95b5b8 | ||
|
|
36b4f08d20 | ||
|
|
015e8ce465 | ||
|
|
ea2f29d681 | ||
|
|
c8cd069e51 | ||
|
|
2ba2da95a8 | ||
|
|
561cad44e3 |
485 changed files with 20600 additions and 71475 deletions
2
.github/workflows/advisory-cron.yaml
vendored
2
.github/workflows/advisory-cron.yaml
vendored
|
|
@ -1,4 +1,4 @@
|
||||||
name: Advisories
|
name: ci
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 18 * * *'
|
- cron: '0 18 * * *'
|
||||||
|
|
|
||||||
90
.github/workflows/ci.yaml
vendored
90
.github/workflows/ci.yaml
vendored
|
|
@ -1,11 +1,11 @@
|
||||||
name: CI
|
name: ci
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- experiment
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- experiment
|
||||||
jobs:
|
jobs:
|
||||||
fmt:
|
fmt:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
@ -14,8 +14,7 @@ jobs:
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.67.0
|
toolchain: stable
|
||||||
default: true
|
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
- run: ./scripts/ci/fmt
|
- run: ./scripts/ci/fmt
|
||||||
|
|
@ -28,8 +27,7 @@ jobs:
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.67.0
|
toolchain: stable
|
||||||
default: true
|
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
- run: ./scripts/ci/lint
|
- run: ./scripts/ci/lint
|
||||||
|
|
@ -42,14 +40,9 @@ jobs:
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.67.0
|
toolchain: stable
|
||||||
default: true
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
- name: Build rust docs
|
- run: ./scripts/ci/docs
|
||||||
run: ./scripts/ci/rust-docs
|
|
||||||
shell: bash
|
|
||||||
- name: Install doxygen
|
|
||||||
run: sudo apt-get install -y doxygen
|
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
cargo-deny:
|
cargo-deny:
|
||||||
|
|
@ -64,88 +57,40 @@ jobs:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: EmbarkStudios/cargo-deny-action@v1
|
- uses: EmbarkStudios/cargo-deny-action@v1
|
||||||
with:
|
with:
|
||||||
arguments: '--manifest-path ./rust/Cargo.toml'
|
|
||||||
command: check ${{ matrix.checks }}
|
command: check ${{ matrix.checks }}
|
||||||
|
|
||||||
wasm_tests:
|
wasm_tests:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Install wasm-bindgen-cli
|
- name: Install wasm-pack
|
||||||
run: cargo install wasm-bindgen-cli wasm-opt
|
run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
|
||||||
- name: Install wasm32 target
|
|
||||||
run: rustup target add wasm32-unknown-unknown
|
|
||||||
- name: run tests
|
- name: run tests
|
||||||
run: ./scripts/ci/wasm_tests
|
run: ./scripts/ci/wasm_tests
|
||||||
deno_tests:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: denoland/setup-deno@v1
|
|
||||||
with:
|
|
||||||
deno-version: v1.x
|
|
||||||
- name: Install wasm-bindgen-cli
|
|
||||||
run: cargo install wasm-bindgen-cli wasm-opt
|
|
||||||
- name: Install wasm32 target
|
|
||||||
run: rustup target add wasm32-unknown-unknown
|
|
||||||
- name: run tests
|
|
||||||
run: ./scripts/ci/deno_tests
|
|
||||||
|
|
||||||
js_fmt:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: install
|
|
||||||
run: yarn global add prettier
|
|
||||||
- name: format
|
|
||||||
run: prettier -c javascript/.prettierrc javascript
|
|
||||||
|
|
||||||
js_tests:
|
js_tests:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Install wasm-bindgen-cli
|
- name: Install wasm-pack
|
||||||
run: cargo install wasm-bindgen-cli wasm-opt
|
run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
|
||||||
- name: Install wasm32 target
|
|
||||||
run: rustup target add wasm32-unknown-unknown
|
|
||||||
- name: run tests
|
- name: run tests
|
||||||
run: ./scripts/ci/js_tests
|
run: ./scripts/ci/js_tests
|
||||||
|
|
||||||
cmake_build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: nightly-2023-01-26
|
|
||||||
default: true
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
|
||||||
- name: Install CMocka
|
|
||||||
run: sudo apt-get install -y libcmocka-dev
|
|
||||||
- name: Install/update CMake
|
|
||||||
uses: jwlawson/actions-setup-cmake@v1.12
|
|
||||||
with:
|
|
||||||
cmake-version: latest
|
|
||||||
- name: Install rust-src
|
|
||||||
run: rustup component add rust-src
|
|
||||||
- name: Build and test C bindings
|
|
||||||
run: ./scripts/ci/cmake-build Release Static
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
linux:
|
linux:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
toolchain:
|
toolchain:
|
||||||
- 1.67.0
|
- stable
|
||||||
|
- nightly
|
||||||
|
continue-on-error: ${{ matrix.toolchain == 'nightly' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: ${{ matrix.toolchain }}
|
toolchain: ${{ matrix.toolchain }}
|
||||||
default: true
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
- run: ./scripts/ci/build-test
|
- run: ./scripts/ci/build-test
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
@ -157,8 +102,7 @@ jobs:
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.67.0
|
toolchain: stable
|
||||||
default: true
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
- run: ./scripts/ci/build-test
|
- run: ./scripts/ci/build-test
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
@ -170,8 +114,8 @@ jobs:
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.67.0
|
toolchain: stable
|
||||||
default: true
|
|
||||||
- uses: Swatinem/rust-cache@v1
|
- uses: Swatinem/rust-cache@v1
|
||||||
- run: ./scripts/ci/build-test
|
- run: ./scripts/ci/build-test
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
|
|
||||||
52
.github/workflows/docs.yaml
vendored
52
.github/workflows/docs.yaml
vendored
|
|
@ -1,52 +0,0 @@
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
name: Documentation
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy-docs:
|
|
||||||
concurrency: deploy-docs
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Toolchain
|
|
||||||
uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Cache
|
|
||||||
uses: Swatinem/rust-cache@v1
|
|
||||||
|
|
||||||
- name: Clean docs dir
|
|
||||||
run: rm -rf docs
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Clean Rust docs dir
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: clean
|
|
||||||
args: --manifest-path ./rust/Cargo.toml --doc
|
|
||||||
|
|
||||||
- name: Build Rust docs
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: doc
|
|
||||||
args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps
|
|
||||||
|
|
||||||
- name: Move Rust docs
|
|
||||||
run: mkdir -p docs && mv rust/target/doc/* docs/.
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Configure root page
|
|
||||||
run: echo '<meta http-equiv="refresh" content="0; url=automerge">' > docs/index.html
|
|
||||||
|
|
||||||
- name: Deploy docs
|
|
||||||
uses: peaceiris/actions-gh-pages@v3
|
|
||||||
with:
|
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
publish_dir: ./docs
|
|
||||||
214
.github/workflows/release.yaml
vendored
214
.github/workflows/release.yaml
vendored
|
|
@ -1,214 +0,0 @@
|
||||||
name: Release
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check_if_wasm_version_upgraded:
|
|
||||||
name: Check if WASM version has been upgraded
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
wasm_version: ${{ steps.version-updated.outputs.current-package-version }}
|
|
||||||
wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }}
|
|
||||||
steps:
|
|
||||||
- uses: JiPaix/package-json-updated-action@v1.0.5
|
|
||||||
id: version-updated
|
|
||||||
with:
|
|
||||||
path: rust/automerge-wasm/package.json
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
publish-wasm:
|
|
||||||
name: Publish WASM package
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- check_if_wasm_version_upgraded
|
|
||||||
# We create release only if the version in the package.json has been upgraded
|
|
||||||
if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true'
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: '16.x'
|
|
||||||
registry-url: 'https://registry.npmjs.org'
|
|
||||||
- uses: denoland/setup-deno@v1
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
- name: Get rid of local github workflows
|
|
||||||
run: rm -r .github/workflows
|
|
||||||
- name: Remove tmp_branch if it exists
|
|
||||||
run: git push origin :tmp_branch || true
|
|
||||||
- run: git checkout -b tmp_branch
|
|
||||||
- name: Install wasm-bindgen-cli
|
|
||||||
run: cargo install wasm-bindgen-cli wasm-opt
|
|
||||||
- name: Install wasm32 target
|
|
||||||
run: rustup target add wasm32-unknown-unknown
|
|
||||||
- name: run wasm js tests
|
|
||||||
id: wasm_js_tests
|
|
||||||
run: ./scripts/ci/wasm_tests
|
|
||||||
- name: run wasm deno tests
|
|
||||||
id: wasm_deno_tests
|
|
||||||
run: ./scripts/ci/deno_tests
|
|
||||||
- name: build release
|
|
||||||
id: build_release
|
|
||||||
run: |
|
|
||||||
npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release
|
|
||||||
- name: Collate deno release files
|
|
||||||
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
mkdir $GITHUB_WORKSPACE/deno_wasm_dist
|
|
||||||
cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist
|
|
||||||
cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist
|
|
||||||
cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist
|
|
||||||
cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist
|
|
||||||
sed -i '1i /// <reference types="./index.d.ts" />' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js
|
|
||||||
- name: Create npm release
|
|
||||||
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then
|
|
||||||
echo "This version is already published"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
EXTRA_ARGS="--access public"
|
|
||||||
if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
|
|
||||||
echo "Is pre-release version"
|
|
||||||
EXTRA_ARGS="$EXTRA_ARGS --tag next"
|
|
||||||
fi
|
|
||||||
if [ "$NODE_AUTH_TOKEN" = "" ]; then
|
|
||||||
echo "Can't publish on NPM, You need a NPM_TOKEN secret."
|
|
||||||
false
|
|
||||||
fi
|
|
||||||
npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS
|
|
||||||
env:
|
|
||||||
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
|
|
||||||
VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
|
|
||||||
- name: Commit wasm deno release files
|
|
||||||
run: |
|
|
||||||
git config --global user.name "actions"
|
|
||||||
git config --global user.email actions@github.com
|
|
||||||
git add $GITHUB_WORKSPACE/deno_wasm_dist
|
|
||||||
git commit -am "Add deno release files"
|
|
||||||
git push origin tmp_branch
|
|
||||||
- name: Tag wasm release
|
|
||||||
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
|
|
||||||
uses: softprops/action-gh-release@v1
|
|
||||||
with:
|
|
||||||
name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
|
|
||||||
tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
|
|
||||||
target_commitish: tmp_branch
|
|
||||||
generate_release_notes: false
|
|
||||||
draft: false
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Remove tmp_branch
|
|
||||||
run: git push origin :tmp_branch
|
|
||||||
check_if_js_version_upgraded:
|
|
||||||
name: Check if JS version has been upgraded
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
js_version: ${{ steps.version-updated.outputs.current-package-version }}
|
|
||||||
js_has_updated: ${{ steps.version-updated.outputs.has-updated }}
|
|
||||||
steps:
|
|
||||||
- uses: JiPaix/package-json-updated-action@v1.0.5
|
|
||||||
id: version-updated
|
|
||||||
with:
|
|
||||||
path: javascript/package.json
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
publish-js:
|
|
||||||
name: Publish JS package
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- check_if_js_version_upgraded
|
|
||||||
- check_if_wasm_version_upgraded
|
|
||||||
- publish-wasm
|
|
||||||
# We create release only if the version in the package.json has been upgraded and after the WASM release
|
|
||||||
if: |
|
|
||||||
(always() && ! cancelled()) &&
|
|
||||||
(needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') &&
|
|
||||||
needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true'
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: '16.x'
|
|
||||||
registry-url: 'https://registry.npmjs.org'
|
|
||||||
- uses: denoland/setup-deno@v1
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
- name: Get rid of local github workflows
|
|
||||||
run: rm -r .github/workflows
|
|
||||||
- name: Remove js_tmp_branch if it exists
|
|
||||||
run: git push origin :js_tmp_branch || true
|
|
||||||
- run: git checkout -b js_tmp_branch
|
|
||||||
- name: check js formatting
|
|
||||||
run: |
|
|
||||||
yarn global add prettier
|
|
||||||
prettier -c javascript/.prettierrc javascript
|
|
||||||
- name: run js tests
|
|
||||||
id: js_tests
|
|
||||||
run: |
|
|
||||||
cargo install wasm-bindgen-cli wasm-opt
|
|
||||||
rustup target add wasm32-unknown-unknown
|
|
||||||
./scripts/ci/js_tests
|
|
||||||
- name: build js release
|
|
||||||
id: build_release
|
|
||||||
run: |
|
|
||||||
npm --prefix $GITHUB_WORKSPACE/javascript run build
|
|
||||||
- name: build js deno release
|
|
||||||
id: build_deno_release
|
|
||||||
run: |
|
|
||||||
VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build
|
|
||||||
env:
|
|
||||||
WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
|
|
||||||
- name: run deno tests
|
|
||||||
id: deno_tests
|
|
||||||
run: |
|
|
||||||
npm --prefix $GITHUB_WORKSPACE/javascript run deno:test
|
|
||||||
- name: Collate deno release files
|
|
||||||
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
mkdir $GITHUB_WORKSPACE/deno_js_dist
|
|
||||||
cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist
|
|
||||||
- name: Create npm release
|
|
||||||
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
|
|
||||||
run: |
|
|
||||||
if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then
|
|
||||||
echo "This version is already published"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
EXTRA_ARGS="--access public"
|
|
||||||
if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
|
|
||||||
echo "Is pre-release version"
|
|
||||||
EXTRA_ARGS="$EXTRA_ARGS --tag next"
|
|
||||||
fi
|
|
||||||
if [ "$NODE_AUTH_TOKEN" = "" ]; then
|
|
||||||
echo "Can't publish on NPM, You need a NPM_TOKEN secret."
|
|
||||||
false
|
|
||||||
fi
|
|
||||||
npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS
|
|
||||||
env:
|
|
||||||
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
|
|
||||||
VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }}
|
|
||||||
- name: Commit js deno release files
|
|
||||||
run: |
|
|
||||||
git config --global user.name "actions"
|
|
||||||
git config --global user.email actions@github.com
|
|
||||||
git add $GITHUB_WORKSPACE/deno_js_dist
|
|
||||||
git commit -am "Add deno js release files"
|
|
||||||
git push origin js_tmp_branch
|
|
||||||
- name: Tag JS release
|
|
||||||
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
|
|
||||||
uses: softprops/action-gh-release@v1
|
|
||||||
with:
|
|
||||||
name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }}
|
|
||||||
tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }}
|
|
||||||
target_commitish: js_tmp_branch
|
|
||||||
generate_release_notes: false
|
|
||||||
draft: false
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Remove js_tmp_branch
|
|
||||||
run: git push origin :js_tmp_branch
|
|
||||||
4
.gitignore
vendored
4
.gitignore
vendored
|
|
@ -1,6 +1,4 @@
|
||||||
|
/target
|
||||||
/.direnv
|
/.direnv
|
||||||
perf.*
|
perf.*
|
||||||
/Cargo.lock
|
/Cargo.lock
|
||||||
build/
|
|
||||||
.vim/*
|
|
||||||
/target
|
|
||||||
|
|
|
||||||
|
|
@ -1,17 +1,15 @@
|
||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
"automerge",
|
"automerge",
|
||||||
"automerge-c",
|
|
||||||
"automerge-cli",
|
|
||||||
"automerge-test",
|
|
||||||
"automerge-wasm",
|
"automerge-wasm",
|
||||||
|
"automerge-cli",
|
||||||
"edit-trace",
|
"edit-trace",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
|
debug = true
|
||||||
lto = true
|
lto = true
|
||||||
codegen-units = 1
|
opt-level = 3
|
||||||
|
|
||||||
[profile.bench]
|
[profile.bench]
|
||||||
debug = true
|
debug = true
|
||||||
13
Makefile
Normal file
13
Makefile
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
rust:
|
||||||
|
cd automerge && cargo test
|
||||||
|
|
||||||
|
wasm:
|
||||||
|
cd automerge-wasm && yarn
|
||||||
|
cd automerge-wasm && yarn build
|
||||||
|
cd automerge-wasm && yarn test
|
||||||
|
cd automerge-wasm && yarn link
|
||||||
|
|
||||||
|
js: wasm
|
||||||
|
cd automerge-js && yarn
|
||||||
|
cd automerge-js && yarn link "automerge-wasm"
|
||||||
|
cd automerge-js && yarn test
|
||||||
188
README.md
188
README.md
|
|
@ -1,147 +1,81 @@
|
||||||
# Automerge
|
# Automerge - NEXT
|
||||||
|
|
||||||
<img src='./img/sign.svg' width='500' alt='Automerge logo' />
|
This is pretty much a ground up rewrite of automerge-rs. The objective of this
|
||||||
|
rewrite is to radically simplify the API. The end goal being to produce a library
|
||||||
|
which is easy to work with both in Rust and from FFI.
|
||||||
|
|
||||||
[](https://automerge.org/)
|
## How?
|
||||||
[](https://automerge.org/automerge-rs/automerge/)
|
|
||||||
[](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml)
|
|
||||||
[](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml)
|
|
||||||
|
|
||||||
Automerge is a library which provides fast implementations of several different
|
The current iteration of automerge-rs is complicated to work with because it
|
||||||
CRDTs, a compact compression format for these CRDTs, and a sync protocol for
|
adopts the frontend/backend split architecture of the JS implementation. This
|
||||||
efficiently transmitting those changes over the network. The objective of the
|
architecture was necessary due to basic operations on the automerge opset being
|
||||||
project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational
|
too slow to perform on the UI thread. Recently @orionz has been able to improve
|
||||||
databases support server applications - by providing mechanisms for persistence
|
the performance to the point where the split is no longer necessary. This means
|
||||||
which allow application developers to avoid thinking about hard distributed
|
we can adopt a much simpler mutable API.
|
||||||
computing problems. Automerge aims to be PostgreSQL for your local-first app.
|
|
||||||
|
|
||||||
If you're looking for documentation on the JavaScript implementation take a look
|
The architecture is now built around the `OpTree`. This is a data structure
|
||||||
at https://automerge.org/docs/hello/. There are other implementations in both
|
which supports efficiently inserting new operations and realising values of
|
||||||
Rust and C, but they are earlier and don't have documentation yet. You can find
|
existing operations. Most interactions with the `OpTree` are in the form of
|
||||||
them in `rust/automerge` and `rust/automerge-c` if you are comfortable
|
implementations of `TreeQuery` - a trait which can be used to traverse the
|
||||||
reading the code and tests to figure out how to use them.
|
optree and producing state of some kind. User facing operations are exposed on
|
||||||
|
an `Automerge` object, under the covers these operations typically instantiate
|
||||||
If you're familiar with CRDTs and interested in the design of Automerge in
|
some `TreeQuery` and run it over the `OpTree`.
|
||||||
particular take a look at https://automerge.org/docs/how-it-works/backend/
|
|
||||||
|
|
||||||
Finally, if you want to talk to us about this project please [join the
|
|
||||||
Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw)
|
|
||||||
|
|
||||||
## Status
|
## Status
|
||||||
|
|
||||||
This project is formed of a core Rust implementation which is exposed via FFI in
|
We have working code which passes all of the tests in the JS test suite. We're
|
||||||
javascript+WASM, C, and soon other languages. Alex
|
now working on writing a bunch more tests and cleaning up the API.
|
||||||
([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining
|
|
||||||
automerge, other members of Ink and Switch are also contributing time and there
|
|
||||||
are several other maintainers. The focus is currently on shipping the new JS
|
|
||||||
package. We expect to be iterating the API and adding new features over the next
|
|
||||||
six months so there will likely be several major version bumps in all packages
|
|
||||||
in that time.
|
|
||||||
|
|
||||||
In general we try and respect semver.
|
## Development
|
||||||
|
|
||||||
### JavaScript
|
### Running CI
|
||||||
|
|
||||||
A stable release of the javascript package is currently available as
|
The steps CI will run are all defined in `./scripts/ci`. Obviously CI will run
|
||||||
`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are
|
everything when you submit a PR, but if you want to run everything locally
|
||||||
available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at
|
before you push you can run `./scripts/ci/run` to run everything.
|
||||||
https://deno.land/x/automerge
|
|
||||||
|
|
||||||
### Rust
|
### Running the JS tests
|
||||||
|
|
||||||
The rust codebase is currently oriented around producing a performant backend
|
You will need to have [node](https://nodejs.org/en/), [yarn](https://yarnpkg.com/getting-started/install), [rust](https://rustup.rs/) and [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/) installed.
|
||||||
for the Javascript wrapper and as such the API for Rust code is low level and
|
|
||||||
not well documented. We will be returning to this over the next few months but
|
|
||||||
for now you will need to be comfortable reading the tests and asking questions
|
|
||||||
to figure out how to use it. If you are looking to build rust applications which
|
|
||||||
use automerge you may want to look into
|
|
||||||
[autosurgeon](https://github.com/alexjg/autosurgeon)
|
|
||||||
|
|
||||||
## Repository Organisation
|
To build and test the rust library:
|
||||||
|
|
||||||
- `./rust` - the rust rust implementation and also the Rust components of
|
```shell
|
||||||
platform specific wrappers (e.g. `automerge-wasm` for the WASM API or
|
$ cd automerge
|
||||||
`automerge-c` for the C FFI bindings)
|
$ cargo test
|
||||||
- `./javascript` - The javascript library which uses `automerge-wasm`
|
|
||||||
internally but presents a more idiomatic javascript interface
|
|
||||||
- `./scripts` - scripts which are useful to maintenance of the repository.
|
|
||||||
This includes the scripts which are run in CI.
|
|
||||||
- `./img` - static assets for use in `.md` files
|
|
||||||
|
|
||||||
## Building
|
|
||||||
|
|
||||||
To build this codebase you will need:
|
|
||||||
|
|
||||||
- `rust`
|
|
||||||
- `node`
|
|
||||||
- `yarn`
|
|
||||||
- `cmake`
|
|
||||||
- `cmocka`
|
|
||||||
|
|
||||||
You will also need to install the following with `cargo install`
|
|
||||||
|
|
||||||
- `wasm-bindgen-cli`
|
|
||||||
- `wasm-opt`
|
|
||||||
- `cargo-deny`
|
|
||||||
|
|
||||||
And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation.
|
|
||||||
|
|
||||||
The various subprojects (the rust code, the wrapper projects) have their own
|
|
||||||
build instructions, but to run the tests that will be run in CI you can run
|
|
||||||
`./scripts/ci/run`.
|
|
||||||
|
|
||||||
### For macOS
|
|
||||||
|
|
||||||
These instructions worked to build locally on macOS 13.1 (arm64) as of
|
|
||||||
Nov 29th 2022.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# clone the repo
|
|
||||||
git clone https://github.com/automerge/automerge-rs
|
|
||||||
cd automerge-rs
|
|
||||||
|
|
||||||
# install rustup
|
|
||||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
|
|
||||||
|
|
||||||
# install homebrew
|
|
||||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
|
||||||
|
|
||||||
# install cmake, node, cmocka
|
|
||||||
brew install cmake node cmocka
|
|
||||||
|
|
||||||
# install yarn
|
|
||||||
npm install --global yarn
|
|
||||||
|
|
||||||
# install javascript dependencies
|
|
||||||
yarn --cwd ./javascript
|
|
||||||
|
|
||||||
# install rust dependencies
|
|
||||||
cargo install wasm-bindgen-cli wasm-opt cargo-deny
|
|
||||||
|
|
||||||
# get nightly rust to produce optimized automerge-c builds
|
|
||||||
rustup toolchain install nightly
|
|
||||||
rustup component add rust-src --toolchain nightly
|
|
||||||
|
|
||||||
# add wasm target in addition to current architecture
|
|
||||||
rustup target add wasm32-unknown-unknown
|
|
||||||
|
|
||||||
# Run ci script
|
|
||||||
./scripts/ci/run
|
|
||||||
```
|
```
|
||||||
|
|
||||||
If your build fails to find `cmocka.h` you may need to teach it about homebrew's
|
To build and test the wasm library:
|
||||||
installation location:
|
|
||||||
|
|
||||||
```
|
```shell
|
||||||
export CPATH=/opt/homebrew/include
|
## setup
|
||||||
export LIBRARY_PATH=/opt/homebrew/lib
|
$ cd automerge-wasm
|
||||||
./scripts/ci/run
|
$ yarn
|
||||||
|
|
||||||
|
## building or testing
|
||||||
|
$ yarn build
|
||||||
|
$ yarn test
|
||||||
|
|
||||||
|
## without this the js library wont automatically use changes
|
||||||
|
$ yarn link
|
||||||
|
|
||||||
|
## cutting a release or doing benchmarking
|
||||||
|
$ yarn release
|
||||||
|
$ yarn opt ## or set `wasm-opt = false` in Cargo.toml on supported platforms (not arm64 osx)
|
||||||
```
|
```
|
||||||
|
|
||||||
## Contributing
|
And finally to test the js library. This is where most of the tests reside.
|
||||||
|
|
||||||
Please try and split your changes up into relatively independent commits which
|
```shell
|
||||||
change one subsystem at a time and add good commit messages which describe what
|
## setup
|
||||||
the change is and why you're making it (err on the side of longer commit
|
$ cd automerge-js
|
||||||
messages). `git blame` should give future maintainers a good idea of why
|
$ yarn
|
||||||
something is the way it is.
|
$ yarn link "automerge-wasm"
|
||||||
|
|
||||||
|
## testing
|
||||||
|
$ yarn test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benchmarking
|
||||||
|
|
||||||
|
The `edit-trace` folder has the main code for running the edit trace benchmarking.
|
||||||
|
|
|
||||||
32
TODO.md
Normal file
32
TODO.md
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
### next steps:
|
||||||
|
1. C API
|
||||||
|
2. port rust command line tool
|
||||||
|
3. fast load
|
||||||
|
|
||||||
|
### ergonomics:
|
||||||
|
1. value() -> () or something that into's a value
|
||||||
|
|
||||||
|
### automerge:
|
||||||
|
1. single pass (fast) load
|
||||||
|
2. micro-patches / bare bones observation API / fully hydrated documents
|
||||||
|
|
||||||
|
### future:
|
||||||
|
1. handle columns with unknown data in and out
|
||||||
|
2. branches with different indexes
|
||||||
|
|
||||||
|
### Peritext
|
||||||
|
1. add mark / remove mark -- type, start/end elemid (inclusive,exclusive)
|
||||||
|
2. track any formatting ops that start or end on a character
|
||||||
|
3. ops right before the character, ops right after that character
|
||||||
|
4. query a single character - character, plus marks that start or end on that character
|
||||||
|
what is its current formatting,
|
||||||
|
what are the ops that include that in their span,
|
||||||
|
None = same as last time, Set( bold, italic ),
|
||||||
|
keep these on index
|
||||||
|
5. op probably belongs with the start character - possible packed at the beginning or end of the list
|
||||||
|
|
||||||
|
### maybe:
|
||||||
|
1. tables
|
||||||
|
|
||||||
|
### no:
|
||||||
|
1. cursors
|
||||||
857
automerge-cli/Cargo.lock
generated
Normal file
857
automerge-cli/Cargo.lock
generated
Normal file
|
|
@ -0,0 +1,857 @@
|
||||||
|
# This file is automatically @generated by Cargo.
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
version = 3
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "adler"
|
||||||
|
version = "1.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ansi_term"
|
||||||
|
version = "0.12.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
|
||||||
|
dependencies = [
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anyhow"
|
||||||
|
version = "1.0.55"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "atty"
|
||||||
|
version = "0.2.14"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
|
||||||
|
dependencies = [
|
||||||
|
"hermit-abi",
|
||||||
|
"libc",
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "autocfg"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "automerge"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"flate2",
|
||||||
|
"fxhash",
|
||||||
|
"hex",
|
||||||
|
"itertools",
|
||||||
|
"js-sys",
|
||||||
|
"leb128",
|
||||||
|
"nonzero_ext",
|
||||||
|
"rand",
|
||||||
|
"serde",
|
||||||
|
"sha2",
|
||||||
|
"smol_str",
|
||||||
|
"thiserror",
|
||||||
|
"tinyvec",
|
||||||
|
"tracing",
|
||||||
|
"unicode-segmentation",
|
||||||
|
"uuid",
|
||||||
|
"wasm-bindgen",
|
||||||
|
"web-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "automerge-cli"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"atty",
|
||||||
|
"automerge",
|
||||||
|
"clap",
|
||||||
|
"colored_json",
|
||||||
|
"combine",
|
||||||
|
"duct",
|
||||||
|
"maplit",
|
||||||
|
"serde_json",
|
||||||
|
"thiserror",
|
||||||
|
"tracing-subscriber",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bitflags"
|
||||||
|
version = "1.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "block-buffer"
|
||||||
|
version = "0.10.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324"
|
||||||
|
dependencies = [
|
||||||
|
"generic-array",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bumpalo"
|
||||||
|
version = "3.9.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "byteorder"
|
||||||
|
version = "1.4.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bytes"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cfg-if"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap"
|
||||||
|
version = "3.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ced1892c55c910c1219e98d6fc8d71f6bddba7905866ce740066d8bfea859312"
|
||||||
|
dependencies = [
|
||||||
|
"atty",
|
||||||
|
"bitflags",
|
||||||
|
"clap_derive",
|
||||||
|
"indexmap",
|
||||||
|
"lazy_static",
|
||||||
|
"os_str_bytes",
|
||||||
|
"strsim",
|
||||||
|
"termcolor",
|
||||||
|
"textwrap",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_derive"
|
||||||
|
version = "3.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "da95d038ede1a964ce99f49cbe27a7fb538d1da595e4b4f70b8c8f338d17bf16"
|
||||||
|
dependencies = [
|
||||||
|
"heck",
|
||||||
|
"proc-macro-error",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colored_json"
|
||||||
|
version = "2.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1fd32eb54d016e203b7c2600e3a7802c75843a92e38ccc4869aefeca21771a64"
|
||||||
|
dependencies = [
|
||||||
|
"ansi_term",
|
||||||
|
"atty",
|
||||||
|
"libc",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "combine"
|
||||||
|
version = "4.6.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "50b727aacc797f9fc28e355d21f34709ac4fc9adecfe470ad07b8f4464f53062"
|
||||||
|
dependencies = [
|
||||||
|
"bytes",
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cpufeatures"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crc32fast"
|
||||||
|
version = "1.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crypto-common"
|
||||||
|
version = "0.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8"
|
||||||
|
dependencies = [
|
||||||
|
"generic-array",
|
||||||
|
"typenum",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "digest"
|
||||||
|
version = "0.10.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506"
|
||||||
|
dependencies = [
|
||||||
|
"block-buffer",
|
||||||
|
"crypto-common",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "duct"
|
||||||
|
version = "0.13.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0fc6a0a59ed0888e0041cf708e66357b7ae1a82f1c67247e1f93b5e0818f7d8d"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"once_cell",
|
||||||
|
"os_pipe",
|
||||||
|
"shared_child",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "either"
|
||||||
|
version = "1.6.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "flate2"
|
||||||
|
version = "1.0.22"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"crc32fast",
|
||||||
|
"libc",
|
||||||
|
"miniz_oxide",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fxhash"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
|
||||||
|
dependencies = [
|
||||||
|
"byteorder",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "generic-array"
|
||||||
|
version = "0.14.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803"
|
||||||
|
dependencies = [
|
||||||
|
"typenum",
|
||||||
|
"version_check",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "getrandom"
|
||||||
|
version = "0.2.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"js-sys",
|
||||||
|
"libc",
|
||||||
|
"wasi",
|
||||||
|
"wasm-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hashbrown"
|
||||||
|
version = "0.11.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "heck"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hermit-abi"
|
||||||
|
version = "0.1.19"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hex"
|
||||||
|
version = "0.4.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "indexmap"
|
||||||
|
version = "1.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223"
|
||||||
|
dependencies = [
|
||||||
|
"autocfg",
|
||||||
|
"hashbrown",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itertools"
|
||||||
|
version = "0.10.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itoa"
|
||||||
|
version = "1.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "js-sys"
|
||||||
|
version = "0.3.56"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04"
|
||||||
|
dependencies = [
|
||||||
|
"wasm-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lazy_static"
|
||||||
|
version = "1.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "leb128"
|
||||||
|
version = "0.2.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libc"
|
||||||
|
version = "0.2.119"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "log"
|
||||||
|
version = "0.4.14"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "maplit"
|
||||||
|
version = "1.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "memchr"
|
||||||
|
version = "2.4.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "miniz_oxide"
|
||||||
|
version = "0.4.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
|
||||||
|
dependencies = [
|
||||||
|
"adler",
|
||||||
|
"autocfg",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nonzero_ext"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "44a1290799eababa63ea60af0cbc3f03363e328e58f32fb0294798ed3e85f444"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "once_cell"
|
||||||
|
version = "1.9.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "os_pipe"
|
||||||
|
version = "0.9.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fb233f06c2307e1f5ce2ecad9f8121cffbbee2c95428f44ea85222e460d0d213"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "os_str_bytes"
|
||||||
|
version = "6.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pin-project-lite"
|
||||||
|
version = "0.2.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ppv-lite86"
|
||||||
|
version = "0.2.16"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "proc-macro-error"
|
||||||
|
version = "1.0.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro-error-attr",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
"version_check",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "proc-macro-error-attr"
|
||||||
|
version = "1.0.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"version_check",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "proc-macro2"
|
||||||
|
version = "1.0.36"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
|
||||||
|
dependencies = [
|
||||||
|
"unicode-xid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "quote"
|
||||||
|
version = "1.0.15"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rand"
|
||||||
|
version = "0.8.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"rand_chacha",
|
||||||
|
"rand_core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rand_chacha"
|
||||||
|
version = "0.3.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
|
||||||
|
dependencies = [
|
||||||
|
"ppv-lite86",
|
||||||
|
"rand_core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rand_core"
|
||||||
|
version = "0.6.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
|
||||||
|
dependencies = [
|
||||||
|
"getrandom",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ryu"
|
||||||
|
version = "1.0.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde"
|
||||||
|
version = "1.0.136"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
|
||||||
|
dependencies = [
|
||||||
|
"serde_derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_derive"
|
||||||
|
version = "1.0.136"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_json"
|
||||||
|
version = "1.0.79"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95"
|
||||||
|
dependencies = [
|
||||||
|
"itoa",
|
||||||
|
"ryu",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sha2"
|
||||||
|
version = "0.10.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"cpufeatures",
|
||||||
|
"digest",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sharded-slab"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
|
||||||
|
dependencies = [
|
||||||
|
"lazy_static",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "shared_child"
|
||||||
|
version = "0.3.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6be9f7d5565b1483af3e72975e2dee33879b3b86bd48c0929fccf6585d79e65a"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "smallvec"
|
||||||
|
version = "1.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "smol_str"
|
||||||
|
version = "0.1.21"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "61d15c83e300cce35b7c8cd39ff567c1ef42dde6d4a1a38dbdbf9a59902261bd"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "strsim"
|
||||||
|
version = "0.10.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "syn"
|
||||||
|
version = "1.0.86"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"unicode-xid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "termcolor"
|
||||||
|
version = "1.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
|
||||||
|
dependencies = [
|
||||||
|
"winapi-util",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "textwrap"
|
||||||
|
version = "0.15.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thiserror"
|
||||||
|
version = "1.0.30"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417"
|
||||||
|
dependencies = [
|
||||||
|
"thiserror-impl",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thiserror-impl"
|
||||||
|
version = "1.0.30"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thread_local"
|
||||||
|
version = "1.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
|
||||||
|
dependencies = [
|
||||||
|
"once_cell",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tinyvec"
|
||||||
|
version = "1.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2"
|
||||||
|
dependencies = [
|
||||||
|
"tinyvec_macros",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tinyvec_macros"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing"
|
||||||
|
version = "0.1.31"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f6c650a8ef0cd2dd93736f033d21cbd1224c5a967aa0c258d00fcf7dafef9b9f"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"log",
|
||||||
|
"pin-project-lite",
|
||||||
|
"tracing-attributes",
|
||||||
|
"tracing-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-attributes"
|
||||||
|
version = "0.1.19"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-core"
|
||||||
|
version = "0.1.22"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23"
|
||||||
|
dependencies = [
|
||||||
|
"lazy_static",
|
||||||
|
"valuable",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-log"
|
||||||
|
version = "0.1.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3"
|
||||||
|
dependencies = [
|
||||||
|
"lazy_static",
|
||||||
|
"log",
|
||||||
|
"tracing-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-subscriber"
|
||||||
|
version = "0.3.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9e0ab7bdc962035a87fba73f3acca9b8a8d0034c2e6f60b84aeaaddddc155dce"
|
||||||
|
dependencies = [
|
||||||
|
"ansi_term",
|
||||||
|
"sharded-slab",
|
||||||
|
"smallvec",
|
||||||
|
"thread_local",
|
||||||
|
"tracing-core",
|
||||||
|
"tracing-log",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typenum"
|
||||||
|
version = "1.15.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-segmentation"
|
||||||
|
version = "1.9.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-xid"
|
||||||
|
version = "0.2.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "uuid"
|
||||||
|
version = "0.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
|
||||||
|
dependencies = [
|
||||||
|
"getrandom",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "valuable"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "version_check"
|
||||||
|
version = "0.9.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasi"
|
||||||
|
version = "0.10.2+wasi-snapshot-preview1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-bindgen"
|
||||||
|
version = "0.2.79"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"wasm-bindgen-macro",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-bindgen-backend"
|
||||||
|
version = "0.2.79"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca"
|
||||||
|
dependencies = [
|
||||||
|
"bumpalo",
|
||||||
|
"lazy_static",
|
||||||
|
"log",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
"wasm-bindgen-shared",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-bindgen-macro"
|
||||||
|
version = "0.2.79"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01"
|
||||||
|
dependencies = [
|
||||||
|
"quote",
|
||||||
|
"wasm-bindgen-macro-support",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-bindgen-macro-support"
|
||||||
|
version = "0.2.79"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
"wasm-bindgen-backend",
|
||||||
|
"wasm-bindgen-shared",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-bindgen-shared"
|
||||||
|
version = "0.2.79"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "web-sys"
|
||||||
|
version = "0.3.56"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb"
|
||||||
|
dependencies = [
|
||||||
|
"js-sys",
|
||||||
|
"wasm-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi"
|
||||||
|
version = "0.3.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||||
|
dependencies = [
|
||||||
|
"winapi-i686-pc-windows-gnu",
|
||||||
|
"winapi-x86_64-pc-windows-gnu",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi-i686-pc-windows-gnu"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi-util"
|
||||||
|
version = "0.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
|
||||||
|
dependencies = [
|
||||||
|
"winapi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "winapi-x86_64-pc-windows-gnu"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||||
|
|
@ -4,7 +4,6 @@ version = "0.1.0"
|
||||||
authors = ["Alex Good <alex@memoryandthought.me>"]
|
authors = ["Alex Good <alex@memoryandthought.me>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
rust-version = "1.57.0"
|
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "automerge"
|
name = "automerge"
|
||||||
|
|
@ -13,18 +12,17 @@ bench = false
|
||||||
doc = false
|
doc = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
clap = {version = "~4", features = ["derive"]}
|
clap = {version = "~3.1", features = ["derive"]}
|
||||||
serde_json = "^1.0"
|
serde_json = "^1.0"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
|
atty = "^0.2"
|
||||||
thiserror = "^1.0"
|
thiserror = "^1.0"
|
||||||
combine = "^4.5"
|
combine = "^4.5"
|
||||||
maplit = "^1.0"
|
maplit = "^1.0"
|
||||||
|
colored_json = "^2.1"
|
||||||
tracing-subscriber = "~0.3"
|
tracing-subscriber = "~0.3"
|
||||||
|
|
||||||
automerge = { path = "../automerge" }
|
automerge = { path = "../automerge" }
|
||||||
is-terminal = "0.4.1"
|
|
||||||
termcolor = "1.1.3"
|
|
||||||
serde = "1.0.150"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
duct = "^0.13"
|
duct = "^0.13"
|
||||||
|
|
@ -1,8 +1,6 @@
|
||||||
use automerge as am;
|
use automerge as am;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use crate::{color_json::print_colored_json, SkipVerifyFlag};
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum ExamineError {
|
pub enum ExamineError {
|
||||||
#[error("Error reading change file: {:?}", source)]
|
#[error("Error reading change file: {:?}", source)]
|
||||||
|
|
@ -22,28 +20,21 @@ pub enum ExamineError {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn examine(
|
pub fn examine(
|
||||||
mut input: impl std::io::Read,
|
mut input: impl std::io::Read,
|
||||||
mut output: impl std::io::Write,
|
mut output: impl std::io::Write,
|
||||||
skip: SkipVerifyFlag,
|
|
||||||
is_tty: bool,
|
is_tty: bool,
|
||||||
) -> Result<(), ExamineError> {
|
) -> Result<(), ExamineError> {
|
||||||
let mut buf: Vec<u8> = Vec::new();
|
let mut buf: Vec<u8> = Vec::new();
|
||||||
input
|
input
|
||||||
.read_to_end(&mut buf)
|
.read_to_end(&mut buf)
|
||||||
.map_err(|e| ExamineError::ReadingChanges { source: e })?;
|
.map_err(|e| ExamineError::ReadingChanges { source: e })?;
|
||||||
let doc = skip
|
let doc = am::Automerge::load(&buf)
|
||||||
.load(&buf)
|
|
||||||
.map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?;
|
.map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?;
|
||||||
let uncompressed_changes: Vec<_> = doc
|
let uncompressed_changes: Vec<_> = doc.get_changes(&[]).iter().map(|c| c.decode()).collect();
|
||||||
.get_changes(&[])
|
|
||||||
.unwrap()
|
|
||||||
.iter()
|
|
||||||
.map(|c| c.decode())
|
|
||||||
.collect();
|
|
||||||
if is_tty {
|
if is_tty {
|
||||||
let json_changes = serde_json::to_value(uncompressed_changes).unwrap();
|
let json_changes = serde_json::to_value(uncompressed_changes).unwrap();
|
||||||
print_colored_json(&json_changes).unwrap();
|
colored_json::write_colored_json(&json_changes, &mut output).unwrap();
|
||||||
writeln!(output).unwrap();
|
writeln!(output).unwrap();
|
||||||
} else {
|
} else {
|
||||||
let json_changes = serde_json::to_string_pretty(&uncompressed_changes).unwrap();
|
let json_changes = serde_json::to_string_pretty(&uncompressed_changes).unwrap();
|
||||||
|
|
@ -1,14 +1,11 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use automerge as am;
|
use automerge as am;
|
||||||
use automerge::ReadDoc;
|
|
||||||
|
|
||||||
use crate::{color_json::print_colored_json, SkipVerifyFlag};
|
|
||||||
|
|
||||||
pub(crate) fn map_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value {
|
pub(crate) fn map_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value {
|
||||||
let keys = doc.keys(obj);
|
let keys = doc.keys(obj);
|
||||||
let mut map = serde_json::Map::new();
|
let mut map = serde_json::Map::new();
|
||||||
for k in keys {
|
for k in keys {
|
||||||
let val = doc.get(obj, &k);
|
let val = doc.value(obj, &k);
|
||||||
match val {
|
match val {
|
||||||
Ok(Some((am::Value::Object(o), exid)))
|
Ok(Some((am::Value::Object(o), exid)))
|
||||||
if o == am::ObjType::Map || o == am::ObjType::Table =>
|
if o == am::ObjType::Map || o == am::ObjType::Table =>
|
||||||
|
|
@ -31,7 +28,7 @@ fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value {
|
||||||
let len = doc.length(obj);
|
let len = doc.length(obj);
|
||||||
let mut array = Vec::new();
|
let mut array = Vec::new();
|
||||||
for i in 0..len {
|
for i in 0..len {
|
||||||
let val = doc.get(obj, i);
|
let val = doc.value(obj, i as usize);
|
||||||
match val {
|
match val {
|
||||||
Ok(Some((am::Value::Object(o), exid)))
|
Ok(Some((am::Value::Object(o), exid)))
|
||||||
if o == am::ObjType::Map || o == am::ObjType::Table =>
|
if o == am::ObjType::Map || o == am::ObjType::Table =>
|
||||||
|
|
@ -53,13 +50,11 @@ fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value {
|
||||||
fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value {
|
fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value {
|
||||||
match val {
|
match val {
|
||||||
am::ScalarValue::Str(s) => serde_json::Value::String(s.to_string()),
|
am::ScalarValue::Str(s) => serde_json::Value::String(s.to_string()),
|
||||||
am::ScalarValue::Bytes(b) | am::ScalarValue::Unknown { bytes: b, .. } => {
|
am::ScalarValue::Bytes(b) => serde_json::Value::Array(
|
||||||
serde_json::Value::Array(
|
b.iter()
|
||||||
b.iter()
|
.map(|byte| serde_json::Value::Number((*byte).into()))
|
||||||
.map(|byte| serde_json::Value::Number((*byte).into()))
|
.collect(),
|
||||||
.collect(),
|
),
|
||||||
)
|
|
||||||
}
|
|
||||||
am::ScalarValue::Int(n) => serde_json::Value::Number((*n).into()),
|
am::ScalarValue::Int(n) => serde_json::Value::Number((*n).into()),
|
||||||
am::ScalarValue::Uint(n) => serde_json::Value::Number((*n).into()),
|
am::ScalarValue::Uint(n) => serde_json::Value::Number((*n).into()),
|
||||||
am::ScalarValue::F64(n) => serde_json::Number::from_f64(*n)
|
am::ScalarValue::F64(n) => serde_json::Number::from_f64(*n)
|
||||||
|
|
@ -72,23 +67,22 @@ fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_state_json(input_data: Vec<u8>, skip: SkipVerifyFlag) -> Result<serde_json::Value> {
|
fn get_state_json(input_data: Vec<u8>) -> Result<serde_json::Value> {
|
||||||
let doc = skip.load(&input_data).unwrap(); // FIXME
|
let doc = am::Automerge::load(&input_data).unwrap(); // FIXME
|
||||||
Ok(map_to_json(&doc, &am::ObjId::Root))
|
Ok(map_to_json(&doc, &am::ObjId::Root))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn export_json(
|
pub fn export_json(
|
||||||
mut changes_reader: impl std::io::Read,
|
mut changes_reader: impl std::io::Read,
|
||||||
mut writer: impl std::io::Write,
|
mut writer: impl std::io::Write,
|
||||||
skip: SkipVerifyFlag,
|
|
||||||
is_tty: bool,
|
is_tty: bool,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let mut input_data = vec![];
|
let mut input_data = vec![];
|
||||||
changes_reader.read_to_end(&mut input_data)?;
|
changes_reader.read_to_end(&mut input_data)?;
|
||||||
|
|
||||||
let state_json = get_state_json(input_data, skip)?;
|
let state_json = get_state_json(input_data)?;
|
||||||
if is_tty {
|
if is_tty {
|
||||||
print_colored_json(&state_json).unwrap();
|
colored_json::write_colored_json(&state_json, &mut writer).unwrap();
|
||||||
writeln!(writer).unwrap();
|
writeln!(writer).unwrap();
|
||||||
} else {
|
} else {
|
||||||
writeln!(
|
writeln!(
|
||||||
|
|
@ -107,10 +101,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn cli_export_with_empty_input() {
|
fn cli_export_with_empty_input() {
|
||||||
assert_eq!(
|
assert_eq!(get_state_json(vec![]).unwrap(), serde_json::json!({}))
|
||||||
get_state_json(vec![], Default::default()).unwrap(),
|
|
||||||
serde_json::json!({})
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -124,7 +115,7 @@ mod tests {
|
||||||
let mut backend = initialize_from_json(&initial_state_json).unwrap();
|
let mut backend = initialize_from_json(&initial_state_json).unwrap();
|
||||||
let change_bytes = backend.save();
|
let change_bytes = backend.save();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_state_json(change_bytes, Default::default()).unwrap(),
|
get_state_json(change_bytes).unwrap(),
|
||||||
serde_json::json!({"sparrows": 15.0})
|
serde_json::json!({"sparrows": 15.0})
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
@ -151,7 +142,7 @@ mod tests {
|
||||||
*/
|
*/
|
||||||
let change_bytes = backend.save();
|
let change_bytes = backend.save();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
get_state_json(change_bytes, Default::default()).unwrap(),
|
get_state_json(change_bytes).unwrap(),
|
||||||
serde_json::json!({
|
serde_json::json!({
|
||||||
"birds": {
|
"birds": {
|
||||||
"wrens": 3.0,
|
"wrens": 3.0,
|
||||||
|
|
@ -22,31 +22,31 @@ fn import_map(
|
||||||
for (key, value) in map {
|
for (key, value) in map {
|
||||||
match value {
|
match value {
|
||||||
serde_json::Value::Null => {
|
serde_json::Value::Null => {
|
||||||
doc.put(obj, key, ())?;
|
doc.set(obj, key, ())?;
|
||||||
}
|
}
|
||||||
serde_json::Value::Bool(b) => {
|
serde_json::Value::Bool(b) => {
|
||||||
doc.put(obj, key, *b)?;
|
doc.set(obj, key, *b)?;
|
||||||
}
|
}
|
||||||
serde_json::Value::String(s) => {
|
serde_json::Value::String(s) => {
|
||||||
doc.put(obj, key, s)?;
|
doc.set(obj, key, s.as_ref())?;
|
||||||
}
|
}
|
||||||
serde_json::Value::Array(vec) => {
|
serde_json::Value::Array(vec) => {
|
||||||
let id = doc.put_object(obj, key, am::ObjType::List)?;
|
let id = doc.set_object(obj, key, am::ObjType::List)?;
|
||||||
import_list(doc, &id, vec)?;
|
import_list(doc, &id, vec)?;
|
||||||
}
|
}
|
||||||
serde_json::Value::Number(n) => {
|
serde_json::Value::Number(n) => {
|
||||||
if let Some(m) = n.as_i64() {
|
if let Some(m) = n.as_i64() {
|
||||||
doc.put(obj, key, m)?;
|
doc.set(obj, key, m)?;
|
||||||
} else if let Some(m) = n.as_u64() {
|
} else if let Some(m) = n.as_u64() {
|
||||||
doc.put(obj, key, m)?;
|
doc.set(obj, key, m)?;
|
||||||
} else if let Some(m) = n.as_f64() {
|
} else if let Some(m) = n.as_f64() {
|
||||||
doc.put(obj, key, m)?;
|
doc.set(obj, key, m)?;
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("not a number");
|
anyhow::bail!("not a number");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
serde_json::Value::Object(map) => {
|
serde_json::Value::Object(map) => {
|
||||||
let id = doc.put_object(obj, key, am::ObjType::Map)?;
|
let id = doc.set_object(obj, key, am::ObjType::Map)?;
|
||||||
import_map(doc, &id, map)?;
|
import_map(doc, &id, map)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -68,7 +68,7 @@ fn import_list(
|
||||||
doc.insert(obj, i, *b)?;
|
doc.insert(obj, i, *b)?;
|
||||||
}
|
}
|
||||||
serde_json::Value::String(s) => {
|
serde_json::Value::String(s) => {
|
||||||
doc.insert(obj, i, s)?;
|
doc.insert(obj, i, s.as_ref())?;
|
||||||
}
|
}
|
||||||
serde_json::Value::Array(vec) => {
|
serde_json::Value::Array(vec) => {
|
||||||
let id = doc.insert_object(obj, i, am::ObjType::List)?;
|
let id = doc.insert_object(obj, i, am::ObjType::List)?;
|
||||||
|
|
@ -1,15 +1,10 @@
|
||||||
use std::{fs::File, path::PathBuf, str::FromStr};
|
use std::{fs::File, path::PathBuf, str::FromStr};
|
||||||
|
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use clap::{
|
use clap::Parser;
|
||||||
builder::{BoolishValueParser, TypedValueParser, ValueParserFactory},
|
|
||||||
Parser,
|
|
||||||
};
|
|
||||||
use is_terminal::IsTerminal;
|
|
||||||
|
|
||||||
mod color_json;
|
//mod change;
|
||||||
mod examine;
|
mod examine;
|
||||||
mod examine_sync;
|
|
||||||
mod export;
|
mod export;
|
||||||
mod import;
|
mod import;
|
||||||
mod merge;
|
mod merge;
|
||||||
|
|
@ -21,50 +16,12 @@ struct Opts {
|
||||||
cmd: Command,
|
cmd: Command,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(clap::ValueEnum, Clone, Debug)]
|
#[derive(Debug)]
|
||||||
enum ExportFormat {
|
enum ExportFormat {
|
||||||
Json,
|
Json,
|
||||||
Toml,
|
Toml,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Default, Debug)]
|
|
||||||
pub(crate) struct SkipVerifyFlag(bool);
|
|
||||||
|
|
||||||
impl SkipVerifyFlag {
|
|
||||||
fn load(&self, buf: &[u8]) -> Result<automerge::Automerge, automerge::AutomergeError> {
|
|
||||||
if self.0 {
|
|
||||||
automerge::Automerge::load(buf)
|
|
||||||
} else {
|
|
||||||
automerge::Automerge::load_unverified_heads(buf)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
struct SkipVerifyFlagParser;
|
|
||||||
impl ValueParserFactory for SkipVerifyFlag {
|
|
||||||
type Parser = SkipVerifyFlagParser;
|
|
||||||
|
|
||||||
fn value_parser() -> Self::Parser {
|
|
||||||
SkipVerifyFlagParser
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TypedValueParser for SkipVerifyFlagParser {
|
|
||||||
type Value = SkipVerifyFlag;
|
|
||||||
|
|
||||||
fn parse_ref(
|
|
||||||
&self,
|
|
||||||
cmd: &clap::Command,
|
|
||||||
arg: Option<&clap::Arg>,
|
|
||||||
value: &std::ffi::OsStr,
|
|
||||||
) -> Result<Self::Value, clap::Error> {
|
|
||||||
BoolishValueParser::new()
|
|
||||||
.parse_ref(cmd, arg, value)
|
|
||||||
.map(SkipVerifyFlag)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for ExportFormat {
|
impl FromStr for ExportFormat {
|
||||||
type Err = anyhow::Error;
|
type Err = anyhow::Error;
|
||||||
|
|
||||||
|
|
@ -86,15 +43,12 @@ enum Command {
|
||||||
format: ExportFormat,
|
format: ExportFormat,
|
||||||
|
|
||||||
/// Path that contains Automerge changes
|
/// Path that contains Automerge changes
|
||||||
|
#[clap(parse(from_os_str))]
|
||||||
changes_file: Option<PathBuf>,
|
changes_file: Option<PathBuf>,
|
||||||
|
|
||||||
/// The file to write to. If omitted assumes stdout
|
/// The file to write to. If omitted assumes stdout
|
||||||
#[clap(long("out"), short('o'))]
|
#[clap(parse(from_os_str), long("out"), short('o'))]
|
||||||
output_file: Option<PathBuf>,
|
output_file: Option<PathBuf>,
|
||||||
|
|
||||||
/// Whether to verify the head hashes of a compressed document
|
|
||||||
#[clap(long, action = clap::ArgAction::SetFalse)]
|
|
||||||
skip_verifying_heads: SkipVerifyFlag,
|
|
||||||
},
|
},
|
||||||
|
|
||||||
Import {
|
Import {
|
||||||
|
|
@ -102,37 +56,69 @@ enum Command {
|
||||||
#[clap(long, short, default_value = "json")]
|
#[clap(long, short, default_value = "json")]
|
||||||
format: ExportFormat,
|
format: ExportFormat,
|
||||||
|
|
||||||
|
#[clap(parse(from_os_str))]
|
||||||
input_file: Option<PathBuf>,
|
input_file: Option<PathBuf>,
|
||||||
|
|
||||||
/// Path to write Automerge changes to
|
/// Path to write Automerge changes to
|
||||||
#[clap(long("out"), short('o'))]
|
#[clap(parse(from_os_str), long("out"), short('o'))]
|
||||||
changes_file: Option<PathBuf>,
|
changes_file: Option<PathBuf>,
|
||||||
},
|
},
|
||||||
|
|
||||||
/// Read an automerge document and print a JSON representation of the changes in it to stdout
|
/// Read an automerge document from a file or stdin, perform a change on it and write a new
|
||||||
Examine {
|
/// document to stdout or the specified output file.
|
||||||
|
Change {
|
||||||
|
/// The change script to perform. Change scripts have the form <command> <path> [<JSON value>].
|
||||||
|
/// The possible commands are 'set', 'insert', 'delete', and 'increment'.
|
||||||
|
///
|
||||||
|
/// Paths look like this: $["mapkey"][0]. They always lways start with a '$', then each
|
||||||
|
/// subsequent segment of the path is either a string in double quotes to index a key in a
|
||||||
|
/// map, or an integer index to address an array element.
|
||||||
|
///
|
||||||
|
/// Examples
|
||||||
|
///
|
||||||
|
/// ## set
|
||||||
|
///
|
||||||
|
/// > automerge change 'set $["someobject"] {"items": []}' somefile
|
||||||
|
///
|
||||||
|
/// ## insert
|
||||||
|
///
|
||||||
|
/// > automerge change 'insert $["someobject"]["items"][0] "item1"' somefile
|
||||||
|
///
|
||||||
|
/// ## increment
|
||||||
|
///
|
||||||
|
/// > automerge change 'increment $["mycounter"]'
|
||||||
|
///
|
||||||
|
/// ## delete
|
||||||
|
///
|
||||||
|
/// > automerge change 'delete $["someobject"]["items"]' somefile
|
||||||
|
script: String,
|
||||||
|
|
||||||
|
/// The file to change, if omitted will assume stdin
|
||||||
|
#[clap(parse(from_os_str))]
|
||||||
input_file: Option<PathBuf>,
|
input_file: Option<PathBuf>,
|
||||||
skip_verifying_heads: SkipVerifyFlag,
|
|
||||||
|
/// Path to write Automerge changes to, if omitted will write to stdout
|
||||||
|
#[clap(parse(from_os_str), long("out"), short('o'))]
|
||||||
|
output_file: Option<PathBuf>,
|
||||||
},
|
},
|
||||||
|
|
||||||
/// Read an automerge sync messaage and print a JSON representation of it
|
/// Read an automerge document and print a JSON representation of the changes in it to stdout
|
||||||
ExamineSync { input_file: Option<PathBuf> },
|
Examine { input_file: Option<PathBuf> },
|
||||||
|
|
||||||
/// Read one or more automerge documents and output a merged, compacted version of them
|
/// Read one or more automerge documents and output a merged, compacted version of them
|
||||||
Merge {
|
Merge {
|
||||||
/// The file to write to. If omitted assumes stdout
|
/// The file to write to. If omitted assumes stdout
|
||||||
#[clap(long("out"), short('o'))]
|
#[clap(parse(from_os_str), long("out"), short('o'))]
|
||||||
output_file: Option<PathBuf>,
|
output_file: Option<PathBuf>,
|
||||||
|
|
||||||
/// The file(s) to compact. If empty assumes stdin
|
/// The file(s) to compact. If empty assumes stdin
|
||||||
input: Vec<PathBuf>,
|
input: Vec<PathBuf>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
fn open_file_or_stdin(maybe_path: Option<PathBuf>) -> Result<Box<dyn std::io::Read>> {
|
fn open_file_or_stdin(maybe_path: Option<PathBuf>) -> Result<Box<dyn std::io::Read>> {
|
||||||
if std::io::stdin().is_terminal() {
|
if atty::is(atty::Stream::Stdin) {
|
||||||
if let Some(path) = maybe_path {
|
if let Some(path) = maybe_path {
|
||||||
Ok(Box::new(File::open(path).unwrap()))
|
Ok(Box::new(File::open(&path).unwrap()))
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow!(
|
Err(anyhow!(
|
||||||
"Must provide file path if not providing input via stdin"
|
"Must provide file path if not providing input via stdin"
|
||||||
|
|
@ -144,9 +130,9 @@ fn open_file_or_stdin(maybe_path: Option<PathBuf>) -> Result<Box<dyn std::io::Re
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_file_or_stdout(maybe_path: Option<PathBuf>) -> Result<Box<dyn std::io::Write>> {
|
fn create_file_or_stdout(maybe_path: Option<PathBuf>) -> Result<Box<dyn std::io::Write>> {
|
||||||
if std::io::stdout().is_terminal() {
|
if atty::is(atty::Stream::Stdout) {
|
||||||
if let Some(path) = maybe_path {
|
if let Some(path) = maybe_path {
|
||||||
Ok(Box::new(File::create(path).unwrap()))
|
Ok(Box::new(File::create(&path).unwrap()))
|
||||||
} else {
|
} else {
|
||||||
Err(anyhow!("Must provide file path if not piping to stdout"))
|
Err(anyhow!("Must provide file path if not piping to stdout"))
|
||||||
}
|
}
|
||||||
|
|
@ -163,22 +149,16 @@ fn main() -> Result<()> {
|
||||||
changes_file,
|
changes_file,
|
||||||
format,
|
format,
|
||||||
output_file,
|
output_file,
|
||||||
skip_verifying_heads,
|
|
||||||
} => {
|
} => {
|
||||||
let output: Box<dyn std::io::Write> = if let Some(output_file) = output_file {
|
let output: Box<dyn std::io::Write> = if let Some(output_file) = output_file {
|
||||||
Box::new(File::create(output_file)?)
|
Box::new(File::create(&output_file)?)
|
||||||
} else {
|
} else {
|
||||||
Box::new(std::io::stdout())
|
Box::new(std::io::stdout())
|
||||||
};
|
};
|
||||||
match format {
|
match format {
|
||||||
ExportFormat::Json => {
|
ExportFormat::Json => {
|
||||||
let mut in_buffer = open_file_or_stdin(changes_file)?;
|
let mut in_buffer = open_file_or_stdin(changes_file)?;
|
||||||
export::export_json(
|
export::export_json(&mut in_buffer, output, atty::is(atty::Stream::Stdout))
|
||||||
&mut in_buffer,
|
|
||||||
output,
|
|
||||||
skip_verifying_heads,
|
|
||||||
std::io::stdout().is_terminal(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
ExportFormat::Toml => unimplemented!(),
|
ExportFormat::Toml => unimplemented!(),
|
||||||
}
|
}
|
||||||
|
|
@ -195,30 +175,23 @@ fn main() -> Result<()> {
|
||||||
}
|
}
|
||||||
ExportFormat::Toml => unimplemented!(),
|
ExportFormat::Toml => unimplemented!(),
|
||||||
},
|
},
|
||||||
Command::Examine {
|
Command::Change { ..
|
||||||
input_file,
|
//input_file,
|
||||||
skip_verifying_heads,
|
//output_file,
|
||||||
|
//script,
|
||||||
} => {
|
} => {
|
||||||
|
unimplemented!()
|
||||||
|
/*
|
||||||
let in_buffer = open_file_or_stdin(input_file)?;
|
let in_buffer = open_file_or_stdin(input_file)?;
|
||||||
let out_buffer = std::io::stdout();
|
let mut out_buffer = create_file_or_stdout(output_file)?;
|
||||||
match examine::examine(
|
change::change(in_buffer, &mut out_buffer, script.as_str())
|
||||||
in_buffer,
|
.map_err(|e| anyhow::format_err!("Unable to make changes: {:?}", e))
|
||||||
out_buffer,
|
*/
|
||||||
skip_verifying_heads,
|
|
||||||
std::io::stdout().is_terminal(),
|
|
||||||
) {
|
|
||||||
Ok(()) => {}
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("Error: {:?}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
Command::ExamineSync { input_file } => {
|
Command::Examine { input_file } => {
|
||||||
let in_buffer = open_file_or_stdin(input_file)?;
|
let in_buffer = open_file_or_stdin(input_file)?;
|
||||||
let out_buffer = std::io::stdout();
|
let out_buffer = std::io::stdout();
|
||||||
match examine_sync::examine_sync(in_buffer, out_buffer, std::io::stdout().is_terminal())
|
match examine::examine(in_buffer, out_buffer, atty::is(atty::Stream::Stdout)) {
|
||||||
{
|
|
||||||
Ok(()) => {}
|
Ok(()) => {}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
eprintln!("Error: {:?}", e);
|
eprintln!("Error: {:?}", e);
|
||||||
2
automerge-js/.gitignore
vendored
Normal file
2
automerge-js/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
/node_modules
|
||||||
|
/yarn.lock
|
||||||
18
automerge-js/package.json
Normal file
18
automerge-js/package.json
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
{
|
||||||
|
"name": "automerge-js",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"main": "src/index.js",
|
||||||
|
"license": "MIT",
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha --bail --full-trace"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"mocha": "^9.1.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"automerge-wasm": "file:../automerge-wasm",
|
||||||
|
"fast-sha256": "^1.3.0",
|
||||||
|
"pako": "^2.0.4",
|
||||||
|
"uuid": "^8.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
18
automerge-js/src/constants.js
Normal file
18
automerge-js/src/constants.js
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
// Properties of the document root object
|
||||||
|
//const OPTIONS = Symbol('_options') // object containing options passed to init()
|
||||||
|
//const CACHE = Symbol('_cache') // map from objectId to immutable object
|
||||||
|
const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers)
|
||||||
|
const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers)
|
||||||
|
const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers)
|
||||||
|
const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers)
|
||||||
|
const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers)
|
||||||
|
|
||||||
|
// Properties of all Automerge objects
|
||||||
|
//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string)
|
||||||
|
//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts
|
||||||
|
//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback
|
||||||
|
//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN
|
||||||
|
}
|
||||||
|
|
@ -1,16 +1,12 @@
|
||||||
import { Automerge, type ObjID, type Prop } from "@automerge/automerge-wasm"
|
|
||||||
import { COUNTER } from "./constants"
|
|
||||||
/**
|
/**
|
||||||
* The most basic CRDT: an integer value that can be changed only by
|
* The most basic CRDT: an integer value that can be changed only by
|
||||||
* incrementing and decrementing. Since addition of integers is commutative,
|
* incrementing and decrementing. Since addition of integers is commutative,
|
||||||
* the value trivially converges.
|
* the value trivially converges.
|
||||||
*/
|
*/
|
||||||
export class Counter {
|
class Counter {
|
||||||
value: number
|
constructor(value) {
|
||||||
|
|
||||||
constructor(value?: number) {
|
|
||||||
this.value = value || 0
|
this.value = value || 0
|
||||||
Reflect.defineProperty(this, COUNTER, { value: true })
|
Object.freeze(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -21,7 +17,7 @@ export class Counter {
|
||||||
* concatenating it with another string, as in `x + ''`.
|
* concatenating it with another string, as in `x + ''`.
|
||||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf
|
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf
|
||||||
*/
|
*/
|
||||||
valueOf(): number {
|
valueOf() {
|
||||||
return this.value
|
return this.value
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -30,7 +26,7 @@ export class Counter {
|
||||||
* this method is called e.g. when you do `['value: ', x].join('')` or when
|
* this method is called e.g. when you do `['value: ', x].join('')` or when
|
||||||
* you use string interpolation: `value: ${x}`.
|
* you use string interpolation: `value: ${x}`.
|
||||||
*/
|
*/
|
||||||
toString(): string {
|
toString() {
|
||||||
return this.valueOf().toString()
|
return this.valueOf().toString()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -38,7 +34,7 @@ export class Counter {
|
||||||
* Returns the counter value, so that a JSON serialization of an Automerge
|
* Returns the counter value, so that a JSON serialization of an Automerge
|
||||||
* document represents the counter simply as an integer.
|
* document represents the counter simply as an integer.
|
||||||
*/
|
*/
|
||||||
toJSON(): number {
|
toJSON() {
|
||||||
return this.value
|
return this.value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -48,32 +44,13 @@ export class Counter {
|
||||||
* callback.
|
* callback.
|
||||||
*/
|
*/
|
||||||
class WriteableCounter extends Counter {
|
class WriteableCounter extends Counter {
|
||||||
context: Automerge
|
|
||||||
path: Prop[]
|
|
||||||
objectId: ObjID
|
|
||||||
key: Prop
|
|
||||||
|
|
||||||
constructor(
|
|
||||||
value: number,
|
|
||||||
context: Automerge,
|
|
||||||
path: Prop[],
|
|
||||||
objectId: ObjID,
|
|
||||||
key: Prop
|
|
||||||
) {
|
|
||||||
super(value)
|
|
||||||
this.context = context
|
|
||||||
this.path = path
|
|
||||||
this.objectId = objectId
|
|
||||||
this.key = key
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Increases the value of the counter by `delta`. If `delta` is not given,
|
* Increases the value of the counter by `delta`. If `delta` is not given,
|
||||||
* increases the value of the counter by 1.
|
* increases the value of the counter by 1.
|
||||||
*/
|
*/
|
||||||
increment(delta: number): number {
|
increment(delta) {
|
||||||
delta = typeof delta === "number" ? delta : 1
|
delta = typeof delta === 'number' ? delta : 1
|
||||||
this.context.increment(this.objectId, this.key, delta)
|
this.context.inc(this.objectId, this.key, delta)
|
||||||
this.value += delta
|
this.value += delta
|
||||||
return this.value
|
return this.value
|
||||||
}
|
}
|
||||||
|
|
@ -82,8 +59,8 @@ class WriteableCounter extends Counter {
|
||||||
* Decreases the value of the counter by `delta`. If `delta` is not given,
|
* Decreases the value of the counter by `delta`. If `delta` is not given,
|
||||||
* decreases the value of the counter by 1.
|
* decreases the value of the counter by 1.
|
||||||
*/
|
*/
|
||||||
decrement(delta: number): number {
|
decrement(delta) {
|
||||||
return this.increment(typeof delta === "number" ? -delta : -1)
|
return this.inc(typeof delta === 'number' ? -delta : -1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -93,15 +70,15 @@ class WriteableCounter extends Counter {
|
||||||
* `objectId` is the ID of the object containing the counter, and `key` is
|
* `objectId` is the ID of the object containing the counter, and `key` is
|
||||||
* the property name (key in map, or index in list) where the counter is
|
* the property name (key in map, or index in list) where the counter is
|
||||||
* located.
|
* located.
|
||||||
*/
|
*/
|
||||||
export function getWriteableCounter(
|
function getWriteableCounter(value, context, path, objectId, key) {
|
||||||
value: number,
|
const instance = Object.create(WriteableCounter.prototype)
|
||||||
context: Automerge,
|
instance.value = value
|
||||||
path: Prop[],
|
instance.context = context
|
||||||
objectId: ObjID,
|
instance.path = path
|
||||||
key: Prop
|
instance.objectId = objectId
|
||||||
): WriteableCounter {
|
instance.key = key
|
||||||
return new WriteableCounter(value, context, path, objectId, key)
|
return instance
|
||||||
}
|
}
|
||||||
|
|
||||||
//module.exports = { Counter, getWriteableCounter }
|
module.exports = { Counter, getWriteableCounter }
|
||||||
372
automerge-js/src/index.js
Normal file
372
automerge-js/src/index.js
Normal file
|
|
@ -0,0 +1,372 @@
|
||||||
|
const AutomergeWASM = require("automerge-wasm")
|
||||||
|
const uuid = require('./uuid')
|
||||||
|
|
||||||
|
let { rootProxy, listProxy, textProxy, mapProxy } = require("./proxies")
|
||||||
|
let { Counter } = require("./counter")
|
||||||
|
let { Text } = require("./text")
|
||||||
|
let { Int, Uint, Float64 } = require("./numbers")
|
||||||
|
let { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } = require("./constants")
|
||||||
|
|
||||||
|
function init(actor) {
|
||||||
|
if (typeof actor != 'string') {
|
||||||
|
actor = null
|
||||||
|
}
|
||||||
|
const state = AutomergeWASM.create(actor)
|
||||||
|
return rootProxy(state, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function clone(doc) {
|
||||||
|
const state = doc[STATE].clone()
|
||||||
|
return rootProxy(state, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function free(doc) {
|
||||||
|
return doc[STATE].free()
|
||||||
|
}
|
||||||
|
|
||||||
|
function from(data, actor) {
|
||||||
|
let doc1 = init(actor)
|
||||||
|
let doc2 = change(doc1, (d) => Object.assign(d, data))
|
||||||
|
return doc2
|
||||||
|
}
|
||||||
|
|
||||||
|
function change(doc, options, callback) {
|
||||||
|
if (callback === undefined) {
|
||||||
|
// FIXME implement options
|
||||||
|
callback = options
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
if (typeof options === "string") {
|
||||||
|
options = { message: options }
|
||||||
|
}
|
||||||
|
if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
|
||||||
|
throw new RangeError("must be the document root");
|
||||||
|
}
|
||||||
|
if (doc[FROZEN] === true) {
|
||||||
|
throw new RangeError("Attempting to use an outdated Automerge document")
|
||||||
|
}
|
||||||
|
if (!!doc[HEADS] === true) {
|
||||||
|
throw new RangeError("Attempting to change an out of date document");
|
||||||
|
}
|
||||||
|
if (doc[READ_ONLY] === false) {
|
||||||
|
throw new RangeError("Calls to Automerge.change cannot be nested")
|
||||||
|
}
|
||||||
|
const state = doc[STATE]
|
||||||
|
const heads = state.getHeads()
|
||||||
|
try {
|
||||||
|
doc[HEADS] = heads
|
||||||
|
doc[FROZEN] = true
|
||||||
|
let root = rootProxy(state);
|
||||||
|
callback(root)
|
||||||
|
if (state.pendingOps() === 0) {
|
||||||
|
doc[FROZEN] = false
|
||||||
|
doc[HEADS] = undefined
|
||||||
|
return doc
|
||||||
|
} else {
|
||||||
|
state.commit(options.message, options.time)
|
||||||
|
return rootProxy(state, true);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
//console.log("ERROR: ",e)
|
||||||
|
doc[FROZEN] = false
|
||||||
|
doc[HEADS] = undefined
|
||||||
|
state.rollback()
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function emptyChange(doc, options) {
|
||||||
|
if (options === undefined) {
|
||||||
|
options = {}
|
||||||
|
}
|
||||||
|
if (typeof options === "string") {
|
||||||
|
options = { message: options }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
|
||||||
|
throw new RangeError("must be the document root");
|
||||||
|
}
|
||||||
|
if (doc[FROZEN] === true) {
|
||||||
|
throw new RangeError("Attempting to use an outdated Automerge document")
|
||||||
|
}
|
||||||
|
if (doc[READ_ONLY] === false) {
|
||||||
|
throw new RangeError("Calls to Automerge.change cannot be nested")
|
||||||
|
}
|
||||||
|
|
||||||
|
const state = doc[STATE]
|
||||||
|
state.commit(options.message, options.time)
|
||||||
|
return rootProxy(state, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function load(data, actor) {
|
||||||
|
const state = AutomergeWASM.loadDoc(data, actor)
|
||||||
|
return rootProxy(state, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function save(doc) {
|
||||||
|
const state = doc[STATE]
|
||||||
|
return state.save()
|
||||||
|
}
|
||||||
|
|
||||||
|
function merge(local, remote) {
|
||||||
|
if (local[HEADS] === true) {
|
||||||
|
throw new RangeError("Attempting to change an out of date document");
|
||||||
|
}
|
||||||
|
const localState = local[STATE]
|
||||||
|
const heads = localState.getHeads()
|
||||||
|
const remoteState = remote[STATE]
|
||||||
|
const changes = localState.getChangesAdded(remoteState)
|
||||||
|
localState.applyChanges(changes)
|
||||||
|
local[HEADS] = heads
|
||||||
|
return rootProxy(localState, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
function getActorId(doc) {
|
||||||
|
const state = doc[STATE]
|
||||||
|
return state.getActorId()
|
||||||
|
}
|
||||||
|
|
||||||
|
function conflictAt(context, objectId, prop) {
|
||||||
|
let values = context.values(objectId, prop)
|
||||||
|
if (values.length <= 1) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let result = {}
|
||||||
|
for (const conflict of values) {
|
||||||
|
const datatype = conflict[0]
|
||||||
|
const value = conflict[1]
|
||||||
|
switch (datatype) {
|
||||||
|
case "map":
|
||||||
|
result[value] = mapProxy(context, value, [ prop ], true)
|
||||||
|
break;
|
||||||
|
case "list":
|
||||||
|
result[value] = listProxy(context, value, [ prop ], true)
|
||||||
|
break;
|
||||||
|
case "text":
|
||||||
|
result[value] = textProxy(context, value, [ prop ], true)
|
||||||
|
break;
|
||||||
|
//case "table":
|
||||||
|
//case "cursor":
|
||||||
|
case "str":
|
||||||
|
case "uint":
|
||||||
|
case "int":
|
||||||
|
case "f64":
|
||||||
|
case "boolean":
|
||||||
|
case "bytes":
|
||||||
|
case "null":
|
||||||
|
result[conflict[2]] = value
|
||||||
|
break;
|
||||||
|
case "counter":
|
||||||
|
result[conflict[2]] = new Counter(value)
|
||||||
|
break;
|
||||||
|
case "timestamp":
|
||||||
|
result[conflict[2]] = new Date(value)
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw RangeError(`datatype ${datatype} unimplemented`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
function getConflicts(doc, prop) {
|
||||||
|
const state = doc[STATE]
|
||||||
|
const objectId = doc[OBJECT_ID]
|
||||||
|
return conflictAt(state, objectId, prop)
|
||||||
|
}
|
||||||
|
|
||||||
|
function getLastLocalChange(doc) {
|
||||||
|
const state = doc[STATE]
|
||||||
|
try {
|
||||||
|
return state.getLastLocalChange()
|
||||||
|
} catch (e) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getObjectId(doc) {
|
||||||
|
return doc[OBJECT_ID]
|
||||||
|
}
|
||||||
|
|
||||||
|
function getChanges(oldState, newState) {
|
||||||
|
const o = oldState[STATE]
|
||||||
|
const n = newState[STATE]
|
||||||
|
const heads = oldState[HEADS]
|
||||||
|
return n.getChanges(heads || o.getHeads())
|
||||||
|
}
|
||||||
|
|
||||||
|
function getAllChanges(doc) {
|
||||||
|
const state = doc[STATE]
|
||||||
|
return state.getChanges([])
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyChanges(doc, changes) {
|
||||||
|
if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
|
||||||
|
throw new RangeError("must be the document root");
|
||||||
|
}
|
||||||
|
if (doc[FROZEN] === true) {
|
||||||
|
throw new RangeError("Attempting to use an outdated Automerge document")
|
||||||
|
}
|
||||||
|
if (doc[READ_ONLY] === false) {
|
||||||
|
throw new RangeError("Calls to Automerge.change cannot be nested")
|
||||||
|
}
|
||||||
|
const state = doc[STATE]
|
||||||
|
const heads = state.getHeads()
|
||||||
|
state.applyChanges(changes)
|
||||||
|
doc[HEADS] = heads
|
||||||
|
return [rootProxy(state, true)];
|
||||||
|
}
|
||||||
|
|
||||||
|
function getHistory(doc) {
|
||||||
|
const actor = getActorId(doc)
|
||||||
|
const history = getAllChanges(doc)
|
||||||
|
return history.map((change, index) => ({
|
||||||
|
get change () {
|
||||||
|
return decodeChange(change)
|
||||||
|
},
|
||||||
|
get snapshot () {
|
||||||
|
const [state] = applyChanges(init(), history.slice(0, index + 1))
|
||||||
|
return state
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function equals() {
|
||||||
|
if (!isObject(val1) || !isObject(val2)) return val1 === val2
|
||||||
|
const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort()
|
||||||
|
if (keys1.length !== keys2.length) return false
|
||||||
|
for (let i = 0; i < keys1.length; i++) {
|
||||||
|
if (keys1[i] !== keys2[i]) return false
|
||||||
|
if (!equals(val1[keys1[i]], val2[keys2[i]])) return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
function encodeSyncMessage(msg) {
|
||||||
|
return AutomergeWASM.encodeSyncMessage(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
function decodeSyncMessage(msg) {
|
||||||
|
return AutomergeWASM.decodeSyncMessage(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
function encodeSyncState(state) {
|
||||||
|
return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state))
|
||||||
|
}
|
||||||
|
|
||||||
|
function decodeSyncState(state) {
|
||||||
|
return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state))
|
||||||
|
}
|
||||||
|
|
||||||
|
function generateSyncMessage(doc, inState) {
|
||||||
|
const state = doc[STATE]
|
||||||
|
const syncState = AutomergeWASM.importSyncState(inState)
|
||||||
|
const message = state.generateSyncMessage(syncState)
|
||||||
|
const outState = AutomergeWASM.exportSyncState(syncState)
|
||||||
|
return [ outState, message ]
|
||||||
|
}
|
||||||
|
|
||||||
|
function receiveSyncMessage(doc, inState, message) {
|
||||||
|
const syncState = AutomergeWASM.importSyncState(inState)
|
||||||
|
if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
|
||||||
|
throw new RangeError("must be the document root");
|
||||||
|
}
|
||||||
|
if (doc[FROZEN] === true) {
|
||||||
|
throw new RangeError("Attempting to use an outdated Automerge document")
|
||||||
|
}
|
||||||
|
if (!!doc[HEADS] === true) {
|
||||||
|
throw new RangeError("Attempting to change an out of date document");
|
||||||
|
}
|
||||||
|
if (doc[READ_ONLY] === false) {
|
||||||
|
throw new RangeError("Calls to Automerge.change cannot be nested")
|
||||||
|
}
|
||||||
|
const state = doc[STATE]
|
||||||
|
const heads = state.getHeads()
|
||||||
|
state.receiveSyncMessage(syncState, message)
|
||||||
|
const outState = AutomergeWASM.exportSyncState(syncState)
|
||||||
|
doc[HEADS] = heads
|
||||||
|
return [rootProxy(state, true), outState, null];
|
||||||
|
}
|
||||||
|
|
||||||
|
function initSyncState() {
|
||||||
|
return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState(change))
|
||||||
|
}
|
||||||
|
|
||||||
|
function encodeChange(change) {
|
||||||
|
return AutomergeWASM.encodeChange(change)
|
||||||
|
}
|
||||||
|
|
||||||
|
function decodeChange(data) {
|
||||||
|
return AutomergeWASM.decodeChange(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
function encodeSyncMessage(change) {
|
||||||
|
return AutomergeWASM.encodeSyncMessage(change)
|
||||||
|
}
|
||||||
|
|
||||||
|
function decodeSyncMessage(data) {
|
||||||
|
return AutomergeWASM.decodeSyncMessage(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
function getMissingDeps(doc, heads) {
|
||||||
|
const state = doc[STATE]
|
||||||
|
return state.getMissingDeps(heads)
|
||||||
|
}
|
||||||
|
|
||||||
|
function getHeads(doc) {
|
||||||
|
const state = doc[STATE]
|
||||||
|
return doc[HEADS] || state.getHeads()
|
||||||
|
}
|
||||||
|
|
||||||
|
function dump(doc) {
|
||||||
|
const state = doc[STATE]
|
||||||
|
state.dump()
|
||||||
|
}
|
||||||
|
|
||||||
|
function toJS(doc) {
|
||||||
|
if (typeof doc === "object") {
|
||||||
|
if (doc instanceof Uint8Array) {
|
||||||
|
return doc
|
||||||
|
}
|
||||||
|
if (doc === null) {
|
||||||
|
return doc
|
||||||
|
}
|
||||||
|
if (doc instanceof Array) {
|
||||||
|
return doc.map((a) => toJS(a))
|
||||||
|
}
|
||||||
|
if (doc instanceof Text) {
|
||||||
|
return doc.map((a) => toJS(a))
|
||||||
|
}
|
||||||
|
let tmp = {}
|
||||||
|
for (index in doc) {
|
||||||
|
tmp[index] = toJS(doc[index])
|
||||||
|
}
|
||||||
|
return tmp
|
||||||
|
} else {
|
||||||
|
return doc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
init, from, change, emptyChange, clone, free,
|
||||||
|
load, save, merge, getChanges, getAllChanges, applyChanges,
|
||||||
|
getLastLocalChange, getObjectId, getActorId, getConflicts,
|
||||||
|
encodeChange, decodeChange, equals, getHistory, getHeads, uuid,
|
||||||
|
generateSyncMessage, receiveSyncMessage, initSyncState,
|
||||||
|
decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState,
|
||||||
|
getMissingDeps,
|
||||||
|
dump, Text, Counter, Int, Uint, Float64, toJS,
|
||||||
|
}
|
||||||
|
|
||||||
|
// depricated
|
||||||
|
// Frontend, setDefaultBackend, Backend
|
||||||
|
|
||||||
|
// more...
|
||||||
|
/*
|
||||||
|
for (let name of ['getObjectId', 'getObjectById',
|
||||||
|
'setActorId',
|
||||||
|
'Text', 'Table', 'Counter', 'Observable' ]) {
|
||||||
|
module.exports[name] = Frontend[name]
|
||||||
|
}
|
||||||
|
*/
|
||||||
33
automerge-js/src/numbers.js
Normal file
33
automerge-js/src/numbers.js
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
// Convience classes to allow users to stricly specify the number type they want
|
||||||
|
|
||||||
|
class Int {
|
||||||
|
constructor(value) {
|
||||||
|
if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) {
|
||||||
|
throw new RangeError(`Value ${value} cannot be a uint`)
|
||||||
|
}
|
||||||
|
this.value = value
|
||||||
|
Object.freeze(this)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class Uint {
|
||||||
|
constructor(value) {
|
||||||
|
if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) {
|
||||||
|
throw new RangeError(`Value ${value} cannot be a uint`)
|
||||||
|
}
|
||||||
|
this.value = value
|
||||||
|
Object.freeze(this)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class Float64 {
|
||||||
|
constructor(value) {
|
||||||
|
if (typeof value !== 'number') {
|
||||||
|
throw new RangeError(`Value ${value} cannot be a float64`)
|
||||||
|
}
|
||||||
|
this.value = value || 0.0
|
||||||
|
Object.freeze(this)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { Int, Uint, Float64 }
|
||||||
623
automerge-js/src/proxies.js
Normal file
623
automerge-js/src/proxies.js
Normal file
|
|
@ -0,0 +1,623 @@
|
||||||
|
|
||||||
|
const AutomergeWASM = require("automerge-wasm")
|
||||||
|
const { Int, Uint, Float64 } = require("./numbers");
|
||||||
|
const { Counter, getWriteableCounter } = require("./counter");
|
||||||
|
const { Text } = require("./text");
|
||||||
|
const { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } = require("./constants")
|
||||||
|
|
||||||
|
function parseListIndex(key) {
|
||||||
|
if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10)
|
||||||
|
if (typeof key !== 'number') {
|
||||||
|
// throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key))
|
||||||
|
return key
|
||||||
|
}
|
||||||
|
if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) {
|
||||||
|
throw new RangeError('A list index must be positive, but you passed ' + key)
|
||||||
|
}
|
||||||
|
return key
|
||||||
|
}
|
||||||
|
|
||||||
|
function valueAt(target, prop) {
|
||||||
|
const { context, objectId, path, readonly, heads} = target
|
||||||
|
let value = context.value(objectId, prop, heads)
|
||||||
|
if (value === undefined) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const datatype = value[0]
|
||||||
|
const val = value[1]
|
||||||
|
switch (datatype) {
|
||||||
|
case undefined: return;
|
||||||
|
case "map": return mapProxy(context, val, [ ... path, prop ], readonly, heads);
|
||||||
|
case "list": return listProxy(context, val, [ ... path, prop ], readonly, heads);
|
||||||
|
case "text": return textProxy(context, val, [ ... path, prop ], readonly, heads);
|
||||||
|
//case "table":
|
||||||
|
//case "cursor":
|
||||||
|
case "str": return val;
|
||||||
|
case "uint": return val;
|
||||||
|
case "int": return val;
|
||||||
|
case "f64": return val;
|
||||||
|
case "boolean": return val;
|
||||||
|
case "null": return null;
|
||||||
|
case "bytes": return val;
|
||||||
|
case "timestamp": return val;
|
||||||
|
case "counter": {
|
||||||
|
if (readonly) {
|
||||||
|
return new Counter(val);
|
||||||
|
} else {
|
||||||
|
return getWriteableCounter(val, context, path, objectId, prop)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
throw RangeError(`datatype ${datatype} unimplemented`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function import_value(value) {
|
||||||
|
switch (typeof value) {
|
||||||
|
case 'object':
|
||||||
|
if (value == null) {
|
||||||
|
return [ null, "null"]
|
||||||
|
} else if (value instanceof Uint) {
|
||||||
|
return [ value.value, "uint" ]
|
||||||
|
} else if (value instanceof Int) {
|
||||||
|
return [ value.value, "int" ]
|
||||||
|
} else if (value instanceof Float64) {
|
||||||
|
return [ value.value, "f64" ]
|
||||||
|
} else if (value instanceof Counter) {
|
||||||
|
return [ value.value, "counter" ]
|
||||||
|
} else if (value instanceof Date) {
|
||||||
|
return [ value.getTime(), "timestamp" ]
|
||||||
|
} else if (value instanceof Uint8Array) {
|
||||||
|
return [ value, "bytes" ]
|
||||||
|
} else if (value instanceof Array) {
|
||||||
|
return [ value, "list" ]
|
||||||
|
} else if (value instanceof Text) {
|
||||||
|
return [ value, "text" ]
|
||||||
|
} else if (value[OBJECT_ID]) {
|
||||||
|
throw new RangeError('Cannot create a reference to an existing document object')
|
||||||
|
} else {
|
||||||
|
return [ value, "map" ]
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'boolean':
|
||||||
|
return [ value, "boolean" ]
|
||||||
|
case 'number':
|
||||||
|
if (Number.isInteger(value)) {
|
||||||
|
return [ value, "int" ]
|
||||||
|
} else {
|
||||||
|
return [ value, "f64" ]
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'string':
|
||||||
|
return [ value ]
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new RangeError(`Unsupported type of value: ${typeof value}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const MapHandler = {
|
||||||
|
get (target, key) {
|
||||||
|
const { context, objectId, path, readonly, frozen, heads, cache } = target
|
||||||
|
if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] }
|
||||||
|
if (key === OBJECT_ID) return objectId
|
||||||
|
if (key === READ_ONLY) return readonly
|
||||||
|
if (key === FROZEN) return frozen
|
||||||
|
if (key === HEADS) return heads
|
||||||
|
if (key === STATE) return context;
|
||||||
|
if (!cache[key]) {
|
||||||
|
cache[key] = valueAt(target, key)
|
||||||
|
}
|
||||||
|
return cache[key]
|
||||||
|
},
|
||||||
|
|
||||||
|
set (target, key, val) {
|
||||||
|
let { context, objectId, path, readonly, frozen} = target
|
||||||
|
target.cache = {} // reset cache on set
|
||||||
|
if (val && val[OBJECT_ID]) {
|
||||||
|
throw new RangeError('Cannot create a reference to an existing document object')
|
||||||
|
}
|
||||||
|
if (key === FROZEN) {
|
||||||
|
target.frozen = val
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (key === HEADS) {
|
||||||
|
target.heads = val
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let [ value, datatype ] = import_value(val)
|
||||||
|
if (frozen) {
|
||||||
|
throw new RangeError("Attempting to use an outdated Automerge document")
|
||||||
|
}
|
||||||
|
if (readonly) {
|
||||||
|
throw new RangeError(`Object property "${key}" cannot be modified`)
|
||||||
|
}
|
||||||
|
switch (datatype) {
|
||||||
|
case "list":
|
||||||
|
const list = context.set_object(objectId, key, [])
|
||||||
|
const proxyList = listProxy(context, list, [ ... path, key ], readonly );
|
||||||
|
for (let i = 0; i < value.length; i++) {
|
||||||
|
proxyList[i] = value[i]
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "text":
|
||||||
|
const text = context.set_object(objectId, key, "", "text")
|
||||||
|
const proxyText = textProxy(context, text, [ ... path, key ], readonly );
|
||||||
|
for (let i = 0; i < value.length; i++) {
|
||||||
|
proxyText[i] = value.get(i)
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "map":
|
||||||
|
const map = context.set_object(objectId, key, {})
|
||||||
|
const proxyMap = mapProxy(context, map, [ ... path, key ], readonly );
|
||||||
|
for (const key in value) {
|
||||||
|
proxyMap[key] = value[key]
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
context.set(objectId, key, value, datatype)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteProperty (target, key) {
|
||||||
|
const { context, objectId, path, readonly, frozen } = target
|
||||||
|
target.cache = {} // reset cache on delete
|
||||||
|
if (readonly) {
|
||||||
|
throw new RangeError(`Object property "${key}" cannot be modified`)
|
||||||
|
}
|
||||||
|
context.del(objectId, key)
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
|
||||||
|
has (target, key) {
|
||||||
|
const value = this.get(target, key)
|
||||||
|
return value !== undefined
|
||||||
|
},
|
||||||
|
|
||||||
|
getOwnPropertyDescriptor (target, key) {
|
||||||
|
const { context, objectId } = target
|
||||||
|
const value = this.get(target, key)
|
||||||
|
if (typeof value !== 'undefined') {
|
||||||
|
return {
|
||||||
|
configurable: true, enumerable: true, value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
ownKeys (target) {
|
||||||
|
const { context, objectId, heads} = target
|
||||||
|
return context.keys(objectId, heads)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
const ListHandler = {
|
||||||
|
get (target, index) {
|
||||||
|
const {context, objectId, path, readonly, frozen, heads } = target
|
||||||
|
index = parseListIndex(index)
|
||||||
|
if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } }
|
||||||
|
if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
|
||||||
|
if (index === OBJECT_ID) return objectId
|
||||||
|
if (index === READ_ONLY) return readonly
|
||||||
|
if (index === FROZEN) return frozen
|
||||||
|
if (index === HEADS) return heads
|
||||||
|
if (index === STATE) return context;
|
||||||
|
if (index === 'length') return context.length(objectId, heads);
|
||||||
|
if (index === Symbol.iterator) {
|
||||||
|
let i = 0;
|
||||||
|
return function *() {
|
||||||
|
// FIXME - ugly
|
||||||
|
let value = valueAt(target, i)
|
||||||
|
while (value !== undefined) {
|
||||||
|
yield value
|
||||||
|
i += 1
|
||||||
|
value = valueAt(target, i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (typeof index === 'number') {
|
||||||
|
return valueAt(target, index)
|
||||||
|
} else {
|
||||||
|
return listMethods(target)[index]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
set (target, index, val) {
|
||||||
|
let {context, objectId, path, readonly, frozen } = target
|
||||||
|
index = parseListIndex(index)
|
||||||
|
if (val && val[OBJECT_ID]) {
|
||||||
|
throw new RangeError('Cannot create a reference to an existing document object')
|
||||||
|
}
|
||||||
|
if (index === FROZEN) {
|
||||||
|
target.frozen = val
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (index === HEADS) {
|
||||||
|
target.heads = val
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (typeof index == "string") {
|
||||||
|
throw new RangeError('list index must be a number')
|
||||||
|
}
|
||||||
|
const [ value, datatype] = import_value(val)
|
||||||
|
if (frozen) {
|
||||||
|
throw new RangeError("Attempting to use an outdated Automerge document")
|
||||||
|
}
|
||||||
|
if (readonly) {
|
||||||
|
throw new RangeError(`Object property "${index}" cannot be modified`)
|
||||||
|
}
|
||||||
|
switch (datatype) {
|
||||||
|
case "list":
|
||||||
|
let list
|
||||||
|
if (index >= context.length(objectId)) {
|
||||||
|
list = context.insert_object(objectId, index, [])
|
||||||
|
} else {
|
||||||
|
list = context.set_object(objectId, index, [])
|
||||||
|
}
|
||||||
|
const proxyList = listProxy(context, list, [ ... path, index ], readonly);
|
||||||
|
proxyList.splice(0,0,...value)
|
||||||
|
break;
|
||||||
|
case "text":
|
||||||
|
let text
|
||||||
|
if (index >= context.length(objectId)) {
|
||||||
|
text = context.insert_object(objectId, index, "", "text")
|
||||||
|
} else {
|
||||||
|
text = context.set_object(objectId, index, "", "text")
|
||||||
|
}
|
||||||
|
const proxyText = textProxy(context, text, [ ... path, index ], readonly);
|
||||||
|
proxyText.splice(0,0,...value)
|
||||||
|
break;
|
||||||
|
case "map":
|
||||||
|
let map
|
||||||
|
if (index >= context.length(objectId)) {
|
||||||
|
map = context.insert_object(objectId, index, {})
|
||||||
|
} else {
|
||||||
|
map = context.set_object(objectId, index, {})
|
||||||
|
}
|
||||||
|
const proxyMap = mapProxy(context, map, [ ... path, index ], readonly);
|
||||||
|
for (const key in value) {
|
||||||
|
proxyMap[key] = value[key]
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
if (index >= context.length(objectId)) {
|
||||||
|
context.insert(objectId, index, value, datatype)
|
||||||
|
} else {
|
||||||
|
context.set(objectId, index, value, datatype)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
|
||||||
|
deleteProperty (target, index) {
|
||||||
|
const {context, objectId} = target
|
||||||
|
index = parseListIndex(index)
|
||||||
|
if (context.value(objectId, index)[0] == "counter") {
|
||||||
|
throw new TypeError('Unsupported operation: deleting a counter from a list')
|
||||||
|
}
|
||||||
|
context.del(objectId, index)
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
|
||||||
|
has (target, index) {
|
||||||
|
const {context, objectId, heads} = target
|
||||||
|
index = parseListIndex(index)
|
||||||
|
if (typeof index === 'number') {
|
||||||
|
return index < context.length(objectId, heads)
|
||||||
|
}
|
||||||
|
return index === 'length'
|
||||||
|
},
|
||||||
|
|
||||||
|
getOwnPropertyDescriptor (target, index) {
|
||||||
|
const {context, objectId, path, readonly, frozen, heads} = target
|
||||||
|
|
||||||
|
if (index === 'length') return {writable: true, value: context.length(objectId, heads) }
|
||||||
|
if (index === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId}
|
||||||
|
|
||||||
|
index = parseListIndex(index)
|
||||||
|
|
||||||
|
let value = valueAt(target, index)
|
||||||
|
return { configurable: true, enumerable: true, value }
|
||||||
|
},
|
||||||
|
|
||||||
|
getPrototypeOf(target) { return Object.getPrototypeOf([]) },
|
||||||
|
ownKeys (target) {
|
||||||
|
const {context, objectId, heads } = target
|
||||||
|
let keys = []
|
||||||
|
// uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array
|
||||||
|
// but not uncommenting it causes for (i in list) {} to not enumerate values properly
|
||||||
|
//for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) }
|
||||||
|
keys.push("length");
|
||||||
|
return keys
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const TextHandler = Object.assign({}, ListHandler, {
|
||||||
|
get (target, index) {
|
||||||
|
// FIXME this is a one line change from ListHandler.get()
|
||||||
|
const {context, objectId, path, readonly, frozen, heads } = target
|
||||||
|
index = parseListIndex(index)
|
||||||
|
if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
|
||||||
|
if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } }
|
||||||
|
if (index === OBJECT_ID) return objectId
|
||||||
|
if (index === READ_ONLY) return readonly
|
||||||
|
if (index === FROZEN) return frozen
|
||||||
|
if (index === HEADS) return heads
|
||||||
|
if (index === STATE) return context;
|
||||||
|
if (index === 'length') return context.length(objectId, heads);
|
||||||
|
if (index === Symbol.iterator) {
|
||||||
|
let i = 0;
|
||||||
|
return function *() {
|
||||||
|
let value = valueAt(target, i)
|
||||||
|
while (value !== undefined) {
|
||||||
|
yield value
|
||||||
|
i += 1
|
||||||
|
value = valueAt(target, i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (typeof index === 'number') {
|
||||||
|
return valueAt(target, index)
|
||||||
|
} else {
|
||||||
|
return textMethods(target)[index] || listMethods(target)[index]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
getPrototypeOf(target) {
|
||||||
|
return Object.getPrototypeOf(new Text())
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
function mapProxy(context, objectId, path, readonly, heads) {
|
||||||
|
return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler)
|
||||||
|
}
|
||||||
|
|
||||||
|
function listProxy(context, objectId, path, readonly, heads) {
|
||||||
|
let target = []
|
||||||
|
Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}})
|
||||||
|
return new Proxy(target, ListHandler)
|
||||||
|
}
|
||||||
|
|
||||||
|
function textProxy(context, objectId, path, readonly, heads) {
|
||||||
|
let target = []
|
||||||
|
Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}})
|
||||||
|
return new Proxy(target, TextHandler)
|
||||||
|
}
|
||||||
|
|
||||||
|
function rootProxy(context, readonly) {
|
||||||
|
return mapProxy(context, "_root", [], readonly)
|
||||||
|
}
|
||||||
|
|
||||||
|
function listMethods(target) {
|
||||||
|
const {context, objectId, path, readonly, frozen, heads} = target
|
||||||
|
const methods = {
|
||||||
|
deleteAt(index, numDelete) {
|
||||||
|
if (typeof numDelete === 'number') {
|
||||||
|
context.splice(objectId, index, numDelete)
|
||||||
|
} else {
|
||||||
|
context.del(objectId, index)
|
||||||
|
}
|
||||||
|
return this
|
||||||
|
},
|
||||||
|
|
||||||
|
fill(val, start, end) {
|
||||||
|
// FIXME
|
||||||
|
let list = context.getObject(objectId)
|
||||||
|
let [value, datatype] = valueAt(target, index)
|
||||||
|
for (let index = parseListIndex(start || 0); index < parseListIndex(end || list.length); index++) {
|
||||||
|
context.set(objectId, index, value, datatype)
|
||||||
|
}
|
||||||
|
return this
|
||||||
|
},
|
||||||
|
|
||||||
|
indexOf(o, start = 0) {
|
||||||
|
// FIXME
|
||||||
|
const id = o[OBJECT_ID]
|
||||||
|
if (id) {
|
||||||
|
const list = context.getObject(objectId)
|
||||||
|
for (let index = start; index < list.length; index++) {
|
||||||
|
if (list[index][OBJECT_ID] === id) {
|
||||||
|
return index
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
} else {
|
||||||
|
return context.indexOf(objectId, o, start)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
insertAt(index, ...values) {
|
||||||
|
this.splice(index, 0, ...values)
|
||||||
|
return this
|
||||||
|
},
|
||||||
|
|
||||||
|
pop() {
|
||||||
|
let length = context.length(objectId)
|
||||||
|
if (length == 0) {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
let last = valueAt(target, length - 1)
|
||||||
|
context.del(objectId, length - 1)
|
||||||
|
return last
|
||||||
|
},
|
||||||
|
|
||||||
|
push(...values) {
|
||||||
|
let len = context.length(objectId)
|
||||||
|
this.splice(len, 0, ...values)
|
||||||
|
return context.length(objectId)
|
||||||
|
},
|
||||||
|
|
||||||
|
shift() {
|
||||||
|
if (context.length(objectId) == 0) return
|
||||||
|
const first = valueAt(target, 0)
|
||||||
|
context.del(objectId, 0)
|
||||||
|
return first
|
||||||
|
},
|
||||||
|
|
||||||
|
splice(index, del, ...vals) {
|
||||||
|
index = parseListIndex(index)
|
||||||
|
del = parseListIndex(del)
|
||||||
|
for (let val of vals) {
|
||||||
|
if (val && val[OBJECT_ID]) {
|
||||||
|
throw new RangeError('Cannot create a reference to an existing document object')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (frozen) {
|
||||||
|
throw new RangeError("Attempting to use an outdated Automerge document")
|
||||||
|
}
|
||||||
|
if (readonly) {
|
||||||
|
throw new RangeError("Sequence object cannot be modified outside of a change block")
|
||||||
|
}
|
||||||
|
let result = []
|
||||||
|
for (let i = 0; i < del; i++) {
|
||||||
|
let value = valueAt(target, index)
|
||||||
|
result.push(value)
|
||||||
|
context.del(objectId, index)
|
||||||
|
}
|
||||||
|
const values = vals.map((val) => import_value(val))
|
||||||
|
for (let [value,datatype] of values) {
|
||||||
|
switch (datatype) {
|
||||||
|
case "list":
|
||||||
|
const list = context.insert_object(objectId, index, [])
|
||||||
|
const proxyList = listProxy(context, list, [ ... path, index ], readonly);
|
||||||
|
proxyList.splice(0,0,...value)
|
||||||
|
break;
|
||||||
|
case "text":
|
||||||
|
const text = context.insert_object(objectId, index, "", "text")
|
||||||
|
const proxyText = textProxy(context, text, [ ... path, index ], readonly);
|
||||||
|
proxyText.splice(0,0,...value)
|
||||||
|
break;
|
||||||
|
case "map":
|
||||||
|
const map = context.insert_object(objectId, index, {})
|
||||||
|
const proxyMap = mapProxy(context, map, [ ... path, index ], readonly);
|
||||||
|
for (const key in value) {
|
||||||
|
proxyMap[key] = value[key]
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
context.insert(objectId, index, value, datatype)
|
||||||
|
}
|
||||||
|
index += 1
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
},
|
||||||
|
|
||||||
|
unshift(...values) {
|
||||||
|
this.splice(0, 0, ...values)
|
||||||
|
return context.length(objectId)
|
||||||
|
},
|
||||||
|
|
||||||
|
entries() {
|
||||||
|
let i = 0;
|
||||||
|
const iterator = {
|
||||||
|
next: () => {
|
||||||
|
let value = valueAt(target, i)
|
||||||
|
if (value === undefined) {
|
||||||
|
return { value: undefined, done: true }
|
||||||
|
} else {
|
||||||
|
return { value: [ i, value ], done: false }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return iterator
|
||||||
|
},
|
||||||
|
|
||||||
|
keys() {
|
||||||
|
let i = 0;
|
||||||
|
let len = context.length(objectId, heads)
|
||||||
|
const iterator = {
|
||||||
|
next: () => {
|
||||||
|
let value = undefined
|
||||||
|
if (i < len) { value = i; i++ }
|
||||||
|
return { value, done: true }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return iterator
|
||||||
|
},
|
||||||
|
|
||||||
|
values() {
|
||||||
|
let i = 0;
|
||||||
|
const iterator = {
|
||||||
|
next: () => {
|
||||||
|
let value = valueAt(target, i)
|
||||||
|
if (value === undefined) {
|
||||||
|
return { value: undefined, done: true }
|
||||||
|
} else {
|
||||||
|
return { value, done: false }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return iterator
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read-only methods that can delegate to the JavaScript built-in implementations
|
||||||
|
// FIXME - super slow
|
||||||
|
for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
|
||||||
|
'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
|
||||||
|
'slice', 'some', 'toLocaleString', 'toString']) {
|
||||||
|
methods[method] = (...args) => {
|
||||||
|
const list = []
|
||||||
|
while (true) {
|
||||||
|
let value = valueAt(target, list.length)
|
||||||
|
if (value == undefined) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
list.push(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return list[method](...args)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return methods
|
||||||
|
}
|
||||||
|
|
||||||
|
function textMethods(target) {
|
||||||
|
const {context, objectId, path, readonly, frozen} = target
|
||||||
|
const methods = {
|
||||||
|
set (index, value) {
|
||||||
|
return this[index] = value
|
||||||
|
},
|
||||||
|
get (index) {
|
||||||
|
return this[index]
|
||||||
|
},
|
||||||
|
toString () {
|
||||||
|
let str = ''
|
||||||
|
let length = this.length
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
const value = this.get(i)
|
||||||
|
if (typeof value === 'string') str += value
|
||||||
|
}
|
||||||
|
return str
|
||||||
|
},
|
||||||
|
toSpans () {
|
||||||
|
let spans = []
|
||||||
|
let chars = ''
|
||||||
|
let length = this.length
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
const value = this[i]
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
chars += value
|
||||||
|
} else {
|
||||||
|
if (chars.length > 0) {
|
||||||
|
spans.push(chars)
|
||||||
|
chars = ''
|
||||||
|
}
|
||||||
|
spans.push(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (chars.length > 0) {
|
||||||
|
spans.push(chars)
|
||||||
|
}
|
||||||
|
return spans
|
||||||
|
},
|
||||||
|
toJSON () {
|
||||||
|
return this.toString()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return methods
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler }
|
||||||
|
|
@ -16,15 +16,11 @@
|
||||||
* last sync to disk), and we fall back to sending the entire document in this case.
|
* last sync to disk), and we fall back to sending the entire document in this case.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
const Backend = null //require('./backend')
|
//const Backend = require('./backend')
|
||||||
const {
|
const Backend = {} //require('./backend')
|
||||||
hexStringToBytes,
|
const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding')
|
||||||
bytesToHexString,
|
const { decodeChangeMeta } = require('./columnar')
|
||||||
Encoder,
|
const { copyObject } = require('../src/common')
|
||||||
Decoder,
|
|
||||||
} = require("./encoding")
|
|
||||||
const { decodeChangeMeta } = require("./columnar")
|
|
||||||
const { copyObject } = require("./common")
|
|
||||||
|
|
||||||
const HASH_SIZE = 32 // 256 bits = 32 bytes
|
const HASH_SIZE = 32 // 256 bits = 32 bytes
|
||||||
const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification
|
const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification
|
||||||
|
|
@ -33,8 +29,7 @@ const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identif
|
||||||
// These constants correspond to a 1% false positive rate. The values can be changed without
|
// These constants correspond to a 1% false positive rate. The values can be changed without
|
||||||
// breaking compatibility of the network protocol, since the parameters used for a particular
|
// breaking compatibility of the network protocol, since the parameters used for a particular
|
||||||
// Bloom filter are encoded in the wire format.
|
// Bloom filter are encoded in the wire format.
|
||||||
const BITS_PER_ENTRY = 10,
|
const BITS_PER_ENTRY = 10, NUM_PROBES = 7
|
||||||
NUM_PROBES = 7
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A Bloom filter implementation that can be serialised to a byte array for transmission
|
* A Bloom filter implementation that can be serialised to a byte array for transmission
|
||||||
|
|
@ -42,15 +37,13 @@ const BITS_PER_ENTRY = 10,
|
||||||
* so this implementation does not perform its own hashing.
|
* so this implementation does not perform its own hashing.
|
||||||
*/
|
*/
|
||||||
class BloomFilter {
|
class BloomFilter {
|
||||||
constructor(arg) {
|
constructor (arg) {
|
||||||
if (Array.isArray(arg)) {
|
if (Array.isArray(arg)) {
|
||||||
// arg is an array of SHA256 hashes in hexadecimal encoding
|
// arg is an array of SHA256 hashes in hexadecimal encoding
|
||||||
this.numEntries = arg.length
|
this.numEntries = arg.length
|
||||||
this.numBitsPerEntry = BITS_PER_ENTRY
|
this.numBitsPerEntry = BITS_PER_ENTRY
|
||||||
this.numProbes = NUM_PROBES
|
this.numProbes = NUM_PROBES
|
||||||
this.bits = new Uint8Array(
|
this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8))
|
||||||
Math.ceil((this.numEntries * this.numBitsPerEntry) / 8)
|
|
||||||
)
|
|
||||||
for (let hash of arg) this.addHash(hash)
|
for (let hash of arg) this.addHash(hash)
|
||||||
} else if (arg instanceof Uint8Array) {
|
} else if (arg instanceof Uint8Array) {
|
||||||
if (arg.byteLength === 0) {
|
if (arg.byteLength === 0) {
|
||||||
|
|
@ -63,12 +56,10 @@ class BloomFilter {
|
||||||
this.numEntries = decoder.readUint32()
|
this.numEntries = decoder.readUint32()
|
||||||
this.numBitsPerEntry = decoder.readUint32()
|
this.numBitsPerEntry = decoder.readUint32()
|
||||||
this.numProbes = decoder.readUint32()
|
this.numProbes = decoder.readUint32()
|
||||||
this.bits = decoder.readRawBytes(
|
this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8))
|
||||||
Math.ceil((this.numEntries * this.numBitsPerEntry) / 8)
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
throw new TypeError("invalid argument")
|
throw new TypeError('invalid argument')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -96,32 +87,12 @@ class BloomFilter {
|
||||||
* http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf
|
* http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf
|
||||||
*/
|
*/
|
||||||
getProbes(hash) {
|
getProbes(hash) {
|
||||||
const hashBytes = hexStringToBytes(hash),
|
const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength
|
||||||
modulo = 8 * this.bits.byteLength
|
if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`)
|
||||||
if (hashBytes.byteLength !== 32)
|
|
||||||
throw new RangeError(`Not a 256-bit hash: ${hash}`)
|
|
||||||
// on the next three lines, the right shift means interpret value as unsigned
|
// on the next three lines, the right shift means interpret value as unsigned
|
||||||
let x =
|
let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo
|
||||||
((hashBytes[0] |
|
let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo
|
||||||
(hashBytes[1] << 8) |
|
let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo
|
||||||
(hashBytes[2] << 16) |
|
|
||||||
(hashBytes[3] << 24)) >>>
|
|
||||||
0) %
|
|
||||||
modulo
|
|
||||||
let y =
|
|
||||||
((hashBytes[4] |
|
|
||||||
(hashBytes[5] << 8) |
|
|
||||||
(hashBytes[6] << 16) |
|
|
||||||
(hashBytes[7] << 24)) >>>
|
|
||||||
0) %
|
|
||||||
modulo
|
|
||||||
let z =
|
|
||||||
((hashBytes[8] |
|
|
||||||
(hashBytes[9] << 8) |
|
|
||||||
(hashBytes[10] << 16) |
|
|
||||||
(hashBytes[11] << 24)) >>>
|
|
||||||
0) %
|
|
||||||
modulo
|
|
||||||
const probes = [x]
|
const probes = [x]
|
||||||
for (let i = 1; i < this.numProbes; i++) {
|
for (let i = 1; i < this.numProbes; i++) {
|
||||||
x = (x + y) % modulo
|
x = (x + y) % modulo
|
||||||
|
|
@ -158,14 +129,12 @@ class BloomFilter {
|
||||||
* Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array.
|
* Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array.
|
||||||
*/
|
*/
|
||||||
function encodeHashes(encoder, hashes) {
|
function encodeHashes(encoder, hashes) {
|
||||||
if (!Array.isArray(hashes)) throw new TypeError("hashes must be an array")
|
if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array')
|
||||||
encoder.appendUint32(hashes.length)
|
encoder.appendUint32(hashes.length)
|
||||||
for (let i = 0; i < hashes.length; i++) {
|
for (let i = 0; i < hashes.length; i++) {
|
||||||
if (i > 0 && hashes[i - 1] >= hashes[i])
|
if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted')
|
||||||
throw new RangeError("hashes must be sorted")
|
|
||||||
const bytes = hexStringToBytes(hashes[i])
|
const bytes = hexStringToBytes(hashes[i])
|
||||||
if (bytes.byteLength !== HASH_SIZE)
|
if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits')
|
||||||
throw new TypeError("heads hashes must be 256 bits")
|
|
||||||
encoder.appendRawBytes(bytes)
|
encoder.appendRawBytes(bytes)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -175,8 +144,7 @@ function encodeHashes(encoder, hashes) {
|
||||||
* array of hex strings.
|
* array of hex strings.
|
||||||
*/
|
*/
|
||||||
function decodeHashes(decoder) {
|
function decodeHashes(decoder) {
|
||||||
let length = decoder.readUint32(),
|
let length = decoder.readUint32(), hashes = []
|
||||||
hashes = []
|
|
||||||
for (let i = 0; i < length; i++) {
|
for (let i = 0; i < length; i++) {
|
||||||
hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE)))
|
hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE)))
|
||||||
}
|
}
|
||||||
|
|
@ -216,11 +184,11 @@ function decodeSyncMessage(bytes) {
|
||||||
const heads = decodeHashes(decoder)
|
const heads = decodeHashes(decoder)
|
||||||
const need = decodeHashes(decoder)
|
const need = decodeHashes(decoder)
|
||||||
const haveCount = decoder.readUint32()
|
const haveCount = decoder.readUint32()
|
||||||
let message = { heads, need, have: [], changes: [] }
|
let message = {heads, need, have: [], changes: []}
|
||||||
for (let i = 0; i < haveCount; i++) {
|
for (let i = 0; i < haveCount; i++) {
|
||||||
const lastSync = decodeHashes(decoder)
|
const lastSync = decodeHashes(decoder)
|
||||||
const bloom = decoder.readPrefixedBytes(decoder)
|
const bloom = decoder.readPrefixedBytes(decoder)
|
||||||
message.have.push({ lastSync, bloom })
|
message.have.push({lastSync, bloom})
|
||||||
}
|
}
|
||||||
const changeCount = decoder.readUint32()
|
const changeCount = decoder.readUint32()
|
||||||
for (let i = 0; i < changeCount; i++) {
|
for (let i = 0; i < changeCount; i++) {
|
||||||
|
|
@ -267,7 +235,7 @@ function decodeSyncState(bytes) {
|
||||||
function makeBloomFilter(backend, lastSync) {
|
function makeBloomFilter(backend, lastSync) {
|
||||||
const newChanges = Backend.getChanges(backend, lastSync)
|
const newChanges = Backend.getChanges(backend, lastSync)
|
||||||
const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash)
|
const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash)
|
||||||
return { lastSync, bloom: new BloomFilter(hashes).bytes }
|
return {lastSync, bloom: new BloomFilter(hashes).bytes}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -278,26 +246,20 @@ function makeBloomFilter(backend, lastSync) {
|
||||||
*/
|
*/
|
||||||
function getChangesToSend(backend, have, need) {
|
function getChangesToSend(backend, have, need) {
|
||||||
if (have.length === 0) {
|
if (have.length === 0) {
|
||||||
return need
|
return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined)
|
||||||
.map(hash => Backend.getChangeByHash(backend, hash))
|
|
||||||
.filter(change => change !== undefined)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let lastSyncHashes = {},
|
let lastSyncHashes = {}, bloomFilters = []
|
||||||
bloomFilters = []
|
|
||||||
for (let h of have) {
|
for (let h of have) {
|
||||||
for (let hash of h.lastSync) lastSyncHashes[hash] = true
|
for (let hash of h.lastSync) lastSyncHashes[hash] = true
|
||||||
bloomFilters.push(new BloomFilter(h.bloom))
|
bloomFilters.push(new BloomFilter(h.bloom))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all changes that were added since the last sync
|
// Get all changes that were added since the last sync
|
||||||
const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)).map(
|
const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes))
|
||||||
change => decodeChangeMeta(change, true)
|
.map(change => decodeChangeMeta(change, true))
|
||||||
)
|
|
||||||
|
|
||||||
let changeHashes = {},
|
let changeHashes = {}, dependents = {}, hashesToSend = {}
|
||||||
dependents = {},
|
|
||||||
hashesToSend = {}
|
|
||||||
for (let change of changes) {
|
for (let change of changes) {
|
||||||
changeHashes[change.hash] = true
|
changeHashes[change.hash] = true
|
||||||
|
|
||||||
|
|
@ -331,8 +293,7 @@ function getChangesToSend(backend, have, need) {
|
||||||
let changesToSend = []
|
let changesToSend = []
|
||||||
for (let hash of need) {
|
for (let hash of need) {
|
||||||
hashesToSend[hash] = true
|
hashesToSend[hash] = true
|
||||||
if (!changeHashes[hash]) {
|
if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()?
|
||||||
// Change is not among those returned by getMissingChanges()?
|
|
||||||
const change = Backend.getChangeByHash(backend, hash)
|
const change = Backend.getChangeByHash(backend, hash)
|
||||||
if (change) changesToSend.push(change)
|
if (change) changesToSend.push(change)
|
||||||
}
|
}
|
||||||
|
|
@ -357,7 +318,7 @@ function initSyncState() {
|
||||||
}
|
}
|
||||||
|
|
||||||
function compareArrays(a, b) {
|
function compareArrays(a, b) {
|
||||||
return a.length === b.length && a.every((v, i) => v === b[i])
|
return (a.length === b.length) && a.every((v, i) => v === b[i])
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -369,19 +330,10 @@ function generateSyncMessage(backend, syncState) {
|
||||||
throw new Error("generateSyncMessage called with no Automerge document")
|
throw new Error("generateSyncMessage called with no Automerge document")
|
||||||
}
|
}
|
||||||
if (!syncState) {
|
if (!syncState) {
|
||||||
throw new Error(
|
throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()")
|
||||||
"generateSyncMessage requires a syncState, which can be created with initSyncState()"
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let {
|
let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState
|
||||||
sharedHeads,
|
|
||||||
lastSentHeads,
|
|
||||||
theirHeads,
|
|
||||||
theirNeed,
|
|
||||||
theirHave,
|
|
||||||
sentHashes,
|
|
||||||
} = syncState
|
|
||||||
const ourHeads = Backend.getHeads(backend)
|
const ourHeads = Backend.getHeads(backend)
|
||||||
|
|
||||||
// Hashes to explicitly request from the remote peer: any missing dependencies of unapplied
|
// Hashes to explicitly request from the remote peer: any missing dependencies of unapplied
|
||||||
|
|
@ -405,28 +357,18 @@ function generateSyncMessage(backend, syncState) {
|
||||||
const lastSync = theirHave[0].lastSync
|
const lastSync = theirHave[0].lastSync
|
||||||
if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) {
|
if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) {
|
||||||
// we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need
|
// we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need
|
||||||
const resetMsg = {
|
const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []}
|
||||||
heads: ourHeads,
|
|
||||||
need: [],
|
|
||||||
have: [{ lastSync: [], bloom: new Uint8Array(0) }],
|
|
||||||
changes: [],
|
|
||||||
}
|
|
||||||
return [syncState, encodeSyncMessage(resetMsg)]
|
return [syncState, encodeSyncMessage(resetMsg)]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size
|
// XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size
|
||||||
// these changes should ideally be RLE encoded but we haven't implemented that yet.
|
// these changes should ideally be RLE encoded but we haven't implemented that yet.
|
||||||
let changesToSend =
|
let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : []
|
||||||
Array.isArray(theirHave) && Array.isArray(theirNeed)
|
|
||||||
? getChangesToSend(backend, theirHave, theirNeed)
|
|
||||||
: []
|
|
||||||
|
|
||||||
// If the heads are equal, we're in sync and don't need to do anything further
|
// If the heads are equal, we're in sync and don't need to do anything further
|
||||||
const headsUnchanged =
|
const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads)
|
||||||
Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads)
|
const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads)
|
||||||
const headsEqual =
|
|
||||||
Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads)
|
|
||||||
if (headsUnchanged && headsEqual && changesToSend.length === 0) {
|
if (headsUnchanged && headsEqual && changesToSend.length === 0) {
|
||||||
// no need to send a sync message if we know we're synced!
|
// no need to send a sync message if we know we're synced!
|
||||||
return [syncState, null]
|
return [syncState, null]
|
||||||
|
|
@ -434,19 +376,12 @@ function generateSyncMessage(backend, syncState) {
|
||||||
|
|
||||||
// TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the
|
// TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the
|
||||||
// unnecessary recomputation
|
// unnecessary recomputation
|
||||||
changesToSend = changesToSend.filter(
|
changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash])
|
||||||
change => !sentHashes[decodeChangeMeta(change, true).hash]
|
|
||||||
)
|
|
||||||
|
|
||||||
// Regular response to a sync message: send any changes that the other node
|
// Regular response to a sync message: send any changes that the other node
|
||||||
// doesn't have. We leave the "have" field empty because the previous message
|
// doesn't have. We leave the "have" field empty because the previous message
|
||||||
// generated by `syncStart` already indicated what changes we have.
|
// generated by `syncStart` already indicated what changes we have.
|
||||||
const syncMessage = {
|
const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend}
|
||||||
heads: ourHeads,
|
|
||||||
have: ourHave,
|
|
||||||
need: ourNeed,
|
|
||||||
changes: changesToSend,
|
|
||||||
}
|
|
||||||
if (changesToSend.length > 0) {
|
if (changesToSend.length > 0) {
|
||||||
sentHashes = copyObject(sentHashes)
|
sentHashes = copyObject(sentHashes)
|
||||||
for (const change of changesToSend) {
|
for (const change of changesToSend) {
|
||||||
|
|
@ -454,10 +389,7 @@ function generateSyncMessage(backend, syncState) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
syncState = Object.assign({}, syncState, {
|
syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes})
|
||||||
lastSentHeads: ourHeads,
|
|
||||||
sentHashes,
|
|
||||||
})
|
|
||||||
return [syncState, encodeSyncMessage(syncMessage)]
|
return [syncState, encodeSyncMessage(syncMessage)]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -475,14 +407,13 @@ function generateSyncMessage(backend, syncState) {
|
||||||
* another peer, that means that peer had those changes, and therefore we now both know about them.
|
* another peer, that means that peer had those changes, and therefore we now both know about them.
|
||||||
*/
|
*/
|
||||||
function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) {
|
function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) {
|
||||||
const newHeads = myNewHeads.filter(head => !myOldHeads.includes(head))
|
const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head))
|
||||||
const commonHeads = ourOldSharedHeads.filter(head =>
|
const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head))
|
||||||
myNewHeads.includes(head)
|
|
||||||
)
|
|
||||||
const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort()
|
const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort()
|
||||||
return advancedHeads
|
return advancedHeads
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Given a backend, a message message and the state of our peer, apply any changes, update what
|
* Given a backend, a message message and the state of our peer, apply any changes, update what
|
||||||
* we believe about the peer, and (if there were applied changes) produce a patch for the frontend
|
* we believe about the peer, and (if there were applied changes) produce a patch for the frontend
|
||||||
|
|
@ -492,13 +423,10 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) {
|
||||||
throw new Error("generateSyncMessage called with no Automerge document")
|
throw new Error("generateSyncMessage called with no Automerge document")
|
||||||
}
|
}
|
||||||
if (!oldSyncState) {
|
if (!oldSyncState) {
|
||||||
throw new Error(
|
throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()")
|
||||||
"generateSyncMessage requires a syncState, which can be created with initSyncState()"
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState,
|
let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null
|
||||||
patch = null
|
|
||||||
const message = decodeSyncMessage(binaryMessage)
|
const message = decodeSyncMessage(binaryMessage)
|
||||||
const beforeHeads = Backend.getHeads(backend)
|
const beforeHeads = Backend.getHeads(backend)
|
||||||
|
|
||||||
|
|
@ -507,27 +435,18 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) {
|
||||||
// changes without applying them. The set of changes may also be incomplete if the sender decided
|
// changes without applying them. The set of changes may also be incomplete if the sender decided
|
||||||
// to break a large set of changes into chunks.
|
// to break a large set of changes into chunks.
|
||||||
if (message.changes.length > 0) {
|
if (message.changes.length > 0) {
|
||||||
;[backend, patch] = Backend.applyChanges(backend, message.changes)
|
[backend, patch] = Backend.applyChanges(backend, message.changes)
|
||||||
sharedHeads = advanceHeads(
|
sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads)
|
||||||
beforeHeads,
|
|
||||||
Backend.getHeads(backend),
|
|
||||||
sharedHeads
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// If heads are equal, indicate we don't need to send a response message
|
// If heads are equal, indicate we don't need to send a response message
|
||||||
if (
|
if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) {
|
||||||
message.changes.length === 0 &&
|
|
||||||
compareArrays(message.heads, beforeHeads)
|
|
||||||
) {
|
|
||||||
lastSentHeads = message.heads
|
lastSentHeads = message.heads
|
||||||
}
|
}
|
||||||
|
|
||||||
// If all of the remote heads are known to us, that means either our heads are equal, or we are
|
// If all of the remote heads are known to us, that means either our heads are equal, or we are
|
||||||
// ahead of the remote peer. In this case, take the remote heads to be our shared heads.
|
// ahead of the remote peer. In this case, take the remote heads to be our shared heads.
|
||||||
const knownHeads = message.heads.filter(head =>
|
const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head))
|
||||||
Backend.getChangeByHash(backend, head)
|
|
||||||
)
|
|
||||||
if (knownHeads.length === message.heads.length) {
|
if (knownHeads.length === message.heads.length) {
|
||||||
sharedHeads = message.heads
|
sharedHeads = message.heads
|
||||||
// If the remote peer has lost all its data, reset our state to perform a full resync
|
// If the remote peer has lost all its data, reset our state to perform a full resync
|
||||||
|
|
@ -549,18 +468,14 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) {
|
||||||
theirHave: message.have, // the information we need to calculate the changes they need
|
theirHave: message.have, // the information we need to calculate the changes they need
|
||||||
theirHeads: message.heads,
|
theirHeads: message.heads,
|
||||||
theirNeed: message.need,
|
theirNeed: message.need,
|
||||||
sentHashes,
|
sentHashes
|
||||||
}
|
}
|
||||||
return [backend, syncState, patch]
|
return [backend, syncState, patch]
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
receiveSyncMessage,
|
receiveSyncMessage, generateSyncMessage,
|
||||||
generateSyncMessage,
|
encodeSyncMessage, decodeSyncMessage,
|
||||||
encodeSyncMessage,
|
initSyncState, encodeSyncState, decodeSyncState,
|
||||||
decodeSyncMessage,
|
BloomFilter // BloomFilter is a private API, exported only for testing purposes
|
||||||
initSyncState,
|
|
||||||
encodeSyncState,
|
|
||||||
decodeSyncState,
|
|
||||||
BloomFilter, // BloomFilter is a private API, exported only for testing purposes
|
|
||||||
}
|
}
|
||||||
132
automerge-js/src/text.js
Normal file
132
automerge-js/src/text.js
Normal file
|
|
@ -0,0 +1,132 @@
|
||||||
|
const { OBJECT_ID } = require('./constants')
|
||||||
|
const { isObject } = require('../src/common')
|
||||||
|
|
||||||
|
class Text {
|
||||||
|
constructor (text) {
|
||||||
|
const instance = Object.create(Text.prototype)
|
||||||
|
if (typeof text === 'string') {
|
||||||
|
instance.elems = [...text]
|
||||||
|
} else if (Array.isArray(text)) {
|
||||||
|
instance.elems = text
|
||||||
|
} else if (text === undefined) {
|
||||||
|
instance.elems = []
|
||||||
|
} else {
|
||||||
|
throw new TypeError(`Unsupported initial value for Text: ${text}`)
|
||||||
|
}
|
||||||
|
return instance
|
||||||
|
}
|
||||||
|
|
||||||
|
get length () {
|
||||||
|
return this.elems.length
|
||||||
|
}
|
||||||
|
|
||||||
|
get (index) {
|
||||||
|
return this.elems[index]
|
||||||
|
}
|
||||||
|
|
||||||
|
getElemId (index) {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterates over the text elements character by character, including any
|
||||||
|
* inline objects.
|
||||||
|
*/
|
||||||
|
[Symbol.iterator] () {
|
||||||
|
let elems = this.elems, index = -1
|
||||||
|
return {
|
||||||
|
next () {
|
||||||
|
index += 1
|
||||||
|
if (index < elems.length) {
|
||||||
|
return {done: false, value: elems[index]}
|
||||||
|
} else {
|
||||||
|
return {done: true}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the content of the Text object as a simple string, ignoring any
|
||||||
|
* non-character elements.
|
||||||
|
*/
|
||||||
|
toString() {
|
||||||
|
// Concatting to a string is faster than creating an array and then
|
||||||
|
// .join()ing for small (<100KB) arrays.
|
||||||
|
// https://jsperf.com/join-vs-loop-w-type-test
|
||||||
|
let str = ''
|
||||||
|
for (const elem of this.elems) {
|
||||||
|
if (typeof elem === 'string') str += elem
|
||||||
|
}
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the content of the Text object as a sequence of strings,
|
||||||
|
* interleaved with non-character elements.
|
||||||
|
*
|
||||||
|
* For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans:
|
||||||
|
* => ['ab', {x: 3}, 'cd']
|
||||||
|
*/
|
||||||
|
toSpans() {
|
||||||
|
let spans = []
|
||||||
|
let chars = ''
|
||||||
|
for (const elem of this.elems) {
|
||||||
|
if (typeof elem === 'string') {
|
||||||
|
chars += elem
|
||||||
|
} else {
|
||||||
|
if (chars.length > 0) {
|
||||||
|
spans.push(chars)
|
||||||
|
chars = ''
|
||||||
|
}
|
||||||
|
spans.push(elem)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (chars.length > 0) {
|
||||||
|
spans.push(chars)
|
||||||
|
}
|
||||||
|
return spans
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the content of the Text object as a simple string, so that the
|
||||||
|
* JSON serialization of an Automerge document represents text nicely.
|
||||||
|
*/
|
||||||
|
toJSON() {
|
||||||
|
return this.toString()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates the list item at position `index` to a new value `value`.
|
||||||
|
*/
|
||||||
|
set (index, value) {
|
||||||
|
this.elems[index] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inserts new list items `values` starting at position `index`.
|
||||||
|
*/
|
||||||
|
insertAt(index, ...values) {
|
||||||
|
this.elems.splice(index, 0, ... values)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes `numDelete` list items starting at position `index`.
|
||||||
|
* if `numDelete` is not given, one item is deleted.
|
||||||
|
*/
|
||||||
|
deleteAt(index, numDelete = 1) {
|
||||||
|
this.elems.splice(index, numDelete)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read-only methods that can delegate to the JavaScript built-in array
|
||||||
|
for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
|
||||||
|
'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
|
||||||
|
'slice', 'some', 'toLocaleString']) {
|
||||||
|
Text.prototype[method] = function (...args) {
|
||||||
|
const array = [...this]
|
||||||
|
return array[method](...args)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { Text }
|
||||||
16
automerge-js/src/uuid.js
Normal file
16
automerge-js/src/uuid.js
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
const { v4: uuid } = require('uuid')
|
||||||
|
|
||||||
|
function defaultFactory() {
|
||||||
|
return uuid().replace(/-/g, '')
|
||||||
|
}
|
||||||
|
|
||||||
|
let factory = defaultFactory
|
||||||
|
|
||||||
|
function makeUuid() {
|
||||||
|
return factory()
|
||||||
|
}
|
||||||
|
|
||||||
|
makeUuid.setFactory = newFactory => { factory = newFactory }
|
||||||
|
makeUuid.reset = () => { factory = defaultFactory }
|
||||||
|
|
||||||
|
module.exports = makeUuid
|
||||||
164
automerge-js/test/basic_test.js
Normal file
164
automerge-js/test/basic_test.js
Normal file
|
|
@ -0,0 +1,164 @@
|
||||||
|
|
||||||
|
const assert = require('assert')
|
||||||
|
const util = require('util')
|
||||||
|
const Automerge = require('..')
|
||||||
|
|
||||||
|
describe('Automerge', () => {
|
||||||
|
describe('basics', () => {
|
||||||
|
it('should init clone and free', () => {
|
||||||
|
let doc1 = Automerge.init()
|
||||||
|
let doc2 = Automerge.clone(doc1);
|
||||||
|
})
|
||||||
|
|
||||||
|
it('handle basic set and read on root object', () => {
|
||||||
|
let doc1 = Automerge.init()
|
||||||
|
let doc2 = Automerge.change(doc1, (d) => {
|
||||||
|
d.hello = "world"
|
||||||
|
d.big = "little"
|
||||||
|
d.zip = "zop"
|
||||||
|
d.app = "dap"
|
||||||
|
assert.deepEqual(d, { hello: "world", big: "little", zip: "zop", app: "dap" })
|
||||||
|
})
|
||||||
|
assert.deepEqual(doc2, { hello: "world", big: "little", zip: "zop", app: "dap" })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('handle basic sets over many changes', () => {
|
||||||
|
let doc1 = Automerge.init()
|
||||||
|
let timestamp = new Date();
|
||||||
|
let counter = new Automerge.Counter(100);
|
||||||
|
let bytes = new Uint8Array([10,11,12]);
|
||||||
|
let doc2 = Automerge.change(doc1, (d) => {
|
||||||
|
d.hello = "world"
|
||||||
|
})
|
||||||
|
let doc3 = Automerge.change(doc2, (d) => {
|
||||||
|
d.counter1 = counter
|
||||||
|
})
|
||||||
|
let doc4 = Automerge.change(doc3, (d) => {
|
||||||
|
d.timestamp1 = timestamp
|
||||||
|
})
|
||||||
|
let doc5 = Automerge.change(doc4, (d) => {
|
||||||
|
d.app = null
|
||||||
|
})
|
||||||
|
let doc6 = Automerge.change(doc5, (d) => {
|
||||||
|
d.bytes1 = bytes
|
||||||
|
})
|
||||||
|
let doc7 = Automerge.change(doc6, (d) => {
|
||||||
|
d.uint = new Automerge.Uint(1)
|
||||||
|
d.int = new Automerge.Int(-1)
|
||||||
|
d.float64 = new Automerge.Float64(5.5)
|
||||||
|
d.number1 = 100
|
||||||
|
d.number2 = -45.67
|
||||||
|
d.true = true
|
||||||
|
d.false = false
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.deepEqual(doc7, { hello: "world", true: true, false: false, int: -1, uint: 1, float64: 5.5, number1: 100, number2: -45.67, counter1: counter, timestamp1: timestamp, bytes1: bytes, app: null })
|
||||||
|
|
||||||
|
let changes = Automerge.getAllChanges(doc7)
|
||||||
|
let t1 = Automerge.init()
|
||||||
|
;let [t2] = Automerge.applyChanges(t1, changes)
|
||||||
|
assert.deepEqual(doc7,t2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('handle overwrites to values', () => {
|
||||||
|
let doc1 = Automerge.init()
|
||||||
|
let doc2 = Automerge.change(doc1, (d) => {
|
||||||
|
d.hello = "world1"
|
||||||
|
})
|
||||||
|
let doc3 = Automerge.change(doc2, (d) => {
|
||||||
|
d.hello = "world2"
|
||||||
|
})
|
||||||
|
let doc4 = Automerge.change(doc3, (d) => {
|
||||||
|
d.hello = "world3"
|
||||||
|
})
|
||||||
|
let doc5 = Automerge.change(doc4, (d) => {
|
||||||
|
d.hello = "world4"
|
||||||
|
})
|
||||||
|
assert.deepEqual(doc5, { hello: "world4" } )
|
||||||
|
})
|
||||||
|
|
||||||
|
it('handle set with object value', () => {
|
||||||
|
let doc1 = Automerge.init()
|
||||||
|
let doc2 = Automerge.change(doc1, (d) => {
|
||||||
|
d.subobj = { hello: "world", subsubobj: { zip: "zop" } }
|
||||||
|
})
|
||||||
|
assert.deepEqual(doc2, { subobj: { hello: "world", subsubobj: { zip: "zop" } } })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('handle simple list creation', () => {
|
||||||
|
let doc1 = Automerge.init()
|
||||||
|
let doc2 = Automerge.change(doc1, (d) => d.list = [])
|
||||||
|
assert.deepEqual(doc2, { list: []})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('handle simple lists', () => {
|
||||||
|
let doc1 = Automerge.init()
|
||||||
|
let doc2 = Automerge.change(doc1, (d) => {
|
||||||
|
d.list = [ 1, 2, 3 ]
|
||||||
|
})
|
||||||
|
assert.deepEqual(doc2.list.length, 3)
|
||||||
|
assert.deepEqual(doc2.list[0], 1)
|
||||||
|
assert.deepEqual(doc2.list[1], 2)
|
||||||
|
assert.deepEqual(doc2.list[2], 3)
|
||||||
|
assert.deepEqual(doc2, { list: [1,2,3] })
|
||||||
|
// assert.deepStrictEqual(Automerge.toJS(doc2), { list: [1,2,3] })
|
||||||
|
|
||||||
|
let doc3 = Automerge.change(doc2, (d) => {
|
||||||
|
d.list[1] = "a"
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.deepEqual(doc3.list.length, 3)
|
||||||
|
assert.deepEqual(doc3.list[0], 1)
|
||||||
|
assert.deepEqual(doc3.list[1], "a")
|
||||||
|
assert.deepEqual(doc3.list[2], 3)
|
||||||
|
assert.deepEqual(doc3, { list: [1,"a",3] })
|
||||||
|
})
|
||||||
|
it('handle simple lists', () => {
|
||||||
|
let doc1 = Automerge.init()
|
||||||
|
let doc2 = Automerge.change(doc1, (d) => {
|
||||||
|
d.list = [ 1, 2, 3 ]
|
||||||
|
})
|
||||||
|
let changes = Automerge.getChanges(doc1, doc2)
|
||||||
|
let docB1 = Automerge.init()
|
||||||
|
;let [docB2] = Automerge.applyChanges(docB1, changes)
|
||||||
|
assert.deepEqual(docB2, doc2);
|
||||||
|
})
|
||||||
|
it('handle text', () => {
|
||||||
|
let doc1 = Automerge.init()
|
||||||
|
let tmp = new Automerge.Text("hello")
|
||||||
|
let doc2 = Automerge.change(doc1, (d) => {
|
||||||
|
d.list = new Automerge.Text("hello")
|
||||||
|
d.list.insertAt(2,"Z")
|
||||||
|
})
|
||||||
|
let changes = Automerge.getChanges(doc1, doc2)
|
||||||
|
let docB1 = Automerge.init()
|
||||||
|
;let [docB2] = Automerge.applyChanges(docB1, changes)
|
||||||
|
assert.deepEqual(docB2, doc2);
|
||||||
|
})
|
||||||
|
|
||||||
|
it('have many list methods', () => {
|
||||||
|
let doc1 = Automerge.from({ list: [1,2,3] })
|
||||||
|
assert.deepEqual(doc1, { list: [1,2,3] });
|
||||||
|
let doc2 = Automerge.change(doc1, (d) => {
|
||||||
|
d.list.splice(1,1,9,10)
|
||||||
|
})
|
||||||
|
assert.deepEqual(doc2, { list: [1,9,10,3] });
|
||||||
|
let doc3 = Automerge.change(doc2, (d) => {
|
||||||
|
d.list.push(11,12)
|
||||||
|
})
|
||||||
|
assert.deepEqual(doc3, { list: [1,9,10,3,11,12] });
|
||||||
|
let doc4 = Automerge.change(doc3, (d) => {
|
||||||
|
d.list.unshift(2,2)
|
||||||
|
})
|
||||||
|
assert.deepEqual(doc4, { list: [2,2,1,9,10,3,11,12] });
|
||||||
|
let doc5 = Automerge.change(doc4, (d) => {
|
||||||
|
d.list.shift()
|
||||||
|
})
|
||||||
|
assert.deepEqual(doc5, { list: [2,1,9,10,3,11,12] });
|
||||||
|
let doc6 = Automerge.change(doc5, (d) => {
|
||||||
|
d.list.insertAt(3,100,101)
|
||||||
|
})
|
||||||
|
assert.deepEqual(doc6, { list: [2,1,9,100,101,10,3,11,12] });
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
97
automerge-js/test/columnar_test.js
Normal file
97
automerge-js/test/columnar_test.js
Normal file
|
|
@ -0,0 +1,97 @@
|
||||||
|
const assert = require('assert')
|
||||||
|
const { checkEncoded } = require('./helpers')
|
||||||
|
const Automerge = require('..')
|
||||||
|
const { encodeChange, decodeChange } = Automerge
|
||||||
|
|
||||||
|
describe('change encoding', () => {
|
||||||
|
it('should encode text edits', () => {
|
||||||
|
/*
|
||||||
|
const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: '', deps: [], ops: [
|
||||||
|
{action: 'makeText', obj: '_root', key: 'text', insert: false, pred: []},
|
||||||
|
{action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []},
|
||||||
|
{action: 'del', obj: '1@aaaa', elemId: '2@aaaa', insert: false, pred: ['2@aaaa']},
|
||||||
|
{action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []},
|
||||||
|
{action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []}
|
||||||
|
]}
|
||||||
|
*/
|
||||||
|
const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: null, deps: [], ops: [
|
||||||
|
{action: 'makeText', obj: '_root', key: 'text', pred: []},
|
||||||
|
{action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []},
|
||||||
|
{action: 'del', obj: '1@aaaa', elemId: '2@aaaa', pred: ['2@aaaa']},
|
||||||
|
{action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []},
|
||||||
|
{action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []}
|
||||||
|
]}
|
||||||
|
checkEncoded(encodeChange(change1), [
|
||||||
|
0x85, 0x6f, 0x4a, 0x83, // magic bytes
|
||||||
|
0xe2, 0xbd, 0xfb, 0xf5, // checksum
|
||||||
|
1, 94, 0, 2, 0xaa, 0xaa, // chunkType: change, length, deps, actor 'aaaa'
|
||||||
|
1, 1, 9, 0, 0, // seq, startOp, time, message, actor list
|
||||||
|
12, 0x01, 4, 0x02, 4, // column count, objActor, objCtr
|
||||||
|
0x11, 8, 0x13, 7, 0x15, 8, // keyActor, keyCtr, keyStr
|
||||||
|
0x34, 4, 0x42, 6, // insert, action
|
||||||
|
0x56, 6, 0x57, 3, // valLen, valRaw
|
||||||
|
0x70, 6, 0x71, 2, 0x73, 2, // predNum, predActor, predCtr
|
||||||
|
0, 1, 4, 0, // objActor column: null, 0, 0, 0, 0
|
||||||
|
0, 1, 4, 1, // objCtr column: null, 1, 1, 1, 1
|
||||||
|
0, 2, 0x7f, 0, 0, 1, 0x7f, 0, // keyActor column: null, null, 0, null, 0
|
||||||
|
0, 1, 0x7c, 0, 2, 0x7e, 4, // keyCtr column: null, 0, 2, 0, 4
|
||||||
|
0x7f, 4, 0x74, 0x65, 0x78, 0x74, 0, 4, // keyStr column: 'text', null, null, null, null
|
||||||
|
1, 1, 1, 2, // insert column: false, true, false, true, true
|
||||||
|
0x7d, 4, 1, 3, 2, 1, // action column: makeText, set, del, set, set
|
||||||
|
0x7d, 0, 0x16, 0, 2, 0x16, // valLen column: 0, 0x16, 0, 0x16, 0x16
|
||||||
|
0x68, 0x48, 0x69, // valRaw column: 'h', 'H', 'i'
|
||||||
|
2, 0, 0x7f, 1, 2, 0, // predNum column: 0, 0, 1, 0, 0
|
||||||
|
0x7f, 0, // predActor column: 0
|
||||||
|
0x7f, 2 // predCtr column: 2
|
||||||
|
])
|
||||||
|
const decoded = decodeChange(encodeChange(change1))
|
||||||
|
assert.deepStrictEqual(decoded, Object.assign({hash: decoded.hash}, change1))
|
||||||
|
})
|
||||||
|
|
||||||
|
// FIXME - skipping this b/c it was never implemented in the rust impl and isnt trivial
|
||||||
|
/*
|
||||||
|
it.skip('should require strict ordering of preds', () => {
|
||||||
|
const change = new Uint8Array([
|
||||||
|
133, 111, 74, 131, 31, 229, 112, 44, 1, 105, 1, 58, 30, 190, 100, 253, 180, 180, 66, 49, 126,
|
||||||
|
81, 142, 10, 3, 35, 140, 189, 231, 34, 145, 57, 66, 23, 224, 149, 64, 97, 88, 140, 168, 194,
|
||||||
|
229, 4, 244, 209, 58, 138, 67, 140, 1, 152, 236, 250, 2, 0, 1, 4, 55, 234, 66, 242, 8, 21, 11,
|
||||||
|
52, 1, 66, 2, 86, 3, 87, 10, 112, 2, 113, 3, 115, 4, 127, 9, 99, 111, 109, 109, 111, 110, 86,
|
||||||
|
97, 114, 1, 127, 1, 127, 166, 1, 52, 48, 57, 49, 52, 57, 52, 53, 56, 50, 127, 2, 126, 0, 1,
|
||||||
|
126, 139, 1, 0
|
||||||
|
])
|
||||||
|
assert.throws(() => { decodeChange(change) }, /operation IDs are not in ascending order/)
|
||||||
|
})
|
||||||
|
*/
|
||||||
|
|
||||||
|
describe('with trailing bytes', () => {
|
||||||
|
let change = new Uint8Array([
|
||||||
|
0x85, 0x6f, 0x4a, 0x83, // magic bytes
|
||||||
|
0xb2, 0x98, 0x9e, 0xa9, // checksum
|
||||||
|
1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234'
|
||||||
|
1, 1, 252, 250, 220, 255, 5, // seq, startOp, time
|
||||||
|
14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111, 110, // message: 'Initialization'
|
||||||
|
0, 6, // actor list, column count
|
||||||
|
0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action
|
||||||
|
0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum
|
||||||
|
0x7f, 1, 0x78, // keyStr: 'x'
|
||||||
|
1, // insert: false
|
||||||
|
0x7f, 1, // action: set
|
||||||
|
0x7f, 19, // valLen: 1 byte of type uint
|
||||||
|
1, // valRaw: 1
|
||||||
|
0x7f, 0, // predNum: 0
|
||||||
|
0, 1, 2, 3, 4, 5, 6, 7, 8, 9 // 10 trailing bytes
|
||||||
|
])
|
||||||
|
|
||||||
|
it('should allow decoding and re-encoding', () => {
|
||||||
|
// NOTE: This calls the JavaScript encoding and decoding functions, even when the WebAssembly
|
||||||
|
// backend is loaded. Should the wasm backend export its own functions for testing?
|
||||||
|
checkEncoded(change, encodeChange(decodeChange(change)))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should be preserved in document encoding', () => {
|
||||||
|
const [doc] = Automerge.applyChanges(Automerge.init(), [change])
|
||||||
|
const [reconstructed] = Automerge.getAllChanges(Automerge.load(Automerge.save(doc)))
|
||||||
|
checkEncoded(change, reconstructed)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,21 +1,16 @@
|
||||||
import * as assert from "assert"
|
const assert = require('assert')
|
||||||
import { Encoder } from "./legacy/encoding"
|
const { Encoder } = require('../src/encoding')
|
||||||
|
|
||||||
// Assertion that succeeds if the first argument deepStrictEquals at least one of the
|
// Assertion that succeeds if the first argument deepStrictEquals at least one of the
|
||||||
// subsequent arguments (but we don't care which one)
|
// subsequent arguments (but we don't care which one)
|
||||||
export function assertEqualsOneOf(actual, ...expected) {
|
function assertEqualsOneOf(actual, ...expected) {
|
||||||
assert(expected.length > 0)
|
assert(expected.length > 0)
|
||||||
for (let i = 0; i < expected.length; i++) {
|
for (let i = 0; i < expected.length; i++) {
|
||||||
try {
|
try {
|
||||||
assert.deepStrictEqual(actual, expected[i])
|
assert.deepStrictEqual(actual, expected[i])
|
||||||
return // if we get here without an exception, that means success
|
return // if we get here without an exception, that means success
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e instanceof assert.AssertionError) {
|
if (!e.name.match(/^AssertionError/) || i === expected.length - 1) throw e
|
||||||
if (!e.name.match(/^AssertionError/) || i === expected.length - 1)
|
|
||||||
throw e
|
|
||||||
} else {
|
|
||||||
throw e
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -24,13 +19,14 @@ export function assertEqualsOneOf(actual, ...expected) {
|
||||||
* Asserts that the byte array maintained by `encoder` contains the same byte
|
* Asserts that the byte array maintained by `encoder` contains the same byte
|
||||||
* sequence as the array `bytes`.
|
* sequence as the array `bytes`.
|
||||||
*/
|
*/
|
||||||
export function checkEncoded(encoder, bytes, detail?) {
|
function checkEncoded(encoder, bytes, detail) {
|
||||||
const encoded = encoder instanceof Encoder ? encoder.buffer : encoder
|
const encoded = (encoder instanceof Encoder) ? encoder.buffer : encoder
|
||||||
const expected = new Uint8Array(bytes)
|
const expected = new Uint8Array(bytes)
|
||||||
const message =
|
const message = (detail ? `${detail}: ` : '') + `${encoded} expected to equal ${expected}`
|
||||||
(detail ? `${detail}: ` : "") + `${encoded} expected to equal ${expected}`
|
|
||||||
assert(encoded.byteLength === expected.byteLength, message)
|
assert(encoded.byteLength === expected.byteLength, message)
|
||||||
for (let i = 0; i < encoded.byteLength; i++) {
|
for (let i = 0; i < encoded.byteLength; i++) {
|
||||||
assert(encoded[i] === expected[i], message)
|
assert(encoded[i] === expected[i], message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module.exports = { assertEqualsOneOf, checkEncoded }
|
||||||
1419
automerge-js/test/legacy_tests.js
Normal file
1419
automerge-js/test/legacy_tests.js
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
697
automerge-js/test/text_test.js
Normal file
697
automerge-js/test/text_test.js
Normal file
|
|
@ -0,0 +1,697 @@
|
||||||
|
const assert = require('assert')
|
||||||
|
const Automerge = require('..')
|
||||||
|
const { assertEqualsOneOf } = require('./helpers')
|
||||||
|
|
||||||
|
function attributeStateToAttributes(accumulatedAttributes) {
|
||||||
|
const attributes = {}
|
||||||
|
Object.entries(accumulatedAttributes).forEach(([key, values]) => {
|
||||||
|
if (values.length && values[0] !== null) {
|
||||||
|
attributes[key] = values[0]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return attributes
|
||||||
|
}
|
||||||
|
|
||||||
|
function isEquivalent(a, b) {
|
||||||
|
const aProps = Object.getOwnPropertyNames(a)
|
||||||
|
const bProps = Object.getOwnPropertyNames(b)
|
||||||
|
|
||||||
|
if (aProps.length != bProps.length) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < aProps.length; i++) {
|
||||||
|
const propName = aProps[i]
|
||||||
|
if (a[propName] !== b[propName]) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
function isControlMarker(pseudoCharacter) {
|
||||||
|
return typeof pseudoCharacter === 'object' && pseudoCharacter.attributes
|
||||||
|
}
|
||||||
|
|
||||||
|
function opFrom(text, attributes) {
|
||||||
|
let op = { insert: text }
|
||||||
|
if (Object.keys(attributes).length > 0) {
|
||||||
|
op.attributes = attributes
|
||||||
|
}
|
||||||
|
return op
|
||||||
|
}
|
||||||
|
|
||||||
|
function accumulateAttributes(span, accumulatedAttributes) {
|
||||||
|
Object.entries(span).forEach(([key, value]) => {
|
||||||
|
if (!accumulatedAttributes[key]) {
|
||||||
|
accumulatedAttributes[key] = []
|
||||||
|
}
|
||||||
|
if (value === null) {
|
||||||
|
if (accumulatedAttributes[key].length === 0 || accumulatedAttributes[key] === null) {
|
||||||
|
accumulatedAttributes[key].unshift(null)
|
||||||
|
} else {
|
||||||
|
accumulatedAttributes[key].shift()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (accumulatedAttributes[key][0] === null) {
|
||||||
|
accumulatedAttributes[key].shift()
|
||||||
|
} else {
|
||||||
|
accumulatedAttributes[key].unshift(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return accumulatedAttributes
|
||||||
|
}
|
||||||
|
|
||||||
|
function automergeTextToDeltaDoc(text) {
|
||||||
|
let ops = []
|
||||||
|
let controlState = {}
|
||||||
|
let currentString = ""
|
||||||
|
let attributes = {}
|
||||||
|
text.toSpans().forEach((span) => {
|
||||||
|
if (isControlMarker(span)) {
|
||||||
|
controlState = accumulateAttributes(span.attributes, controlState)
|
||||||
|
} else {
|
||||||
|
let next = attributeStateToAttributes(controlState)
|
||||||
|
|
||||||
|
// if the next span has the same calculated attributes as the current span
|
||||||
|
// don't bother outputting it as a separate span, just let it ride
|
||||||
|
if (typeof span === 'string' && isEquivalent(next, attributes)) {
|
||||||
|
currentString = currentString + span
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currentString) {
|
||||||
|
ops.push(opFrom(currentString, attributes))
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we've got a string, we might be able to concatenate it to another
|
||||||
|
// same-attributed-string, so remember it and go to the next iteration.
|
||||||
|
if (typeof span === 'string') {
|
||||||
|
currentString = span
|
||||||
|
attributes = next
|
||||||
|
} else {
|
||||||
|
// otherwise we have an embed "character" and should output it immediately.
|
||||||
|
// embeds are always one-"character" in length.
|
||||||
|
ops.push(opFrom(span, next))
|
||||||
|
currentString = ''
|
||||||
|
attributes = {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// at the end, flush any accumulated string out
|
||||||
|
if (currentString) {
|
||||||
|
ops.push(opFrom(currentString, attributes))
|
||||||
|
}
|
||||||
|
|
||||||
|
return ops
|
||||||
|
}
|
||||||
|
|
||||||
|
function inverseAttributes(attributes) {
|
||||||
|
let invertedAttributes = {}
|
||||||
|
Object.keys(attributes).forEach((key) => {
|
||||||
|
invertedAttributes[key] = null
|
||||||
|
})
|
||||||
|
return invertedAttributes
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyDeleteOp(text, offset, op) {
|
||||||
|
let length = op.delete
|
||||||
|
while (length > 0) {
|
||||||
|
if (isControlMarker(text.get(offset))) {
|
||||||
|
offset += 1
|
||||||
|
} else {
|
||||||
|
// we need to not delete control characters, but we do delete embed characters
|
||||||
|
text.deleteAt(offset, 1)
|
||||||
|
length -= 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return [text, offset]
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyRetainOp(text, offset, op) {
|
||||||
|
let length = op.retain
|
||||||
|
|
||||||
|
if (op.attributes) {
|
||||||
|
text.insertAt(offset, { attributes: op.attributes })
|
||||||
|
offset += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
while (length > 0) {
|
||||||
|
const char = text.get(offset)
|
||||||
|
offset += 1
|
||||||
|
if (!isControlMarker(char)) {
|
||||||
|
length -= 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (op.attributes) {
|
||||||
|
text.insertAt(offset, { attributes: inverseAttributes(op.attributes) })
|
||||||
|
offset += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return [text, offset]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function applyInsertOp(text, offset, op) {
|
||||||
|
let originalOffset = offset
|
||||||
|
|
||||||
|
if (typeof op.insert === 'string') {
|
||||||
|
text.insertAt(offset, ...op.insert.split(''))
|
||||||
|
offset += op.insert.length
|
||||||
|
} else {
|
||||||
|
// we have an embed or something similar
|
||||||
|
text.insertAt(offset, op.insert)
|
||||||
|
offset += 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if (op.attributes) {
|
||||||
|
text.insertAt(originalOffset, { attributes: op.attributes })
|
||||||
|
offset += 1
|
||||||
|
}
|
||||||
|
if (op.attributes) {
|
||||||
|
text.insertAt(offset, { attributes: inverseAttributes(op.attributes) })
|
||||||
|
offset += 1
|
||||||
|
}
|
||||||
|
return [text, offset]
|
||||||
|
}
|
||||||
|
|
||||||
|
// XXX: uhhhhh, why can't I pass in text?
|
||||||
|
function applyDeltaDocToAutomergeText(delta, doc) {
|
||||||
|
let offset = 0
|
||||||
|
|
||||||
|
delta.forEach(op => {
|
||||||
|
if (op.retain) {
|
||||||
|
[, offset] = applyRetainOp(doc.text, offset, op)
|
||||||
|
} else if (op.delete) {
|
||||||
|
[, offset] = applyDeleteOp(doc.text, offset, op)
|
||||||
|
} else if (op.insert) {
|
||||||
|
[, offset] = applyInsertOp(doc.text, offset, op)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Automerge.Text', () => {
|
||||||
|
let s1, s2
|
||||||
|
beforeEach(() => {
|
||||||
|
s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text())
|
||||||
|
s2 = Automerge.merge(Automerge.init(), s1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should support insertion', () => {
|
||||||
|
s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a'))
|
||||||
|
assert.strictEqual(s1.text.length, 1)
|
||||||
|
assert.strictEqual(s1.text.get(0), 'a')
|
||||||
|
assert.strictEqual(s1.text.toString(), 'a')
|
||||||
|
//assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should support deletion', () => {
|
||||||
|
s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
|
||||||
|
s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1))
|
||||||
|
assert.strictEqual(s1.text.length, 2)
|
||||||
|
assert.strictEqual(s1.text.get(0), 'a')
|
||||||
|
assert.strictEqual(s1.text.get(1), 'c')
|
||||||
|
assert.strictEqual(s1.text.toString(), 'ac')
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should support implicit and explicit deletion", () => {
|
||||||
|
s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c"))
|
||||||
|
s1 = Automerge.change(s1, doc => doc.text.deleteAt(1))
|
||||||
|
s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0))
|
||||||
|
assert.strictEqual(s1.text.length, 2)
|
||||||
|
assert.strictEqual(s1.text.get(0), "a")
|
||||||
|
assert.strictEqual(s1.text.get(1), "c")
|
||||||
|
assert.strictEqual(s1.text.toString(), "ac")
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle concurrent insertion', () => {
|
||||||
|
s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
|
||||||
|
s2 = Automerge.change(s2, doc => doc.text.insertAt(0, 'x', 'y', 'z'))
|
||||||
|
s1 = Automerge.merge(s1, s2)
|
||||||
|
assert.strictEqual(s1.text.length, 6)
|
||||||
|
assertEqualsOneOf(s1.text.toString(), 'abcxyz', 'xyzabc')
|
||||||
|
assertEqualsOneOf(s1.text.join(''), 'abcxyz', 'xyzabc')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle text and other ops in the same change', () => {
|
||||||
|
s1 = Automerge.change(s1, doc => {
|
||||||
|
doc.foo = 'bar'
|
||||||
|
doc.text.insertAt(0, 'a')
|
||||||
|
})
|
||||||
|
assert.strictEqual(s1.foo, 'bar')
|
||||||
|
assert.strictEqual(s1.text.toString(), 'a')
|
||||||
|
assert.strictEqual(s1.text.join(''), 'a')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should serialize to JSON as a simple string', () => {
|
||||||
|
s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', '"', 'b'))
|
||||||
|
assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should allow modification before an object is assigned to a document', () => {
|
||||||
|
s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
const text = new Automerge.Text()
|
||||||
|
text.insertAt(0, 'a', 'b', 'c', 'd')
|
||||||
|
text.deleteAt(2)
|
||||||
|
doc.text = text
|
||||||
|
assert.strictEqual(doc.text.toString(), 'abd')
|
||||||
|
assert.strictEqual(doc.text.join(''), 'abd')
|
||||||
|
})
|
||||||
|
assert.strictEqual(s1.text.toString(), 'abd')
|
||||||
|
assert.strictEqual(s1.text.join(''), 'abd')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should allow modification after an object is assigned to a document', () => {
|
||||||
|
s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
const text = new Automerge.Text()
|
||||||
|
doc.text = text
|
||||||
|
doc.text.insertAt(0, 'a', 'b', 'c', 'd')
|
||||||
|
doc.text.deleteAt(2)
|
||||||
|
assert.strictEqual(doc.text.toString(), 'abd')
|
||||||
|
assert.strictEqual(doc.text.join(''), 'abd')
|
||||||
|
})
|
||||||
|
assert.strictEqual(s1.text.join(''), 'abd')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not allow modification outside of a change callback', () => {
|
||||||
|
assert.throws(() => s1.text.insertAt(0, 'a'), /object cannot be modified outside of a change block/)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('with initial value', () => {
|
||||||
|
it('should accept a string as initial value', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('init'))
|
||||||
|
assert.strictEqual(s1.text.length, 4)
|
||||||
|
assert.strictEqual(s1.text.get(0), 'i')
|
||||||
|
assert.strictEqual(s1.text.get(1), 'n')
|
||||||
|
assert.strictEqual(s1.text.get(2), 'i')
|
||||||
|
assert.strictEqual(s1.text.get(3), 't')
|
||||||
|
assert.strictEqual(s1.text.toString(), 'init')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should accept an array as initial value', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text(['i', 'n', 'i', 't']))
|
||||||
|
assert.strictEqual(s1.text.length, 4)
|
||||||
|
assert.strictEqual(s1.text.get(0), 'i')
|
||||||
|
assert.strictEqual(s1.text.get(1), 'n')
|
||||||
|
assert.strictEqual(s1.text.get(2), 'i')
|
||||||
|
assert.strictEqual(s1.text.get(3), 't')
|
||||||
|
assert.strictEqual(s1.text.toString(), 'init')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should initialize text in Automerge.from()', () => {
|
||||||
|
let s1 = Automerge.from({text: new Automerge.Text('init')})
|
||||||
|
assert.strictEqual(s1.text.length, 4)
|
||||||
|
assert.strictEqual(s1.text.get(0), 'i')
|
||||||
|
assert.strictEqual(s1.text.get(1), 'n')
|
||||||
|
assert.strictEqual(s1.text.get(2), 'i')
|
||||||
|
assert.strictEqual(s1.text.get(3), 't')
|
||||||
|
assert.strictEqual(s1.text.toString(), 'init')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should encode the initial value as a change', () => {
|
||||||
|
const s1 = Automerge.from({text: new Automerge.Text('init')})
|
||||||
|
const changes = Automerge.getAllChanges(s1)
|
||||||
|
assert.strictEqual(changes.length, 1)
|
||||||
|
const [s2] = Automerge.applyChanges(Automerge.init(), changes)
|
||||||
|
assert.strictEqual(s2.text instanceof Automerge.Text, true)
|
||||||
|
assert.strictEqual(s2.text.toString(), 'init')
|
||||||
|
assert.strictEqual(s2.text.join(''), 'init')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should allow immediate access to the value', () => {
|
||||||
|
Automerge.change(Automerge.init(), doc => {
|
||||||
|
const text = new Automerge.Text('init')
|
||||||
|
assert.strictEqual(text.length, 4)
|
||||||
|
assert.strictEqual(text.get(0), 'i')
|
||||||
|
assert.strictEqual(text.toString(), 'init')
|
||||||
|
doc.text = text
|
||||||
|
assert.strictEqual(doc.text.length, 4)
|
||||||
|
assert.strictEqual(doc.text.get(0), 'i')
|
||||||
|
assert.strictEqual(doc.text.toString(), 'init')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should allow pre-assignment modification of the initial value', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
const text = new Automerge.Text('init')
|
||||||
|
text.deleteAt(3)
|
||||||
|
assert.strictEqual(text.join(''), 'ini')
|
||||||
|
doc.text = text
|
||||||
|
assert.strictEqual(doc.text.join(''), 'ini')
|
||||||
|
assert.strictEqual(doc.text.toString(), 'ini')
|
||||||
|
})
|
||||||
|
assert.strictEqual(s1.text.toString(), 'ini')
|
||||||
|
assert.strictEqual(s1.text.join(''), 'ini')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should allow post-assignment modification of the initial value', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
const text = new Automerge.Text('init')
|
||||||
|
doc.text = text
|
||||||
|
doc.text.deleteAt(0)
|
||||||
|
doc.text.insertAt(0, 'I')
|
||||||
|
assert.strictEqual(doc.text.join(''), 'Init')
|
||||||
|
assert.strictEqual(doc.text.toString(), 'Init')
|
||||||
|
})
|
||||||
|
assert.strictEqual(s1.text.join(''), 'Init')
|
||||||
|
assert.strictEqual(s1.text.toString(), 'Init')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('non-textual control characters', () => {
|
||||||
|
let s1
|
||||||
|
beforeEach(() => {
|
||||||
|
s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text()
|
||||||
|
doc.text.insertAt(0, 'a')
|
||||||
|
doc.text.insertAt(1, { attribute: 'bold' })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should allow fetching non-textual characters', () => {
|
||||||
|
assert.deepEqual(s1.text.get(1), { attribute: 'bold' })
|
||||||
|
//assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should include control characters in string length', () => {
|
||||||
|
assert.strictEqual(s1.text.length, 2)
|
||||||
|
assert.strictEqual(s1.text.get(0), 'a')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should exclude control characters from toString()', () => {
|
||||||
|
assert.strictEqual(s1.text.toString(), 'a')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should allow control characters to be updated', () => {
|
||||||
|
const s2 = Automerge.change(s1, doc => doc.text.get(1).attribute = 'italic')
|
||||||
|
const s3 = Automerge.load(Automerge.save(s2))
|
||||||
|
assert.strictEqual(s1.text.get(1).attribute, 'bold')
|
||||||
|
assert.strictEqual(s2.text.get(1).attribute, 'italic')
|
||||||
|
assert.strictEqual(s3.text.get(1).attribute, 'italic')
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('spans interface to Text', () => {
|
||||||
|
it('should return a simple string as a single span', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('hello world')
|
||||||
|
})
|
||||||
|
assert.deepEqual(s1.text.toSpans(), ['hello world'])
|
||||||
|
})
|
||||||
|
it('should return an empty string as an empty array', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text()
|
||||||
|
})
|
||||||
|
assert.deepEqual(s1.text.toSpans(), [])
|
||||||
|
})
|
||||||
|
it('should split a span at a control character', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('hello world')
|
||||||
|
doc.text.insertAt(5, { attributes: { bold: true } })
|
||||||
|
})
|
||||||
|
assert.deepEqual(s1.text.toSpans(),
|
||||||
|
['hello', { attributes: { bold: true } }, ' world'])
|
||||||
|
})
|
||||||
|
it('should allow consecutive control characters', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('hello world')
|
||||||
|
doc.text.insertAt(5, { attributes: { bold: true } })
|
||||||
|
doc.text.insertAt(6, { attributes: { italic: true } })
|
||||||
|
})
|
||||||
|
assert.deepEqual(s1.text.toSpans(),
|
||||||
|
['hello',
|
||||||
|
{ attributes: { bold: true } },
|
||||||
|
{ attributes: { italic: true } },
|
||||||
|
' world'
|
||||||
|
])
|
||||||
|
})
|
||||||
|
it('should allow non-consecutive control characters', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('hello world')
|
||||||
|
doc.text.insertAt(5, { attributes: { bold: true } })
|
||||||
|
doc.text.insertAt(12, { attributes: { italic: true } })
|
||||||
|
})
|
||||||
|
assert.deepEqual(s1.text.toSpans(),
|
||||||
|
['hello',
|
||||||
|
{ attributes: { bold: true } },
|
||||||
|
' world',
|
||||||
|
{ attributes: { italic: true } }
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should be convertable into a Quill delta', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('Gandalf the Grey')
|
||||||
|
doc.text.insertAt(0, { attributes: { bold: true } })
|
||||||
|
doc.text.insertAt(7 + 1, { attributes: { bold: null } })
|
||||||
|
doc.text.insertAt(12 + 2, { attributes: { color: '#cccccc' } })
|
||||||
|
})
|
||||||
|
|
||||||
|
let deltaDoc = automergeTextToDeltaDoc(s1.text)
|
||||||
|
|
||||||
|
// From https://quilljs.com/docs/delta/
|
||||||
|
let expectedDoc = [
|
||||||
|
{ insert: 'Gandalf', attributes: { bold: true } },
|
||||||
|
{ insert: ' the ' },
|
||||||
|
{ insert: 'Grey', attributes: { color: '#cccccc' } }
|
||||||
|
]
|
||||||
|
|
||||||
|
assert.deepEqual(deltaDoc, expectedDoc)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should support embeds', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('')
|
||||||
|
doc.text.insertAt(0, { attributes: { link: 'https://quilljs.com' } })
|
||||||
|
doc.text.insertAt(1, {
|
||||||
|
image: 'https://quilljs.com/assets/images/icon.png'
|
||||||
|
})
|
||||||
|
doc.text.insertAt(2, { attributes: { link: null } })
|
||||||
|
})
|
||||||
|
|
||||||
|
let deltaDoc = automergeTextToDeltaDoc(s1.text)
|
||||||
|
|
||||||
|
// From https://quilljs.com/docs/delta/
|
||||||
|
let expectedDoc = [{
|
||||||
|
// An image link
|
||||||
|
insert: {
|
||||||
|
image: 'https://quilljs.com/assets/images/icon.png'
|
||||||
|
},
|
||||||
|
attributes: {
|
||||||
|
link: 'https://quilljs.com'
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
|
||||||
|
assert.deepEqual(deltaDoc, expectedDoc)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle concurrent overlapping spans', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('Gandalf the Grey')
|
||||||
|
})
|
||||||
|
|
||||||
|
let s2 = Automerge.merge(Automerge.init(), s1)
|
||||||
|
|
||||||
|
let s3 = Automerge.change(s1, doc => {
|
||||||
|
doc.text.insertAt(8, { attributes: { bold: true } })
|
||||||
|
doc.text.insertAt(16 + 1, { attributes: { bold: null } })
|
||||||
|
})
|
||||||
|
|
||||||
|
let s4 = Automerge.change(s2, doc => {
|
||||||
|
doc.text.insertAt(0, { attributes: { bold: true } })
|
||||||
|
doc.text.insertAt(11 + 1, { attributes: { bold: null } })
|
||||||
|
})
|
||||||
|
|
||||||
|
let merged = Automerge.merge(s3, s4)
|
||||||
|
|
||||||
|
let deltaDoc = automergeTextToDeltaDoc(merged.text)
|
||||||
|
|
||||||
|
// From https://quilljs.com/docs/delta/
|
||||||
|
let expectedDoc = [
|
||||||
|
{ insert: 'Gandalf the Grey', attributes: { bold: true } },
|
||||||
|
]
|
||||||
|
|
||||||
|
assert.deepEqual(deltaDoc, expectedDoc)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle debolding spans', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('Gandalf the Grey')
|
||||||
|
})
|
||||||
|
|
||||||
|
let s2 = Automerge.merge(Automerge.init(), s1)
|
||||||
|
|
||||||
|
let s3 = Automerge.change(s1, doc => {
|
||||||
|
doc.text.insertAt(0, { attributes: { bold: true } })
|
||||||
|
doc.text.insertAt(16 + 1, { attributes: { bold: null } })
|
||||||
|
})
|
||||||
|
|
||||||
|
let s4 = Automerge.change(s2, doc => {
|
||||||
|
doc.text.insertAt(8, { attributes: { bold: null } })
|
||||||
|
doc.text.insertAt(11 + 1, { attributes: { bold: true } })
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
let merged = Automerge.merge(s3, s4)
|
||||||
|
|
||||||
|
let deltaDoc = automergeTextToDeltaDoc(merged.text)
|
||||||
|
|
||||||
|
// From https://quilljs.com/docs/delta/
|
||||||
|
let expectedDoc = [
|
||||||
|
{ insert: 'Gandalf ', attributes: { bold: true } },
|
||||||
|
{ insert: 'the' },
|
||||||
|
{ insert: ' Grey', attributes: { bold: true } },
|
||||||
|
]
|
||||||
|
|
||||||
|
assert.deepEqual(deltaDoc, expectedDoc)
|
||||||
|
})
|
||||||
|
|
||||||
|
// xxx: how would this work for colors?
|
||||||
|
it('should handle destyling across destyled spans', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('Gandalf the Grey')
|
||||||
|
})
|
||||||
|
|
||||||
|
let s2 = Automerge.merge(Automerge.init(), s1)
|
||||||
|
|
||||||
|
let s3 = Automerge.change(s1, doc => {
|
||||||
|
doc.text.insertAt(0, { attributes: { bold: true } })
|
||||||
|
doc.text.insertAt(16 + 1, { attributes: { bold: null } })
|
||||||
|
})
|
||||||
|
|
||||||
|
let s4 = Automerge.change(s2, doc => {
|
||||||
|
doc.text.insertAt(8, { attributes: { bold: null } })
|
||||||
|
doc.text.insertAt(11 + 1, { attributes: { bold: true } })
|
||||||
|
})
|
||||||
|
|
||||||
|
let merged = Automerge.merge(s3, s4)
|
||||||
|
|
||||||
|
let final = Automerge.change(merged, doc => {
|
||||||
|
doc.text.insertAt(3 + 1, { attributes: { bold: null } })
|
||||||
|
doc.text.insertAt(doc.text.length, { attributes: { bold: true } })
|
||||||
|
})
|
||||||
|
|
||||||
|
let deltaDoc = automergeTextToDeltaDoc(final.text)
|
||||||
|
|
||||||
|
// From https://quilljs.com/docs/delta/
|
||||||
|
let expectedDoc = [
|
||||||
|
{ insert: 'Gan', attributes: { bold: true } },
|
||||||
|
{ insert: 'dalf the Grey' },
|
||||||
|
]
|
||||||
|
|
||||||
|
assert.deepEqual(deltaDoc, expectedDoc)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should apply an insert', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('Hello world')
|
||||||
|
})
|
||||||
|
|
||||||
|
const delta = [
|
||||||
|
{ retain: 6 },
|
||||||
|
{ insert: 'reader' },
|
||||||
|
{ delete: 5 }
|
||||||
|
]
|
||||||
|
|
||||||
|
let s2 = Automerge.change(s1, doc => {
|
||||||
|
applyDeltaDocToAutomergeText(delta, doc)
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.strictEqual(s2.text.join(''), 'Hello reader')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should apply an insert with control characters', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('Hello world')
|
||||||
|
})
|
||||||
|
|
||||||
|
const delta = [
|
||||||
|
{ retain: 6 },
|
||||||
|
{ insert: 'reader', attributes: { bold: true } },
|
||||||
|
{ delete: 5 },
|
||||||
|
{ insert: '!' }
|
||||||
|
]
|
||||||
|
|
||||||
|
let s2 = Automerge.change(s1, doc => {
|
||||||
|
applyDeltaDocToAutomergeText(delta, doc)
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.strictEqual(s2.text.toString(), 'Hello reader!')
|
||||||
|
assert.deepEqual(s2.text.toSpans(), [
|
||||||
|
"Hello ",
|
||||||
|
{ attributes: { bold: true } },
|
||||||
|
"reader",
|
||||||
|
{ attributes: { bold: null } },
|
||||||
|
"!"
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should account for control characters in retain/delete lengths', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('Hello world')
|
||||||
|
doc.text.insertAt(4, { attributes: { color: '#ccc' } })
|
||||||
|
doc.text.insertAt(10, { attributes: { color: '#f00' } })
|
||||||
|
})
|
||||||
|
|
||||||
|
const delta = [
|
||||||
|
{ retain: 6 },
|
||||||
|
{ insert: 'reader', attributes: { bold: true } },
|
||||||
|
{ delete: 5 },
|
||||||
|
{ insert: '!' }
|
||||||
|
]
|
||||||
|
|
||||||
|
let s2 = Automerge.change(s1, doc => {
|
||||||
|
applyDeltaDocToAutomergeText(delta, doc)
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.strictEqual(s2.text.toString(), 'Hello reader!')
|
||||||
|
assert.deepEqual(s2.text.toSpans(), [
|
||||||
|
"Hell",
|
||||||
|
{ attributes: { color: '#ccc'} },
|
||||||
|
"o ",
|
||||||
|
{ attributes: { bold: true } },
|
||||||
|
"reader",
|
||||||
|
{ attributes: { bold: null } },
|
||||||
|
{ attributes: { color: '#f00'} },
|
||||||
|
"!"
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should support embeds', () => {
|
||||||
|
let s1 = Automerge.change(Automerge.init(), doc => {
|
||||||
|
doc.text = new Automerge.Text('')
|
||||||
|
})
|
||||||
|
|
||||||
|
let deltaDoc = [{
|
||||||
|
// An image link
|
||||||
|
insert: {
|
||||||
|
image: 'https://quilljs.com/assets/images/icon.png'
|
||||||
|
},
|
||||||
|
attributes: {
|
||||||
|
link: 'https://quilljs.com'
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
|
||||||
|
let s2 = Automerge.change(s1, doc => {
|
||||||
|
applyDeltaDocToAutomergeText(deltaDoc, doc)
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.deepEqual(s2.text.toSpans(), [
|
||||||
|
{ attributes: { link: 'https://quilljs.com' } },
|
||||||
|
{ image: 'https://quilljs.com/assets/images/icon.png'},
|
||||||
|
{ attributes: { link: null } },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should support unicode when creating text', () => {
|
||||||
|
s1 = Automerge.from({
|
||||||
|
text: new Automerge.Text('🐦')
|
||||||
|
})
|
||||||
|
assert.strictEqual(s1.text.get(0), '🐦')
|
||||||
|
})
|
||||||
|
})
|
||||||
32
automerge-js/test/uuid_test.js
Normal file
32
automerge-js/test/uuid_test.js
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
const assert = require('assert')
|
||||||
|
const Automerge = require('..')
|
||||||
|
|
||||||
|
const uuid = Automerge.uuid
|
||||||
|
|
||||||
|
describe('uuid', () => {
|
||||||
|
afterEach(() => {
|
||||||
|
uuid.reset()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('default implementation', () => {
|
||||||
|
it('generates unique values', () => {
|
||||||
|
assert.notEqual(uuid(), uuid())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('custom implementation', () => {
|
||||||
|
let counter
|
||||||
|
|
||||||
|
function customUuid() {
|
||||||
|
return `custom-uuid-${counter++}`
|
||||||
|
}
|
||||||
|
|
||||||
|
before(() => uuid.setFactory(customUuid))
|
||||||
|
beforeEach(() => counter = 0)
|
||||||
|
|
||||||
|
it('invokes the custom factory', () => {
|
||||||
|
assert.equal(uuid(), 'custom-uuid-0')
|
||||||
|
assert.equal(uuid(), 'custom-uuid-1')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
/node_modules
|
/node_modules
|
||||||
/bundler
|
/dev
|
||||||
/nodejs
|
/node
|
||||||
/deno
|
/web
|
||||||
|
/target
|
||||||
Cargo.lock
|
Cargo.lock
|
||||||
yarn.lock
|
yarn.lock
|
||||||
|
|
@ -2,14 +2,13 @@
|
||||||
[package]
|
[package]
|
||||||
name = "automerge-wasm"
|
name = "automerge-wasm"
|
||||||
description = "An js/wasm wrapper for the rust implementation of automerge-backend"
|
description = "An js/wasm wrapper for the rust implementation of automerge-backend"
|
||||||
repository = "https://github.com/automerge/automerge-rs"
|
# repository = "https://github.com/automerge/automerge-rs"
|
||||||
version = "0.1.0"
|
version = "0.0.4"
|
||||||
authors = ["Alex Good <alex@memoryandthought.me>","Orion Henry <orion@inkandswitch.com>", "Martin Kleppmann"]
|
authors = ["Alex Good <alex@memoryandthought.me>","Orion Henry <orion@inkandswitch.com>", "Martin Kleppmann"]
|
||||||
categories = ["wasm"]
|
categories = ["wasm"]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
rust-version = "1.57.0"
|
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
crate-type = ["cdylib","rlib"]
|
crate-type = ["cdylib","rlib"]
|
||||||
|
|
@ -28,24 +27,23 @@ serde = "^1.0"
|
||||||
serde_json = "^1.0"
|
serde_json = "^1.0"
|
||||||
rand = { version = "^0.8.4" }
|
rand = { version = "^0.8.4" }
|
||||||
getrandom = { version = "^0.2.2", features=["js"] }
|
getrandom = { version = "^0.2.2", features=["js"] }
|
||||||
uuid = { version = "^1.2.1", features=["v4", "js", "serde"] }
|
uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] }
|
||||||
serde-wasm-bindgen = "0.4.3"
|
serde-wasm-bindgen = "0.1.3"
|
||||||
serde_bytes = "0.11.5"
|
serde_bytes = "0.11.5"
|
||||||
|
unicode-segmentation = "1.7.1"
|
||||||
hex = "^0.4.3"
|
hex = "^0.4.3"
|
||||||
regex = "^1.5"
|
regex = "^1.5"
|
||||||
itertools = "^0.10.3"
|
|
||||||
thiserror = "^1.0.16"
|
|
||||||
|
|
||||||
[dependencies.wasm-bindgen]
|
[dependencies.wasm-bindgen]
|
||||||
version = "^0.2.83"
|
version = "^0.2"
|
||||||
#features = ["std"]
|
#features = ["std"]
|
||||||
features = ["serde-serialize", "std"]
|
features = ["serde-serialize", "std"]
|
||||||
|
|
||||||
[package.metadata.wasm-pack.profile.release]
|
[package.metadata.wasm-pack.profile.release]
|
||||||
# wasm-opt = false
|
wasm-opt = true
|
||||||
|
|
||||||
[package.metadata.wasm-pack.profile.profiling]
|
[package.metadata.wasm-pack.profile.profiling]
|
||||||
wasm-opt = false
|
wasm-opt = true
|
||||||
|
|
||||||
# The `web-sys` crate allows you to interact with the various browser APIs,
|
# The `web-sys` crate allows you to interact with the various browser APIs,
|
||||||
# like the DOM.
|
# like the DOM.
|
||||||
|
|
@ -57,6 +55,5 @@ features = ["console"]
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
futures = "^0.1"
|
futures = "^0.1"
|
||||||
proptest = { version = "^1.0.0", default-features = false, features = ["std"] }
|
|
||||||
wasm-bindgen-futures = "^0.4"
|
wasm-bindgen-futures = "^0.4"
|
||||||
wasm-bindgen-test = "^0.3"
|
wasm-bindgen-test = "^0.3"
|
||||||
4
automerge-wasm/README.md
Normal file
4
automerge-wasm/README.md
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
## Automerge WASM Low Level Interface
|
||||||
|
|
||||||
|
This is a low level automerge library written in rust exporting a javascript API via WASM. This low level api is the underpinning to the `automerge-js` library that reimplements the Automerge API via these low level functions.
|
||||||
|
|
||||||
15
automerge-wasm/attr_bug.js
Normal file
15
automerge-wasm/attr_bug.js
Normal file
File diff suppressed because one or more lines are too long
249
automerge-wasm/index.d.ts
vendored
Normal file
249
automerge-wasm/index.d.ts
vendored
Normal file
|
|
@ -0,0 +1,249 @@
|
||||||
|
|
||||||
|
export type Actor = string;
|
||||||
|
export type ObjID = string;
|
||||||
|
export type Change = Uint8Array;
|
||||||
|
export type SyncMessage = Uint8Array;
|
||||||
|
export type Prop = string | number;
|
||||||
|
export type Hash = string;
|
||||||
|
export type Heads = Hash[];
|
||||||
|
export type Value = string | number | boolean | null | Date | Uint8Array
|
||||||
|
export type ObjType = string | Array | Object
|
||||||
|
export type FullValue =
|
||||||
|
["str", string] |
|
||||||
|
["int", number] |
|
||||||
|
["uint", number] |
|
||||||
|
["f64", number] |
|
||||||
|
["boolean", boolean] |
|
||||||
|
["timestamp", Date] |
|
||||||
|
["counter", number] |
|
||||||
|
["bytes", Uint8Array] |
|
||||||
|
["null", Uint8Array] |
|
||||||
|
["map", ObjID] |
|
||||||
|
["list", ObjID] |
|
||||||
|
["text", ObjID] |
|
||||||
|
["table", ObjID]
|
||||||
|
|
||||||
|
export enum ObjTypeName {
|
||||||
|
list = "list",
|
||||||
|
map = "map",
|
||||||
|
table = "table",
|
||||||
|
text = "text",
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Datatype =
|
||||||
|
"boolean" |
|
||||||
|
"str" |
|
||||||
|
"int" |
|
||||||
|
"uint" |
|
||||||
|
"f64" |
|
||||||
|
"null" |
|
||||||
|
"timestamp" |
|
||||||
|
"counter" |
|
||||||
|
"bytes" |
|
||||||
|
"map" |
|
||||||
|
"text" |
|
||||||
|
"list";
|
||||||
|
|
||||||
|
export type DecodedSyncMessage = {
|
||||||
|
heads: Heads,
|
||||||
|
need: Heads,
|
||||||
|
have: any[]
|
||||||
|
changes: Change[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DecodedChange = {
|
||||||
|
actor: Actor,
|
||||||
|
seq: number
|
||||||
|
startOp: number,
|
||||||
|
time: number,
|
||||||
|
message: string | null,
|
||||||
|
deps: Heads,
|
||||||
|
hash: Hash,
|
||||||
|
ops: Op[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ChangeSetAddition = {
|
||||||
|
actor: string,
|
||||||
|
start: number,
|
||||||
|
end: number,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ChangeSetDeletion = {
|
||||||
|
actor: string,
|
||||||
|
pos: number,
|
||||||
|
val: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ChangeSet = {
|
||||||
|
add: ChangeSetAddition[],
|
||||||
|
del: ChangeSetDeletion[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Op = {
|
||||||
|
action: string,
|
||||||
|
obj: ObjID,
|
||||||
|
key: string,
|
||||||
|
value?: string | number | boolean,
|
||||||
|
datatype?: string,
|
||||||
|
pred: string[],
|
||||||
|
}
|
||||||
|
|
||||||
|
export function create(actor?: Actor): Automerge;
|
||||||
|
export function loadDoc(data: Uint8Array, actor?: Actor): Automerge;
|
||||||
|
export function encodeChange(change: DecodedChange): Change;
|
||||||
|
export function decodeChange(change: Change): DecodedChange;
|
||||||
|
export function initSyncState(): SyncState;
|
||||||
|
export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage;
|
||||||
|
export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage;
|
||||||
|
export function encodeSyncState(state: SyncState): Uint8Array;
|
||||||
|
export function decodeSyncState(data: Uint8Array): SyncState;
|
||||||
|
|
||||||
|
export class Automerge {
|
||||||
|
// change state
|
||||||
|
set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined;
|
||||||
|
set_object(obj: ObjID, prop: Prop, value: ObjType): ObjID;
|
||||||
|
insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined;
|
||||||
|
insert_object(obj: ObjID, index: number, value: ObjType): ObjID;
|
||||||
|
push(obj: ObjID, value: Value, datatype?: Datatype): undefined;
|
||||||
|
push_object(obj: ObjID, value: ObjType): ObjID;
|
||||||
|
splice(obj: ObjID, start: number, delete_count: number, text?: string | Array<Value>): ObjID[] | undefined;
|
||||||
|
inc(obj: ObjID, prop: Prop, value: number): void;
|
||||||
|
del(obj: ObjID, prop: Prop): void;
|
||||||
|
|
||||||
|
// returns a single value - if there is a conflict return the winner
|
||||||
|
value(obj: ObjID, prop: any, heads?: Heads): FullValue | null;
|
||||||
|
// return all values in case of a conflict
|
||||||
|
values(obj: ObjID, arg: any, heads?: Heads): FullValue[];
|
||||||
|
keys(obj: ObjID, heads?: Heads): string[];
|
||||||
|
text(obj: ObjID, heads?: Heads): string;
|
||||||
|
length(obj: ObjID, heads?: Heads): number;
|
||||||
|
materialize(obj?: ObjID): any;
|
||||||
|
|
||||||
|
// experimental spans api - unstable!
|
||||||
|
mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void;
|
||||||
|
unmark(obj: ObjID, mark: ObjID): void;
|
||||||
|
spans(obj: ObjID): any;
|
||||||
|
raw_spans(obj: ObjID): any;
|
||||||
|
blame(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
|
||||||
|
attribute(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
|
||||||
|
attribute2(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[];
|
||||||
|
|
||||||
|
// transactions
|
||||||
|
commit(message?: string, time?: number): Heads;
|
||||||
|
merge(other: Automerge): ObjID[];
|
||||||
|
getActorId(): Actor;
|
||||||
|
pendingOps(): number;
|
||||||
|
rollback(): number;
|
||||||
|
|
||||||
|
// save and load to local store
|
||||||
|
save(): Uint8Array;
|
||||||
|
saveIncremental(): Uint8Array;
|
||||||
|
loadIncremental(data: Uint8Array): ObjID[];
|
||||||
|
|
||||||
|
// sync over network
|
||||||
|
receiveSyncMessage(state: SyncState, message: SyncMessage): ObjID[];
|
||||||
|
generateSyncMessage(state: SyncState): SyncMessage | null;
|
||||||
|
|
||||||
|
// low level change functions
|
||||||
|
applyChanges(changes: Change[]): ObjID[];
|
||||||
|
getChanges(have_deps: Heads): Change[];
|
||||||
|
getChangeByHash(hash: Hash): Change | null;
|
||||||
|
getChangesAdded(other: Automerge): Change[];
|
||||||
|
getHeads(): Heads;
|
||||||
|
getLastLocalChange(): Change;
|
||||||
|
getMissingDeps(heads?: Heads): Heads;
|
||||||
|
|
||||||
|
// memory management
|
||||||
|
free(): void;
|
||||||
|
clone(actor?: string): Automerge;
|
||||||
|
fork(actor?: string): Automerge;
|
||||||
|
|
||||||
|
// dump internal state to console.log
|
||||||
|
dump(): void;
|
||||||
|
|
||||||
|
// dump internal state to a JS object
|
||||||
|
toJS(): any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SyncState {
|
||||||
|
free(): void;
|
||||||
|
clone(): SyncState;
|
||||||
|
lastSentHeads: any;
|
||||||
|
sentHashes: any;
|
||||||
|
readonly sharedHeads: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
|
||||||
|
|
||||||
|
export interface InitOutput {
|
||||||
|
readonly memory: WebAssembly.Memory;
|
||||||
|
readonly __wbg_automerge_free: (a: number) => void;
|
||||||
|
readonly automerge_new: (a: number, b: number, c: number) => void;
|
||||||
|
readonly automerge_clone: (a: number, b: number, c: number, d: number) => void;
|
||||||
|
readonly automerge_free: (a: number) => void;
|
||||||
|
readonly automerge_pendingOps: (a: number) => number;
|
||||||
|
readonly automerge_commit: (a: number, b: number, c: number, d: number, e: number) => number;
|
||||||
|
readonly automerge_rollback: (a: number) => number;
|
||||||
|
readonly automerge_keys: (a: number, b: number, c: number, d: number, e: number) => void;
|
||||||
|
readonly automerge_text: (a: number, b: number, c: number, d: number, e: number) => void;
|
||||||
|
readonly automerge_splice: (a: number, b: number, c: number, d: number, e: number, f: number, g: number) => void;
|
||||||
|
readonly automerge_push: (a: number, b: number, c: number, d: number, e: number, f: number, g: number) => void;
|
||||||
|
readonly automerge_insert: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void;
|
||||||
|
readonly automerge_set: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void;
|
||||||
|
readonly automerge_inc: (a: number, b: number, c: number, d: number, e: number, f: number) => void;
|
||||||
|
readonly automerge_value: (a: number, b: number, c: number, d: number, e: number, f: number) => void;
|
||||||
|
readonly automerge_values: (a: number, b: number, c: number, d: number, e: number, f: number) => void;
|
||||||
|
readonly automerge_length: (a: number, b: number, c: number, d: number, e: number) => void;
|
||||||
|
readonly automerge_del: (a: number, b: number, c: number, d: number, e: number) => void;
|
||||||
|
readonly automerge_save: (a: number, b: number) => void;
|
||||||
|
readonly automerge_saveIncremental: (a: number) => number;
|
||||||
|
readonly automerge_loadIncremental: (a: number, b: number, c: number) => void;
|
||||||
|
readonly automerge_applyChanges: (a: number, b: number, c: number) => void;
|
||||||
|
readonly automerge_getChanges: (a: number, b: number, c: number) => void;
|
||||||
|
readonly automerge_getChangesAdded: (a: number, b: number, c: number) => void;
|
||||||
|
readonly automerge_getHeads: (a: number) => number;
|
||||||
|
readonly automerge_getActorId: (a: number, b: number) => void;
|
||||||
|
readonly automerge_getLastLocalChange: (a: number, b: number) => void;
|
||||||
|
readonly automerge_dump: (a: number) => void;
|
||||||
|
readonly automerge_getMissingDeps: (a: number, b: number, c: number) => void;
|
||||||
|
readonly automerge_receiveSyncMessage: (a: number, b: number, c: number, d: number) => void;
|
||||||
|
readonly automerge_generateSyncMessage: (a: number, b: number, c: number) => void;
|
||||||
|
readonly automerge_toJS: (a: number) => number;
|
||||||
|
readonly create: (a: number, b: number, c: number) => void;
|
||||||
|
readonly loadDoc: (a: number, b: number, c: number, d: number) => void;
|
||||||
|
readonly encodeChange: (a: number, b: number) => void;
|
||||||
|
readonly decodeChange: (a: number, b: number) => void;
|
||||||
|
readonly initSyncState: () => number;
|
||||||
|
readonly importSyncState: (a: number, b: number) => void;
|
||||||
|
readonly exportSyncState: (a: number) => number;
|
||||||
|
readonly encodeSyncMessage: (a: number, b: number) => void;
|
||||||
|
readonly decodeSyncMessage: (a: number, b: number) => void;
|
||||||
|
readonly encodeSyncState: (a: number, b: number) => void;
|
||||||
|
readonly decodeSyncState: (a: number, b: number) => void;
|
||||||
|
readonly __wbg_list_free: (a: number) => void;
|
||||||
|
readonly __wbg_map_free: (a: number) => void;
|
||||||
|
readonly __wbg_text_free: (a: number) => void;
|
||||||
|
readonly __wbg_table_free: (a: number) => void;
|
||||||
|
readonly __wbg_syncstate_free: (a: number) => void;
|
||||||
|
readonly syncstate_sharedHeads: (a: number) => number;
|
||||||
|
readonly syncstate_lastSentHeads: (a: number) => number;
|
||||||
|
readonly syncstate_set_lastSentHeads: (a: number, b: number, c: number) => void;
|
||||||
|
readonly syncstate_set_sentHashes: (a: number, b: number, c: number) => void;
|
||||||
|
readonly syncstate_clone: (a: number) => number;
|
||||||
|
readonly __wbindgen_malloc: (a: number) => number;
|
||||||
|
readonly __wbindgen_realloc: (a: number, b: number, c: number) => number;
|
||||||
|
readonly __wbindgen_add_to_stack_pointer: (a: number) => number;
|
||||||
|
readonly __wbindgen_free: (a: number, b: number) => void;
|
||||||
|
readonly __wbindgen_exn_store: (a: number) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If `module_or_path` is {RequestInfo} or {URL}, makes a request and
|
||||||
|
* for everything else, calls `WebAssembly.instantiate` directly.
|
||||||
|
*
|
||||||
|
* @param {InitInput | Promise<InitInput>} module_or_path
|
||||||
|
*
|
||||||
|
* @returns {Promise<InitOutput>}
|
||||||
|
*/
|
||||||
|
|
||||||
|
export default function init (module_or_path?: InitInput | Promise<InitInput>): Promise<InitOutput>;
|
||||||
43
automerge-wasm/package.json
Normal file
43
automerge-wasm/package.json
Normal file
|
|
@ -0,0 +1,43 @@
|
||||||
|
{
|
||||||
|
"collaborators": [
|
||||||
|
"Orion Henry <orion@inkandswitch.com>",
|
||||||
|
"Alex Good <alex@memoryandthought.me>",
|
||||||
|
"Martin Kleppmann"
|
||||||
|
],
|
||||||
|
"name": "automerge-wasm-pack",
|
||||||
|
"description": "wasm-bindgen bindings to the automerge rust implementation",
|
||||||
|
"version": "0.0.23",
|
||||||
|
"license": "MIT",
|
||||||
|
"files": [
|
||||||
|
"README.md",
|
||||||
|
"package.json",
|
||||||
|
"index.d.ts",
|
||||||
|
"node/index.js",
|
||||||
|
"node/index_bg.wasm",
|
||||||
|
"web/index.js",
|
||||||
|
"web/index_bg.wasm"
|
||||||
|
],
|
||||||
|
"types": "index.d.ts",
|
||||||
|
"module": "./web/index.js",
|
||||||
|
"main": "./node/index.js",
|
||||||
|
"scripts": {
|
||||||
|
"build": "rimraf ./node && wasm-pack build --target nodejs --dev --out-name index -d node && cp index.d.ts node",
|
||||||
|
"release-w": "rimraf ./web && wasm-pack build --target web --release --out-name index -d web && cp index.d.ts web",
|
||||||
|
"release-n": "rimraf ./node && wasm-pack build --target nodejs --release --out-name index -d node && cp index.d.ts node",
|
||||||
|
"release": "yarn release-w && yarn release-n",
|
||||||
|
"test": "yarn build && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/expect": "^24.3.0",
|
||||||
|
"@types/jest": "^27.4.0",
|
||||||
|
"@types/mocha": "^9.1.0",
|
||||||
|
"@types/node": "^17.0.13",
|
||||||
|
"fast-sha256": "^1.3.0",
|
||||||
|
"mocha": "^9.1.3",
|
||||||
|
"pako": "^2.0.4",
|
||||||
|
"rimraf": "^3.0.2",
|
||||||
|
"ts-mocha": "^9.0.2",
|
||||||
|
"typescript": "^4.5.5"
|
||||||
|
}
|
||||||
|
}
|
||||||
388
automerge-wasm/src/interop.rs
Normal file
388
automerge-wasm/src/interop.rs
Normal file
|
|
@ -0,0 +1,388 @@
|
||||||
|
use automerge as am;
|
||||||
|
use automerge::transaction::Transactable;
|
||||||
|
use automerge::{Change, ChangeHash, Prop};
|
||||||
|
use js_sys::{Array, Object, Reflect, Uint8Array};
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use std::fmt::Display;
|
||||||
|
use unicode_segmentation::UnicodeSegmentation;
|
||||||
|
use wasm_bindgen::prelude::*;
|
||||||
|
use wasm_bindgen::JsCast;
|
||||||
|
|
||||||
|
use crate::{ObjId, ScalarValue, Value};
|
||||||
|
|
||||||
|
pub(crate) struct JS(pub JsValue);
|
||||||
|
pub(crate) struct AR(pub Array);
|
||||||
|
|
||||||
|
impl From<AR> for JsValue {
|
||||||
|
fn from(ar: AR) -> Self {
|
||||||
|
ar.0.into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<JS> for JsValue {
|
||||||
|
fn from(js: JS) -> Self {
|
||||||
|
js.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<am::sync::State> for JS {
|
||||||
|
fn from(state: am::sync::State) -> Self {
|
||||||
|
let shared_heads: JS = state.shared_heads.into();
|
||||||
|
let last_sent_heads: JS = state.last_sent_heads.into();
|
||||||
|
let their_heads: JS = state.their_heads.into();
|
||||||
|
let their_need: JS = state.their_need.into();
|
||||||
|
let sent_hashes: JS = state.sent_hashes.into();
|
||||||
|
let their_have = if let Some(have) = &state.their_have {
|
||||||
|
JsValue::from(AR::from(have.as_slice()).0)
|
||||||
|
} else {
|
||||||
|
JsValue::null()
|
||||||
|
};
|
||||||
|
let result: JsValue = Object::new().into();
|
||||||
|
// we can unwrap here b/c we made the object and know its not frozen
|
||||||
|
Reflect::set(&result, &"sharedHeads".into(), &shared_heads.0).unwrap();
|
||||||
|
Reflect::set(&result, &"lastSentHeads".into(), &last_sent_heads.0).unwrap();
|
||||||
|
Reflect::set(&result, &"theirHeads".into(), &their_heads.0).unwrap();
|
||||||
|
Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap();
|
||||||
|
Reflect::set(&result, &"theirHave".into(), &their_have).unwrap();
|
||||||
|
Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap();
|
||||||
|
JS(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Vec<ChangeHash>> for JS {
|
||||||
|
fn from(heads: Vec<ChangeHash>) -> Self {
|
||||||
|
let heads: Array = heads
|
||||||
|
.iter()
|
||||||
|
.map(|h| JsValue::from_str(&h.to_string()))
|
||||||
|
.collect();
|
||||||
|
JS(heads.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<HashSet<ChangeHash>> for JS {
|
||||||
|
fn from(heads: HashSet<ChangeHash>) -> Self {
|
||||||
|
let result: JsValue = Object::new().into();
|
||||||
|
for key in &heads {
|
||||||
|
Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap();
|
||||||
|
}
|
||||||
|
JS(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Option<Vec<ChangeHash>>> for JS {
|
||||||
|
fn from(heads: Option<Vec<ChangeHash>>) -> Self {
|
||||||
|
if let Some(v) = heads {
|
||||||
|
let v: Array = v
|
||||||
|
.iter()
|
||||||
|
.map(|h| JsValue::from_str(&h.to_string()))
|
||||||
|
.collect();
|
||||||
|
JS(v.into())
|
||||||
|
} else {
|
||||||
|
JS(JsValue::null())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<JS> for HashSet<ChangeHash> {
|
||||||
|
type Error = JsValue;
|
||||||
|
|
||||||
|
fn try_from(value: JS) -> Result<Self, Self::Error> {
|
||||||
|
let mut result = HashSet::new();
|
||||||
|
for key in Reflect::own_keys(&value.0)?.iter() {
|
||||||
|
if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() {
|
||||||
|
result.insert(key.into_serde().map_err(to_js_err)?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<JS> for Vec<ChangeHash> {
|
||||||
|
type Error = JsValue;
|
||||||
|
|
||||||
|
fn try_from(value: JS) -> Result<Self, Self::Error> {
|
||||||
|
let value = value.0.dyn_into::<Array>()?;
|
||||||
|
let value: Result<Vec<ChangeHash>, _> = value.iter().map(|j| j.into_serde()).collect();
|
||||||
|
let value = value.map_err(to_js_err)?;
|
||||||
|
Ok(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<JS> for Option<Vec<ChangeHash>> {
|
||||||
|
fn from(value: JS) -> Self {
|
||||||
|
let value = value.0.dyn_into::<Array>().ok()?;
|
||||||
|
let value: Result<Vec<ChangeHash>, _> = value.iter().map(|j| j.into_serde()).collect();
|
||||||
|
let value = value.ok()?;
|
||||||
|
Some(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<JS> for Vec<Change> {
|
||||||
|
type Error = JsValue;
|
||||||
|
|
||||||
|
fn try_from(value: JS) -> Result<Self, Self::Error> {
|
||||||
|
let value = value.0.dyn_into::<Array>()?;
|
||||||
|
let changes: Result<Vec<Uint8Array>, _> = value.iter().map(|j| j.dyn_into()).collect();
|
||||||
|
let changes = changes?;
|
||||||
|
let changes: Result<Vec<Change>, _> = changes
|
||||||
|
.iter()
|
||||||
|
.map(|a| Change::try_from(a.to_vec()))
|
||||||
|
.collect();
|
||||||
|
let changes = changes.map_err(to_js_err)?;
|
||||||
|
Ok(changes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<JS> for am::sync::State {
|
||||||
|
type Error = JsValue;
|
||||||
|
|
||||||
|
fn try_from(value: JS) -> Result<Self, Self::Error> {
|
||||||
|
let value = value.0;
|
||||||
|
let shared_heads = js_get(&value, "sharedHeads")?.try_into()?;
|
||||||
|
let last_sent_heads = js_get(&value, "lastSentHeads")?.try_into()?;
|
||||||
|
let their_heads = js_get(&value, "theirHeads")?.into();
|
||||||
|
let their_need = js_get(&value, "theirNeed")?.into();
|
||||||
|
let their_have = js_get(&value, "theirHave")?.try_into()?;
|
||||||
|
let sent_hashes = js_get(&value, "sentHashes")?.try_into()?;
|
||||||
|
Ok(am::sync::State {
|
||||||
|
shared_heads,
|
||||||
|
last_sent_heads,
|
||||||
|
their_heads,
|
||||||
|
their_need,
|
||||||
|
their_have,
|
||||||
|
sent_hashes,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<JS> for Option<Vec<am::sync::Have>> {
|
||||||
|
type Error = JsValue;
|
||||||
|
|
||||||
|
fn try_from(value: JS) -> Result<Self, Self::Error> {
|
||||||
|
if value.0.is_null() {
|
||||||
|
Ok(None)
|
||||||
|
} else {
|
||||||
|
Ok(Some(value.try_into()?))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<JS> for Vec<am::sync::Have> {
|
||||||
|
type Error = JsValue;
|
||||||
|
|
||||||
|
fn try_from(value: JS) -> Result<Self, Self::Error> {
|
||||||
|
let value = value.0.dyn_into::<Array>()?;
|
||||||
|
let have: Result<Vec<am::sync::Have>, JsValue> = value
|
||||||
|
.iter()
|
||||||
|
.map(|s| {
|
||||||
|
let last_sync = js_get(&s, "lastSync")?.try_into()?;
|
||||||
|
let bloom = js_get(&s, "bloom")?.try_into()?;
|
||||||
|
Ok(am::sync::Have { last_sync, bloom })
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let have = have?;
|
||||||
|
Ok(have)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<JS> for am::sync::BloomFilter {
|
||||||
|
type Error = JsValue;
|
||||||
|
|
||||||
|
fn try_from(value: JS) -> Result<Self, Self::Error> {
|
||||||
|
let value: Uint8Array = value.0.dyn_into()?;
|
||||||
|
let value = value.to_vec();
|
||||||
|
let value = value.as_slice().try_into().map_err(to_js_err)?;
|
||||||
|
Ok(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&[ChangeHash]> for AR {
|
||||||
|
fn from(value: &[ChangeHash]) -> Self {
|
||||||
|
AR(value
|
||||||
|
.iter()
|
||||||
|
.map(|h| JsValue::from_str(&hex::encode(&h.0)))
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&[Change]> for AR {
|
||||||
|
fn from(value: &[Change]) -> Self {
|
||||||
|
let changes: Array = value
|
||||||
|
.iter()
|
||||||
|
.map(|c| Uint8Array::from(c.raw_bytes()))
|
||||||
|
.collect();
|
||||||
|
AR(changes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&[am::sync::Have]> for AR {
|
||||||
|
fn from(value: &[am::sync::Have]) -> Self {
|
||||||
|
AR(value
|
||||||
|
.iter()
|
||||||
|
.map(|have| {
|
||||||
|
let last_sync: Array = have
|
||||||
|
.last_sync
|
||||||
|
.iter()
|
||||||
|
.map(|h| JsValue::from_str(&hex::encode(&h.0)))
|
||||||
|
.collect();
|
||||||
|
// FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes()
|
||||||
|
let bloom = Uint8Array::from(have.bloom.to_bytes().as_slice());
|
||||||
|
let obj: JsValue = Object::new().into();
|
||||||
|
// we can unwrap here b/c we created the object and know its not frozen
|
||||||
|
Reflect::set(&obj, &"lastSync".into(), &last_sync.into()).unwrap();
|
||||||
|
Reflect::set(&obj, &"bloom".into(), &bloom.into()).unwrap();
|
||||||
|
obj
|
||||||
|
})
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_js_err<T: Display>(err: T) -> JsValue {
|
||||||
|
js_sys::Error::new(&std::format!("{}", err)).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn js_get<J: Into<JsValue>>(obj: J, prop: &str) -> Result<JS, JsValue> {
|
||||||
|
Ok(JS(Reflect::get(&obj.into(), &prop.into())?))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn js_set<V: Into<JsValue>>(obj: &JsValue, prop: &str, val: V) -> Result<bool, JsValue> {
|
||||||
|
Reflect::set(obj, &prop.into(), &val.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_prop(p: JsValue) -> Result<Prop, JsValue> {
|
||||||
|
if let Some(s) = p.as_string() {
|
||||||
|
Ok(Prop::Map(s))
|
||||||
|
} else if let Some(n) = p.as_f64() {
|
||||||
|
Ok(Prop::Seq(n as usize))
|
||||||
|
} else {
|
||||||
|
Err(to_js_err("prop must me a string or number"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_objtype(
|
||||||
|
value: &JsValue,
|
||||||
|
datatype: &Option<String>,
|
||||||
|
) -> Option<(am::ObjType, Vec<(Prop, JsValue)>)> {
|
||||||
|
match datatype.as_deref() {
|
||||||
|
Some("map") => {
|
||||||
|
let map = value.clone().dyn_into::<js_sys::Object>().ok()?;
|
||||||
|
// FIXME unwrap
|
||||||
|
let map = js_sys::Object::keys(&map)
|
||||||
|
.iter()
|
||||||
|
.zip(js_sys::Object::values(&map).iter())
|
||||||
|
.map(|(key, val)| (key.as_string().unwrap().into(), val))
|
||||||
|
.collect();
|
||||||
|
Some((am::ObjType::Map, map))
|
||||||
|
}
|
||||||
|
Some("list") => {
|
||||||
|
let list = value.clone().dyn_into::<js_sys::Array>().ok()?;
|
||||||
|
let list = list
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, e)| (i.into(), e))
|
||||||
|
.collect();
|
||||||
|
Some((am::ObjType::List, list))
|
||||||
|
}
|
||||||
|
Some("text") => {
|
||||||
|
let text = value.as_string()?;
|
||||||
|
let text = text
|
||||||
|
.graphemes(true)
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, ch)| (i.into(), ch.into()))
|
||||||
|
.collect();
|
||||||
|
Some((am::ObjType::Text, text))
|
||||||
|
}
|
||||||
|
Some(_) => None,
|
||||||
|
None => {
|
||||||
|
if let Ok(list) = value.clone().dyn_into::<js_sys::Array>() {
|
||||||
|
let list = list
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, e)| (i.into(), e))
|
||||||
|
.collect();
|
||||||
|
Some((am::ObjType::List, list))
|
||||||
|
} else if let Ok(map) = value.clone().dyn_into::<js_sys::Object>() {
|
||||||
|
// FIXME unwrap
|
||||||
|
let map = js_sys::Object::keys(&map)
|
||||||
|
.iter()
|
||||||
|
.zip(js_sys::Object::values(&map).iter())
|
||||||
|
.map(|(key, val)| (key.as_string().unwrap().into(), val))
|
||||||
|
.collect();
|
||||||
|
Some((am::ObjType::Map, map))
|
||||||
|
} else if let Some(text) = value.as_string() {
|
||||||
|
let text = text
|
||||||
|
.graphemes(true)
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, ch)| (i.into(), ch.into()))
|
||||||
|
.collect();
|
||||||
|
Some((am::ObjType::Text, text))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_heads(heads: Option<Array>) -> Option<Vec<ChangeHash>> {
|
||||||
|
let heads = heads?;
|
||||||
|
let heads: Result<Vec<ChangeHash>, _> = heads.iter().map(|j| j.into_serde()).collect();
|
||||||
|
heads.ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn get_js_heads(heads: JsValue) -> Result<Vec<ChangeHash>, JsValue> {
|
||||||
|
let heads = heads.dyn_into::<Array>()?;
|
||||||
|
heads
|
||||||
|
.iter()
|
||||||
|
.map(|j| j.into_serde())
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
.map_err(to_js_err)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue {
|
||||||
|
let keys = doc.keys(obj);
|
||||||
|
let map = Object::new();
|
||||||
|
for k in keys {
|
||||||
|
let val = doc.value(obj, &k);
|
||||||
|
match val {
|
||||||
|
Ok(Some((Value::Object(o), exid)))
|
||||||
|
if o == am::ObjType::Map || o == am::ObjType::Table =>
|
||||||
|
{
|
||||||
|
Reflect::set(&map, &k.into(), &map_to_js(doc, &exid)).unwrap();
|
||||||
|
}
|
||||||
|
Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => {
|
||||||
|
Reflect::set(&map, &k.into(), &list_to_js(doc, &exid)).unwrap();
|
||||||
|
}
|
||||||
|
Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => {
|
||||||
|
Reflect::set(&map, &k.into(), &doc.text(&exid).unwrap().into()).unwrap();
|
||||||
|
}
|
||||||
|
Ok(Some((Value::Scalar(v), _))) => {
|
||||||
|
Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap();
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
map.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue {
|
||||||
|
let len = doc.length(obj);
|
||||||
|
let array = Array::new();
|
||||||
|
for i in 0..len {
|
||||||
|
let val = doc.value(obj, i as usize);
|
||||||
|
match val {
|
||||||
|
Ok(Some((Value::Object(o), exid)))
|
||||||
|
if o == am::ObjType::Map || o == am::ObjType::Table =>
|
||||||
|
{
|
||||||
|
array.push(&map_to_js(doc, &exid));
|
||||||
|
}
|
||||||
|
Ok(Some((Value::Object(_), exid))) => {
|
||||||
|
array.push(&list_to_js(doc, &exid));
|
||||||
|
}
|
||||||
|
Ok(Some((Value::Scalar(v), _))) => {
|
||||||
|
array.push(&ScalarValue(v).into());
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
array.into()
|
||||||
|
}
|
||||||
912
automerge-wasm/src/lib.rs
Normal file
912
automerge-wasm/src/lib.rs
Normal file
|
|
@ -0,0 +1,912 @@
|
||||||
|
#![allow(clippy::unused_unit)]
|
||||||
|
use am::transaction::CommitOptions;
|
||||||
|
use am::transaction::Transactable;
|
||||||
|
use automerge as am;
|
||||||
|
use automerge::{Change, ObjId, Prop, Value, ROOT};
|
||||||
|
use js_sys::{Array, Object, Uint8Array};
|
||||||
|
use regex::Regex;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
use wasm_bindgen::prelude::*;
|
||||||
|
use wasm_bindgen::JsCast;
|
||||||
|
|
||||||
|
mod interop;
|
||||||
|
mod sync;
|
||||||
|
mod value;
|
||||||
|
|
||||||
|
use interop::{
|
||||||
|
get_heads, get_js_heads, js_get, js_set, list_to_js, map_to_js, to_js_err, to_objtype, to_prop,
|
||||||
|
AR, JS,
|
||||||
|
};
|
||||||
|
use sync::SyncState;
|
||||||
|
use value::{datatype, ScalarValue};
|
||||||
|
|
||||||
|
#[allow(unused_macros)]
|
||||||
|
macro_rules! log {
|
||||||
|
( $( $t:tt )* ) => {
|
||||||
|
web_sys::console::log_1(&format!( $( $t )* ).into());
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "wee_alloc")]
|
||||||
|
#[global_allocator]
|
||||||
|
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
|
||||||
|
|
||||||
|
#[wasm_bindgen]
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Automerge(automerge::AutoCommit);
|
||||||
|
|
||||||
|
#[wasm_bindgen]
|
||||||
|
impl Automerge {
|
||||||
|
pub fn new(actor: Option<String>) -> Result<Automerge, JsValue> {
|
||||||
|
let mut automerge = automerge::AutoCommit::new();
|
||||||
|
if let Some(a) = actor {
|
||||||
|
let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec());
|
||||||
|
automerge.set_actor(a);
|
||||||
|
}
|
||||||
|
Ok(Automerge(automerge))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::should_implement_trait)]
|
||||||
|
pub fn clone(&mut self, actor: Option<String>) -> Result<Automerge, JsValue> {
|
||||||
|
if self.0.pending_ops() > 0 {
|
||||||
|
self.0.commit();
|
||||||
|
}
|
||||||
|
let mut automerge = Automerge(self.0.clone());
|
||||||
|
if let Some(s) = actor {
|
||||||
|
let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
|
||||||
|
automerge.0.set_actor(actor);
|
||||||
|
}
|
||||||
|
Ok(automerge)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::should_implement_trait)]
|
||||||
|
pub fn fork(&mut self, actor: Option<String>) -> Result<Automerge, JsValue> {
|
||||||
|
let mut automerge = Automerge(self.0.fork());
|
||||||
|
if let Some(s) = actor {
|
||||||
|
let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
|
||||||
|
automerge.0.set_actor(actor);
|
||||||
|
}
|
||||||
|
Ok(automerge)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn free(self) {}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = pendingOps)]
|
||||||
|
pub fn pending_ops(&self) -> JsValue {
|
||||||
|
(self.0.pending_ops() as u32).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn commit(&mut self, message: Option<String>, time: Option<f64>) -> JsValue {
|
||||||
|
let mut commit_opts = CommitOptions::default();
|
||||||
|
if let Some(message) = message {
|
||||||
|
commit_opts.set_message(message);
|
||||||
|
}
|
||||||
|
if let Some(time) = time {
|
||||||
|
commit_opts.set_time(time as i64);
|
||||||
|
}
|
||||||
|
let hash = self.0.commit_with(commit_opts);
|
||||||
|
let result = Array::new();
|
||||||
|
result.push(&JsValue::from_str(&hex::encode(&hash.0)));
|
||||||
|
result.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn merge(&mut self, other: &mut Automerge) -> Result<Array, JsValue> {
|
||||||
|
let objs = self.0.merge(&mut other.0)?;
|
||||||
|
let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect();
|
||||||
|
Ok(objs)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rollback(&mut self) -> f64 {
|
||||||
|
self.0.rollback() as f64
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn keys(&mut self, obj: JsValue, heads: Option<Array>) -> Result<Array, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let result = if let Some(heads) = get_heads(heads) {
|
||||||
|
self.0
|
||||||
|
.keys_at(&obj, &heads)
|
||||||
|
.map(|s| JsValue::from_str(&s))
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
self.0.keys(&obj).map(|s| JsValue::from_str(&s)).collect()
|
||||||
|
};
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn text(&mut self, obj: JsValue, heads: Option<Array>) -> Result<String, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
if let Some(heads) = get_heads(heads) {
|
||||||
|
Ok(self.0.text_at(&obj, &heads)?)
|
||||||
|
} else {
|
||||||
|
Ok(self.0.text(&obj)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn splice(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
start: f64,
|
||||||
|
delete_count: f64,
|
||||||
|
text: JsValue,
|
||||||
|
) -> Result<(), JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let start = start as usize;
|
||||||
|
let delete_count = delete_count as usize;
|
||||||
|
let mut vals = vec![];
|
||||||
|
if let Some(t) = text.as_string() {
|
||||||
|
self.0.splice_text(&obj, start, delete_count, &t)?;
|
||||||
|
} else {
|
||||||
|
if let Ok(array) = text.dyn_into::<Array>() {
|
||||||
|
for i in array.iter() {
|
||||||
|
let value = self
|
||||||
|
.import_scalar(&i, &None)
|
||||||
|
.ok_or_else(|| to_js_err("expected scalar"))?;
|
||||||
|
vals.push(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.0.splice(&obj, start, delete_count, vals.into_iter())?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn push(&mut self, obj: JsValue, value: JsValue, datatype: JsValue) -> Result<(), JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let value = self
|
||||||
|
.import_scalar(&value, &datatype.as_string())
|
||||||
|
.ok_or_else(|| to_js_err("invalid scalar value"))?;
|
||||||
|
let index = self.0.length(&obj);
|
||||||
|
self.0.insert(&obj, index, value)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn push_object(&mut self, obj: JsValue, value: JsValue) -> Result<Option<String>, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let (value, subvals) =
|
||||||
|
to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?;
|
||||||
|
let index = self.0.length(&obj);
|
||||||
|
let opid = self.0.insert_object(&obj, index, value)?;
|
||||||
|
self.subset(&opid, subvals)?;
|
||||||
|
Ok(opid.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
index: f64,
|
||||||
|
value: JsValue,
|
||||||
|
datatype: JsValue,
|
||||||
|
) -> Result<(), JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let index = index as f64;
|
||||||
|
let value = self
|
||||||
|
.import_scalar(&value, &datatype.as_string())
|
||||||
|
.ok_or_else(|| to_js_err("expected scalar value"))?;
|
||||||
|
self.0.insert(&obj, index as usize, value)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert_object(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
index: f64,
|
||||||
|
value: JsValue,
|
||||||
|
) -> Result<Option<String>, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let index = index as f64;
|
||||||
|
let (value, subvals) =
|
||||||
|
to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?;
|
||||||
|
let opid = self.0.insert_object(&obj, index as usize, value)?;
|
||||||
|
self.subset(&opid, subvals)?;
|
||||||
|
Ok(opid.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
prop: JsValue,
|
||||||
|
value: JsValue,
|
||||||
|
datatype: JsValue,
|
||||||
|
) -> Result<(), JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let prop = self.import_prop(prop)?;
|
||||||
|
let value = self
|
||||||
|
.import_scalar(&value, &datatype.as_string())
|
||||||
|
.ok_or_else(|| to_js_err("expected scalar value"))?;
|
||||||
|
self.0.set(&obj, prop, value)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
prop: JsValue,
|
||||||
|
value: JsValue,
|
||||||
|
_datatype: JsValue,
|
||||||
|
) -> Result<JsValue, JsValue> {
|
||||||
|
// remove this
|
||||||
|
am::log!("doc.make() is depricated - please use doc.set_object() or doc.insert_object()");
|
||||||
|
self.set_object(obj, prop, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_object(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
prop: JsValue,
|
||||||
|
value: JsValue,
|
||||||
|
) -> Result<JsValue, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let prop = self.import_prop(prop)?;
|
||||||
|
let (value, subvals) =
|
||||||
|
to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?;
|
||||||
|
let opid = self.0.set_object(&obj, prop, value)?;
|
||||||
|
self.subset(&opid, subvals)?;
|
||||||
|
Ok(opid.to_string().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn subset(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), JsValue> {
|
||||||
|
for (p, v) in vals {
|
||||||
|
let (value, subvals) = self.import_value(&v, None)?;
|
||||||
|
//let opid = self.0.set(id, p, value)?;
|
||||||
|
let opid = match (p, value) {
|
||||||
|
(Prop::Map(s), Value::Object(objtype)) => Some(self.0.set_object(obj, s, objtype)?),
|
||||||
|
(Prop::Map(s), Value::Scalar(scalar)) => {
|
||||||
|
self.0.set(obj, s, scalar)?;
|
||||||
|
None
|
||||||
|
}
|
||||||
|
(Prop::Seq(i), Value::Object(objtype)) => {
|
||||||
|
Some(self.0.insert_object(obj, i, objtype)?)
|
||||||
|
}
|
||||||
|
(Prop::Seq(i), Value::Scalar(scalar)) => {
|
||||||
|
self.0.insert(obj, i, scalar)?;
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if let Some(opid) = opid {
|
||||||
|
self.subset(&opid, subvals)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn inc(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result<(), JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let prop = self.import_prop(prop)?;
|
||||||
|
let value: f64 = value
|
||||||
|
.as_f64()
|
||||||
|
.ok_or_else(|| to_js_err("inc needs a numberic value"))?;
|
||||||
|
self.0.inc(&obj, prop, value as i64)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn value(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
prop: JsValue,
|
||||||
|
heads: Option<Array>,
|
||||||
|
) -> Result<Option<Array>, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let result = Array::new();
|
||||||
|
let prop = to_prop(prop);
|
||||||
|
let heads = get_heads(heads);
|
||||||
|
if let Ok(prop) = prop {
|
||||||
|
let value = if let Some(h) = heads {
|
||||||
|
self.0.value_at(&obj, prop, &h)?
|
||||||
|
} else {
|
||||||
|
self.0.value(&obj, prop)?
|
||||||
|
};
|
||||||
|
match value {
|
||||||
|
Some((Value::Object(obj_type), obj_id)) => {
|
||||||
|
result.push(&obj_type.to_string().into());
|
||||||
|
result.push(&obj_id.to_string().into());
|
||||||
|
Ok(Some(result))
|
||||||
|
}
|
||||||
|
Some((Value::Scalar(value), _)) => {
|
||||||
|
result.push(&datatype(&value).into());
|
||||||
|
result.push(&ScalarValue(value).into());
|
||||||
|
Ok(Some(result))
|
||||||
|
}
|
||||||
|
None => Ok(None),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn values(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
arg: JsValue,
|
||||||
|
heads: Option<Array>,
|
||||||
|
) -> Result<Array, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let result = Array::new();
|
||||||
|
let prop = to_prop(arg);
|
||||||
|
if let Ok(prop) = prop {
|
||||||
|
let values = if let Some(heads) = get_heads(heads) {
|
||||||
|
self.0.values_at(&obj, prop, &heads)
|
||||||
|
} else {
|
||||||
|
self.0.values(&obj, prop)
|
||||||
|
}
|
||||||
|
.map_err(to_js_err)?;
|
||||||
|
for value in values {
|
||||||
|
match value {
|
||||||
|
(Value::Object(obj_type), obj_id) => {
|
||||||
|
let sub = Array::new();
|
||||||
|
sub.push(&obj_type.to_string().into());
|
||||||
|
sub.push(&obj_id.to_string().into());
|
||||||
|
result.push(&sub.into());
|
||||||
|
}
|
||||||
|
(Value::Scalar(value), id) => {
|
||||||
|
let sub = Array::new();
|
||||||
|
sub.push(&datatype(&value).into());
|
||||||
|
sub.push(&ScalarValue(value).into());
|
||||||
|
sub.push(&id.to_string().into());
|
||||||
|
result.push(&sub.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn length(&mut self, obj: JsValue, heads: Option<Array>) -> Result<f64, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
if let Some(heads) = get_heads(heads) {
|
||||||
|
Ok(self.0.length_at(&obj, &heads) as f64)
|
||||||
|
} else {
|
||||||
|
Ok(self.0.length(&obj) as f64)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let prop = to_prop(prop)?;
|
||||||
|
self.0.del(&obj, prop).map_err(to_js_err)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn mark(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
range: JsValue,
|
||||||
|
name: JsValue,
|
||||||
|
value: JsValue,
|
||||||
|
datatype: JsValue,
|
||||||
|
) -> Result<(), JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let re = Regex::new(r"([\[\(])(\d+)\.\.(\d+)([\)\]])").unwrap();
|
||||||
|
let range = range.as_string().ok_or("range must be a string")?;
|
||||||
|
let cap = re.captures_iter(&range).next().ok_or("range must be in the form of (start..end] or [start..end) etc... () for sticky, [] for normal")?;
|
||||||
|
let start: usize = cap[2].parse().map_err(|_| to_js_err("invalid start"))?;
|
||||||
|
let end: usize = cap[3].parse().map_err(|_| to_js_err("invalid end"))?;
|
||||||
|
let start_sticky = &cap[1] == "(";
|
||||||
|
let end_sticky = &cap[4] == ")";
|
||||||
|
let name = name
|
||||||
|
.as_string()
|
||||||
|
.ok_or("invalid mark name")
|
||||||
|
.map_err(to_js_err)?;
|
||||||
|
let value = self
|
||||||
|
.import_scalar(&value, &datatype.as_string())
|
||||||
|
.ok_or_else(|| to_js_err("invalid value"))?;
|
||||||
|
self.0
|
||||||
|
.mark(&obj, start, start_sticky, end, end_sticky, &name, value)
|
||||||
|
.map_err(to_js_err)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn unmark(&mut self, obj: JsValue, mark: JsValue) -> Result<(), JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let mark = self.import(mark)?;
|
||||||
|
self.0.unmark(&obj, &mark).map_err(to_js_err)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spans(&mut self, obj: JsValue) -> Result<JsValue, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let text = self.0.list(&obj).map_err(to_js_err)?;
|
||||||
|
let spans = self.0.spans(&obj).map_err(to_js_err)?;
|
||||||
|
let mut last_pos = 0;
|
||||||
|
let result = Array::new();
|
||||||
|
for s in spans {
|
||||||
|
let marks = Array::new();
|
||||||
|
for m in s.marks {
|
||||||
|
let mark = Array::new();
|
||||||
|
mark.push(&m.0.into());
|
||||||
|
mark.push(&datatype(&m.1).into());
|
||||||
|
mark.push(&ScalarValue(m.1).into());
|
||||||
|
marks.push(&mark.into());
|
||||||
|
}
|
||||||
|
let text_span = &text[last_pos..s.pos]; //.slice(last_pos, s.pos);
|
||||||
|
if !text_span.is_empty() {
|
||||||
|
let t: String = text_span
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(v, _)| v.as_string())
|
||||||
|
.collect();
|
||||||
|
result.push(&t.into());
|
||||||
|
}
|
||||||
|
result.push(&marks);
|
||||||
|
last_pos = s.pos;
|
||||||
|
//let obj = Object::new().into();
|
||||||
|
//js_set(&obj, "pos", s.pos as i32)?;
|
||||||
|
//js_set(&obj, "marks", marks)?;
|
||||||
|
//result.push(&obj.into());
|
||||||
|
}
|
||||||
|
let text_span = &text[last_pos..];
|
||||||
|
if !text_span.is_empty() {
|
||||||
|
let t: String = text_span
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(v, _)| v.as_string())
|
||||||
|
.collect();
|
||||||
|
result.push(&t.into());
|
||||||
|
}
|
||||||
|
Ok(result.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn raw_spans(&mut self, obj: JsValue) -> Result<Array, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let spans = self.0.raw_spans(&obj).map_err(to_js_err)?;
|
||||||
|
let result = Array::new();
|
||||||
|
for s in spans {
|
||||||
|
result.push(&JsValue::from_serde(&s).map_err(to_js_err)?);
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn blame(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
baseline: JsValue,
|
||||||
|
change_sets: JsValue,
|
||||||
|
) -> Result<Array, JsValue> {
|
||||||
|
am::log!("doc.blame() is depricated - please use doc.attribute()");
|
||||||
|
self.attribute(obj, baseline, change_sets)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn attribute(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
baseline: JsValue,
|
||||||
|
change_sets: JsValue,
|
||||||
|
) -> Result<Array, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let baseline = get_js_heads(baseline)?;
|
||||||
|
let change_sets = change_sets.dyn_into::<Array>()?;
|
||||||
|
let change_sets = change_sets
|
||||||
|
.iter()
|
||||||
|
.map(get_js_heads)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
let result = self.0.attribute(&obj, &baseline, &change_sets)?;
|
||||||
|
let result = result
|
||||||
|
.into_iter()
|
||||||
|
.map(|cs| {
|
||||||
|
let add = cs
|
||||||
|
.add
|
||||||
|
.iter()
|
||||||
|
.map::<Result<JsValue, JsValue>, _>(|range| {
|
||||||
|
let r = Object::new();
|
||||||
|
js_set(&r, "start", range.start as f64)?;
|
||||||
|
js_set(&r, "end", range.end as f64)?;
|
||||||
|
Ok(JsValue::from(&r))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<JsValue>, JsValue>>()?
|
||||||
|
.iter()
|
||||||
|
.collect::<Array>();
|
||||||
|
let del = cs
|
||||||
|
.del
|
||||||
|
.iter()
|
||||||
|
.map::<Result<JsValue, JsValue>, _>(|d| {
|
||||||
|
let r = Object::new();
|
||||||
|
js_set(&r, "pos", d.0 as f64)?;
|
||||||
|
js_set(&r, "val", &d.1)?;
|
||||||
|
Ok(JsValue::from(&r))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<JsValue>, JsValue>>()?
|
||||||
|
.iter()
|
||||||
|
.collect::<Array>();
|
||||||
|
let obj = Object::new();
|
||||||
|
js_set(&obj, "add", add)?;
|
||||||
|
js_set(&obj, "del", del)?;
|
||||||
|
Ok(obj.into())
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<JsValue>, JsValue>>()?
|
||||||
|
.iter()
|
||||||
|
.collect::<Array>();
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn attribute2(
|
||||||
|
&mut self,
|
||||||
|
obj: JsValue,
|
||||||
|
baseline: JsValue,
|
||||||
|
change_sets: JsValue,
|
||||||
|
) -> Result<Array, JsValue> {
|
||||||
|
let obj = self.import(obj)?;
|
||||||
|
let baseline = get_js_heads(baseline)?;
|
||||||
|
let change_sets = change_sets.dyn_into::<Array>()?;
|
||||||
|
let change_sets = change_sets
|
||||||
|
.iter()
|
||||||
|
.map(get_js_heads)
|
||||||
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
let result = self.0.attribute2(&obj, &baseline, &change_sets)?;
|
||||||
|
let result = result
|
||||||
|
.into_iter()
|
||||||
|
.map(|cs| {
|
||||||
|
let add = cs
|
||||||
|
.add
|
||||||
|
.iter()
|
||||||
|
.map::<Result<JsValue, JsValue>, _>(|a| {
|
||||||
|
let r = Object::new();
|
||||||
|
js_set(&r, "actor", &self.0.actor_to_str(a.actor))?;
|
||||||
|
js_set(&r, "start", a.range.start as f64)?;
|
||||||
|
js_set(&r, "end", a.range.end as f64)?;
|
||||||
|
Ok(JsValue::from(&r))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<JsValue>, JsValue>>()?
|
||||||
|
.iter()
|
||||||
|
.collect::<Array>();
|
||||||
|
let del = cs
|
||||||
|
.del
|
||||||
|
.iter()
|
||||||
|
.map::<Result<JsValue, JsValue>, _>(|d| {
|
||||||
|
let r = Object::new();
|
||||||
|
js_set(&r, "actor", &self.0.actor_to_str(d.actor))?;
|
||||||
|
js_set(&r, "pos", d.pos as f64)?;
|
||||||
|
js_set(&r, "val", &d.span)?;
|
||||||
|
Ok(JsValue::from(&r))
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<JsValue>, JsValue>>()?
|
||||||
|
.iter()
|
||||||
|
.collect::<Array>();
|
||||||
|
let obj = Object::new();
|
||||||
|
js_set(&obj, "add", add)?;
|
||||||
|
js_set(&obj, "del", del)?;
|
||||||
|
Ok(obj.into())
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<JsValue>, JsValue>>()?
|
||||||
|
.iter()
|
||||||
|
.collect::<Array>();
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save(&mut self) -> Uint8Array {
|
||||||
|
Uint8Array::from(self.0.save().as_slice())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = saveIncremental)]
|
||||||
|
pub fn save_incremental(&mut self) -> Uint8Array {
|
||||||
|
let bytes = self.0.save_incremental();
|
||||||
|
Uint8Array::from(bytes.as_slice())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = loadIncremental)]
|
||||||
|
pub fn load_incremental(&mut self, data: Uint8Array) -> Result<Array, JsValue> {
|
||||||
|
let data = data.to_vec();
|
||||||
|
let objs = self.0.load_incremental(&data).map_err(to_js_err)?;
|
||||||
|
let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect();
|
||||||
|
Ok(objs)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = applyChanges)]
|
||||||
|
pub fn apply_changes(&mut self, changes: JsValue) -> Result<Array, JsValue> {
|
||||||
|
let changes: Vec<_> = JS(changes).try_into()?;
|
||||||
|
let objs = self.0.apply_changes(changes).map_err(to_js_err)?;
|
||||||
|
let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect();
|
||||||
|
Ok(objs)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = getChanges)]
|
||||||
|
pub fn get_changes(&mut self, have_deps: JsValue) -> Result<Array, JsValue> {
|
||||||
|
let deps: Vec<_> = JS(have_deps).try_into()?;
|
||||||
|
let changes = self.0.get_changes(&deps);
|
||||||
|
let changes: Array = changes
|
||||||
|
.iter()
|
||||||
|
.map(|c| Uint8Array::from(c.raw_bytes()))
|
||||||
|
.collect();
|
||||||
|
Ok(changes)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = getChangeByHash)]
|
||||||
|
pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result<JsValue, JsValue> {
|
||||||
|
let hash = hash.into_serde().map_err(to_js_err)?;
|
||||||
|
let change = self.0.get_change_by_hash(&hash);
|
||||||
|
if let Some(c) = change {
|
||||||
|
Ok(Uint8Array::from(c.raw_bytes()).into())
|
||||||
|
} else {
|
||||||
|
Ok(JsValue::null())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = getChangesAdded)]
|
||||||
|
pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result<Array, JsValue> {
|
||||||
|
let changes = self.0.get_changes_added(&mut other.0);
|
||||||
|
let changes: Array = changes
|
||||||
|
.iter()
|
||||||
|
.map(|c| Uint8Array::from(c.raw_bytes()))
|
||||||
|
.collect();
|
||||||
|
Ok(changes)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = getHeads)]
|
||||||
|
pub fn get_heads(&mut self) -> Array {
|
||||||
|
let heads = self.0.get_heads();
|
||||||
|
let heads: Array = heads
|
||||||
|
.iter()
|
||||||
|
.map(|h| JsValue::from_str(&hex::encode(&h.0)))
|
||||||
|
.collect();
|
||||||
|
heads
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = getActorId)]
|
||||||
|
pub fn get_actor_id(&mut self) -> String {
|
||||||
|
let actor = self.0.get_actor();
|
||||||
|
actor.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = getLastLocalChange)]
|
||||||
|
pub fn get_last_local_change(&mut self) -> Result<Uint8Array, JsValue> {
|
||||||
|
if let Some(change) = self.0.get_last_local_change() {
|
||||||
|
Ok(Uint8Array::from(change.raw_bytes()))
|
||||||
|
} else {
|
||||||
|
Err(to_js_err("no local changes"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dump(&self) {
|
||||||
|
self.0.dump()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = getMissingDeps)]
|
||||||
|
pub fn get_missing_deps(&mut self, heads: Option<Array>) -> Result<Array, JsValue> {
|
||||||
|
let heads = get_heads(heads).unwrap_or_default();
|
||||||
|
let deps = self.0.get_missing_deps(&heads);
|
||||||
|
let deps: Array = deps
|
||||||
|
.iter()
|
||||||
|
.map(|h| JsValue::from_str(&hex::encode(&h.0)))
|
||||||
|
.collect();
|
||||||
|
Ok(deps)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = receiveSyncMessage)]
|
||||||
|
pub fn receive_sync_message(
|
||||||
|
&mut self,
|
||||||
|
state: &mut SyncState,
|
||||||
|
message: Uint8Array,
|
||||||
|
) -> Result<Array, JsValue> {
|
||||||
|
let message = message.to_vec();
|
||||||
|
let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?;
|
||||||
|
let objs = self
|
||||||
|
.0
|
||||||
|
.receive_sync_message(&mut state.0, message)
|
||||||
|
.map_err(to_js_err)?;
|
||||||
|
let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect();
|
||||||
|
Ok(objs)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = generateSyncMessage)]
|
||||||
|
pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result<JsValue, JsValue> {
|
||||||
|
if let Some(message) = self.0.generate_sync_message(&mut state.0) {
|
||||||
|
Ok(Uint8Array::from(message.encode().as_slice()).into())
|
||||||
|
} else {
|
||||||
|
Ok(JsValue::null())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = toJS)]
|
||||||
|
pub fn to_js(&self) -> JsValue {
|
||||||
|
map_to_js(&self.0, &ROOT)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn materialize(&self, obj: JsValue) -> Result<JsValue, JsValue> {
|
||||||
|
let obj = self.import(obj).unwrap_or(ROOT);
|
||||||
|
match self.0.object_type(&obj) {
|
||||||
|
Some(am::ObjType::Map) => Ok(map_to_js(&self.0, &obj)),
|
||||||
|
Some(am::ObjType::List) => Ok(list_to_js(&self.0, &obj)),
|
||||||
|
Some(am::ObjType::Text) => Ok(self.0.text(&obj)?.into()),
|
||||||
|
Some(am::ObjType::Table) => Ok(map_to_js(&self.0, &obj)),
|
||||||
|
None => Err(to_js_err(format!("invalid obj {}", obj))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn import(&self, id: JsValue) -> Result<ObjId, JsValue> {
|
||||||
|
if let Some(s) = id.as_string() {
|
||||||
|
if let Some(post) = s.strip_prefix('/') {
|
||||||
|
let mut obj = ROOT;
|
||||||
|
let mut is_map = true;
|
||||||
|
let parts = post.split('/');
|
||||||
|
for prop in parts {
|
||||||
|
if prop.is_empty() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let val = if is_map {
|
||||||
|
self.0.value(obj, prop)?
|
||||||
|
} else {
|
||||||
|
self.0.value(obj, am::Prop::Seq(prop.parse().unwrap()))?
|
||||||
|
};
|
||||||
|
match val {
|
||||||
|
Some((am::Value::Object(am::ObjType::Map), id)) => {
|
||||||
|
is_map = true;
|
||||||
|
obj = id;
|
||||||
|
}
|
||||||
|
Some((am::Value::Object(am::ObjType::Table), id)) => {
|
||||||
|
is_map = true;
|
||||||
|
obj = id;
|
||||||
|
}
|
||||||
|
Some((am::Value::Object(_), id)) => {
|
||||||
|
is_map = false;
|
||||||
|
obj = id;
|
||||||
|
}
|
||||||
|
None => return Err(to_js_err(format!("invalid path '{}'", s))),
|
||||||
|
_ => return Err(to_js_err(format!("path '{}' is not an object", s))),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Ok(obj)
|
||||||
|
} else {
|
||||||
|
Ok(self.0.import(&s)?)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(to_js_err("invalid objid"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn import_prop(&mut self, prop: JsValue) -> Result<Prop, JsValue> {
|
||||||
|
if let Some(s) = prop.as_string() {
|
||||||
|
Ok(s.into())
|
||||||
|
} else if let Some(n) = prop.as_f64() {
|
||||||
|
Ok((n as usize).into())
|
||||||
|
} else {
|
||||||
|
Err(to_js_err(format!("invalid prop {:?}", prop)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn import_scalar(
|
||||||
|
&mut self,
|
||||||
|
value: &JsValue,
|
||||||
|
datatype: &Option<String>,
|
||||||
|
) -> Option<am::ScalarValue> {
|
||||||
|
match datatype.as_deref() {
|
||||||
|
Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean),
|
||||||
|
Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)),
|
||||||
|
Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)),
|
||||||
|
Some("f64") => value.as_f64().map(am::ScalarValue::F64),
|
||||||
|
Some("bytes") => Some(am::ScalarValue::Bytes(
|
||||||
|
value.clone().dyn_into::<Uint8Array>().unwrap().to_vec(),
|
||||||
|
)),
|
||||||
|
Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)),
|
||||||
|
Some("timestamp") => value.as_f64().map(|v| am::ScalarValue::Timestamp(v as i64)),
|
||||||
|
Some("null") => Some(am::ScalarValue::Null),
|
||||||
|
Some(_) => None,
|
||||||
|
None => {
|
||||||
|
if value.is_null() {
|
||||||
|
Some(am::ScalarValue::Null)
|
||||||
|
} else if let Some(b) = value.as_bool() {
|
||||||
|
Some(am::ScalarValue::Boolean(b))
|
||||||
|
} else if let Some(s) = value.as_string() {
|
||||||
|
Some(am::ScalarValue::Str(s.into()))
|
||||||
|
} else if let Some(n) = value.as_f64() {
|
||||||
|
if (n.round() - n).abs() < f64::EPSILON {
|
||||||
|
Some(am::ScalarValue::Int(n as i64))
|
||||||
|
} else {
|
||||||
|
Some(am::ScalarValue::F64(n))
|
||||||
|
}
|
||||||
|
} else if let Ok(d) = value.clone().dyn_into::<js_sys::Date>() {
|
||||||
|
Some(am::ScalarValue::Timestamp(d.get_time() as i64))
|
||||||
|
} else if let Ok(o) = &value.clone().dyn_into::<Uint8Array>() {
|
||||||
|
Some(am::ScalarValue::Bytes(o.to_vec()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn import_value(
|
||||||
|
&mut self,
|
||||||
|
value: &JsValue,
|
||||||
|
datatype: Option<String>,
|
||||||
|
) -> Result<(Value, Vec<(Prop, JsValue)>), JsValue> {
|
||||||
|
match self.import_scalar(value, &datatype) {
|
||||||
|
Some(val) => Ok((val.into(), vec![])),
|
||||||
|
None => {
|
||||||
|
if let Some((o, subvals)) = to_objtype(value, &datatype) {
|
||||||
|
Ok((o.into(), subvals))
|
||||||
|
} else {
|
||||||
|
web_sys::console::log_2(&"Invalid value".into(), value);
|
||||||
|
Err(to_js_err("invalid value"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = create)]
|
||||||
|
pub fn init(actor: Option<String>) -> Result<Automerge, JsValue> {
|
||||||
|
console_error_panic_hook::set_once();
|
||||||
|
Automerge::new(actor)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = loadDoc)]
|
||||||
|
pub fn load(data: Uint8Array, actor: Option<String>) -> Result<Automerge, JsValue> {
|
||||||
|
let data = data.to_vec();
|
||||||
|
let mut automerge = am::AutoCommit::load(&data).map_err(to_js_err)?;
|
||||||
|
if let Some(s) = actor {
|
||||||
|
let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
|
||||||
|
automerge.set_actor(actor);
|
||||||
|
}
|
||||||
|
Ok(Automerge(automerge))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = encodeChange)]
|
||||||
|
pub fn encode_change(change: JsValue) -> Result<Uint8Array, JsValue> {
|
||||||
|
let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?;
|
||||||
|
let change: Change = change.into();
|
||||||
|
Ok(Uint8Array::from(change.raw_bytes()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = decodeChange)]
|
||||||
|
pub fn decode_change(change: Uint8Array) -> Result<JsValue, JsValue> {
|
||||||
|
let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?;
|
||||||
|
let change: am::ExpandedChange = change.decode();
|
||||||
|
JsValue::from_serde(&change).map_err(to_js_err)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = initSyncState)]
|
||||||
|
pub fn init_sync_state() -> SyncState {
|
||||||
|
SyncState(am::sync::State::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
// this is needed to be compatible with the automerge-js api
|
||||||
|
#[wasm_bindgen(js_name = importSyncState)]
|
||||||
|
pub fn import_sync_state(state: JsValue) -> Result<SyncState, JsValue> {
|
||||||
|
Ok(SyncState(JS(state).try_into()?))
|
||||||
|
}
|
||||||
|
|
||||||
|
// this is needed to be compatible with the automerge-js api
|
||||||
|
#[wasm_bindgen(js_name = exportSyncState)]
|
||||||
|
pub fn export_sync_state(state: SyncState) -> JsValue {
|
||||||
|
JS::from(state.0).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = encodeSyncMessage)]
|
||||||
|
pub fn encode_sync_message(message: JsValue) -> Result<Uint8Array, JsValue> {
|
||||||
|
let heads = js_get(&message, "heads")?.try_into()?;
|
||||||
|
let need = js_get(&message, "need")?.try_into()?;
|
||||||
|
let changes = js_get(&message, "changes")?.try_into()?;
|
||||||
|
let have = js_get(&message, "have")?.try_into()?;
|
||||||
|
Ok(Uint8Array::from(
|
||||||
|
am::sync::Message {
|
||||||
|
heads,
|
||||||
|
need,
|
||||||
|
have,
|
||||||
|
changes,
|
||||||
|
}
|
||||||
|
.encode()
|
||||||
|
.as_slice(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = decodeSyncMessage)]
|
||||||
|
pub fn decode_sync_message(msg: Uint8Array) -> Result<JsValue, JsValue> {
|
||||||
|
let data = msg.to_vec();
|
||||||
|
let msg = am::sync::Message::decode(&data).map_err(to_js_err)?;
|
||||||
|
let heads = AR::from(msg.heads.as_slice());
|
||||||
|
let need = AR::from(msg.need.as_slice());
|
||||||
|
let changes = AR::from(msg.changes.as_slice());
|
||||||
|
let have = AR::from(msg.have.as_slice());
|
||||||
|
let obj = Object::new().into();
|
||||||
|
js_set(&obj, "heads", heads)?;
|
||||||
|
js_set(&obj, "need", need)?;
|
||||||
|
js_set(&obj, "have", have)?;
|
||||||
|
js_set(&obj, "changes", changes)?;
|
||||||
|
Ok(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = encodeSyncState)]
|
||||||
|
pub fn encode_sync_state(state: SyncState) -> Result<Uint8Array, JsValue> {
|
||||||
|
let state = state.0;
|
||||||
|
Ok(Uint8Array::from(state.encode().as_slice()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[wasm_bindgen(js_name = decodeSyncState)]
|
||||||
|
pub fn decode_sync_state(data: Uint8Array) -> Result<SyncState, JsValue> {
|
||||||
|
SyncState::decode(data)
|
||||||
|
}
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
use automerge as am;
|
use automerge as am;
|
||||||
use automerge::ChangeHash;
|
use automerge::ChangeHash;
|
||||||
use js_sys::Uint8Array;
|
use js_sys::Uint8Array;
|
||||||
use std::collections::{BTreeSet, HashMap};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::convert::TryInto;
|
use std::convert::TryInto;
|
||||||
use wasm_bindgen::prelude::*;
|
use wasm_bindgen::prelude::*;
|
||||||
|
|
||||||
use crate::interop::{self, to_js_err, AR, JS};
|
use crate::interop::{to_js_err, AR, JS};
|
||||||
|
|
||||||
#[wasm_bindgen]
|
#[wasm_bindgen]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
@ -24,10 +24,7 @@ impl SyncState {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[wasm_bindgen(setter, js_name = lastSentHeads)]
|
#[wasm_bindgen(setter, js_name = lastSentHeads)]
|
||||||
pub fn set_last_sent_heads(
|
pub fn set_last_sent_heads(&mut self, heads: JsValue) -> Result<(), JsValue> {
|
||||||
&mut self,
|
|
||||||
heads: JsValue,
|
|
||||||
) -> Result<(), interop::error::BadChangeHashes> {
|
|
||||||
let heads: Vec<ChangeHash> = JS(heads).try_into()?;
|
let heads: Vec<ChangeHash> = JS(heads).try_into()?;
|
||||||
self.0.last_sent_heads = heads;
|
self.0.last_sent_heads = heads;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -35,9 +32,8 @@ impl SyncState {
|
||||||
|
|
||||||
#[wasm_bindgen(setter, js_name = sentHashes)]
|
#[wasm_bindgen(setter, js_name = sentHashes)]
|
||||||
pub fn set_sent_hashes(&mut self, hashes: JsValue) -> Result<(), JsValue> {
|
pub fn set_sent_hashes(&mut self, hashes: JsValue) -> Result<(), JsValue> {
|
||||||
let hashes_map: HashMap<ChangeHash, bool> =
|
let hashes_map: HashMap<ChangeHash, bool> = hashes.into_serde().map_err(to_js_err)?;
|
||||||
serde_wasm_bindgen::from_value(hashes).map_err(to_js_err)?;
|
let hashes_set: HashSet<ChangeHash> = hashes_map.keys().cloned().collect();
|
||||||
let hashes_set: BTreeSet<ChangeHash> = hashes_map.keys().cloned().collect();
|
|
||||||
self.0.sent_hashes = hashes_set;
|
self.0.sent_hashes = hashes_set;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -47,19 +43,10 @@ impl SyncState {
|
||||||
SyncState(self.0.clone())
|
SyncState(self.0.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn decode(data: Uint8Array) -> Result<SyncState, DecodeSyncStateErr> {
|
pub(crate) fn decode(data: Uint8Array) -> Result<SyncState, JsValue> {
|
||||||
let data = data.to_vec();
|
let data = data.to_vec();
|
||||||
let s = am::sync::State::decode(&data)?;
|
let s = am::sync::State::decode(&data);
|
||||||
|
let s = s.map_err(to_js_err)?;
|
||||||
Ok(SyncState(s))
|
Ok(SyncState(s))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
#[error(transparent)]
|
|
||||||
pub struct DecodeSyncStateErr(#[from] automerge::sync::DecodeStateError);
|
|
||||||
|
|
||||||
impl From<DecodeSyncStateErr> for JsValue {
|
|
||||||
fn from(e: DecodeSyncStateErr) -> Self {
|
|
||||||
JsValue::from(e.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
36
automerge-wasm/src/value.rs
Normal file
36
automerge-wasm/src/value.rs
Normal file
|
|
@ -0,0 +1,36 @@
|
||||||
|
use automerge as am;
|
||||||
|
use js_sys::Uint8Array;
|
||||||
|
use wasm_bindgen::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ScalarValue(pub(crate) am::ScalarValue);
|
||||||
|
|
||||||
|
impl From<ScalarValue> for JsValue {
|
||||||
|
fn from(val: ScalarValue) -> Self {
|
||||||
|
match &val.0 {
|
||||||
|
am::ScalarValue::Bytes(v) => Uint8Array::from(v.as_slice()).into(),
|
||||||
|
am::ScalarValue::Str(v) => v.to_string().into(),
|
||||||
|
am::ScalarValue::Int(v) => (*v as f64).into(),
|
||||||
|
am::ScalarValue::Uint(v) => (*v as f64).into(),
|
||||||
|
am::ScalarValue::F64(v) => (*v).into(),
|
||||||
|
am::ScalarValue::Counter(v) => (f64::from(v)).into(),
|
||||||
|
am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(),
|
||||||
|
am::ScalarValue::Boolean(v) => (*v).into(),
|
||||||
|
am::ScalarValue::Null => JsValue::null(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn datatype(s: &am::ScalarValue) -> String {
|
||||||
|
match s {
|
||||||
|
am::ScalarValue::Bytes(_) => "bytes".into(),
|
||||||
|
am::ScalarValue::Str(_) => "str".into(),
|
||||||
|
am::ScalarValue::Int(_) => "int".into(),
|
||||||
|
am::ScalarValue::Uint(_) => "uint".into(),
|
||||||
|
am::ScalarValue::F64(_) => "f64".into(),
|
||||||
|
am::ScalarValue::Counter(_) => "counter".into(),
|
||||||
|
am::ScalarValue::Timestamp(_) => "timestamp".into(),
|
||||||
|
am::ScalarValue::Boolean(_) => "boolean".into(),
|
||||||
|
am::ScalarValue::Null => "null".into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
189
automerge-wasm/test/attribute.ts
Normal file
189
automerge-wasm/test/attribute.ts
Normal file
|
|
@ -0,0 +1,189 @@
|
||||||
|
import { describe, it } from 'mocha';
|
||||||
|
//@ts-ignore
|
||||||
|
import assert from 'assert'
|
||||||
|
//@ts-ignore
|
||||||
|
import { BloomFilter } from './helpers/sync'
|
||||||
|
import { create, loadDoc, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..'
|
||||||
|
import { DecodedSyncMessage, Hash } from '..'
|
||||||
|
|
||||||
|
describe('Automerge', () => {
|
||||||
|
describe('attribute', () => {
|
||||||
|
it('should be able to attribute text segments on change sets', () => {
|
||||||
|
let doc1 = create()
|
||||||
|
let text = doc1.set_object("_root", "notes","hello little world")
|
||||||
|
let h1 = doc1.getHeads();
|
||||||
|
|
||||||
|
let doc2 = doc1.fork();
|
||||||
|
doc2.splice(text, 5, 7, " big");
|
||||||
|
doc2.text(text)
|
||||||
|
let h2 = doc2.getHeads();
|
||||||
|
assert.deepEqual(doc2.text(text), "hello big world")
|
||||||
|
|
||||||
|
let doc3 = doc1.fork();
|
||||||
|
doc3.splice(text, 0, 0, "Well, ");
|
||||||
|
let h3 = doc3.getHeads();
|
||||||
|
assert.deepEqual(doc3.text(text), "Well, hello little world")
|
||||||
|
|
||||||
|
doc1.merge(doc2)
|
||||||
|
doc1.merge(doc3)
|
||||||
|
assert.deepEqual(doc1.text(text), "Well, hello big world")
|
||||||
|
let attribute = doc1.attribute(text, h1, [h2, h3])
|
||||||
|
|
||||||
|
assert.deepEqual(attribute, [
|
||||||
|
{ add: [ { start: 11, end: 15 } ], del: [ { pos: 15, val: ' little' } ] },
|
||||||
|
{ add: [ { start: 0, end: 6 } ], del: [] }
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should be able to hand complex attribute change sets', () => {
|
||||||
|
let doc1 = create("aaaa")
|
||||||
|
let text = doc1.set_object("_root", "notes","AAAAAA")
|
||||||
|
let h1 = doc1.getHeads();
|
||||||
|
|
||||||
|
let doc2 = doc1.fork("bbbb");
|
||||||
|
doc2.splice(text, 0, 2, "BB");
|
||||||
|
doc2.commit()
|
||||||
|
doc2.splice(text, 2, 2, "BB");
|
||||||
|
doc2.commit()
|
||||||
|
doc2.splice(text, 6, 0, "BB");
|
||||||
|
doc2.commit()
|
||||||
|
let h2 = doc2.getHeads();
|
||||||
|
assert.deepEqual(doc2.text(text), "BBBBAABB")
|
||||||
|
|
||||||
|
let doc3 = doc1.fork("cccc");
|
||||||
|
doc3.splice(text, 1, 1, "C");
|
||||||
|
doc3.commit()
|
||||||
|
doc3.splice(text, 3, 1, "C");
|
||||||
|
doc3.commit()
|
||||||
|
doc3.splice(text, 5, 1, "C");
|
||||||
|
doc3.commit()
|
||||||
|
let h3 = doc3.getHeads();
|
||||||
|
// with tombstones its
|
||||||
|
// AC.AC.AC.
|
||||||
|
assert.deepEqual(doc3.text(text), "ACACAC")
|
||||||
|
|
||||||
|
doc1.merge(doc2)
|
||||||
|
|
||||||
|
assert.deepEqual(doc1.attribute(text, h1, [h2]), [
|
||||||
|
{ add: [ {start:0, end: 4}, { start: 6, end: 8 } ], del: [ { pos: 4, val: 'AAAA' } ] },
|
||||||
|
])
|
||||||
|
|
||||||
|
doc1.merge(doc3)
|
||||||
|
|
||||||
|
assert.deepEqual(doc1.text(text), "BBBBCCACBB")
|
||||||
|
|
||||||
|
// with tombstones its
|
||||||
|
// BBBB.C..C.AC.BB
|
||||||
|
assert.deepEqual(doc1.attribute(text, h1, [h2,h3]), [
|
||||||
|
{ add: [ {start:0, end: 4}, { start: 8, end: 10 } ], del: [ { pos: 4, val: 'A' }, { pos: 5, val: 'AA' }, { pos: 6, val: 'A' } ] },
|
||||||
|
{ add: [ {start:4, end: 6}, { start: 7, end: 8 } ], del: [ { pos: 5, val: 'A' }, { pos: 6, val: 'A' }, { pos: 8, val: 'A' } ] }
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not include attribution of text that is inserted and deleted only within change sets', () => {
|
||||||
|
let doc1 = create()
|
||||||
|
let text = doc1.set_object("_root", "notes","hello little world")
|
||||||
|
let h1 = doc1.getHeads();
|
||||||
|
|
||||||
|
let doc2 = doc1.fork();
|
||||||
|
doc2.splice(text, 5, 7, " big");
|
||||||
|
doc2.splice(text, 9, 0, " bad");
|
||||||
|
doc2.splice(text, 9, 4)
|
||||||
|
doc2.text(text)
|
||||||
|
let h2 = doc2.getHeads();
|
||||||
|
assert.deepEqual(doc2.text(text), "hello big world")
|
||||||
|
|
||||||
|
let doc3 = doc1.fork();
|
||||||
|
doc3.splice(text, 0, 0, "Well, HI THERE");
|
||||||
|
doc3.splice(text, 6, 8, "")
|
||||||
|
let h3 = doc3.getHeads();
|
||||||
|
assert.deepEqual(doc3.text(text), "Well, hello little world")
|
||||||
|
|
||||||
|
doc1.merge(doc2)
|
||||||
|
doc1.merge(doc3)
|
||||||
|
assert.deepEqual(doc1.text(text), "Well, hello big world")
|
||||||
|
let attribute = doc1.attribute(text, h1, [h2, h3])
|
||||||
|
|
||||||
|
assert.deepEqual(attribute, [
|
||||||
|
{ add: [ { start: 11, end: 15 } ], del: [ { pos: 15, val: ' little' } ] },
|
||||||
|
{ add: [ { start: 0, end: 6 } ], del: [] }
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
|
describe('attribute2', () => {
|
||||||
|
it('should be able to attribute text segments on change sets', () => {
|
||||||
|
let doc1 = create("aaaa")
|
||||||
|
let text = doc1.set_object("_root", "notes","hello little world")
|
||||||
|
let h1 = doc1.getHeads();
|
||||||
|
|
||||||
|
let doc2 = doc1.fork("bbbb");
|
||||||
|
doc2.splice(text, 5, 7, " big");
|
||||||
|
doc2.text(text)
|
||||||
|
let h2 = doc2.getHeads();
|
||||||
|
assert.deepEqual(doc2.text(text), "hello big world")
|
||||||
|
|
||||||
|
let doc3 = doc1.fork("cccc");
|
||||||
|
doc3.splice(text, 0, 0, "Well, ");
|
||||||
|
let doc4 = doc3.fork("dddd")
|
||||||
|
doc4.splice(text, 0, 0, "Gee, ");
|
||||||
|
let h3 = doc4.getHeads();
|
||||||
|
assert.deepEqual(doc4.text(text), "Gee, Well, hello little world")
|
||||||
|
|
||||||
|
doc1.merge(doc2)
|
||||||
|
doc1.merge(doc4)
|
||||||
|
assert.deepEqual(doc1.text(text), "Gee, Well, hello big world")
|
||||||
|
let attribute = doc1.attribute2(text, h1, [h2, h3])
|
||||||
|
|
||||||
|
assert.deepEqual(attribute, [
|
||||||
|
{ add: [ { actor: "bbbb", start: 16, end: 20 } ], del: [ { actor: "bbbb", pos: 20, val: ' little' } ] },
|
||||||
|
{ add: [ { actor: "dddd", start:0, end: 5 }, { actor: "cccc", start: 5, end: 11 } ], del: [] }
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not include attribution of text that is inserted and deleted only within change sets', () => {
|
||||||
|
let doc1 = create("aaaa")
|
||||||
|
let text = doc1.set_object("_root", "notes","hello little world")
|
||||||
|
let h1 = doc1.getHeads();
|
||||||
|
|
||||||
|
let doc2 = doc1.fork("bbbb");
|
||||||
|
doc2.splice(text, 5, 7, " big");
|
||||||
|
doc2.splice(text, 9, 0, " bad");
|
||||||
|
doc2.splice(text, 9, 4)
|
||||||
|
doc2.text(text)
|
||||||
|
let h2 = doc2.getHeads();
|
||||||
|
assert.deepEqual(doc2.text(text), "hello big world")
|
||||||
|
|
||||||
|
let doc3 = doc1.fork("cccc");
|
||||||
|
doc3.splice(text, 0, 0, "Well, HI THERE");
|
||||||
|
doc3.splice(text, 6, 8, "")
|
||||||
|
let h3 = doc3.getHeads();
|
||||||
|
assert.deepEqual(doc3.text(text), "Well, hello little world")
|
||||||
|
|
||||||
|
doc1.merge(doc2)
|
||||||
|
doc1.merge(doc3)
|
||||||
|
assert.deepEqual(doc1.text(text), "Well, hello big world")
|
||||||
|
let attribute = doc1.attribute2(text, h1, [h2, h3])
|
||||||
|
|
||||||
|
assert.deepEqual(attribute, [
|
||||||
|
{ add: [ { start: 11, end: 15, actor: "bbbb" } ], del: [ { pos: 15, val: ' little', actor: "bbbb" } ] },
|
||||||
|
{ add: [ { start: 0, end: 6, actor: "cccc" } ], del: [] }
|
||||||
|
])
|
||||||
|
|
||||||
|
let h4 = doc1.getHeads()
|
||||||
|
|
||||||
|
doc3.splice(text, 24, 0, "!!!")
|
||||||
|
doc1.merge(doc3)
|
||||||
|
|
||||||
|
let h5 = doc1.getHeads()
|
||||||
|
|
||||||
|
assert.deepEqual(doc1.text(text), "Well, hello big world!!!")
|
||||||
|
attribute = doc1.attribute2(text, h4, [h5])
|
||||||
|
|
||||||
|
assert.deepEqual(attribute, [
|
||||||
|
{ add: [ { start: 21, end: 24, actor: "cccc" } ], del: [] },
|
||||||
|
{ add: [], del: [] }
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
1415
automerge-wasm/test/helpers/columnar.js
Normal file
1415
automerge-wasm/test/helpers/columnar.js
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1,5 +1,5 @@
|
||||||
function isObject(obj) {
|
function isObject(obj) {
|
||||||
return typeof obj === "object" && obj !== null
|
return typeof obj === 'object' && obj !== null
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -20,11 +20,11 @@ function copyObject(obj) {
|
||||||
* with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`.
|
* with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`.
|
||||||
*/
|
*/
|
||||||
function parseOpId(opId) {
|
function parseOpId(opId) {
|
||||||
const match = /^(\d+)@(.*)$/.exec(opId || "")
|
const match = /^(\d+)@(.*)$/.exec(opId || '')
|
||||||
if (!match) {
|
if (!match) {
|
||||||
throw new RangeError(`Not a valid opId: ${opId}`)
|
throw new RangeError(`Not a valid opId: ${opId}`)
|
||||||
}
|
}
|
||||||
return { counter: parseInt(match[1], 10), actorId: match[2] }
|
return {counter: parseInt(match[1], 10), actorId: match[2]}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -32,7 +32,7 @@ function parseOpId(opId) {
|
||||||
*/
|
*/
|
||||||
function equalBytes(array1, array2) {
|
function equalBytes(array1, array2) {
|
||||||
if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) {
|
if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) {
|
||||||
throw new TypeError("equalBytes can only compare Uint8Arrays")
|
throw new TypeError('equalBytes can only compare Uint8Arrays')
|
||||||
}
|
}
|
||||||
if (array1.byteLength !== array2.byteLength) return false
|
if (array1.byteLength !== array2.byteLength) return false
|
||||||
for (let i = 0; i < array1.byteLength; i++) {
|
for (let i = 0; i < array1.byteLength; i++) {
|
||||||
|
|
@ -41,19 +41,6 @@ function equalBytes(array1, array2) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates an array containing the value `null` repeated `length` times.
|
|
||||||
*/
|
|
||||||
function createArrayOfNulls(length) {
|
|
||||||
const array = new Array(length)
|
|
||||||
for (let i = 0; i < length; i++) array[i] = null
|
|
||||||
return array
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
isObject,
|
isObject, copyObject, parseOpId, equalBytes
|
||||||
copyObject,
|
|
||||||
parseOpId,
|
|
||||||
equalBytes,
|
|
||||||
createArrayOfNulls,
|
|
||||||
}
|
}
|
||||||
|
|
@ -6,7 +6,7 @@
|
||||||
* https://github.com/anonyco/FastestSmallestTextEncoderDecoder
|
* https://github.com/anonyco/FastestSmallestTextEncoderDecoder
|
||||||
*/
|
*/
|
||||||
const utf8encoder = new TextEncoder()
|
const utf8encoder = new TextEncoder()
|
||||||
const utf8decoder = new TextDecoder("utf-8")
|
const utf8decoder = new TextDecoder('utf-8')
|
||||||
|
|
||||||
function stringToUtf8(string) {
|
function stringToUtf8(string) {
|
||||||
return utf8encoder.encode(string)
|
return utf8encoder.encode(string)
|
||||||
|
|
@ -20,48 +20,30 @@ function utf8ToString(buffer) {
|
||||||
* Converts a string consisting of hexadecimal digits into an Uint8Array.
|
* Converts a string consisting of hexadecimal digits into an Uint8Array.
|
||||||
*/
|
*/
|
||||||
function hexStringToBytes(value) {
|
function hexStringToBytes(value) {
|
||||||
if (typeof value !== "string") {
|
if (typeof value !== 'string') {
|
||||||
throw new TypeError("value is not a string")
|
throw new TypeError('value is not a string')
|
||||||
}
|
}
|
||||||
if (!/^([0-9a-f][0-9a-f])*$/.test(value)) {
|
if (!/^([0-9a-f][0-9a-f])*$/.test(value)) {
|
||||||
throw new RangeError("value is not hexadecimal")
|
throw new RangeError('value is not hexadecimal')
|
||||||
}
|
}
|
||||||
if (value === "") {
|
if (value === '') {
|
||||||
return new Uint8Array(0)
|
return new Uint8Array(0)
|
||||||
} else {
|
} else {
|
||||||
return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16)))
|
return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const NIBBLE_TO_HEX = [
|
const NIBBLE_TO_HEX = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f']
|
||||||
"0",
|
|
||||||
"1",
|
|
||||||
"2",
|
|
||||||
"3",
|
|
||||||
"4",
|
|
||||||
"5",
|
|
||||||
"6",
|
|
||||||
"7",
|
|
||||||
"8",
|
|
||||||
"9",
|
|
||||||
"a",
|
|
||||||
"b",
|
|
||||||
"c",
|
|
||||||
"d",
|
|
||||||
"e",
|
|
||||||
"f",
|
|
||||||
]
|
|
||||||
const BYTE_TO_HEX = new Array(256)
|
const BYTE_TO_HEX = new Array(256)
|
||||||
for (let i = 0; i < 256; i++) {
|
for (let i = 0; i < 256; i++) {
|
||||||
BYTE_TO_HEX[i] = `${NIBBLE_TO_HEX[(i >>> 4) & 0xf]}${NIBBLE_TO_HEX[i & 0xf]}`
|
BYTE_TO_HEX[i] = `${NIBBLE_TO_HEX[(i >>> 4) & 0xf]}${NIBBLE_TO_HEX[i & 0xf]}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts a Uint8Array into the equivalent hexadecimal string.
|
* Converts a Uint8Array into the equivalent hexadecimal string.
|
||||||
*/
|
*/
|
||||||
function bytesToHexString(bytes) {
|
function bytesToHexString(bytes) {
|
||||||
let hex = "",
|
let hex = '', len = bytes.byteLength
|
||||||
len = bytes.byteLength
|
|
||||||
for (let i = 0; i < len; i++) {
|
for (let i = 0; i < len; i++) {
|
||||||
hex += BYTE_TO_HEX[bytes[i]]
|
hex += BYTE_TO_HEX[bytes[i]]
|
||||||
}
|
}
|
||||||
|
|
@ -113,17 +95,14 @@ class Encoder {
|
||||||
* appends it to the buffer. Returns the number of bytes written.
|
* appends it to the buffer. Returns the number of bytes written.
|
||||||
*/
|
*/
|
||||||
appendUint32(value) {
|
appendUint32(value) {
|
||||||
if (!Number.isInteger(value))
|
if (!Number.isInteger(value)) throw new RangeError('value is not an integer')
|
||||||
throw new RangeError("value is not an integer")
|
if (value < 0 || value > 0xffffffff) throw new RangeError('number out of range')
|
||||||
if (value < 0 || value > 0xffffffff)
|
|
||||||
throw new RangeError("number out of range")
|
|
||||||
|
|
||||||
const numBytes = Math.max(1, Math.ceil((32 - Math.clz32(value)) / 7))
|
const numBytes = Math.max(1, Math.ceil((32 - Math.clz32(value)) / 7))
|
||||||
if (this.offset + numBytes > this.buf.byteLength) this.grow()
|
if (this.offset + numBytes > this.buf.byteLength) this.grow()
|
||||||
|
|
||||||
for (let i = 0; i < numBytes; i++) {
|
for (let i = 0; i < numBytes; i++) {
|
||||||
this.buf[this.offset + i] =
|
this.buf[this.offset + i] = (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80)
|
||||||
(value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80)
|
|
||||||
value >>>= 7 // zero-filling right shift
|
value >>>= 7 // zero-filling right shift
|
||||||
}
|
}
|
||||||
this.offset += numBytes
|
this.offset += numBytes
|
||||||
|
|
@ -136,19 +115,14 @@ class Encoder {
|
||||||
* it to the buffer. Returns the number of bytes written.
|
* it to the buffer. Returns the number of bytes written.
|
||||||
*/
|
*/
|
||||||
appendInt32(value) {
|
appendInt32(value) {
|
||||||
if (!Number.isInteger(value))
|
if (!Number.isInteger(value)) throw new RangeError('value is not an integer')
|
||||||
throw new RangeError("value is not an integer")
|
if (value < -0x80000000 || value > 0x7fffffff) throw new RangeError('number out of range')
|
||||||
if (value < -0x80000000 || value > 0x7fffffff)
|
|
||||||
throw new RangeError("number out of range")
|
|
||||||
|
|
||||||
const numBytes = Math.ceil(
|
const numBytes = Math.ceil((33 - Math.clz32(value >= 0 ? value : -value - 1)) / 7)
|
||||||
(33 - Math.clz32(value >= 0 ? value : -value - 1)) / 7
|
|
||||||
)
|
|
||||||
if (this.offset + numBytes > this.buf.byteLength) this.grow()
|
if (this.offset + numBytes > this.buf.byteLength) this.grow()
|
||||||
|
|
||||||
for (let i = 0; i < numBytes; i++) {
|
for (let i = 0; i < numBytes; i++) {
|
||||||
this.buf[this.offset + i] =
|
this.buf[this.offset + i] = (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80)
|
||||||
(value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80)
|
|
||||||
value >>= 7 // sign-propagating right shift
|
value >>= 7 // sign-propagating right shift
|
||||||
}
|
}
|
||||||
this.offset += numBytes
|
this.offset += numBytes
|
||||||
|
|
@ -161,10 +135,9 @@ class Encoder {
|
||||||
* (53 bits).
|
* (53 bits).
|
||||||
*/
|
*/
|
||||||
appendUint53(value) {
|
appendUint53(value) {
|
||||||
if (!Number.isInteger(value))
|
if (!Number.isInteger(value)) throw new RangeError('value is not an integer')
|
||||||
throw new RangeError("value is not an integer")
|
|
||||||
if (value < 0 || value > Number.MAX_SAFE_INTEGER) {
|
if (value < 0 || value > Number.MAX_SAFE_INTEGER) {
|
||||||
throw new RangeError("number out of range")
|
throw new RangeError('number out of range')
|
||||||
}
|
}
|
||||||
const high32 = Math.floor(value / 0x100000000)
|
const high32 = Math.floor(value / 0x100000000)
|
||||||
const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned
|
const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned
|
||||||
|
|
@ -177,10 +150,9 @@ class Encoder {
|
||||||
* (53 bits).
|
* (53 bits).
|
||||||
*/
|
*/
|
||||||
appendInt53(value) {
|
appendInt53(value) {
|
||||||
if (!Number.isInteger(value))
|
if (!Number.isInteger(value)) throw new RangeError('value is not an integer')
|
||||||
throw new RangeError("value is not an integer")
|
|
||||||
if (value < Number.MIN_SAFE_INTEGER || value > Number.MAX_SAFE_INTEGER) {
|
if (value < Number.MIN_SAFE_INTEGER || value > Number.MAX_SAFE_INTEGER) {
|
||||||
throw new RangeError("number out of range")
|
throw new RangeError('number out of range')
|
||||||
}
|
}
|
||||||
const high32 = Math.floor(value / 0x100000000)
|
const high32 = Math.floor(value / 0x100000000)
|
||||||
const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned
|
const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned
|
||||||
|
|
@ -195,10 +167,10 @@ class Encoder {
|
||||||
*/
|
*/
|
||||||
appendUint64(high32, low32) {
|
appendUint64(high32, low32) {
|
||||||
if (!Number.isInteger(high32) || !Number.isInteger(low32)) {
|
if (!Number.isInteger(high32) || !Number.isInteger(low32)) {
|
||||||
throw new RangeError("value is not an integer")
|
throw new RangeError('value is not an integer')
|
||||||
}
|
}
|
||||||
if (high32 < 0 || high32 > 0xffffffff || low32 < 0 || low32 > 0xffffffff) {
|
if (high32 < 0 || high32 > 0xffffffff || low32 < 0 || low32 > 0xffffffff) {
|
||||||
throw new RangeError("number out of range")
|
throw new RangeError('number out of range')
|
||||||
}
|
}
|
||||||
if (high32 === 0) return this.appendUint32(low32)
|
if (high32 === 0) return this.appendUint32(low32)
|
||||||
|
|
||||||
|
|
@ -208,12 +180,10 @@ class Encoder {
|
||||||
this.buf[this.offset + i] = (low32 & 0x7f) | 0x80
|
this.buf[this.offset + i] = (low32 & 0x7f) | 0x80
|
||||||
low32 >>>= 7 // zero-filling right shift
|
low32 >>>= 7 // zero-filling right shift
|
||||||
}
|
}
|
||||||
this.buf[this.offset + 4] =
|
this.buf[this.offset + 4] = (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80)
|
||||||
(low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80)
|
|
||||||
high32 >>>= 3
|
high32 >>>= 3
|
||||||
for (let i = 5; i < numBytes; i++) {
|
for (let i = 5; i < numBytes; i++) {
|
||||||
this.buf[this.offset + i] =
|
this.buf[this.offset + i] = (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80)
|
||||||
(high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80)
|
|
||||||
high32 >>>= 7
|
high32 >>>= 7
|
||||||
}
|
}
|
||||||
this.offset += numBytes
|
this.offset += numBytes
|
||||||
|
|
@ -230,35 +200,25 @@ class Encoder {
|
||||||
*/
|
*/
|
||||||
appendInt64(high32, low32) {
|
appendInt64(high32, low32) {
|
||||||
if (!Number.isInteger(high32) || !Number.isInteger(low32)) {
|
if (!Number.isInteger(high32) || !Number.isInteger(low32)) {
|
||||||
throw new RangeError("value is not an integer")
|
throw new RangeError('value is not an integer')
|
||||||
}
|
}
|
||||||
if (
|
if (high32 < -0x80000000 || high32 > 0x7fffffff || low32 < -0x80000000 || low32 > 0xffffffff) {
|
||||||
high32 < -0x80000000 ||
|
throw new RangeError('number out of range')
|
||||||
high32 > 0x7fffffff ||
|
|
||||||
low32 < -0x80000000 ||
|
|
||||||
low32 > 0xffffffff
|
|
||||||
) {
|
|
||||||
throw new RangeError("number out of range")
|
|
||||||
}
|
}
|
||||||
low32 >>>= 0 // interpret as unsigned
|
low32 >>>= 0 // interpret as unsigned
|
||||||
if (high32 === 0 && low32 <= 0x7fffffff) return this.appendInt32(low32)
|
if (high32 === 0 && low32 <= 0x7fffffff) return this.appendInt32(low32)
|
||||||
if (high32 === -1 && low32 >= 0x80000000)
|
if (high32 === -1 && low32 >= 0x80000000) return this.appendInt32(low32 - 0x100000000)
|
||||||
return this.appendInt32(low32 - 0x100000000)
|
|
||||||
|
|
||||||
const numBytes = Math.ceil(
|
const numBytes = Math.ceil((65 - Math.clz32(high32 >= 0 ? high32 : -high32 - 1)) / 7)
|
||||||
(65 - Math.clz32(high32 >= 0 ? high32 : -high32 - 1)) / 7
|
|
||||||
)
|
|
||||||
if (this.offset + numBytes > this.buf.byteLength) this.grow()
|
if (this.offset + numBytes > this.buf.byteLength) this.grow()
|
||||||
for (let i = 0; i < 4; i++) {
|
for (let i = 0; i < 4; i++) {
|
||||||
this.buf[this.offset + i] = (low32 & 0x7f) | 0x80
|
this.buf[this.offset + i] = (low32 & 0x7f) | 0x80
|
||||||
low32 >>>= 7 // zero-filling right shift
|
low32 >>>= 7 // zero-filling right shift
|
||||||
}
|
}
|
||||||
this.buf[this.offset + 4] =
|
this.buf[this.offset + 4] = (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80)
|
||||||
(low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80)
|
|
||||||
high32 >>= 3 // sign-propagating right shift
|
high32 >>= 3 // sign-propagating right shift
|
||||||
for (let i = 5; i < numBytes; i++) {
|
for (let i = 5; i < numBytes; i++) {
|
||||||
this.buf[this.offset + i] =
|
this.buf[this.offset + i] = (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80)
|
||||||
(high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80)
|
|
||||||
high32 >>= 7
|
high32 >>= 7
|
||||||
}
|
}
|
||||||
this.offset += numBytes
|
this.offset += numBytes
|
||||||
|
|
@ -283,7 +243,7 @@ class Encoder {
|
||||||
* number of bytes appended.
|
* number of bytes appended.
|
||||||
*/
|
*/
|
||||||
appendRawString(value) {
|
appendRawString(value) {
|
||||||
if (typeof value !== "string") throw new TypeError("value is not a string")
|
if (typeof value !== 'string') throw new TypeError('value is not a string')
|
||||||
return this.appendRawBytes(stringToUtf8(value))
|
return this.appendRawBytes(stringToUtf8(value))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -302,7 +262,7 @@ class Encoder {
|
||||||
* (where the length is encoded as an unsigned LEB128 integer).
|
* (where the length is encoded as an unsigned LEB128 integer).
|
||||||
*/
|
*/
|
||||||
appendPrefixedString(value) {
|
appendPrefixedString(value) {
|
||||||
if (typeof value !== "string") throw new TypeError("value is not a string")
|
if (typeof value !== 'string') throw new TypeError('value is not a string')
|
||||||
this.appendPrefixedBytes(stringToUtf8(value))
|
this.appendPrefixedBytes(stringToUtf8(value))
|
||||||
return this
|
return this
|
||||||
}
|
}
|
||||||
|
|
@ -321,7 +281,8 @@ class Encoder {
|
||||||
* Flushes any unwritten data to the buffer. Call this before reading from
|
* Flushes any unwritten data to the buffer. Call this before reading from
|
||||||
* the buffer constructed by this Encoder.
|
* the buffer constructed by this Encoder.
|
||||||
*/
|
*/
|
||||||
finish() {}
|
finish() {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -360,7 +321,7 @@ class Decoder {
|
||||||
*/
|
*/
|
||||||
skip(bytes) {
|
skip(bytes) {
|
||||||
if (this.offset + bytes > this.buf.byteLength) {
|
if (this.offset + bytes > this.buf.byteLength) {
|
||||||
throw new RangeError("cannot skip beyond end of buffer")
|
throw new RangeError('cannot skip beyond end of buffer')
|
||||||
}
|
}
|
||||||
this.offset += bytes
|
this.offset += bytes
|
||||||
}
|
}
|
||||||
|
|
@ -378,20 +339,18 @@ class Decoder {
|
||||||
* Throws an exception if the value doesn't fit in a 32-bit unsigned int.
|
* Throws an exception if the value doesn't fit in a 32-bit unsigned int.
|
||||||
*/
|
*/
|
||||||
readUint32() {
|
readUint32() {
|
||||||
let result = 0,
|
let result = 0, shift = 0
|
||||||
shift = 0
|
|
||||||
while (this.offset < this.buf.byteLength) {
|
while (this.offset < this.buf.byteLength) {
|
||||||
const nextByte = this.buf[this.offset]
|
const nextByte = this.buf[this.offset]
|
||||||
if (shift === 28 && (nextByte & 0xf0) !== 0) {
|
if (shift === 28 && (nextByte & 0xf0) !== 0) { // more than 5 bytes, or value > 0xffffffff
|
||||||
// more than 5 bytes, or value > 0xffffffff
|
throw new RangeError('number out of range')
|
||||||
throw new RangeError("number out of range")
|
|
||||||
}
|
}
|
||||||
result = (result | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned
|
result = (result | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned
|
||||||
shift += 7
|
shift += 7
|
||||||
this.offset++
|
this.offset++
|
||||||
if ((nextByte & 0x80) === 0) return result
|
if ((nextByte & 0x80) === 0) return result
|
||||||
}
|
}
|
||||||
throw new RangeError("buffer ended with incomplete number")
|
throw new RangeError('buffer ended with incomplete number')
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -399,17 +358,13 @@ class Decoder {
|
||||||
* Throws an exception if the value doesn't fit in a 32-bit signed int.
|
* Throws an exception if the value doesn't fit in a 32-bit signed int.
|
||||||
*/
|
*/
|
||||||
readInt32() {
|
readInt32() {
|
||||||
let result = 0,
|
let result = 0, shift = 0
|
||||||
shift = 0
|
|
||||||
while (this.offset < this.buf.byteLength) {
|
while (this.offset < this.buf.byteLength) {
|
||||||
const nextByte = this.buf[this.offset]
|
const nextByte = this.buf[this.offset]
|
||||||
if (
|
if ((shift === 28 && (nextByte & 0x80) !== 0) || // more than 5 bytes
|
||||||
(shift === 28 && (nextByte & 0x80) !== 0) || // more than 5 bytes
|
(shift === 28 && (nextByte & 0x40) === 0 && (nextByte & 0x38) !== 0) || // positive int > 0x7fffffff
|
||||||
(shift === 28 && (nextByte & 0x40) === 0 && (nextByte & 0x38) !== 0) || // positive int > 0x7fffffff
|
(shift === 28 && (nextByte & 0x40) !== 0 && (nextByte & 0x38) !== 0x38)) { // negative int < -0x80000000
|
||||||
(shift === 28 && (nextByte & 0x40) !== 0 && (nextByte & 0x38) !== 0x38)
|
throw new RangeError('number out of range')
|
||||||
) {
|
|
||||||
// negative int < -0x80000000
|
|
||||||
throw new RangeError("number out of range")
|
|
||||||
}
|
}
|
||||||
result |= (nextByte & 0x7f) << shift
|
result |= (nextByte & 0x7f) << shift
|
||||||
shift += 7
|
shift += 7
|
||||||
|
|
@ -423,7 +378,7 @@ class Decoder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new RangeError("buffer ended with incomplete number")
|
throw new RangeError('buffer ended with incomplete number')
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -434,7 +389,7 @@ class Decoder {
|
||||||
readUint53() {
|
readUint53() {
|
||||||
const { low32, high32 } = this.readUint64()
|
const { low32, high32 } = this.readUint64()
|
||||||
if (high32 < 0 || high32 > 0x1fffff) {
|
if (high32 < 0 || high32 > 0x1fffff) {
|
||||||
throw new RangeError("number out of range")
|
throw new RangeError('number out of range')
|
||||||
}
|
}
|
||||||
return high32 * 0x100000000 + low32
|
return high32 * 0x100000000 + low32
|
||||||
}
|
}
|
||||||
|
|
@ -446,12 +401,8 @@ class Decoder {
|
||||||
*/
|
*/
|
||||||
readInt53() {
|
readInt53() {
|
||||||
const { low32, high32 } = this.readInt64()
|
const { low32, high32 } = this.readInt64()
|
||||||
if (
|
if (high32 < -0x200000 || (high32 === -0x200000 && low32 === 0) || high32 > 0x1fffff) {
|
||||||
high32 < -0x200000 ||
|
throw new RangeError('number out of range')
|
||||||
(high32 === -0x200000 && low32 === 0) ||
|
|
||||||
high32 > 0x1fffff
|
|
||||||
) {
|
|
||||||
throw new RangeError("number out of range")
|
|
||||||
}
|
}
|
||||||
return high32 * 0x100000000 + low32
|
return high32 * 0x100000000 + low32
|
||||||
}
|
}
|
||||||
|
|
@ -463,12 +414,10 @@ class Decoder {
|
||||||
* `{high32, low32}`.
|
* `{high32, low32}`.
|
||||||
*/
|
*/
|
||||||
readUint64() {
|
readUint64() {
|
||||||
let low32 = 0,
|
let low32 = 0, high32 = 0, shift = 0
|
||||||
high32 = 0,
|
|
||||||
shift = 0
|
|
||||||
while (this.offset < this.buf.byteLength && shift <= 28) {
|
while (this.offset < this.buf.byteLength && shift <= 28) {
|
||||||
const nextByte = this.buf[this.offset]
|
const nextByte = this.buf[this.offset]
|
||||||
low32 = (low32 | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned
|
low32 = (low32 | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned
|
||||||
if (shift === 28) {
|
if (shift === 28) {
|
||||||
high32 = (nextByte & 0x70) >>> 4
|
high32 = (nextByte & 0x70) >>> 4
|
||||||
}
|
}
|
||||||
|
|
@ -480,16 +429,15 @@ class Decoder {
|
||||||
shift = 3
|
shift = 3
|
||||||
while (this.offset < this.buf.byteLength) {
|
while (this.offset < this.buf.byteLength) {
|
||||||
const nextByte = this.buf[this.offset]
|
const nextByte = this.buf[this.offset]
|
||||||
if (shift === 31 && (nextByte & 0xfe) !== 0) {
|
if (shift === 31 && (nextByte & 0xfe) !== 0) { // more than 10 bytes, or value > 2^64 - 1
|
||||||
// more than 10 bytes, or value > 2^64 - 1
|
throw new RangeError('number out of range')
|
||||||
throw new RangeError("number out of range")
|
|
||||||
}
|
}
|
||||||
high32 = (high32 | ((nextByte & 0x7f) << shift)) >>> 0
|
high32 = (high32 | (nextByte & 0x7f) << shift) >>> 0
|
||||||
shift += 7
|
shift += 7
|
||||||
this.offset++
|
this.offset++
|
||||||
if ((nextByte & 0x80) === 0) return { high32, low32 }
|
if ((nextByte & 0x80) === 0) return { high32, low32 }
|
||||||
}
|
}
|
||||||
throw new RangeError("buffer ended with incomplete number")
|
throw new RangeError('buffer ended with incomplete number')
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -500,20 +448,17 @@ class Decoder {
|
||||||
* sign of the `high32` half indicates the sign of the 64-bit number.
|
* sign of the `high32` half indicates the sign of the 64-bit number.
|
||||||
*/
|
*/
|
||||||
readInt64() {
|
readInt64() {
|
||||||
let low32 = 0,
|
let low32 = 0, high32 = 0, shift = 0
|
||||||
high32 = 0,
|
|
||||||
shift = 0
|
|
||||||
while (this.offset < this.buf.byteLength && shift <= 28) {
|
while (this.offset < this.buf.byteLength && shift <= 28) {
|
||||||
const nextByte = this.buf[this.offset]
|
const nextByte = this.buf[this.offset]
|
||||||
low32 = (low32 | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned
|
low32 = (low32 | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned
|
||||||
if (shift === 28) {
|
if (shift === 28) {
|
||||||
high32 = (nextByte & 0x70) >>> 4
|
high32 = (nextByte & 0x70) >>> 4
|
||||||
}
|
}
|
||||||
shift += 7
|
shift += 7
|
||||||
this.offset++
|
this.offset++
|
||||||
if ((nextByte & 0x80) === 0) {
|
if ((nextByte & 0x80) === 0) {
|
||||||
if ((nextByte & 0x40) !== 0) {
|
if ((nextByte & 0x40) !== 0) { // sign-extend negative integer
|
||||||
// sign-extend negative integer
|
|
||||||
if (shift < 32) low32 = (low32 | (-1 << shift)) >>> 0
|
if (shift < 32) low32 = (low32 | (-1 << shift)) >>> 0
|
||||||
high32 |= -1 << Math.max(shift - 32, 0)
|
high32 |= -1 << Math.max(shift - 32, 0)
|
||||||
}
|
}
|
||||||
|
|
@ -527,20 +472,19 @@ class Decoder {
|
||||||
// On the 10th byte there are only two valid values: all 7 value bits zero
|
// On the 10th byte there are only two valid values: all 7 value bits zero
|
||||||
// (if the value is positive) or all 7 bits one (if the value is negative)
|
// (if the value is positive) or all 7 bits one (if the value is negative)
|
||||||
if (shift === 31 && nextByte !== 0 && nextByte !== 0x7f) {
|
if (shift === 31 && nextByte !== 0 && nextByte !== 0x7f) {
|
||||||
throw new RangeError("number out of range")
|
throw new RangeError('number out of range')
|
||||||
}
|
}
|
||||||
high32 |= (nextByte & 0x7f) << shift
|
high32 |= (nextByte & 0x7f) << shift
|
||||||
shift += 7
|
shift += 7
|
||||||
this.offset++
|
this.offset++
|
||||||
if ((nextByte & 0x80) === 0) {
|
if ((nextByte & 0x80) === 0) {
|
||||||
if ((nextByte & 0x40) !== 0 && shift < 32) {
|
if ((nextByte & 0x40) !== 0 && shift < 32) { // sign-extend negative integer
|
||||||
// sign-extend negative integer
|
|
||||||
high32 |= -1 << shift
|
high32 |= -1 << shift
|
||||||
}
|
}
|
||||||
return { high32, low32 }
|
return { high32, low32 }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new RangeError("buffer ended with incomplete number")
|
throw new RangeError('buffer ended with incomplete number')
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -550,7 +494,7 @@ class Decoder {
|
||||||
readRawBytes(length) {
|
readRawBytes(length) {
|
||||||
const start = this.offset
|
const start = this.offset
|
||||||
if (start + length > this.buf.byteLength) {
|
if (start + length > this.buf.byteLength) {
|
||||||
throw new RangeError("subarray exceeds buffer size")
|
throw new RangeError('subarray exceeds buffer size')
|
||||||
}
|
}
|
||||||
this.offset += length
|
this.offset += length
|
||||||
return this.buf.subarray(start, this.offset)
|
return this.buf.subarray(start, this.offset)
|
||||||
|
|
@ -615,7 +559,7 @@ class RLEEncoder extends Encoder {
|
||||||
constructor(type) {
|
constructor(type) {
|
||||||
super()
|
super()
|
||||||
this.type = type
|
this.type = type
|
||||||
this.state = "empty"
|
this.state = 'empty'
|
||||||
this.lastValue = undefined
|
this.lastValue = undefined
|
||||||
this.count = 0
|
this.count = 0
|
||||||
this.literal = []
|
this.literal = []
|
||||||
|
|
@ -634,81 +578,76 @@ class RLEEncoder extends Encoder {
|
||||||
*/
|
*/
|
||||||
_appendValue(value, repetitions = 1) {
|
_appendValue(value, repetitions = 1) {
|
||||||
if (repetitions <= 0) return
|
if (repetitions <= 0) return
|
||||||
if (this.state === "empty") {
|
if (this.state === 'empty') {
|
||||||
this.state =
|
this.state = (value === null ? 'nulls' : (repetitions === 1 ? 'loneValue' : 'repetition'))
|
||||||
value === null
|
|
||||||
? "nulls"
|
|
||||||
: repetitions === 1
|
|
||||||
? "loneValue"
|
|
||||||
: "repetition"
|
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
this.count = repetitions
|
this.count = repetitions
|
||||||
} else if (this.state === "loneValue") {
|
} else if (this.state === 'loneValue') {
|
||||||
if (value === null) {
|
if (value === null) {
|
||||||
this.flush()
|
this.flush()
|
||||||
this.state = "nulls"
|
this.state = 'nulls'
|
||||||
this.count = repetitions
|
this.count = repetitions
|
||||||
} else if (value === this.lastValue) {
|
} else if (value === this.lastValue) {
|
||||||
this.state = "repetition"
|
this.state = 'repetition'
|
||||||
this.count = 1 + repetitions
|
this.count = 1 + repetitions
|
||||||
} else if (repetitions > 1) {
|
} else if (repetitions > 1) {
|
||||||
this.flush()
|
this.flush()
|
||||||
this.state = "repetition"
|
this.state = 'repetition'
|
||||||
this.count = repetitions
|
this.count = repetitions
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
} else {
|
} else {
|
||||||
this.state = "literal"
|
this.state = 'literal'
|
||||||
this.literal = [this.lastValue]
|
this.literal = [this.lastValue]
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
}
|
}
|
||||||
} else if (this.state === "repetition") {
|
} else if (this.state === 'repetition') {
|
||||||
if (value === null) {
|
if (value === null) {
|
||||||
this.flush()
|
this.flush()
|
||||||
this.state = "nulls"
|
this.state = 'nulls'
|
||||||
this.count = repetitions
|
this.count = repetitions
|
||||||
} else if (value === this.lastValue) {
|
} else if (value === this.lastValue) {
|
||||||
this.count += repetitions
|
this.count += repetitions
|
||||||
} else if (repetitions > 1) {
|
} else if (repetitions > 1) {
|
||||||
this.flush()
|
this.flush()
|
||||||
this.state = "repetition"
|
this.state = 'repetition'
|
||||||
this.count = repetitions
|
this.count = repetitions
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
} else {
|
} else {
|
||||||
this.flush()
|
this.flush()
|
||||||
this.state = "loneValue"
|
this.state = 'loneValue'
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
}
|
}
|
||||||
} else if (this.state === "literal") {
|
} else if (this.state === 'literal') {
|
||||||
if (value === null) {
|
if (value === null) {
|
||||||
this.literal.push(this.lastValue)
|
this.literal.push(this.lastValue)
|
||||||
this.flush()
|
this.flush()
|
||||||
this.state = "nulls"
|
this.state = 'nulls'
|
||||||
this.count = repetitions
|
this.count = repetitions
|
||||||
} else if (value === this.lastValue) {
|
} else if (value === this.lastValue) {
|
||||||
this.flush()
|
this.flush()
|
||||||
this.state = "repetition"
|
this.state = 'repetition'
|
||||||
this.count = 1 + repetitions
|
this.count = 1 + repetitions
|
||||||
} else if (repetitions > 1) {
|
} else if (repetitions > 1) {
|
||||||
this.literal.push(this.lastValue)
|
this.literal.push(this.lastValue)
|
||||||
this.flush()
|
this.flush()
|
||||||
this.state = "repetition"
|
this.state = 'repetition'
|
||||||
this.count = repetitions
|
this.count = repetitions
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
} else {
|
} else {
|
||||||
this.literal.push(this.lastValue)
|
this.literal.push(this.lastValue)
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
}
|
}
|
||||||
} else if (this.state === "nulls") {
|
} else if (this.state === 'nulls') {
|
||||||
if (value === null) {
|
if (value === null) {
|
||||||
this.count += repetitions
|
this.count += repetitions
|
||||||
} else if (repetitions > 1) {
|
} else if (repetitions > 1) {
|
||||||
this.flush()
|
this.flush()
|
||||||
this.state = "repetition"
|
this.state = 'repetition'
|
||||||
this.count = repetitions
|
this.count = repetitions
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
} else {
|
} else {
|
||||||
this.flush()
|
this.flush()
|
||||||
this.state = "loneValue"
|
this.state = 'loneValue'
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -727,16 +666,13 @@ class RLEEncoder extends Encoder {
|
||||||
*/
|
*/
|
||||||
copyFrom(decoder, options = {}) {
|
copyFrom(decoder, options = {}) {
|
||||||
const { count, sumValues, sumShift } = options
|
const { count, sumValues, sumShift } = options
|
||||||
if (!(decoder instanceof RLEDecoder) || decoder.type !== this.type) {
|
if (!(decoder instanceof RLEDecoder) || (decoder.type !== this.type)) {
|
||||||
throw new TypeError("incompatible type of decoder")
|
throw new TypeError('incompatible type of decoder')
|
||||||
}
|
}
|
||||||
let remaining = typeof count === "number" ? count : Number.MAX_SAFE_INTEGER
|
let remaining = (typeof count === 'number' ? count : Number.MAX_SAFE_INTEGER)
|
||||||
let nonNullValues = 0,
|
let nonNullValues = 0, sum = 0
|
||||||
sum = 0
|
if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`)
|
||||||
if (count && remaining > 0 && decoder.done)
|
if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues}
|
||||||
throw new RangeError(`cannot copy ${count} values`)
|
|
||||||
if (remaining === 0 || decoder.done)
|
|
||||||
return sumValues ? { nonNullValues, sum } : { nonNullValues }
|
|
||||||
|
|
||||||
// Copy a value so that we have a well-defined starting state. NB: when super.copyFrom() is
|
// Copy a value so that we have a well-defined starting state. NB: when super.copyFrom() is
|
||||||
// called by the DeltaEncoder subclass, the following calls to readValue() and appendValue()
|
// called by the DeltaEncoder subclass, the following calls to readValue() and appendValue()
|
||||||
|
|
@ -748,101 +684,87 @@ class RLEEncoder extends Encoder {
|
||||||
remaining -= numNulls
|
remaining -= numNulls
|
||||||
decoder.count -= numNulls - 1
|
decoder.count -= numNulls - 1
|
||||||
this.appendValue(null, numNulls)
|
this.appendValue(null, numNulls)
|
||||||
if (count && remaining > 0 && decoder.done)
|
if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`)
|
||||||
throw new RangeError(`cannot copy ${count} values`)
|
if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues}
|
||||||
if (remaining === 0 || decoder.done)
|
|
||||||
return sumValues ? { nonNullValues, sum } : { nonNullValues }
|
|
||||||
firstValue = decoder.readValue()
|
firstValue = decoder.readValue()
|
||||||
if (firstValue === null)
|
if (firstValue === null) throw new RangeError('null run must be followed by non-null value')
|
||||||
throw new RangeError("null run must be followed by non-null value")
|
|
||||||
}
|
}
|
||||||
this.appendValue(firstValue)
|
this.appendValue(firstValue)
|
||||||
remaining--
|
remaining--
|
||||||
nonNullValues++
|
nonNullValues++
|
||||||
if (sumValues) sum += sumShift ? firstValue >>> sumShift : firstValue
|
if (sumValues) sum += (sumShift ? (firstValue >>> sumShift) : firstValue)
|
||||||
if (count && remaining > 0 && decoder.done)
|
if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`)
|
||||||
throw new RangeError(`cannot copy ${count} values`)
|
if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues}
|
||||||
if (remaining === 0 || decoder.done)
|
|
||||||
return sumValues ? { nonNullValues, sum } : { nonNullValues }
|
|
||||||
|
|
||||||
// Copy data at the record level without expanding repetitions
|
// Copy data at the record level without expanding repetitions
|
||||||
let firstRun = decoder.count > 0
|
let firstRun = (decoder.count > 0)
|
||||||
while (remaining > 0 && !decoder.done) {
|
while (remaining > 0 && !decoder.done) {
|
||||||
if (!firstRun) decoder.readRecord()
|
if (!firstRun) decoder.readRecord()
|
||||||
const numValues = Math.min(decoder.count, remaining)
|
const numValues = Math.min(decoder.count, remaining)
|
||||||
decoder.count -= numValues
|
decoder.count -= numValues
|
||||||
|
|
||||||
if (decoder.state === "literal") {
|
if (decoder.state === 'literal') {
|
||||||
nonNullValues += numValues
|
nonNullValues += numValues
|
||||||
for (let i = 0; i < numValues; i++) {
|
for (let i = 0; i < numValues; i++) {
|
||||||
if (decoder.done) throw new RangeError("incomplete literal")
|
if (decoder.done) throw new RangeError('incomplete literal')
|
||||||
const value = decoder.readRawValue()
|
const value = decoder.readRawValue()
|
||||||
if (value === decoder.lastValue)
|
if (value === decoder.lastValue) throw new RangeError('Repetition of values is not allowed in literal')
|
||||||
throw new RangeError(
|
|
||||||
"Repetition of values is not allowed in literal"
|
|
||||||
)
|
|
||||||
decoder.lastValue = value
|
decoder.lastValue = value
|
||||||
this._appendValue(value)
|
this._appendValue(value)
|
||||||
if (sumValues) sum += sumShift ? value >>> sumShift : value
|
if (sumValues) sum += (sumShift ? (value >>> sumShift) : value)
|
||||||
}
|
}
|
||||||
} else if (decoder.state === "repetition") {
|
} else if (decoder.state === 'repetition') {
|
||||||
nonNullValues += numValues
|
nonNullValues += numValues
|
||||||
if (sumValues)
|
if (sumValues) sum += numValues * (sumShift ? (decoder.lastValue >>> sumShift) : decoder.lastValue)
|
||||||
sum +=
|
|
||||||
numValues *
|
|
||||||
(sumShift ? decoder.lastValue >>> sumShift : decoder.lastValue)
|
|
||||||
const value = decoder.lastValue
|
const value = decoder.lastValue
|
||||||
this._appendValue(value)
|
this._appendValue(value)
|
||||||
if (numValues > 1) {
|
if (numValues > 1) {
|
||||||
this._appendValue(value)
|
this._appendValue(value)
|
||||||
if (this.state !== "repetition")
|
if (this.state !== 'repetition') throw new RangeError(`Unexpected state ${this.state}`)
|
||||||
throw new RangeError(`Unexpected state ${this.state}`)
|
|
||||||
this.count += numValues - 2
|
this.count += numValues - 2
|
||||||
}
|
}
|
||||||
} else if (decoder.state === "nulls") {
|
} else if (decoder.state === 'nulls') {
|
||||||
this._appendValue(null)
|
this._appendValue(null)
|
||||||
if (this.state !== "nulls")
|
if (this.state !== 'nulls') throw new RangeError(`Unexpected state ${this.state}`)
|
||||||
throw new RangeError(`Unexpected state ${this.state}`)
|
|
||||||
this.count += numValues - 1
|
this.count += numValues - 1
|
||||||
}
|
}
|
||||||
|
|
||||||
firstRun = false
|
firstRun = false
|
||||||
remaining -= numValues
|
remaining -= numValues
|
||||||
}
|
}
|
||||||
if (count && remaining > 0 && decoder.done)
|
if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`)
|
||||||
throw new RangeError(`cannot copy ${count} values`)
|
return sumValues ? {nonNullValues, sum} : {nonNullValues}
|
||||||
return sumValues ? { nonNullValues, sum } : { nonNullValues }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Private method, do not call from outside the class.
|
* Private method, do not call from outside the class.
|
||||||
*/
|
*/
|
||||||
flush() {
|
flush() {
|
||||||
if (this.state === "loneValue") {
|
if (this.state === 'loneValue') {
|
||||||
this.appendInt32(-1)
|
this.appendInt32(-1)
|
||||||
this.appendRawValue(this.lastValue)
|
this.appendRawValue(this.lastValue)
|
||||||
} else if (this.state === "repetition") {
|
} else if (this.state === 'repetition') {
|
||||||
this.appendInt53(this.count)
|
this.appendInt53(this.count)
|
||||||
this.appendRawValue(this.lastValue)
|
this.appendRawValue(this.lastValue)
|
||||||
} else if (this.state === "literal") {
|
} else if (this.state === 'literal') {
|
||||||
this.appendInt53(-this.literal.length)
|
this.appendInt53(-this.literal.length)
|
||||||
for (let v of this.literal) this.appendRawValue(v)
|
for (let v of this.literal) this.appendRawValue(v)
|
||||||
} else if (this.state === "nulls") {
|
} else if (this.state === 'nulls') {
|
||||||
this.appendInt32(0)
|
this.appendInt32(0)
|
||||||
this.appendUint53(this.count)
|
this.appendUint53(this.count)
|
||||||
}
|
}
|
||||||
this.state = "empty"
|
this.state = 'empty'
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Private method, do not call from outside the class.
|
* Private method, do not call from outside the class.
|
||||||
*/
|
*/
|
||||||
appendRawValue(value) {
|
appendRawValue(value) {
|
||||||
if (this.type === "int") {
|
if (this.type === 'int') {
|
||||||
this.appendInt53(value)
|
this.appendInt53(value)
|
||||||
} else if (this.type === "uint") {
|
} else if (this.type === 'uint') {
|
||||||
this.appendUint53(value)
|
this.appendUint53(value)
|
||||||
} else if (this.type === "utf8") {
|
} else if (this.type === 'utf8') {
|
||||||
this.appendPrefixedString(value)
|
this.appendPrefixedString(value)
|
||||||
} else {
|
} else {
|
||||||
throw new RangeError(`Unknown RLEEncoder datatype: ${this.type}`)
|
throw new RangeError(`Unknown RLEEncoder datatype: ${this.type}`)
|
||||||
|
|
@ -854,9 +776,9 @@ class RLEEncoder extends Encoder {
|
||||||
* the buffer constructed by this Encoder.
|
* the buffer constructed by this Encoder.
|
||||||
*/
|
*/
|
||||||
finish() {
|
finish() {
|
||||||
if (this.state === "literal") this.literal.push(this.lastValue)
|
if (this.state === 'literal') this.literal.push(this.lastValue)
|
||||||
// Don't write anything if the only values we have seen are nulls
|
// Don't write anything if the only values we have seen are nulls
|
||||||
if (this.state !== "nulls" || this.offset > 0) this.flush()
|
if (this.state !== 'nulls' || this.offset > 0) this.flush()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -878,7 +800,7 @@ class RLEDecoder extends Decoder {
|
||||||
* position, and true if we are at the end of the buffer.
|
* position, and true if we are at the end of the buffer.
|
||||||
*/
|
*/
|
||||||
get done() {
|
get done() {
|
||||||
return this.count === 0 && this.offset === this.buf.byteLength
|
return (this.count === 0) && (this.offset === this.buf.byteLength)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -899,10 +821,9 @@ class RLEDecoder extends Decoder {
|
||||||
if (this.done) return null
|
if (this.done) return null
|
||||||
if (this.count === 0) this.readRecord()
|
if (this.count === 0) this.readRecord()
|
||||||
this.count -= 1
|
this.count -= 1
|
||||||
if (this.state === "literal") {
|
if (this.state === 'literal') {
|
||||||
const value = this.readRawValue()
|
const value = this.readRawValue()
|
||||||
if (value === this.lastValue)
|
if (value === this.lastValue) throw new RangeError('Repetition of values is not allowed in literal')
|
||||||
throw new RangeError("Repetition of values is not allowed in literal")
|
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
return value
|
return value
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -918,22 +839,20 @@ class RLEDecoder extends Decoder {
|
||||||
if (this.count === 0) {
|
if (this.count === 0) {
|
||||||
this.count = this.readInt53()
|
this.count = this.readInt53()
|
||||||
if (this.count > 0) {
|
if (this.count > 0) {
|
||||||
this.lastValue =
|
this.lastValue = (this.count <= numSkip) ? this.skipRawValues(1) : this.readRawValue()
|
||||||
this.count <= numSkip ? this.skipRawValues(1) : this.readRawValue()
|
this.state = 'repetition'
|
||||||
this.state = "repetition"
|
|
||||||
} else if (this.count < 0) {
|
} else if (this.count < 0) {
|
||||||
this.count = -this.count
|
this.count = -this.count
|
||||||
this.state = "literal"
|
this.state = 'literal'
|
||||||
} else {
|
} else { // this.count == 0
|
||||||
// this.count == 0
|
|
||||||
this.count = this.readUint53()
|
this.count = this.readUint53()
|
||||||
this.lastValue = null
|
this.lastValue = null
|
||||||
this.state = "nulls"
|
this.state = 'nulls'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const consume = Math.min(numSkip, this.count)
|
const consume = Math.min(numSkip, this.count)
|
||||||
if (this.state === "literal") this.skipRawValues(consume)
|
if (this.state === 'literal') this.skipRawValues(consume)
|
||||||
numSkip -= consume
|
numSkip -= consume
|
||||||
this.count -= consume
|
this.count -= consume
|
||||||
}
|
}
|
||||||
|
|
@ -947,34 +866,23 @@ class RLEDecoder extends Decoder {
|
||||||
this.count = this.readInt53()
|
this.count = this.readInt53()
|
||||||
if (this.count > 1) {
|
if (this.count > 1) {
|
||||||
const value = this.readRawValue()
|
const value = this.readRawValue()
|
||||||
if (
|
if ((this.state === 'repetition' || this.state === 'literal') && this.lastValue === value) {
|
||||||
(this.state === "repetition" || this.state === "literal") &&
|
throw new RangeError('Successive repetitions with the same value are not allowed')
|
||||||
this.lastValue === value
|
|
||||||
) {
|
|
||||||
throw new RangeError(
|
|
||||||
"Successive repetitions with the same value are not allowed"
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
this.state = "repetition"
|
this.state = 'repetition'
|
||||||
this.lastValue = value
|
this.lastValue = value
|
||||||
} else if (this.count === 1) {
|
} else if (this.count === 1) {
|
||||||
throw new RangeError(
|
throw new RangeError('Repetition count of 1 is not allowed, use a literal instead')
|
||||||
"Repetition count of 1 is not allowed, use a literal instead"
|
|
||||||
)
|
|
||||||
} else if (this.count < 0) {
|
} else if (this.count < 0) {
|
||||||
this.count = -this.count
|
this.count = -this.count
|
||||||
if (this.state === "literal")
|
if (this.state === 'literal') throw new RangeError('Successive literals are not allowed')
|
||||||
throw new RangeError("Successive literals are not allowed")
|
this.state = 'literal'
|
||||||
this.state = "literal"
|
} else { // this.count == 0
|
||||||
} else {
|
if (this.state === 'nulls') throw new RangeError('Successive null runs are not allowed')
|
||||||
// this.count == 0
|
|
||||||
if (this.state === "nulls")
|
|
||||||
throw new RangeError("Successive null runs are not allowed")
|
|
||||||
this.count = this.readUint53()
|
this.count = this.readUint53()
|
||||||
if (this.count === 0)
|
if (this.count === 0) throw new RangeError('Zero-length null runs are not allowed')
|
||||||
throw new RangeError("Zero-length null runs are not allowed")
|
|
||||||
this.lastValue = null
|
this.lastValue = null
|
||||||
this.state = "nulls"
|
this.state = 'nulls'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -983,11 +891,11 @@ class RLEDecoder extends Decoder {
|
||||||
* Reads one value of the datatype configured on construction.
|
* Reads one value of the datatype configured on construction.
|
||||||
*/
|
*/
|
||||||
readRawValue() {
|
readRawValue() {
|
||||||
if (this.type === "int") {
|
if (this.type === 'int') {
|
||||||
return this.readInt53()
|
return this.readInt53()
|
||||||
} else if (this.type === "uint") {
|
} else if (this.type === 'uint') {
|
||||||
return this.readUint53()
|
return this.readUint53()
|
||||||
} else if (this.type === "utf8") {
|
} else if (this.type === 'utf8') {
|
||||||
return this.readPrefixedString()
|
return this.readPrefixedString()
|
||||||
} else {
|
} else {
|
||||||
throw new RangeError(`Unknown RLEDecoder datatype: ${this.type}`)
|
throw new RangeError(`Unknown RLEDecoder datatype: ${this.type}`)
|
||||||
|
|
@ -999,14 +907,14 @@ class RLEDecoder extends Decoder {
|
||||||
* Skips over `num` values of the datatype configured on construction.
|
* Skips over `num` values of the datatype configured on construction.
|
||||||
*/
|
*/
|
||||||
skipRawValues(num) {
|
skipRawValues(num) {
|
||||||
if (this.type === "utf8") {
|
if (this.type === 'utf8') {
|
||||||
for (let i = 0; i < num; i++) this.skip(this.readUint53())
|
for (let i = 0; i < num; i++) this.skip(this.readUint53())
|
||||||
} else {
|
} else {
|
||||||
while (num > 0 && this.offset < this.buf.byteLength) {
|
while (num > 0 && this.offset < this.buf.byteLength) {
|
||||||
if ((this.buf[this.offset] & 0x80) === 0) num--
|
if ((this.buf[this.offset] & 0x80) === 0) num--
|
||||||
this.offset++
|
this.offset++
|
||||||
}
|
}
|
||||||
if (num > 0) throw new RangeError("cannot skip beyond end of buffer")
|
if (num > 0) throw new RangeError('cannot skip beyond end of buffer')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1023,7 +931,7 @@ class RLEDecoder extends Decoder {
|
||||||
*/
|
*/
|
||||||
class DeltaEncoder extends RLEEncoder {
|
class DeltaEncoder extends RLEEncoder {
|
||||||
constructor() {
|
constructor() {
|
||||||
super("int")
|
super('int')
|
||||||
this.absoluteValue = 0
|
this.absoluteValue = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1033,7 +941,7 @@ class DeltaEncoder extends RLEEncoder {
|
||||||
*/
|
*/
|
||||||
appendValue(value, repetitions = 1) {
|
appendValue(value, repetitions = 1) {
|
||||||
if (repetitions <= 0) return
|
if (repetitions <= 0) return
|
||||||
if (typeof value === "number") {
|
if (typeof value === 'number') {
|
||||||
super.appendValue(value - this.absoluteValue, 1)
|
super.appendValue(value - this.absoluteValue, 1)
|
||||||
this.absoluteValue = value
|
this.absoluteValue = value
|
||||||
if (repetitions > 1) super.appendValue(0, repetitions - 1)
|
if (repetitions > 1) super.appendValue(0, repetitions - 1)
|
||||||
|
|
@ -1049,29 +957,26 @@ class DeltaEncoder extends RLEEncoder {
|
||||||
*/
|
*/
|
||||||
copyFrom(decoder, options = {}) {
|
copyFrom(decoder, options = {}) {
|
||||||
if (options.sumValues) {
|
if (options.sumValues) {
|
||||||
throw new RangeError("unsupported options for DeltaEncoder.copyFrom()")
|
throw new RangeError('unsupported options for DeltaEncoder.copyFrom()')
|
||||||
}
|
}
|
||||||
if (!(decoder instanceof DeltaDecoder)) {
|
if (!(decoder instanceof DeltaDecoder)) {
|
||||||
throw new TypeError("incompatible type of decoder")
|
throw new TypeError('incompatible type of decoder')
|
||||||
}
|
}
|
||||||
|
|
||||||
let remaining = options.count
|
let remaining = options.count
|
||||||
if (remaining > 0 && decoder.done)
|
if (remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${remaining} values`)
|
||||||
throw new RangeError(`cannot copy ${remaining} values`)
|
|
||||||
if (remaining === 0 || decoder.done) return
|
if (remaining === 0 || decoder.done) return
|
||||||
|
|
||||||
// Copy any null values, and the first non-null value, so that appendValue() computes the
|
// Copy any null values, and the first non-null value, so that appendValue() computes the
|
||||||
// difference between the encoder's last value and the decoder's first (absolute) value.
|
// difference between the encoder's last value and the decoder's first (absolute) value.
|
||||||
let value = decoder.readValue(),
|
let value = decoder.readValue(), nulls = 0
|
||||||
nulls = 0
|
|
||||||
this.appendValue(value)
|
this.appendValue(value)
|
||||||
if (value === null) {
|
if (value === null) {
|
||||||
nulls = decoder.count + 1
|
nulls = decoder.count + 1
|
||||||
if (remaining !== undefined && remaining < nulls) nulls = remaining
|
if (remaining !== undefined && remaining < nulls) nulls = remaining
|
||||||
decoder.count -= nulls - 1
|
decoder.count -= nulls - 1
|
||||||
this.count += nulls - 1
|
this.count += nulls - 1
|
||||||
if (remaining > nulls && decoder.done)
|
if (remaining > nulls && decoder.done) throw new RangeError(`cannot copy ${remaining} values`)
|
||||||
throw new RangeError(`cannot copy ${remaining} values`)
|
|
||||||
if (remaining === nulls || decoder.done) return
|
if (remaining === nulls || decoder.done) return
|
||||||
|
|
||||||
// The next value read is certain to be non-null because we're not at the end of the decoder,
|
// The next value read is certain to be non-null because we're not at the end of the decoder,
|
||||||
|
|
@ -1084,10 +989,7 @@ class DeltaEncoder extends RLEEncoder {
|
||||||
// value, while subsequent values are relative. Thus, the sum of all of the (non-null) copied
|
// value, while subsequent values are relative. Thus, the sum of all of the (non-null) copied
|
||||||
// values must equal the absolute value of the final element copied.
|
// values must equal the absolute value of the final element copied.
|
||||||
if (remaining !== undefined) remaining -= nulls + 1
|
if (remaining !== undefined) remaining -= nulls + 1
|
||||||
const { nonNullValues, sum } = super.copyFrom(decoder, {
|
const { nonNullValues, sum } = super.copyFrom(decoder, {count: remaining, sumValues: true})
|
||||||
count: remaining,
|
|
||||||
sumValues: true,
|
|
||||||
})
|
|
||||||
if (nonNullValues > 0) {
|
if (nonNullValues > 0) {
|
||||||
this.absoluteValue = sum
|
this.absoluteValue = sum
|
||||||
decoder.absoluteValue = sum
|
decoder.absoluteValue = sum
|
||||||
|
|
@ -1101,7 +1003,7 @@ class DeltaEncoder extends RLEEncoder {
|
||||||
*/
|
*/
|
||||||
class DeltaDecoder extends RLEDecoder {
|
class DeltaDecoder extends RLEDecoder {
|
||||||
constructor(buffer) {
|
constructor(buffer) {
|
||||||
super("int", buffer)
|
super('int', buffer)
|
||||||
this.absoluteValue = 0
|
this.absoluteValue = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1134,12 +1036,12 @@ class DeltaDecoder extends RLEDecoder {
|
||||||
while (numSkip > 0 && !this.done) {
|
while (numSkip > 0 && !this.done) {
|
||||||
if (this.count === 0) this.readRecord()
|
if (this.count === 0) this.readRecord()
|
||||||
const consume = Math.min(numSkip, this.count)
|
const consume = Math.min(numSkip, this.count)
|
||||||
if (this.state === "literal") {
|
if (this.state === 'literal') {
|
||||||
for (let i = 0; i < consume; i++) {
|
for (let i = 0; i < consume; i++) {
|
||||||
this.lastValue = this.readRawValue()
|
this.lastValue = this.readRawValue()
|
||||||
this.absoluteValue += this.lastValue
|
this.absoluteValue += this.lastValue
|
||||||
}
|
}
|
||||||
} else if (this.state === "repetition") {
|
} else if (this.state === 'repetition') {
|
||||||
this.absoluteValue += consume * this.lastValue
|
this.absoluteValue += consume * this.lastValue
|
||||||
}
|
}
|
||||||
numSkip -= consume
|
numSkip -= consume
|
||||||
|
|
@ -1188,13 +1090,12 @@ class BooleanEncoder extends Encoder {
|
||||||
*/
|
*/
|
||||||
copyFrom(decoder, options = {}) {
|
copyFrom(decoder, options = {}) {
|
||||||
if (!(decoder instanceof BooleanDecoder)) {
|
if (!(decoder instanceof BooleanDecoder)) {
|
||||||
throw new TypeError("incompatible type of decoder")
|
throw new TypeError('incompatible type of decoder')
|
||||||
}
|
}
|
||||||
|
|
||||||
const { count } = options
|
const { count } = options
|
||||||
let remaining = typeof count === "number" ? count : Number.MAX_SAFE_INTEGER
|
let remaining = (typeof count === 'number' ? count : Number.MAX_SAFE_INTEGER)
|
||||||
if (count && remaining > 0 && decoder.done)
|
if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`)
|
||||||
throw new RangeError(`cannot copy ${count} values`)
|
|
||||||
if (remaining === 0 || decoder.done) return
|
if (remaining === 0 || decoder.done) return
|
||||||
|
|
||||||
// Copy one value to bring decoder and encoder state into sync, then finish that value's repetitions
|
// Copy one value to bring decoder and encoder state into sync, then finish that value's repetitions
|
||||||
|
|
@ -1207,8 +1108,7 @@ class BooleanEncoder extends Encoder {
|
||||||
|
|
||||||
while (remaining > 0 && !decoder.done) {
|
while (remaining > 0 && !decoder.done) {
|
||||||
decoder.count = decoder.readUint53()
|
decoder.count = decoder.readUint53()
|
||||||
if (decoder.count === 0)
|
if (decoder.count === 0) throw new RangeError('Zero-length runs are not allowed')
|
||||||
throw new RangeError("Zero-length runs are not allowed")
|
|
||||||
decoder.lastValue = !decoder.lastValue
|
decoder.lastValue = !decoder.lastValue
|
||||||
this.appendUint53(this.count)
|
this.appendUint53(this.count)
|
||||||
|
|
||||||
|
|
@ -1219,8 +1119,7 @@ class BooleanEncoder extends Encoder {
|
||||||
remaining -= numCopied
|
remaining -= numCopied
|
||||||
}
|
}
|
||||||
|
|
||||||
if (count && remaining > 0 && decoder.done)
|
if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`)
|
||||||
throw new RangeError(`cannot copy ${count} values`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -1252,7 +1151,7 @@ class BooleanDecoder extends Decoder {
|
||||||
* position, and true if we are at the end of the buffer.
|
* position, and true if we are at the end of the buffer.
|
||||||
*/
|
*/
|
||||||
get done() {
|
get done() {
|
||||||
return this.count === 0 && this.offset === this.buf.byteLength
|
return (this.count === 0) && (this.offset === this.buf.byteLength)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -1275,7 +1174,7 @@ class BooleanDecoder extends Decoder {
|
||||||
this.count = this.readUint53()
|
this.count = this.readUint53()
|
||||||
this.lastValue = !this.lastValue
|
this.lastValue = !this.lastValue
|
||||||
if (this.count === 0 && !this.firstRun) {
|
if (this.count === 0 && !this.firstRun) {
|
||||||
throw new RangeError("Zero-length runs are not allowed")
|
throw new RangeError('Zero-length runs are not allowed')
|
||||||
}
|
}
|
||||||
this.firstRun = false
|
this.firstRun = false
|
||||||
}
|
}
|
||||||
|
|
@ -1291,8 +1190,7 @@ class BooleanDecoder extends Decoder {
|
||||||
if (this.count === 0) {
|
if (this.count === 0) {
|
||||||
this.count = this.readUint53()
|
this.count = this.readUint53()
|
||||||
this.lastValue = !this.lastValue
|
this.lastValue = !this.lastValue
|
||||||
if (this.count === 0)
|
if (this.count === 0) throw new RangeError('Zero-length runs are not allowed')
|
||||||
throw new RangeError("Zero-length runs are not allowed")
|
|
||||||
}
|
}
|
||||||
if (this.count < numSkip) {
|
if (this.count < numSkip) {
|
||||||
numSkip -= this.count
|
numSkip -= this.count
|
||||||
|
|
@ -1306,16 +1204,6 @@ class BooleanDecoder extends Decoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
stringToUtf8,
|
stringToUtf8, utf8ToString, hexStringToBytes, bytesToHexString,
|
||||||
utf8ToString,
|
Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder
|
||||||
hexStringToBytes,
|
|
||||||
bytesToHexString,
|
|
||||||
Encoder,
|
|
||||||
Decoder,
|
|
||||||
RLEEncoder,
|
|
||||||
RLEDecoder,
|
|
||||||
DeltaEncoder,
|
|
||||||
DeltaDecoder,
|
|
||||||
BooleanEncoder,
|
|
||||||
BooleanDecoder,
|
|
||||||
}
|
}
|
||||||
203
automerge-wasm/test/marks.ts
Normal file
203
automerge-wasm/test/marks.ts
Normal file
|
|
@ -0,0 +1,203 @@
|
||||||
|
import { describe, it } from 'mocha';
|
||||||
|
//@ts-ignore
|
||||||
|
import assert from 'assert'
|
||||||
|
//@ts-ignore
|
||||||
|
import { create, loadDoc, Automerge, encodeChange, decodeChange } from '..'
|
||||||
|
|
||||||
|
describe('Automerge', () => {
|
||||||
|
describe('marks', () => {
|
||||||
|
it('should handle marks [..]', () => {
|
||||||
|
let doc = create()
|
||||||
|
let list = doc.set_object("_root", "list", "")
|
||||||
|
doc.splice(list, 0, 0, "aaabbbccc")
|
||||||
|
doc.mark(list, "[3..6]", "bold" , true)
|
||||||
|
let spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
|
||||||
|
doc.insert(list, 6, "A")
|
||||||
|
doc.insert(list, 3, "A")
|
||||||
|
spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aaaA', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'Accc' ]);
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle marks [..] at the beginning of a string', () => {
|
||||||
|
let doc = create()
|
||||||
|
let list = doc.set_object("_root", "list", "")
|
||||||
|
doc.splice(list, 0, 0, "aaabbbccc")
|
||||||
|
doc.mark(list, "[0..3]", "bold", true)
|
||||||
|
let spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]);
|
||||||
|
|
||||||
|
let doc2 = doc.fork()
|
||||||
|
doc2.insert(list, 0, "A")
|
||||||
|
doc2.insert(list, 4, "B")
|
||||||
|
doc.merge(doc2)
|
||||||
|
spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'A', [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'Bbbbccc' ]);
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle marks [..] with splice', () => {
|
||||||
|
let doc = create()
|
||||||
|
let list = doc.set_object("_root", "list", "")
|
||||||
|
doc.splice(list, 0, 0, "aaabbbccc")
|
||||||
|
doc.mark(list, "[0..3]", "bold", true)
|
||||||
|
let spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]);
|
||||||
|
|
||||||
|
let doc2 = doc.fork()
|
||||||
|
doc2.splice(list, 0, 2, "AAA")
|
||||||
|
doc2.splice(list, 4, 0, "BBB")
|
||||||
|
doc.merge(doc2)
|
||||||
|
spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'AAA', [ [ 'bold', 'boolean', true ] ], 'a', [], 'BBBbbbccc' ]);
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle marks across multiple forks', () => {
|
||||||
|
let doc = create()
|
||||||
|
let list = doc.set_object("_root", "list", "")
|
||||||
|
doc.splice(list, 0, 0, "aaabbbccc")
|
||||||
|
doc.mark(list, "[0..3]", "bold", true)
|
||||||
|
let spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]);
|
||||||
|
|
||||||
|
let doc2 = doc.fork()
|
||||||
|
doc2.splice(list, 1, 1, "Z") // replace 'aaa' with 'aZa' inside mark.
|
||||||
|
|
||||||
|
let doc3 = doc.fork()
|
||||||
|
doc3.insert(list, 0, "AAA") // should not be included in mark.
|
||||||
|
|
||||||
|
doc.merge(doc2)
|
||||||
|
doc.merge(doc3)
|
||||||
|
|
||||||
|
spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'AAA', [ [ 'bold', 'boolean', true ] ], 'aZa', [], 'bbbccc' ]);
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
it('should handle marks with deleted ends [..]', () => {
|
||||||
|
let doc = create()
|
||||||
|
let list = doc.set_object("_root", "list", "")
|
||||||
|
|
||||||
|
doc.splice(list, 0, 0, "aaabbbccc")
|
||||||
|
doc.mark(list, "[3..6]", "bold" , true)
|
||||||
|
let spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
|
||||||
|
doc.del(list,5);
|
||||||
|
doc.del(list,5);
|
||||||
|
doc.del(list,2);
|
||||||
|
doc.del(list,2);
|
||||||
|
spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ])
|
||||||
|
doc.insert(list, 3, "A")
|
||||||
|
doc.insert(list, 2, "A")
|
||||||
|
spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aaA', [ [ 'bold', 'boolean', true ] ], 'b', [], 'Acc' ])
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle sticky marks (..)', () => {
|
||||||
|
let doc = create()
|
||||||
|
let list = doc.set_object("_root", "list", "")
|
||||||
|
doc.splice(list, 0, 0, "aaabbbccc")
|
||||||
|
doc.mark(list, "(3..6)", "bold" , true)
|
||||||
|
let spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
|
||||||
|
doc.insert(list, 6, "A")
|
||||||
|
doc.insert(list, 3, "A")
|
||||||
|
spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'AbbbA', [], 'ccc' ]);
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle sticky marks with deleted ends (..)', () => {
|
||||||
|
let doc = create()
|
||||||
|
let list = doc.set_object("_root", "list", "")
|
||||||
|
doc.splice(list, 0, 0, "aaabbbccc")
|
||||||
|
doc.mark(list, "(3..6)", "bold" , true)
|
||||||
|
let spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]);
|
||||||
|
doc.del(list,5);
|
||||||
|
doc.del(list,5);
|
||||||
|
doc.del(list,2);
|
||||||
|
doc.del(list,2);
|
||||||
|
spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ])
|
||||||
|
doc.insert(list, 3, "A")
|
||||||
|
doc.insert(list, 2, "A")
|
||||||
|
spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ])
|
||||||
|
|
||||||
|
// make sure save/load can handle marks
|
||||||
|
|
||||||
|
let doc2 = loadDoc(doc.save())
|
||||||
|
spans = doc2.spans(list);
|
||||||
|
assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ])
|
||||||
|
|
||||||
|
assert.deepStrictEqual(doc.getHeads(), doc2.getHeads())
|
||||||
|
assert.deepStrictEqual(doc.save(), doc2.save())
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should handle overlapping marks', () => {
|
||||||
|
let doc : Automerge = create("aabbcc")
|
||||||
|
let list = doc.set_object("_root", "list", "")
|
||||||
|
doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog")
|
||||||
|
doc.mark(list, "[0..37]", "bold" , true)
|
||||||
|
doc.mark(list, "[4..19]", "itallic" , true)
|
||||||
|
doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!")
|
||||||
|
doc.commit("marks");
|
||||||
|
let spans = doc.spans(list);
|
||||||
|
assert.deepStrictEqual(spans,
|
||||||
|
[
|
||||||
|
[ [ 'bold', 'boolean', true ] ],
|
||||||
|
'the ',
|
||||||
|
[ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ],
|
||||||
|
'quick ',
|
||||||
|
[
|
||||||
|
[ 'bold', 'boolean', true ],
|
||||||
|
[ 'comment', 'str', 'foxes are my favorite animal!' ],
|
||||||
|
[ 'itallic', 'boolean', true ]
|
||||||
|
],
|
||||||
|
'fox',
|
||||||
|
[ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ],
|
||||||
|
' jumps',
|
||||||
|
[ [ 'bold', 'boolean', true ] ],
|
||||||
|
' over the lazy dog',
|
||||||
|
[],
|
||||||
|
]
|
||||||
|
)
|
||||||
|
let text = doc.text(list);
|
||||||
|
assert.deepStrictEqual(text, "the quick fox jumps over the lazy dog");
|
||||||
|
let raw_spans = doc.raw_spans(list);
|
||||||
|
assert.deepStrictEqual(raw_spans,
|
||||||
|
[
|
||||||
|
{ id: "39@aabbcc", start: 0, end: 37, type: 'bold', value: true },
|
||||||
|
{ id: "41@aabbcc", start: 4, end: 19, type: 'itallic', value: true },
|
||||||
|
{ id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' }
|
||||||
|
]);
|
||||||
|
|
||||||
|
doc.unmark(list, "41@aabbcc")
|
||||||
|
raw_spans = doc.raw_spans(list);
|
||||||
|
assert.deepStrictEqual(raw_spans,
|
||||||
|
[
|
||||||
|
{ id: "39@aabbcc", start: 0, end: 37, type: 'bold', value: true },
|
||||||
|
{ id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' }
|
||||||
|
]);
|
||||||
|
// mark sure encode decode can handle marks
|
||||||
|
|
||||||
|
doc.unmark(list, "39@aabbcc")
|
||||||
|
raw_spans = doc.raw_spans(list);
|
||||||
|
assert.deepStrictEqual(raw_spans,
|
||||||
|
[
|
||||||
|
{ id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' }
|
||||||
|
]);
|
||||||
|
|
||||||
|
let all = doc.getChanges([])
|
||||||
|
let decoded = all.map((c) => decodeChange(c))
|
||||||
|
let encoded = decoded.map((c) => encodeChange(c))
|
||||||
|
let doc2 = create();
|
||||||
|
doc2.applyChanges(encoded)
|
||||||
|
|
||||||
|
doc.dump()
|
||||||
|
doc2.dump()
|
||||||
|
assert.deepStrictEqual(doc.spans(list) , doc2.spans(list))
|
||||||
|
assert.deepStrictEqual(doc.save(), doc2.save())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
1480
automerge-wasm/test/test.ts
Normal file
1480
automerge-wasm/test/test.ts
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -11,9 +11,7 @@
|
||||||
"paths": { "dev": ["*"]},
|
"paths": { "dev": ["*"]},
|
||||||
"rootDir": "",
|
"rootDir": "",
|
||||||
"target": "es2016",
|
"target": "es2016",
|
||||||
"types": ["mocha", "node"],
|
"typeRoots": ["./dev/index.d.ts"]
|
||||||
"typeRoots": ["./index.d.ts"]
|
|
||||||
},
|
},
|
||||||
"include": ["test/**/*.ts"],
|
"exclude": ["dist/**/*"]
|
||||||
"exclude": ["dist/**/*", "examples/**/*"]
|
|
||||||
}
|
}
|
||||||
42
automerge/Cargo.toml
Normal file
42
automerge/Cargo.toml
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
[package]
|
||||||
|
name = "automerge"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[features]
|
||||||
|
optree-visualisation = ["dot"]
|
||||||
|
wasm = ["js-sys", "wasm-bindgen"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
hex = "^0.4.3"
|
||||||
|
leb128 = "^0.2.5"
|
||||||
|
sha2 = "^0.10.0"
|
||||||
|
rand = { version = "^0.8.4" }
|
||||||
|
thiserror = "^1.0.16"
|
||||||
|
itertools = "^0.10.3"
|
||||||
|
flate2 = "^1.0.22"
|
||||||
|
nonzero_ext = "^0.2.0"
|
||||||
|
uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] }
|
||||||
|
smol_str = "^0.1.21"
|
||||||
|
tracing = { version = "^0.1.29", features = ["log"] }
|
||||||
|
fxhash = "^0.2.1"
|
||||||
|
tinyvec = { version = "^1.5.1", features = ["alloc"] }
|
||||||
|
unicode-segmentation = "1.7.1"
|
||||||
|
serde = { version = "^1.0", features=["derive"] }
|
||||||
|
dot = { version = "0.1.4", optional = true }
|
||||||
|
js-sys = { version = "^0.3", optional = true }
|
||||||
|
wasm-bindgen = { version = "^0.2", optional = true }
|
||||||
|
|
||||||
|
[dependencies.web-sys]
|
||||||
|
version = "^0.3.55"
|
||||||
|
features = ["console"]
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
pretty_assertions = "1.0.0"
|
||||||
|
proptest = { version = "^1.0.0", default-features = false, features = ["std"] }
|
||||||
|
serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true }
|
||||||
|
maplit = { version = "^1.0" }
|
||||||
|
decorum = "0.3.1"
|
||||||
|
|
@ -2,7 +2,7 @@ use automerge::transaction::CommitOptions;
|
||||||
use automerge::transaction::Transactable;
|
use automerge::transaction::Transactable;
|
||||||
use automerge::AutomergeError;
|
use automerge::AutomergeError;
|
||||||
use automerge::ObjType;
|
use automerge::ObjType;
|
||||||
use automerge::{Automerge, ReadDoc, ROOT};
|
use automerge::{Automerge, ROOT};
|
||||||
|
|
||||||
// Based on https://automerge.github.io/docs/quickstart
|
// Based on https://automerge.github.io/docs/quickstart
|
||||||
fn main() {
|
fn main() {
|
||||||
|
|
@ -11,13 +11,13 @@ fn main() {
|
||||||
.transact_with::<_, _, AutomergeError, _>(
|
.transact_with::<_, _, AutomergeError, _>(
|
||||||
|_| CommitOptions::default().with_message("Add card".to_owned()),
|
|_| CommitOptions::default().with_message("Add card".to_owned()),
|
||||||
|tx| {
|
|tx| {
|
||||||
let cards = tx.put_object(ROOT, "cards", ObjType::List).unwrap();
|
let cards = tx.set_object(ROOT, "cards", ObjType::List).unwrap();
|
||||||
let card1 = tx.insert_object(&cards, 0, ObjType::Map)?;
|
let card1 = tx.insert_object(&cards, 0, ObjType::Map)?;
|
||||||
tx.put(&card1, "title", "Rewrite everything in Clojure")?;
|
tx.set(&card1, "title", "Rewrite everything in Clojure")?;
|
||||||
tx.put(&card1, "done", false)?;
|
tx.set(&card1, "done", false)?;
|
||||||
let card2 = tx.insert_object(&cards, 0, ObjType::Map)?;
|
let card2 = tx.insert_object(&cards, 0, ObjType::Map)?;
|
||||||
tx.put(&card2, "title", "Rewrite everything in Haskell")?;
|
tx.set(&card2, "title", "Rewrite everything in Haskell")?;
|
||||||
tx.put(&card2, "done", false)?;
|
tx.set(&card2, "done", false)?;
|
||||||
Ok((cards, card1))
|
Ok((cards, card1))
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
@ -33,7 +33,7 @@ fn main() {
|
||||||
doc1.transact_with::<_, _, AutomergeError, _>(
|
doc1.transact_with::<_, _, AutomergeError, _>(
|
||||||
|_| CommitOptions::default().with_message("Mark card as done".to_owned()),
|
|_| CommitOptions::default().with_message("Mark card as done".to_owned()),
|
||||||
|tx| {
|
|tx| {
|
||||||
tx.put(&card1, "done", true)?;
|
tx.set(&card1, "done", true)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
@ -42,7 +42,7 @@ fn main() {
|
||||||
doc2.transact_with::<_, _, AutomergeError, _>(
|
doc2.transact_with::<_, _, AutomergeError, _>(
|
||||||
|_| CommitOptions::default().with_message("Delete card".to_owned()),
|
|_| CommitOptions::default().with_message("Delete card".to_owned()),
|
||||||
|tx| {
|
|tx| {
|
||||||
tx.delete(&cards, 0)?;
|
tx.del(&cards, 0)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
@ -50,8 +50,8 @@ fn main() {
|
||||||
|
|
||||||
doc1.merge(&mut doc2).unwrap();
|
doc1.merge(&mut doc2).unwrap();
|
||||||
|
|
||||||
for change in doc1.get_changes(&[]).unwrap() {
|
for change in doc1.get_changes(&[]) {
|
||||||
let length = doc1.length_at(&cards, &[change.hash()]);
|
let length = doc1.length_at(&cards, &[change.hash]);
|
||||||
println!("{} {}", change.message().unwrap(), length);
|
println!("{} {}", change.message().unwrap(), length);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
455
automerge/src/autocommit.rs
Normal file
455
automerge/src/autocommit.rs
Normal file
|
|
@ -0,0 +1,455 @@
|
||||||
|
use crate::exid::ExId;
|
||||||
|
use crate::transaction::{CommitOptions, Transactable};
|
||||||
|
use crate::{
|
||||||
|
query, sync, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change,
|
||||||
|
ChangeHash, Keys, KeysAt, ObjType, Prop, ScalarValue, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// An automerge document that automatically manages transactions.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AutoCommit {
|
||||||
|
doc: Automerge,
|
||||||
|
transaction: Option<TransactionInner>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for AutoCommit {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AutoCommit {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
doc: Automerge::new(),
|
||||||
|
transaction: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME : temp
|
||||||
|
pub fn actor_to_str(&self, actor: usize) -> String {
|
||||||
|
self.doc.ops.m.actors.cache[actor].to_hex_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the inner document.
|
||||||
|
#[doc(hidden)]
|
||||||
|
pub fn document(&mut self) -> &Automerge {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
&self.doc
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_actor(mut self, actor: ActorId) -> Self {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.set_actor(actor);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_actor(&mut self, actor: ActorId) -> &mut Self {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.set_actor(actor);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_actor(&self) -> &ActorId {
|
||||||
|
self.doc.get_actor()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ensure_transaction_open(&mut self) {
|
||||||
|
if self.transaction.is_none() {
|
||||||
|
self.transaction = Some(self.doc.transaction_inner());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn fork(&mut self) -> Self {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
Self {
|
||||||
|
doc: self.doc.fork(),
|
||||||
|
transaction: self.transaction.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ensure_transaction_closed(&mut self) {
|
||||||
|
if let Some(tx) = self.transaction.take() {
|
||||||
|
tx.commit(&mut self.doc, None, None);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn load(data: &[u8]) -> Result<Self, AutomergeError> {
|
||||||
|
let doc = Automerge::load(data)?;
|
||||||
|
Ok(Self {
|
||||||
|
doc,
|
||||||
|
transaction: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn load_incremental(&mut self, data: &[u8]) -> Result<Vec<ExId>, AutomergeError> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.load_incremental(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn apply_changes(&mut self, changes: Vec<Change>) -> Result<Vec<ExId>, AutomergeError> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.apply_changes(changes)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Takes all the changes in `other` which are not in `self` and applies them
|
||||||
|
pub fn merge(&mut self, other: &mut Self) -> Result<Vec<ExId>, AutomergeError> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
other.ensure_transaction_closed();
|
||||||
|
self.doc.merge(&mut other.doc)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save(&mut self) -> Vec<u8> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.save()
|
||||||
|
}
|
||||||
|
|
||||||
|
// should this return an empty vec instead of None?
|
||||||
|
pub fn save_incremental(&mut self) -> Vec<u8> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.save_incremental()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_missing_deps(&mut self, heads: &[ChangeHash]) -> Vec<ChangeHash> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.get_missing_deps(heads)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_last_local_change(&mut self) -> Option<&Change> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.get_last_local_change()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_changes(&mut self, have_deps: &[ChangeHash]) -> Vec<&Change> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.get_changes(have_deps)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_change_by_hash(&mut self, hash: &ChangeHash) -> Option<&Change> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.get_change_by_hash(hash)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
other.ensure_transaction_closed();
|
||||||
|
self.doc.get_changes_added(&other.doc)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn import(&self, s: &str) -> Result<ExId, AutomergeError> {
|
||||||
|
self.doc.import(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dump(&self) {
|
||||||
|
self.doc.dump()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_sync_message(&mut self, sync_state: &mut sync::State) -> Option<sync::Message> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.generate_sync_message(sync_state)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn receive_sync_message(
|
||||||
|
&mut self,
|
||||||
|
sync_state: &mut sync::State,
|
||||||
|
message: sync::Message,
|
||||||
|
) -> Result<Vec<ExId>, AutomergeError> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.receive_sync_message(sync_state, message)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "optree-visualisation")]
|
||||||
|
pub fn visualise_optree(&self) -> String {
|
||||||
|
self.doc.visualise_optree()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the current heads of the document.
|
||||||
|
///
|
||||||
|
/// This closes the transaction first, if one is in progress.
|
||||||
|
pub fn get_heads(&mut self) -> Vec<ChangeHash> {
|
||||||
|
self.ensure_transaction_closed();
|
||||||
|
self.doc.get_heads()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn commit(&mut self) -> ChangeHash {
|
||||||
|
self.commit_with(CommitOptions::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Commit the current operations with some options.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use automerge::transaction::CommitOptions;
|
||||||
|
/// # use automerge::transaction::Transactable;
|
||||||
|
/// # use automerge::ROOT;
|
||||||
|
/// # use automerge::AutoCommit;
|
||||||
|
/// # use automerge::ObjType;
|
||||||
|
/// # use std::time::SystemTime;
|
||||||
|
/// let mut doc = AutoCommit::new();
|
||||||
|
/// doc.set_object(&ROOT, "todos", ObjType::List).unwrap();
|
||||||
|
/// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as
|
||||||
|
/// i64;
|
||||||
|
/// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now));
|
||||||
|
/// ```
|
||||||
|
pub fn commit_with(&mut self, options: CommitOptions) -> ChangeHash {
|
||||||
|
// ensure that even no changes triggers a change
|
||||||
|
self.ensure_transaction_open();
|
||||||
|
let tx = self.transaction.take().unwrap();
|
||||||
|
tx.commit(&mut self.doc, options.message, options.time)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rollback(&mut self) -> usize {
|
||||||
|
self.transaction
|
||||||
|
.take()
|
||||||
|
.map(|tx| tx.rollback(&mut self.doc))
|
||||||
|
.unwrap_or(0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Transactable for AutoCommit {
|
||||||
|
fn pending_ops(&self) -> usize {
|
||||||
|
self.transaction
|
||||||
|
.as_ref()
|
||||||
|
.map(|t| t.pending_ops())
|
||||||
|
.unwrap_or(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// KeysAt::()
|
||||||
|
// LenAt::()
|
||||||
|
// PropAt::()
|
||||||
|
// NthAt::()
|
||||||
|
|
||||||
|
fn keys<O: AsRef<ExId>>(&self, obj: O) -> Keys {
|
||||||
|
self.doc.keys(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn keys_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt {
|
||||||
|
self.doc.keys_at(obj, heads)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn length<O: AsRef<ExId>>(&self, obj: O) -> usize {
|
||||||
|
self.doc.length(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn length_at<O: AsRef<ExId>>(&self, obj: O, heads: &[ChangeHash]) -> usize {
|
||||||
|
self.doc.length_at(obj, heads)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn object_type<O: AsRef<ExId>>(&self, obj: O) -> Option<ObjType> {
|
||||||
|
self.doc.object_type(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
// set(obj, prop, value) - value can be scalar or objtype
|
||||||
|
// del(obj, prop)
|
||||||
|
// inc(obj, prop, value)
|
||||||
|
// insert(obj, index, value)
|
||||||
|
|
||||||
|
/// Set the value of property `P` to value `V` in object `obj`.
|
||||||
|
///
|
||||||
|
/// # Returns
|
||||||
|
///
|
||||||
|
/// The opid of the operation which was created, or None if this operation doesn't change the
|
||||||
|
/// document or create a new object.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// This will return an error if
|
||||||
|
/// - The object does not exist
|
||||||
|
/// - The key is the wrong type for the object
|
||||||
|
/// - The key does not exist in the object
|
||||||
|
fn set<O: AsRef<ExId>, P: Into<Prop>, V: Into<ScalarValue>>(
|
||||||
|
&mut self,
|
||||||
|
obj: O,
|
||||||
|
prop: P,
|
||||||
|
value: V,
|
||||||
|
) -> Result<(), AutomergeError> {
|
||||||
|
self.ensure_transaction_open();
|
||||||
|
let tx = self.transaction.as_mut().unwrap();
|
||||||
|
tx.set(&mut self.doc, obj.as_ref(), prop, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_object<O: AsRef<ExId>, P: Into<Prop>>(
|
||||||
|
&mut self,
|
||||||
|
obj: O,
|
||||||
|
prop: P,
|
||||||
|
value: ObjType,
|
||||||
|
) -> Result<ExId, AutomergeError> {
|
||||||
|
self.ensure_transaction_open();
|
||||||
|
let tx = self.transaction.as_mut().unwrap();
|
||||||
|
tx.set_object(&mut self.doc, obj.as_ref(), prop, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert<O: AsRef<ExId>, V: Into<ScalarValue>>(
|
||||||
|
&mut self,
|
||||||
|
obj: O,
|
||||||
|
index: usize,
|
||||||
|
value: V,
|
||||||
|
) -> Result<(), AutomergeError> {
|
||||||
|
self.ensure_transaction_open();
|
||||||
|
let tx = self.transaction.as_mut().unwrap();
|
||||||
|
tx.insert(&mut self.doc, obj.as_ref(), index, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
fn mark<O: AsRef<ExId>>(
|
||||||
|
&mut self,
|
||||||
|
obj: O,
|
||||||
|
start: usize,
|
||||||
|
expand_start: bool,
|
||||||
|
end: usize,
|
||||||
|
expand_end: bool,
|
||||||
|
mark: &str,
|
||||||
|
value: ScalarValue,
|
||||||
|
) -> Result<(), AutomergeError> {
|
||||||
|
self.ensure_transaction_open();
|
||||||
|
let tx = self.transaction.as_mut().unwrap();
|
||||||
|
tx.mark(
|
||||||
|
&mut self.doc,
|
||||||
|
obj,
|
||||||
|
start,
|
||||||
|
expand_start,
|
||||||
|
end,
|
||||||
|
expand_end,
|
||||||
|
mark,
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unmark<O: AsRef<ExId>>(&mut self, obj: O, mark: O) -> Result<(), AutomergeError> {
|
||||||
|
self.ensure_transaction_open();
|
||||||
|
let tx = self.transaction.as_mut().unwrap();
|
||||||
|
tx.unmark(&mut self.doc, obj, mark)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn insert_object(
|
||||||
|
&mut self,
|
||||||
|
obj: &ExId,
|
||||||
|
index: usize,
|
||||||
|
value: ObjType,
|
||||||
|
) -> Result<ExId, AutomergeError> {
|
||||||
|
self.ensure_transaction_open();
|
||||||
|
let tx = self.transaction.as_mut().unwrap();
|
||||||
|
tx.insert_object(&mut self.doc, obj, index, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inc<O: AsRef<ExId>, P: Into<Prop>>(
|
||||||
|
&mut self,
|
||||||
|
obj: O,
|
||||||
|
prop: P,
|
||||||
|
value: i64,
|
||||||
|
) -> Result<(), AutomergeError> {
|
||||||
|
self.ensure_transaction_open();
|
||||||
|
let tx = self.transaction.as_mut().unwrap();
|
||||||
|
tx.inc(&mut self.doc, obj.as_ref(), prop, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn del<O: AsRef<ExId>, P: Into<Prop>>(
|
||||||
|
&mut self,
|
||||||
|
obj: O,
|
||||||
|
prop: P,
|
||||||
|
) -> Result<(), AutomergeError> {
|
||||||
|
self.ensure_transaction_open();
|
||||||
|
let tx = self.transaction.as_mut().unwrap();
|
||||||
|
tx.del(&mut self.doc, obj.as_ref(), prop)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert
|
||||||
|
/// the new elements
|
||||||
|
fn splice<O: AsRef<ExId>, V: IntoIterator<Item = ScalarValue>>(
|
||||||
|
&mut self,
|
||||||
|
obj: O,
|
||||||
|
pos: usize,
|
||||||
|
del: usize,
|
||||||
|
vals: V,
|
||||||
|
) -> Result<(), AutomergeError> {
|
||||||
|
self.ensure_transaction_open();
|
||||||
|
let tx = self.transaction.as_mut().unwrap();
|
||||||
|
tx.splice(&mut self.doc, obj.as_ref(), pos, del, vals)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn text<O: AsRef<ExId>>(&self, obj: O) -> Result<String, AutomergeError> {
|
||||||
|
self.doc.text(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn text_at<O: AsRef<ExId>>(
|
||||||
|
&self,
|
||||||
|
obj: O,
|
||||||
|
heads: &[ChangeHash],
|
||||||
|
) -> Result<String, AutomergeError> {
|
||||||
|
self.doc.text_at(obj, heads)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<(Value, ExId)>, AutomergeError> {
|
||||||
|
self.doc.list(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list_at<O: AsRef<ExId>>(
|
||||||
|
&self,
|
||||||
|
obj: O,
|
||||||
|
heads: &[ChangeHash],
|
||||||
|
) -> Result<Vec<(Value, ExId)>, AutomergeError> {
|
||||||
|
self.doc.list_at(obj, heads)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::Span>, AutomergeError> {
|
||||||
|
self.doc.spans(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn raw_spans<O: AsRef<ExId>>(&self, obj: O) -> Result<Vec<query::SpanInfo>, AutomergeError> {
|
||||||
|
self.doc.raw_spans(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn attribute<O: AsRef<ExId>>(
|
||||||
|
&self,
|
||||||
|
obj: O,
|
||||||
|
baseline: &[ChangeHash],
|
||||||
|
change_sets: &[Vec<ChangeHash>],
|
||||||
|
) -> Result<Vec<query::ChangeSet>, AutomergeError> {
|
||||||
|
self.doc.attribute(obj, baseline, change_sets)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn attribute2<O: AsRef<ExId>>(
|
||||||
|
&self,
|
||||||
|
obj: O,
|
||||||
|
baseline: &[ChangeHash],
|
||||||
|
change_sets: &[Vec<ChangeHash>],
|
||||||
|
) -> Result<Vec<query::ChangeSet2>, AutomergeError> {
|
||||||
|
self.doc.attribute2(obj, baseline, change_sets)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO - I need to return these OpId's here **only** to get
|
||||||
|
// the legacy conflicts format of { [opid]: value }
|
||||||
|
// Something better?
|
||||||
|
fn value<O: AsRef<ExId>, P: Into<Prop>>(
|
||||||
|
&self,
|
||||||
|
obj: O,
|
||||||
|
prop: P,
|
||||||
|
) -> Result<Option<(Value, ExId)>, AutomergeError> {
|
||||||
|
self.doc.value(obj, prop)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn value_at<O: AsRef<ExId>, P: Into<Prop>>(
|
||||||
|
&self,
|
||||||
|
obj: O,
|
||||||
|
prop: P,
|
||||||
|
heads: &[ChangeHash],
|
||||||
|
) -> Result<Option<(Value, ExId)>, AutomergeError> {
|
||||||
|
self.doc.value_at(obj, prop, heads)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn values<O: AsRef<ExId>, P: Into<Prop>>(
|
||||||
|
&self,
|
||||||
|
obj: O,
|
||||||
|
prop: P,
|
||||||
|
) -> Result<Vec<(Value, ExId)>, AutomergeError> {
|
||||||
|
self.doc.values(obj, prop)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn values_at<O: AsRef<ExId>, P: Into<Prop>>(
|
||||||
|
&self,
|
||||||
|
obj: O,
|
||||||
|
prop: P,
|
||||||
|
heads: &[ChangeHash],
|
||||||
|
) -> Result<Vec<(Value, ExId)>, AutomergeError> {
|
||||||
|
self.doc.values_at(obj, prop, heads)
|
||||||
|
}
|
||||||
|
}
|
||||||
1616
automerge/src/automerge.rs
Normal file
1616
automerge/src/automerge.rs
Normal file
File diff suppressed because it is too large
Load diff
997
automerge/src/change.rs
Normal file
997
automerge/src/change.rs
Normal file
|
|
@ -0,0 +1,997 @@
|
||||||
|
use crate::columnar::{
|
||||||
|
ChangeEncoder, ChangeIterator, ColumnEncoder, DepsIterator, DocChange, DocOp, DocOpEncoder,
|
||||||
|
DocOpIterator, OperationIterator, COLUMN_TYPE_DEFLATE,
|
||||||
|
};
|
||||||
|
use crate::decoding;
|
||||||
|
use crate::decoding::{Decodable, InvalidChangeError};
|
||||||
|
use crate::encoding::{Encodable, DEFLATE_MIN_SIZE};
|
||||||
|
use crate::error::AutomergeError;
|
||||||
|
use crate::indexed_cache::IndexedCache;
|
||||||
|
use crate::legacy as amp;
|
||||||
|
use crate::transaction::TransactionInner;
|
||||||
|
use crate::types;
|
||||||
|
use crate::types::{ActorId, ElemId, Key, ObjId, Op, OpId, OpType};
|
||||||
|
use core::ops::Range;
|
||||||
|
use flate2::{
|
||||||
|
bufread::{DeflateDecoder, DeflateEncoder},
|
||||||
|
Compression,
|
||||||
|
};
|
||||||
|
use itertools::Itertools;
|
||||||
|
use sha2::Digest;
|
||||||
|
use sha2::Sha256;
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::convert::TryInto;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::io::{Read, Write};
|
||||||
|
use std::num::NonZeroU64;
|
||||||
|
use tracing::instrument;
|
||||||
|
|
||||||
|
const MAGIC_BYTES: [u8; 4] = [0x85, 0x6f, 0x4a, 0x83];
|
||||||
|
const PREAMBLE_BYTES: usize = 8;
|
||||||
|
const HEADER_BYTES: usize = PREAMBLE_BYTES + 1;
|
||||||
|
|
||||||
|
const HASH_BYTES: usize = 32;
|
||||||
|
const BLOCK_TYPE_DOC: u8 = 0;
|
||||||
|
const BLOCK_TYPE_CHANGE: u8 = 1;
|
||||||
|
const BLOCK_TYPE_DEFLATE: u8 = 2;
|
||||||
|
const CHUNK_START: usize = 8;
|
||||||
|
const HASH_RANGE: Range<usize> = 4..8;
|
||||||
|
|
||||||
|
pub(crate) fn encode_document<'a, 'b>(
|
||||||
|
heads: Vec<amp::ChangeHash>,
|
||||||
|
changes: impl Iterator<Item = &'a Change>,
|
||||||
|
doc_ops: impl Iterator<Item = (&'b ObjId, &'b Op)>,
|
||||||
|
actors_index: &IndexedCache<ActorId>,
|
||||||
|
props: &'a [String],
|
||||||
|
) -> Vec<u8> {
|
||||||
|
let mut bytes: Vec<u8> = Vec::new();
|
||||||
|
|
||||||
|
let actors_map = actors_index.encode_index();
|
||||||
|
let actors = actors_index.sorted();
|
||||||
|
|
||||||
|
/*
|
||||||
|
// this assumes that all actor_ids referenced are seen in changes.actor_id which is true
|
||||||
|
// so long as we have a full history
|
||||||
|
let mut actors: Vec<_> = changes
|
||||||
|
.iter()
|
||||||
|
.map(|c| &c.actor)
|
||||||
|
.unique()
|
||||||
|
.sorted()
|
||||||
|
.cloned()
|
||||||
|
.collect();
|
||||||
|
*/
|
||||||
|
|
||||||
|
let (change_bytes, change_info) = ChangeEncoder::encode_changes(changes, &actors);
|
||||||
|
|
||||||
|
//let doc_ops = group_doc_ops(changes, &actors);
|
||||||
|
|
||||||
|
let (ops_bytes, ops_info) = DocOpEncoder::encode_doc_ops(doc_ops, &actors_map, props);
|
||||||
|
|
||||||
|
bytes.extend(MAGIC_BYTES);
|
||||||
|
bytes.extend([0, 0, 0, 0]); // we dont know the hash yet so fill in a fake
|
||||||
|
bytes.push(BLOCK_TYPE_DOC);
|
||||||
|
|
||||||
|
let mut chunk = Vec::new();
|
||||||
|
|
||||||
|
actors.len().encode_vec(&mut chunk);
|
||||||
|
|
||||||
|
for a in actors.into_iter() {
|
||||||
|
a.to_bytes().encode_vec(&mut chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
heads.len().encode_vec(&mut chunk);
|
||||||
|
for head in heads.iter() {
|
||||||
|
chunk.write_all(&head.0).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
chunk.extend(change_info);
|
||||||
|
chunk.extend(ops_info);
|
||||||
|
|
||||||
|
chunk.extend(change_bytes);
|
||||||
|
chunk.extend(ops_bytes);
|
||||||
|
|
||||||
|
leb128::write::unsigned(&mut bytes, chunk.len() as u64).unwrap();
|
||||||
|
|
||||||
|
bytes.extend(&chunk);
|
||||||
|
|
||||||
|
let hash_result = Sha256::digest(&bytes[CHUNK_START..bytes.len()]);
|
||||||
|
|
||||||
|
bytes.splice(HASH_RANGE, hash_result[0..4].iter().copied());
|
||||||
|
|
||||||
|
bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
/// When encoding a change we take all the actor IDs referenced by a change and place them in an
|
||||||
|
/// array. The array has the actor who authored the change as the first element and all remaining
|
||||||
|
/// actors (i.e. those referenced in object IDs in the target of an operation or in the `pred` of
|
||||||
|
/// an operation) lexicographically ordered following the change author.
|
||||||
|
fn actor_ids_in_change(change: &::Change) -> Vec<amp::ActorId> {
|
||||||
|
let mut other_ids: Vec<&::ActorId> = change
|
||||||
|
.operations
|
||||||
|
.iter()
|
||||||
|
.flat_map(opids_in_operation)
|
||||||
|
.filter(|a| *a != &change.actor_id)
|
||||||
|
.unique()
|
||||||
|
.collect();
|
||||||
|
other_ids.sort();
|
||||||
|
// Now prepend the change actor
|
||||||
|
std::iter::once(&change.actor_id)
|
||||||
|
.chain(other_ids.into_iter())
|
||||||
|
.cloned()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn opids_in_operation(op: &::Op) -> impl Iterator<Item = &::ActorId> {
|
||||||
|
let obj_actor_id = match &op.obj {
|
||||||
|
amp::ObjectId::Root => None,
|
||||||
|
amp::ObjectId::Id(opid) => Some(opid.actor()),
|
||||||
|
};
|
||||||
|
let pred_ids = op.pred.iter().map(amp::OpId::actor);
|
||||||
|
let key_actor = match &op.key {
|
||||||
|
amp::Key::Seq(amp::ElementId::Id(i)) => Some(i.actor()),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
obj_actor_id
|
||||||
|
.into_iter()
|
||||||
|
.chain(key_actor.into_iter())
|
||||||
|
.chain(pred_ids)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<amp::Change> for Change {
|
||||||
|
fn from(value: amp::Change) -> Self {
|
||||||
|
encode(&value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&::Change> for Change {
|
||||||
|
fn from(value: &::Change) -> Self {
|
||||||
|
encode(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode(change: &::Change) -> Change {
|
||||||
|
let mut deps = change.deps.clone();
|
||||||
|
deps.sort_unstable();
|
||||||
|
|
||||||
|
let mut chunk = encode_chunk(change, &deps);
|
||||||
|
|
||||||
|
let mut bytes = Vec::with_capacity(MAGIC_BYTES.len() + 4 + chunk.bytes.len());
|
||||||
|
|
||||||
|
bytes.extend(&MAGIC_BYTES);
|
||||||
|
|
||||||
|
bytes.extend(vec![0, 0, 0, 0]); // we dont know the hash yet so fill in a fake
|
||||||
|
|
||||||
|
bytes.push(BLOCK_TYPE_CHANGE);
|
||||||
|
|
||||||
|
leb128::write::unsigned(&mut bytes, chunk.bytes.len() as u64).unwrap();
|
||||||
|
|
||||||
|
let body_start = bytes.len();
|
||||||
|
|
||||||
|
increment_range(&mut chunk.body, bytes.len());
|
||||||
|
increment_range(&mut chunk.message, bytes.len());
|
||||||
|
increment_range(&mut chunk.extra_bytes, bytes.len());
|
||||||
|
increment_range_map(&mut chunk.ops, bytes.len());
|
||||||
|
|
||||||
|
bytes.extend(&chunk.bytes);
|
||||||
|
|
||||||
|
let hash_result = Sha256::digest(&bytes[CHUNK_START..bytes.len()]);
|
||||||
|
let hash: amp::ChangeHash = hash_result[..].try_into().unwrap();
|
||||||
|
|
||||||
|
bytes.splice(HASH_RANGE, hash_result[0..4].iter().copied());
|
||||||
|
|
||||||
|
// any time I make changes to the encoder decoder its a good idea
|
||||||
|
// to run it through a round trip to detect errors the tests might not
|
||||||
|
// catch
|
||||||
|
// let c0 = Change::from_bytes(bytes.clone()).unwrap();
|
||||||
|
// std::assert_eq!(c1, c0);
|
||||||
|
// perhaps we should add something like this to the test suite
|
||||||
|
|
||||||
|
let bytes = ChangeBytes::Uncompressed(bytes);
|
||||||
|
|
||||||
|
Change {
|
||||||
|
bytes,
|
||||||
|
body_start,
|
||||||
|
hash,
|
||||||
|
seq: change.seq,
|
||||||
|
start_op: change.start_op,
|
||||||
|
time: change.time,
|
||||||
|
actors: chunk.actors,
|
||||||
|
message: chunk.message,
|
||||||
|
deps,
|
||||||
|
ops: chunk.ops,
|
||||||
|
extra_bytes: chunk.extra_bytes,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ChunkIntermediate {
|
||||||
|
bytes: Vec<u8>,
|
||||||
|
body: Range<usize>,
|
||||||
|
actors: Vec<ActorId>,
|
||||||
|
message: Range<usize>,
|
||||||
|
ops: HashMap<u32, Range<usize>>,
|
||||||
|
extra_bytes: Range<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode_chunk(change: &::Change, deps: &[amp::ChangeHash]) -> ChunkIntermediate {
|
||||||
|
let mut bytes = Vec::new();
|
||||||
|
|
||||||
|
// All these unwraps are okay because we're writing to an in memory buffer so io erros should
|
||||||
|
// not happen
|
||||||
|
|
||||||
|
// encode deps
|
||||||
|
deps.len().encode(&mut bytes).unwrap();
|
||||||
|
for hash in deps.iter() {
|
||||||
|
bytes.write_all(&hash.0).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let actors = actor_ids_in_change(change);
|
||||||
|
change.actor_id.to_bytes().encode(&mut bytes).unwrap();
|
||||||
|
|
||||||
|
// encode seq, start_op, time, message
|
||||||
|
change.seq.encode(&mut bytes).unwrap();
|
||||||
|
change.start_op.encode(&mut bytes).unwrap();
|
||||||
|
change.time.encode(&mut bytes).unwrap();
|
||||||
|
let message = bytes.len() + 1;
|
||||||
|
change.message.encode(&mut bytes).unwrap();
|
||||||
|
let message = message..bytes.len();
|
||||||
|
|
||||||
|
// encode ops into a side buffer - collect all other actors
|
||||||
|
let (ops_buf, mut ops) = ColumnEncoder::encode_ops(&change.operations, &actors);
|
||||||
|
|
||||||
|
// encode all other actors
|
||||||
|
actors[1..].encode(&mut bytes).unwrap();
|
||||||
|
|
||||||
|
// now we know how many bytes ops are offset by so we can adjust the ranges
|
||||||
|
increment_range_map(&mut ops, bytes.len());
|
||||||
|
|
||||||
|
// write out the ops
|
||||||
|
|
||||||
|
bytes.write_all(&ops_buf).unwrap();
|
||||||
|
|
||||||
|
// write out the extra bytes
|
||||||
|
let extra_bytes = bytes.len()..(bytes.len() + change.extra_bytes.len());
|
||||||
|
bytes.write_all(&change.extra_bytes).unwrap();
|
||||||
|
let body = 0..bytes.len();
|
||||||
|
|
||||||
|
ChunkIntermediate {
|
||||||
|
bytes,
|
||||||
|
body,
|
||||||
|
actors,
|
||||||
|
message,
|
||||||
|
ops,
|
||||||
|
extra_bytes,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq, Debug, Clone)]
|
||||||
|
enum ChangeBytes {
|
||||||
|
Compressed {
|
||||||
|
compressed: Vec<u8>,
|
||||||
|
uncompressed: Vec<u8>,
|
||||||
|
},
|
||||||
|
Uncompressed(Vec<u8>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChangeBytes {
|
||||||
|
fn uncompressed(&self) -> &[u8] {
|
||||||
|
match self {
|
||||||
|
ChangeBytes::Compressed { uncompressed, .. } => &uncompressed[..],
|
||||||
|
ChangeBytes::Uncompressed(b) => &b[..],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compress(&mut self, body_start: usize) {
|
||||||
|
match self {
|
||||||
|
ChangeBytes::Compressed { .. } => {}
|
||||||
|
ChangeBytes::Uncompressed(uncompressed) => {
|
||||||
|
if uncompressed.len() > DEFLATE_MIN_SIZE {
|
||||||
|
let mut result = Vec::with_capacity(uncompressed.len());
|
||||||
|
result.extend(&uncompressed[0..8]);
|
||||||
|
result.push(BLOCK_TYPE_DEFLATE);
|
||||||
|
let mut deflater =
|
||||||
|
DeflateEncoder::new(&uncompressed[body_start..], Compression::default());
|
||||||
|
let mut deflated = Vec::new();
|
||||||
|
let deflated_len = deflater.read_to_end(&mut deflated).unwrap();
|
||||||
|
leb128::write::unsigned(&mut result, deflated_len as u64).unwrap();
|
||||||
|
result.extend(&deflated[..]);
|
||||||
|
*self = ChangeBytes::Compressed {
|
||||||
|
compressed: result,
|
||||||
|
uncompressed: std::mem::take(uncompressed),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn raw(&self) -> &[u8] {
|
||||||
|
match self {
|
||||||
|
ChangeBytes::Compressed { compressed, .. } => &compressed[..],
|
||||||
|
ChangeBytes::Uncompressed(b) => &b[..],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A change represents a group of operations performed by an actor.
|
||||||
|
#[derive(PartialEq, Debug, Clone)]
|
||||||
|
pub struct Change {
|
||||||
|
bytes: ChangeBytes,
|
||||||
|
body_start: usize,
|
||||||
|
/// Hash of this change.
|
||||||
|
pub hash: amp::ChangeHash,
|
||||||
|
/// The index of this change in the changes from this actor.
|
||||||
|
pub seq: u64,
|
||||||
|
/// The start operation index. Starts at 1.
|
||||||
|
pub start_op: NonZeroU64,
|
||||||
|
/// The time that this change was committed.
|
||||||
|
pub time: i64,
|
||||||
|
/// The message of this change.
|
||||||
|
message: Range<usize>,
|
||||||
|
/// The actors referenced in this change.
|
||||||
|
actors: Vec<ActorId>,
|
||||||
|
/// The dependencies of this change.
|
||||||
|
pub deps: Vec<amp::ChangeHash>,
|
||||||
|
ops: HashMap<u32, Range<usize>>,
|
||||||
|
extra_bytes: Range<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Change {
|
||||||
|
pub fn actor_id(&self) -> &ActorId {
|
||||||
|
&self.actors[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(bytes))]
|
||||||
|
pub fn load_document(bytes: &[u8]) -> Result<Vec<Change>, AutomergeError> {
|
||||||
|
load_blocks(bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_bytes(bytes: Vec<u8>) -> Result<Change, decoding::Error> {
|
||||||
|
Change::try_from(bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.len() == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
// TODO - this could be a lot more efficient
|
||||||
|
self.iter_ops().count()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn max_op(&self) -> u64 {
|
||||||
|
self.start_op.get() + (self.len() as u64) - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn message(&self) -> Option<String> {
|
||||||
|
let m = &self.bytes.uncompressed()[self.message.clone()];
|
||||||
|
if m.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
std::str::from_utf8(m).map(ToString::to_string).ok()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decode(&self) -> amp::Change {
|
||||||
|
amp::Change {
|
||||||
|
start_op: self.start_op,
|
||||||
|
seq: self.seq,
|
||||||
|
time: self.time,
|
||||||
|
hash: Some(self.hash),
|
||||||
|
message: self.message(),
|
||||||
|
actor_id: self.actors[0].clone(),
|
||||||
|
deps: self.deps.clone(),
|
||||||
|
operations: self
|
||||||
|
.iter_ops()
|
||||||
|
.map(|op| amp::Op {
|
||||||
|
action: op.action.clone(),
|
||||||
|
obj: op.obj.clone(),
|
||||||
|
key: op.key.clone(),
|
||||||
|
pred: op.pred.clone(),
|
||||||
|
insert: op.insert,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
extra_bytes: self.extra_bytes().into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn iter_ops(&self) -> OperationIterator {
|
||||||
|
OperationIterator::new(self.bytes.uncompressed(), self.actors.as_slice(), &self.ops)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extra_bytes(&self) -> &[u8] {
|
||||||
|
&self.bytes.uncompressed()[self.extra_bytes.clone()]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compress(&mut self) {
|
||||||
|
self.bytes.compress(self.body_start);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn raw_bytes(&self) -> &[u8] {
|
||||||
|
self.bytes.raw()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_leb128(bytes: &mut &[u8]) -> Result<(usize, usize), decoding::Error> {
|
||||||
|
let mut buf = &bytes[..];
|
||||||
|
let val = leb128::read::unsigned(&mut buf)? as usize;
|
||||||
|
let leb128_bytes = bytes.len() - buf.len();
|
||||||
|
Ok((val, leb128_bytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_slice<T: Decodable + Debug>(
|
||||||
|
bytes: &[u8],
|
||||||
|
cursor: &mut Range<usize>,
|
||||||
|
) -> Result<T, decoding::Error> {
|
||||||
|
let mut view = &bytes[cursor.clone()];
|
||||||
|
let init_len = view.len();
|
||||||
|
let val = T::decode::<&[u8]>(&mut view).ok_or(decoding::Error::NoDecodedValue);
|
||||||
|
let bytes_read = init_len - view.len();
|
||||||
|
*cursor = (cursor.start + bytes_read)..cursor.end;
|
||||||
|
val
|
||||||
|
}
|
||||||
|
|
||||||
|
fn slice_bytes(bytes: &[u8], cursor: &mut Range<usize>) -> Result<Range<usize>, decoding::Error> {
|
||||||
|
let (val, len) = read_leb128(&mut &bytes[cursor.clone()])?;
|
||||||
|
let start = cursor.start + len;
|
||||||
|
let end = start + val;
|
||||||
|
*cursor = end..cursor.end;
|
||||||
|
Ok(start..end)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn increment_range(range: &mut Range<usize>, len: usize) {
|
||||||
|
range.end += len;
|
||||||
|
range.start += len;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn increment_range_map(ranges: &mut HashMap<u32, Range<usize>>, len: usize) {
|
||||||
|
for range in ranges.values_mut() {
|
||||||
|
increment_range(range, len);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn export_objid(id: &ObjId, actors: &IndexedCache<ActorId>) -> amp::ObjectId {
|
||||||
|
if id == &ObjId::root() {
|
||||||
|
amp::ObjectId::Root
|
||||||
|
} else {
|
||||||
|
export_opid(&id.0, actors).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn export_elemid(id: &ElemId, actors: &IndexedCache<ActorId>) -> amp::ElementId {
|
||||||
|
if id == &types::HEAD {
|
||||||
|
amp::ElementId::Head
|
||||||
|
} else {
|
||||||
|
export_opid(&id.0, actors).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn export_opid(id: &OpId, actors: &IndexedCache<ActorId>) -> amp::OpId {
|
||||||
|
amp::OpId(id.0, actors.get(id.1).clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn export_op(
|
||||||
|
op: &Op,
|
||||||
|
obj: &ObjId,
|
||||||
|
actors: &IndexedCache<ActorId>,
|
||||||
|
props: &IndexedCache<String>,
|
||||||
|
) -> amp::Op {
|
||||||
|
let action = op.action.clone();
|
||||||
|
let key = match &op.key {
|
||||||
|
Key::Map(n) => amp::Key::Map(props.get(*n).clone().into()),
|
||||||
|
Key::Seq(id) => amp::Key::Seq(export_elemid(id, actors)),
|
||||||
|
};
|
||||||
|
let obj = export_objid(obj, actors);
|
||||||
|
let pred = op.pred.iter().map(|id| export_opid(id, actors)).collect();
|
||||||
|
amp::Op {
|
||||||
|
action,
|
||||||
|
obj,
|
||||||
|
insert: op.insert,
|
||||||
|
pred,
|
||||||
|
key,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn export_change(
|
||||||
|
change: TransactionInner,
|
||||||
|
actors: &IndexedCache<ActorId>,
|
||||||
|
props: &IndexedCache<String>,
|
||||||
|
) -> Change {
|
||||||
|
amp::Change {
|
||||||
|
actor_id: actors.get(change.actor).clone(),
|
||||||
|
seq: change.seq,
|
||||||
|
start_op: change.start_op,
|
||||||
|
time: change.time,
|
||||||
|
deps: change.deps,
|
||||||
|
message: change.message,
|
||||||
|
hash: change.hash,
|
||||||
|
operations: change
|
||||||
|
.operations
|
||||||
|
.iter()
|
||||||
|
.map(|(obj, op)| export_op(op, obj, actors, props))
|
||||||
|
.collect(),
|
||||||
|
extra_bytes: change.extra_bytes,
|
||||||
|
}
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<Vec<u8>> for Change {
|
||||||
|
type Error = decoding::Error;
|
||||||
|
|
||||||
|
fn try_from(bytes: Vec<u8>) -> Result<Self, Self::Error> {
|
||||||
|
let (chunktype, body) = decode_header_without_hash(&bytes)?;
|
||||||
|
let bytes = if chunktype == BLOCK_TYPE_DEFLATE {
|
||||||
|
decompress_chunk(0..PREAMBLE_BYTES, body, bytes)?
|
||||||
|
} else {
|
||||||
|
ChangeBytes::Uncompressed(bytes)
|
||||||
|
};
|
||||||
|
|
||||||
|
let (chunktype, hash, body) = decode_header(bytes.uncompressed())?;
|
||||||
|
|
||||||
|
if chunktype != BLOCK_TYPE_CHANGE {
|
||||||
|
return Err(decoding::Error::WrongType {
|
||||||
|
expected_one_of: vec![BLOCK_TYPE_CHANGE],
|
||||||
|
found: chunktype,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let body_start = body.start;
|
||||||
|
let mut cursor = body;
|
||||||
|
|
||||||
|
let deps = decode_hashes(bytes.uncompressed(), &mut cursor)?;
|
||||||
|
|
||||||
|
let actor =
|
||||||
|
ActorId::from(&bytes.uncompressed()[slice_bytes(bytes.uncompressed(), &mut cursor)?]);
|
||||||
|
let seq = read_slice(bytes.uncompressed(), &mut cursor)?;
|
||||||
|
let start_op = read_slice(bytes.uncompressed(), &mut cursor)?;
|
||||||
|
let time = read_slice(bytes.uncompressed(), &mut cursor)?;
|
||||||
|
let message = slice_bytes(bytes.uncompressed(), &mut cursor)?;
|
||||||
|
|
||||||
|
let actors = decode_actors(bytes.uncompressed(), &mut cursor, Some(actor))?;
|
||||||
|
|
||||||
|
let ops_info = decode_column_info(bytes.uncompressed(), &mut cursor, false)?;
|
||||||
|
let ops = decode_columns(&mut cursor, &ops_info);
|
||||||
|
|
||||||
|
Ok(Change {
|
||||||
|
bytes,
|
||||||
|
body_start,
|
||||||
|
hash,
|
||||||
|
seq,
|
||||||
|
start_op,
|
||||||
|
time,
|
||||||
|
actors,
|
||||||
|
message,
|
||||||
|
deps,
|
||||||
|
ops,
|
||||||
|
extra_bytes: cursor,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decompress_chunk(
|
||||||
|
preamble: Range<usize>,
|
||||||
|
body: Range<usize>,
|
||||||
|
compressed: Vec<u8>,
|
||||||
|
) -> Result<ChangeBytes, decoding::Error> {
|
||||||
|
let mut decoder = DeflateDecoder::new(&compressed[body]);
|
||||||
|
let mut decompressed = Vec::new();
|
||||||
|
decoder.read_to_end(&mut decompressed)?;
|
||||||
|
let mut result = Vec::with_capacity(decompressed.len() + preamble.len());
|
||||||
|
result.extend(&compressed[preamble]);
|
||||||
|
result.push(BLOCK_TYPE_CHANGE);
|
||||||
|
leb128::write::unsigned::<Vec<u8>>(&mut result, decompressed.len() as u64).unwrap();
|
||||||
|
result.extend(decompressed);
|
||||||
|
Ok(ChangeBytes::Compressed {
|
||||||
|
uncompressed: result,
|
||||||
|
compressed,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode_hashes(
|
||||||
|
bytes: &[u8],
|
||||||
|
cursor: &mut Range<usize>,
|
||||||
|
) -> Result<Vec<amp::ChangeHash>, decoding::Error> {
|
||||||
|
let num_hashes = read_slice(bytes, cursor)?;
|
||||||
|
let mut hashes = Vec::with_capacity(num_hashes);
|
||||||
|
for _ in 0..num_hashes {
|
||||||
|
let hash = cursor.start..(cursor.start + HASH_BYTES);
|
||||||
|
*cursor = hash.end..cursor.end;
|
||||||
|
hashes.push(
|
||||||
|
bytes
|
||||||
|
.get(hash)
|
||||||
|
.ok_or(decoding::Error::NotEnoughBytes)?
|
||||||
|
.try_into()
|
||||||
|
.map_err(InvalidChangeError::from)?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(hashes)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode_actors(
|
||||||
|
bytes: &[u8],
|
||||||
|
cursor: &mut Range<usize>,
|
||||||
|
first: Option<ActorId>,
|
||||||
|
) -> Result<Vec<ActorId>, decoding::Error> {
|
||||||
|
let num_actors: usize = read_slice(bytes, cursor)?;
|
||||||
|
let mut actors = Vec::with_capacity(num_actors + 1);
|
||||||
|
if let Some(actor) = first {
|
||||||
|
actors.push(actor);
|
||||||
|
}
|
||||||
|
for _ in 0..num_actors {
|
||||||
|
actors.push(ActorId::from(
|
||||||
|
bytes
|
||||||
|
.get(slice_bytes(bytes, cursor)?)
|
||||||
|
.ok_or(decoding::Error::NotEnoughBytes)?,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ok(actors)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode_column_info(
|
||||||
|
bytes: &[u8],
|
||||||
|
cursor: &mut Range<usize>,
|
||||||
|
allow_compressed_column: bool,
|
||||||
|
) -> Result<Vec<(u32, usize)>, decoding::Error> {
|
||||||
|
let num_columns = read_slice(bytes, cursor)?;
|
||||||
|
let mut columns = Vec::with_capacity(num_columns);
|
||||||
|
let mut last_id = 0;
|
||||||
|
for _ in 0..num_columns {
|
||||||
|
let id: u32 = read_slice(bytes, cursor)?;
|
||||||
|
if (id & !COLUMN_TYPE_DEFLATE) <= (last_id & !COLUMN_TYPE_DEFLATE) {
|
||||||
|
return Err(decoding::Error::ColumnsNotInAscendingOrder {
|
||||||
|
last: last_id,
|
||||||
|
found: id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if id & COLUMN_TYPE_DEFLATE != 0 && !allow_compressed_column {
|
||||||
|
return Err(decoding::Error::ChangeContainedCompressedColumns);
|
||||||
|
}
|
||||||
|
last_id = id;
|
||||||
|
let length = read_slice(bytes, cursor)?;
|
||||||
|
columns.push((id, length));
|
||||||
|
}
|
||||||
|
Ok(columns)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode_columns(
|
||||||
|
cursor: &mut Range<usize>,
|
||||||
|
columns: &[(u32, usize)],
|
||||||
|
) -> HashMap<u32, Range<usize>> {
|
||||||
|
let mut ops = HashMap::new();
|
||||||
|
for (id, length) in columns {
|
||||||
|
let start = cursor.start;
|
||||||
|
let end = start + length;
|
||||||
|
*cursor = end..cursor.end;
|
||||||
|
ops.insert(*id, start..end);
|
||||||
|
}
|
||||||
|
ops
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode_header(bytes: &[u8]) -> Result<(u8, amp::ChangeHash, Range<usize>), decoding::Error> {
|
||||||
|
let (chunktype, body) = decode_header_without_hash(bytes)?;
|
||||||
|
|
||||||
|
let calculated_hash = Sha256::digest(&bytes[PREAMBLE_BYTES..]);
|
||||||
|
|
||||||
|
let checksum = &bytes[4..8];
|
||||||
|
if checksum != &calculated_hash[0..4] {
|
||||||
|
return Err(decoding::Error::InvalidChecksum {
|
||||||
|
found: checksum.try_into().unwrap(),
|
||||||
|
calculated: calculated_hash[0..4].try_into().unwrap(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let hash = calculated_hash[..]
|
||||||
|
.try_into()
|
||||||
|
.map_err(InvalidChangeError::from)?;
|
||||||
|
|
||||||
|
Ok((chunktype, hash, body))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode_header_without_hash(bytes: &[u8]) -> Result<(u8, Range<usize>), decoding::Error> {
|
||||||
|
if bytes.len() <= HEADER_BYTES {
|
||||||
|
return Err(decoding::Error::NotEnoughBytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
if bytes[0..4] != MAGIC_BYTES {
|
||||||
|
return Err(decoding::Error::WrongMagicBytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
let (val, len) = read_leb128(&mut &bytes[HEADER_BYTES..])?;
|
||||||
|
let body = (HEADER_BYTES + len)..(HEADER_BYTES + len + val);
|
||||||
|
if bytes.len() != body.end {
|
||||||
|
return Err(decoding::Error::WrongByteLength {
|
||||||
|
expected: body.end,
|
||||||
|
found: bytes.len(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let chunktype = bytes[PREAMBLE_BYTES];
|
||||||
|
|
||||||
|
Ok((chunktype, body))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_blocks(bytes: &[u8]) -> Result<Vec<Change>, AutomergeError> {
|
||||||
|
let mut changes = Vec::new();
|
||||||
|
for slice in split_blocks(bytes)? {
|
||||||
|
decode_block(slice, &mut changes)?;
|
||||||
|
}
|
||||||
|
Ok(changes)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn split_blocks(bytes: &[u8]) -> Result<Vec<&[u8]>, decoding::Error> {
|
||||||
|
// split off all valid blocks - ignore the rest if its corrupted or truncated
|
||||||
|
let mut blocks = Vec::new();
|
||||||
|
let mut cursor = bytes;
|
||||||
|
while let Some(block) = pop_block(cursor)? {
|
||||||
|
blocks.push(&cursor[block.clone()]);
|
||||||
|
if cursor.len() <= block.end {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
cursor = &cursor[block.end..];
|
||||||
|
}
|
||||||
|
Ok(blocks)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pop_block(bytes: &[u8]) -> Result<Option<Range<usize>>, decoding::Error> {
|
||||||
|
if bytes.len() < 4 || bytes[0..4] != MAGIC_BYTES {
|
||||||
|
// not reporting error here - file got corrupted?
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
let (val, len) = read_leb128(
|
||||||
|
&mut bytes
|
||||||
|
.get(HEADER_BYTES..)
|
||||||
|
.ok_or(decoding::Error::NotEnoughBytes)?,
|
||||||
|
)?;
|
||||||
|
// val is arbitrary so it could overflow
|
||||||
|
let end = (HEADER_BYTES + len)
|
||||||
|
.checked_add(val)
|
||||||
|
.ok_or(decoding::Error::Overflow)?;
|
||||||
|
if end > bytes.len() {
|
||||||
|
// not reporting error here - file got truncated?
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
Ok(Some(0..end))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode_block(bytes: &[u8], changes: &mut Vec<Change>) -> Result<(), decoding::Error> {
|
||||||
|
match bytes[PREAMBLE_BYTES] {
|
||||||
|
BLOCK_TYPE_DOC => {
|
||||||
|
changes.extend(decode_document(bytes)?);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
BLOCK_TYPE_CHANGE | BLOCK_TYPE_DEFLATE => {
|
||||||
|
changes.push(Change::try_from(bytes.to_vec())?);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
found => Err(decoding::Error::WrongType {
|
||||||
|
expected_one_of: vec![BLOCK_TYPE_DOC, BLOCK_TYPE_CHANGE, BLOCK_TYPE_DEFLATE],
|
||||||
|
found,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode_document(bytes: &[u8]) -> Result<Vec<Change>, decoding::Error> {
|
||||||
|
let (chunktype, _hash, mut cursor) = decode_header(bytes)?;
|
||||||
|
|
||||||
|
// chunktype == 0 is a document, chunktype = 1 is a change
|
||||||
|
if chunktype > 0 {
|
||||||
|
return Err(decoding::Error::WrongType {
|
||||||
|
expected_one_of: vec![0],
|
||||||
|
found: chunktype,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let actors = decode_actors(bytes, &mut cursor, None)?;
|
||||||
|
|
||||||
|
let heads = decode_hashes(bytes, &mut cursor)?;
|
||||||
|
|
||||||
|
let changes_info = decode_column_info(bytes, &mut cursor, true)?;
|
||||||
|
let ops_info = decode_column_info(bytes, &mut cursor, true)?;
|
||||||
|
|
||||||
|
let changes_data = decode_columns(&mut cursor, &changes_info);
|
||||||
|
let mut doc_changes = ChangeIterator::new(bytes, &changes_data).collect::<Vec<_>>();
|
||||||
|
let doc_changes_deps = DepsIterator::new(bytes, &changes_data);
|
||||||
|
|
||||||
|
let doc_changes_len = doc_changes.len();
|
||||||
|
|
||||||
|
let ops_data = decode_columns(&mut cursor, &ops_info);
|
||||||
|
let doc_ops: Vec<_> = DocOpIterator::new(bytes, &actors, &ops_data).collect();
|
||||||
|
|
||||||
|
group_doc_change_and_doc_ops(&mut doc_changes, doc_ops, &actors)?;
|
||||||
|
|
||||||
|
let uncompressed_changes =
|
||||||
|
doc_changes_to_uncompressed_changes(doc_changes.into_iter(), &actors);
|
||||||
|
|
||||||
|
let changes = compress_doc_changes(uncompressed_changes, doc_changes_deps, doc_changes_len)
|
||||||
|
.ok_or(decoding::Error::NoDocChanges)?;
|
||||||
|
|
||||||
|
let mut calculated_heads = HashSet::new();
|
||||||
|
for change in &changes {
|
||||||
|
for dep in &change.deps {
|
||||||
|
calculated_heads.remove(dep);
|
||||||
|
}
|
||||||
|
calculated_heads.insert(change.hash);
|
||||||
|
}
|
||||||
|
|
||||||
|
if calculated_heads != heads.into_iter().collect::<HashSet<_>>() {
|
||||||
|
return Err(decoding::Error::MismatchedHeads);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(changes)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compress_doc_changes(
|
||||||
|
uncompressed_changes: impl Iterator<Item = amp::Change>,
|
||||||
|
doc_changes_deps: impl Iterator<Item = Vec<usize>>,
|
||||||
|
num_changes: usize,
|
||||||
|
) -> Option<Vec<Change>> {
|
||||||
|
let mut changes: Vec<Change> = Vec::with_capacity(num_changes);
|
||||||
|
|
||||||
|
// fill out the hashes as we go
|
||||||
|
for (deps, mut uncompressed_change) in doc_changes_deps.zip_eq(uncompressed_changes) {
|
||||||
|
for idx in deps {
|
||||||
|
uncompressed_change.deps.push(changes.get(idx)?.hash);
|
||||||
|
}
|
||||||
|
changes.push(uncompressed_change.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(changes)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn group_doc_change_and_doc_ops(
|
||||||
|
changes: &mut [DocChange],
|
||||||
|
mut ops: Vec<DocOp>,
|
||||||
|
actors: &[ActorId],
|
||||||
|
) -> Result<(), decoding::Error> {
|
||||||
|
let mut changes_by_actor: HashMap<usize, Vec<usize>> = HashMap::new();
|
||||||
|
|
||||||
|
for (i, change) in changes.iter().enumerate() {
|
||||||
|
let actor_change_index = changes_by_actor.entry(change.actor).or_default();
|
||||||
|
if change.seq != (actor_change_index.len() + 1) as u64 {
|
||||||
|
return Err(decoding::Error::ChangeDecompressFailed(
|
||||||
|
"Doc Seq Invalid".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
if change.actor >= actors.len() {
|
||||||
|
return Err(decoding::Error::ChangeDecompressFailed(
|
||||||
|
"Doc Actor Invalid".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
actor_change_index.push(i);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut op_by_id = HashMap::new();
|
||||||
|
ops.iter().enumerate().for_each(|(i, op)| {
|
||||||
|
op_by_id.insert((op.ctr, op.actor), i);
|
||||||
|
});
|
||||||
|
|
||||||
|
for i in 0..ops.len() {
|
||||||
|
let op = ops[i].clone(); // this is safe - avoid borrow checker issues
|
||||||
|
//let id = (op.ctr, op.actor);
|
||||||
|
//op_by_id.insert(id, i);
|
||||||
|
for succ in &op.succ {
|
||||||
|
if let Some(index) = op_by_id.get(succ) {
|
||||||
|
ops[*index].pred.push((op.ctr, op.actor));
|
||||||
|
} else {
|
||||||
|
let key = if op.insert {
|
||||||
|
amp::OpId(op.ctr, actors[op.actor].clone()).into()
|
||||||
|
} else {
|
||||||
|
op.key.clone()
|
||||||
|
};
|
||||||
|
let del = DocOp {
|
||||||
|
actor: succ.1,
|
||||||
|
ctr: succ.0,
|
||||||
|
action: OpType::Del,
|
||||||
|
obj: op.obj.clone(),
|
||||||
|
key,
|
||||||
|
succ: Vec::new(),
|
||||||
|
pred: vec![(op.ctr, op.actor)],
|
||||||
|
insert: false,
|
||||||
|
};
|
||||||
|
op_by_id.insert(*succ, ops.len());
|
||||||
|
ops.push(del);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for op in ops {
|
||||||
|
// binary search for our change
|
||||||
|
let actor_change_index = changes_by_actor.entry(op.actor).or_default();
|
||||||
|
let mut left = 0;
|
||||||
|
let mut right = actor_change_index.len();
|
||||||
|
while left < right {
|
||||||
|
let seq = (left + right) / 2;
|
||||||
|
if changes[actor_change_index[seq]].max_op < op.ctr {
|
||||||
|
left = seq + 1;
|
||||||
|
} else {
|
||||||
|
right = seq;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if left >= actor_change_index.len() {
|
||||||
|
return Err(decoding::Error::ChangeDecompressFailed(
|
||||||
|
"Doc MaxOp Invalid".into(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
changes[actor_change_index[left]].ops.push(op);
|
||||||
|
}
|
||||||
|
|
||||||
|
changes
|
||||||
|
.iter_mut()
|
||||||
|
.for_each(|change| change.ops.sort_unstable());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn doc_changes_to_uncompressed_changes<'a>(
|
||||||
|
changes: impl Iterator<Item = DocChange> + 'a,
|
||||||
|
actors: &'a [ActorId],
|
||||||
|
) -> impl Iterator<Item = amp::Change> + 'a {
|
||||||
|
changes.map(move |change| amp::Change {
|
||||||
|
// we've already confirmed that all change.actor's are valid
|
||||||
|
actor_id: actors[change.actor].clone(),
|
||||||
|
seq: change.seq,
|
||||||
|
time: change.time,
|
||||||
|
// SAFETY: this unwrap is safe as we always add 1
|
||||||
|
start_op: NonZeroU64::new(change.max_op - change.ops.len() as u64 + 1).unwrap(),
|
||||||
|
hash: None,
|
||||||
|
message: change.message,
|
||||||
|
operations: change
|
||||||
|
.ops
|
||||||
|
.into_iter()
|
||||||
|
.map(|op| amp::Op {
|
||||||
|
action: op.action.clone(),
|
||||||
|
insert: op.insert,
|
||||||
|
key: op.key,
|
||||||
|
obj: op.obj,
|
||||||
|
// we've already confirmed that all op.actor's are valid
|
||||||
|
pred: pred_into(op.pred.into_iter(), actors),
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
deps: Vec::new(),
|
||||||
|
extra_bytes: change.extra_bytes,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pred_into(
|
||||||
|
pred: impl Iterator<Item = (u64, usize)>,
|
||||||
|
actors: &[ActorId],
|
||||||
|
) -> amp::SortedVec<amp::OpId> {
|
||||||
|
pred.map(|(ctr, actor)| amp::OpId(ctr, actors[actor].clone()))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::legacy as amp;
|
||||||
|
#[test]
|
||||||
|
fn mismatched_head_repro_one() {
|
||||||
|
let op_json = serde_json::json!({
|
||||||
|
"ops": [
|
||||||
|
{
|
||||||
|
"action": "del",
|
||||||
|
"obj": "1@1485eebc689d47efbf8b892e81653eb3",
|
||||||
|
"elemId": "3164@0dcdf83d9594477199f80ccd25e87053",
|
||||||
|
"pred": [
|
||||||
|
"3164@0dcdf83d9594477199f80ccd25e87053"
|
||||||
|
],
|
||||||
|
"insert": false
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"actor": "e63cf5ed1f0a4fb28b2c5bc6793b9272",
|
||||||
|
"hash": "e7fd5c02c8fdd2cdc3071ce898a5839bf36229678af3b940f347da541d147ae2",
|
||||||
|
"seq": 1,
|
||||||
|
"startOp": 3179,
|
||||||
|
"time": 1634146652,
|
||||||
|
"message": null,
|
||||||
|
"deps": [
|
||||||
|
"2603cded00f91e525507fc9e030e77f9253b239d90264ee343753efa99e3fec1"
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
let change: amp::Change = serde_json::from_value(op_json).unwrap();
|
||||||
|
let expected_hash: super::amp::ChangeHash =
|
||||||
|
"4dff4665d658a28bb6dcace8764eb35fa8e48e0a255e70b6b8cbf8e8456e5c50"
|
||||||
|
.parse()
|
||||||
|
.unwrap();
|
||||||
|
let encoded: super::Change = change.into();
|
||||||
|
assert_eq!(encoded.hash, expected_hash);
|
||||||
|
}
|
||||||
|
}
|
||||||
52
automerge/src/clock.rs
Normal file
52
automerge/src/clock.rs
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
use crate::types::OpId;
|
||||||
|
use fxhash::FxBuildHasher;
|
||||||
|
use std::cmp;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub(crate) struct Clock(HashMap<usize, u64, FxBuildHasher>);
|
||||||
|
|
||||||
|
impl Clock {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Clock(Default::default())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn include(&mut self, key: usize, n: u64) {
|
||||||
|
self.0
|
||||||
|
.entry(key)
|
||||||
|
.and_modify(|m| *m = cmp::max(n, *m))
|
||||||
|
.or_insert(n);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn covers(&self, id: &OpId) -> bool {
|
||||||
|
if let Some(val) = self.0.get(&id.1) {
|
||||||
|
val >= &id.0
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn covers() {
|
||||||
|
let mut clock = Clock::new();
|
||||||
|
|
||||||
|
clock.include(1, 20);
|
||||||
|
clock.include(2, 10);
|
||||||
|
|
||||||
|
assert!(clock.covers(&OpId(10, 1)));
|
||||||
|
assert!(clock.covers(&OpId(20, 1)));
|
||||||
|
assert!(!clock.covers(&OpId(30, 1)));
|
||||||
|
|
||||||
|
assert!(clock.covers(&OpId(5, 2)));
|
||||||
|
assert!(clock.covers(&OpId(10, 2)));
|
||||||
|
assert!(!clock.covers(&OpId(15, 2)));
|
||||||
|
|
||||||
|
assert!(!clock.covers(&OpId(1, 3)));
|
||||||
|
assert!(!clock.covers(&OpId(100, 3)));
|
||||||
|
}
|
||||||
|
}
|
||||||
1384
automerge/src/columnar.rs
Normal file
1384
automerge/src/columnar.rs
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -52,60 +52,7 @@ pub enum Error {
|
||||||
Io(#[from] io::Error),
|
Io(#[from] io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq<Error> for Error {
|
#[derive(thiserror::Error, Debug)]
|
||||||
fn eq(&self, other: &Error) -> bool {
|
|
||||||
match (self, other) {
|
|
||||||
(
|
|
||||||
Self::WrongType {
|
|
||||||
expected_one_of: l_expected_one_of,
|
|
||||||
found: l_found,
|
|
||||||
},
|
|
||||||
Self::WrongType {
|
|
||||||
expected_one_of: r_expected_one_of,
|
|
||||||
found: r_found,
|
|
||||||
},
|
|
||||||
) => l_expected_one_of == r_expected_one_of && l_found == r_found,
|
|
||||||
(Self::BadChangeFormat(l0), Self::BadChangeFormat(r0)) => l0 == r0,
|
|
||||||
(
|
|
||||||
Self::WrongByteLength {
|
|
||||||
expected: l_expected,
|
|
||||||
found: l_found,
|
|
||||||
},
|
|
||||||
Self::WrongByteLength {
|
|
||||||
expected: r_expected,
|
|
||||||
found: r_found,
|
|
||||||
},
|
|
||||||
) => l_expected == r_expected && l_found == r_found,
|
|
||||||
(
|
|
||||||
Self::ColumnsNotInAscendingOrder {
|
|
||||||
last: l_last,
|
|
||||||
found: l_found,
|
|
||||||
},
|
|
||||||
Self::ColumnsNotInAscendingOrder {
|
|
||||||
last: r_last,
|
|
||||||
found: r_found,
|
|
||||||
},
|
|
||||||
) => l_last == r_last && l_found == r_found,
|
|
||||||
(
|
|
||||||
Self::InvalidChecksum {
|
|
||||||
found: l_found,
|
|
||||||
calculated: l_calculated,
|
|
||||||
},
|
|
||||||
Self::InvalidChecksum {
|
|
||||||
found: r_found,
|
|
||||||
calculated: r_calculated,
|
|
||||||
},
|
|
||||||
) => l_found == r_found && l_calculated == r_calculated,
|
|
||||||
(Self::InvalidChange(l0), Self::InvalidChange(r0)) => l0 == r0,
|
|
||||||
(Self::ChangeDecompressFailed(l0), Self::ChangeDecompressFailed(r0)) => l0 == r0,
|
|
||||||
(Self::Leb128(_l0), Self::Leb128(_r0)) => true,
|
|
||||||
(Self::Io(l0), Self::Io(r0)) => l0.kind() == r0.kind(),
|
|
||||||
_ => core::mem::discriminant(self) == core::mem::discriminant(other),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, PartialEq, Debug)]
|
|
||||||
pub enum InvalidChangeError {
|
pub enum InvalidChangeError {
|
||||||
#[error("Change contained an operation with action 'set' which did not have a 'value'")]
|
#[error("Change contained an operation with action 'set' which did not have a 'value'")]
|
||||||
SetOpWithoutValue,
|
SetOpWithoutValue,
|
||||||
|
|
@ -125,13 +72,13 @@ pub enum InvalidChangeError {
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct Decoder<'a> {
|
pub(crate) struct Decoder<'a> {
|
||||||
pub(crate) offset: usize,
|
pub offset: usize,
|
||||||
pub(crate) last_read: usize,
|
pub last_read: usize,
|
||||||
data: Cow<'a, [u8]>,
|
data: Cow<'a, [u8]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Decoder<'a> {
|
impl<'a> Decoder<'a> {
|
||||||
pub(crate) fn new(data: Cow<'a, [u8]>) -> Self {
|
pub fn new(data: Cow<'a, [u8]>) -> Self {
|
||||||
Decoder {
|
Decoder {
|
||||||
offset: 0,
|
offset: 0,
|
||||||
last_read: 0,
|
last_read: 0,
|
||||||
|
|
@ -139,7 +86,7 @@ impl<'a> Decoder<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read<T: Decodable + Debug>(&mut self) -> Result<T, Error> {
|
pub fn read<T: Decodable + Debug>(&mut self) -> Result<T, Error> {
|
||||||
let mut buf = &self.data[self.offset..];
|
let mut buf = &self.data[self.offset..];
|
||||||
let init_len = buf.len();
|
let init_len = buf.len();
|
||||||
let val = T::decode::<&[u8]>(&mut buf).ok_or(Error::NoDecodedValue)?;
|
let val = T::decode::<&[u8]>(&mut buf).ok_or(Error::NoDecodedValue)?;
|
||||||
|
|
@ -153,7 +100,7 @@ impl<'a> Decoder<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> {
|
pub fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> {
|
||||||
if self.offset + index > self.data.len() {
|
if self.offset + index > self.data.len() {
|
||||||
Err(Error::TryingToReadPastEnd)
|
Err(Error::TryingToReadPastEnd)
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -164,7 +111,7 @@ impl<'a> Decoder<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn done(&self) -> bool {
|
pub fn done(&self) -> bool {
|
||||||
self.offset >= self.data.len()
|
self.offset >= self.data.len()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -212,7 +159,7 @@ impl<'a> Iterator for BooleanDecoder<'a> {
|
||||||
/// See discussion on [`crate::encoding::RleEncoder`] for the format data is stored in.
|
/// See discussion on [`crate::encoding::RleEncoder`] for the format data is stored in.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct RleDecoder<'a, T> {
|
pub(crate) struct RleDecoder<'a, T> {
|
||||||
pub(crate) decoder: Decoder<'a>,
|
pub decoder: Decoder<'a>,
|
||||||
last_value: Option<T>,
|
last_value: Option<T>,
|
||||||
count: isize,
|
count: isize,
|
||||||
literal: bool,
|
literal: bool,
|
||||||
383
automerge/src/encoding.rs
Normal file
383
automerge/src/encoding.rs
Normal file
|
|
@ -0,0 +1,383 @@
|
||||||
|
use core::fmt::Debug;
|
||||||
|
use std::{
|
||||||
|
io,
|
||||||
|
io::{Read, Write},
|
||||||
|
mem,
|
||||||
|
num::NonZeroU64,
|
||||||
|
};
|
||||||
|
|
||||||
|
use flate2::{bufread::DeflateEncoder, Compression};
|
||||||
|
use smol_str::SmolStr;
|
||||||
|
|
||||||
|
use crate::columnar::COLUMN_TYPE_DEFLATE;
|
||||||
|
use crate::ActorId;
|
||||||
|
|
||||||
|
pub(crate) const DEFLATE_MIN_SIZE: usize = 256;
|
||||||
|
|
||||||
|
/// The error type for encoding operations.
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error(transparent)]
|
||||||
|
Io(#[from] io::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encodes booleans by storing the count of the same value.
|
||||||
|
///
|
||||||
|
/// The sequence of numbers describes the count of false values on even indices (0-indexed) and the
|
||||||
|
/// count of true values on odd indices (0-indexed).
|
||||||
|
///
|
||||||
|
/// Counts are encoded as usize.
|
||||||
|
pub(crate) struct BooleanEncoder {
|
||||||
|
buf: Vec<u8>,
|
||||||
|
last: bool,
|
||||||
|
count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BooleanEncoder {
|
||||||
|
pub fn new() -> BooleanEncoder {
|
||||||
|
BooleanEncoder {
|
||||||
|
buf: Vec::new(),
|
||||||
|
last: false,
|
||||||
|
count: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append(&mut self, value: bool) {
|
||||||
|
if value == self.last {
|
||||||
|
self.count += 1;
|
||||||
|
} else {
|
||||||
|
self.count.encode(&mut self.buf).ok();
|
||||||
|
self.last = value;
|
||||||
|
self.count = 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finish(mut self, col: u32) -> ColData {
|
||||||
|
if self.count > 0 {
|
||||||
|
self.count.encode(&mut self.buf).ok();
|
||||||
|
}
|
||||||
|
ColData::new(col, self.buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encodes integers as the change since the previous value.
|
||||||
|
///
|
||||||
|
/// The initial value is 0 encoded as u64. Deltas are encoded as i64.
|
||||||
|
///
|
||||||
|
/// Run length encoding is then applied to the resulting sequence.
|
||||||
|
pub(crate) struct DeltaEncoder {
|
||||||
|
rle: RleEncoder<i64>,
|
||||||
|
absolute_value: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DeltaEncoder {
|
||||||
|
pub fn new() -> DeltaEncoder {
|
||||||
|
DeltaEncoder {
|
||||||
|
rle: RleEncoder::new(),
|
||||||
|
absolute_value: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append_value(&mut self, value: u64) {
|
||||||
|
self.rle
|
||||||
|
.append_value(value as i64 - self.absolute_value as i64);
|
||||||
|
self.absolute_value = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append_null(&mut self) {
|
||||||
|
self.rle.append_null();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finish(self, col: u32) -> ColData {
|
||||||
|
self.rle.finish(col)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum RleState<T> {
|
||||||
|
Empty,
|
||||||
|
NullRun(usize),
|
||||||
|
LiteralRun(T, Vec<T>),
|
||||||
|
LoneVal(T),
|
||||||
|
Run(T, usize),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Encodes data in run lengh encoding format. This is very efficient for long repeats of data
|
||||||
|
///
|
||||||
|
/// There are 3 types of 'run' in this encoder:
|
||||||
|
/// - a normal run (compresses repeated values)
|
||||||
|
/// - a null run (compresses repeated nulls)
|
||||||
|
/// - a literal run (no compression)
|
||||||
|
///
|
||||||
|
/// A normal run consists of the length of the run (encoded as an i64) followed by the encoded value that this run contains.
|
||||||
|
///
|
||||||
|
/// A null run consists of a zero value (encoded as an i64) followed by the length of the null run (encoded as a usize).
|
||||||
|
///
|
||||||
|
/// A literal run consists of the **negative** length of the run (encoded as an i64) followed by the values in the run.
|
||||||
|
///
|
||||||
|
/// Therefore all the types start with an encoded i64, the value of which determines the type of the following data.
|
||||||
|
pub(crate) struct RleEncoder<T>
|
||||||
|
where
|
||||||
|
T: Encodable + PartialEq + Clone,
|
||||||
|
{
|
||||||
|
buf: Vec<u8>,
|
||||||
|
state: RleState<T>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> RleEncoder<T>
|
||||||
|
where
|
||||||
|
T: Encodable + PartialEq + Clone,
|
||||||
|
{
|
||||||
|
pub fn new() -> RleEncoder<T> {
|
||||||
|
RleEncoder {
|
||||||
|
buf: Vec::new(),
|
||||||
|
state: RleState::Empty,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finish(mut self, col: u32) -> ColData {
|
||||||
|
match self.take_state() {
|
||||||
|
// this covers `only_nulls`
|
||||||
|
RleState::NullRun(size) => {
|
||||||
|
if !self.buf.is_empty() {
|
||||||
|
self.flush_null_run(size);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
RleState::LoneVal(value) => self.flush_lit_run(vec![value]),
|
||||||
|
RleState::Run(value, len) => self.flush_run(&value, len),
|
||||||
|
RleState::LiteralRun(last, mut run) => {
|
||||||
|
run.push(last);
|
||||||
|
self.flush_lit_run(run);
|
||||||
|
}
|
||||||
|
RleState::Empty => {}
|
||||||
|
}
|
||||||
|
ColData::new(col, self.buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush_run(&mut self, val: &T, len: usize) {
|
||||||
|
self.encode(&(len as i64));
|
||||||
|
self.encode(val);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush_null_run(&mut self, len: usize) {
|
||||||
|
self.encode::<i64>(&0);
|
||||||
|
self.encode(&len);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush_lit_run(&mut self, run: Vec<T>) {
|
||||||
|
self.encode(&-(run.len() as i64));
|
||||||
|
for val in run {
|
||||||
|
self.encode(&val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn take_state(&mut self) -> RleState<T> {
|
||||||
|
let mut state = RleState::Empty;
|
||||||
|
mem::swap(&mut self.state, &mut state);
|
||||||
|
state
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append_null(&mut self) {
|
||||||
|
self.state = match self.take_state() {
|
||||||
|
RleState::Empty => RleState::NullRun(1),
|
||||||
|
RleState::NullRun(size) => RleState::NullRun(size + 1),
|
||||||
|
RleState::LoneVal(other) => {
|
||||||
|
self.flush_lit_run(vec![other]);
|
||||||
|
RleState::NullRun(1)
|
||||||
|
}
|
||||||
|
RleState::Run(other, len) => {
|
||||||
|
self.flush_run(&other, len);
|
||||||
|
RleState::NullRun(1)
|
||||||
|
}
|
||||||
|
RleState::LiteralRun(last, mut run) => {
|
||||||
|
run.push(last);
|
||||||
|
self.flush_lit_run(run);
|
||||||
|
RleState::NullRun(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append_value(&mut self, value: T) {
|
||||||
|
self.state = match self.take_state() {
|
||||||
|
RleState::Empty => RleState::LoneVal(value),
|
||||||
|
RleState::LoneVal(other) => {
|
||||||
|
if other == value {
|
||||||
|
RleState::Run(value, 2)
|
||||||
|
} else {
|
||||||
|
let mut v = Vec::with_capacity(2);
|
||||||
|
v.push(other);
|
||||||
|
RleState::LiteralRun(value, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
RleState::Run(other, len) => {
|
||||||
|
if other == value {
|
||||||
|
RleState::Run(other, len + 1)
|
||||||
|
} else {
|
||||||
|
self.flush_run(&other, len);
|
||||||
|
RleState::LoneVal(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
RleState::LiteralRun(last, mut run) => {
|
||||||
|
if last == value {
|
||||||
|
self.flush_lit_run(run);
|
||||||
|
RleState::Run(value, 2)
|
||||||
|
} else {
|
||||||
|
run.push(last);
|
||||||
|
RleState::LiteralRun(value, run)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
RleState::NullRun(size) => {
|
||||||
|
self.flush_null_run(size);
|
||||||
|
RleState::LoneVal(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode<V>(&mut self, val: &V)
|
||||||
|
where
|
||||||
|
V: Encodable,
|
||||||
|
{
|
||||||
|
val.encode(&mut self.buf).ok();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) trait Encodable {
|
||||||
|
fn encode_with_actors_to_vec(&self, actors: &mut [ActorId]) -> io::Result<Vec<u8>> {
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
self.encode_with_actors(&mut buf, actors)?;
|
||||||
|
Ok(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode_with_actors<R: Write>(&self, buf: &mut R, _actors: &[ActorId]) -> io::Result<usize> {
|
||||||
|
self.encode(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize>;
|
||||||
|
|
||||||
|
fn encode_vec(&self, buf: &mut Vec<u8>) -> usize {
|
||||||
|
self.encode(buf).unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for SmolStr {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
let bytes = self.as_bytes();
|
||||||
|
let head = bytes.len().encode(buf)?;
|
||||||
|
buf.write_all(bytes)?;
|
||||||
|
Ok(head + bytes.len())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for String {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
let bytes = self.as_bytes();
|
||||||
|
let head = bytes.len().encode(buf)?;
|
||||||
|
buf.write_all(bytes)?;
|
||||||
|
Ok(head + bytes.len())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for Option<String> {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
if let Some(s) = self {
|
||||||
|
s.encode(buf)
|
||||||
|
} else {
|
||||||
|
0.encode(buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for u64 {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
leb128::write::unsigned(buf, *self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for NonZeroU64 {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
leb128::write::unsigned(buf, self.get())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for f64 {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
let bytes = self.to_le_bytes();
|
||||||
|
buf.write_all(&bytes)?;
|
||||||
|
Ok(bytes.len())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for f32 {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
let bytes = self.to_le_bytes();
|
||||||
|
buf.write_all(&bytes)?;
|
||||||
|
Ok(bytes.len())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for i64 {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
leb128::write::signed(buf, *self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for usize {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
(*self as u64).encode(buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for u32 {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
u64::from(*self).encode(buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Encodable for i32 {
|
||||||
|
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
i64::from(*self).encode(buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct ColData {
|
||||||
|
pub col: u32,
|
||||||
|
pub data: Vec<u8>,
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
has_been_deflated: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ColData {
|
||||||
|
pub fn new(col_id: u32, data: Vec<u8>) -> ColData {
|
||||||
|
ColData {
|
||||||
|
col: col_id,
|
||||||
|
data,
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
has_been_deflated: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn encode_col_len<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
|
||||||
|
let mut len = 0;
|
||||||
|
if !self.data.is_empty() {
|
||||||
|
len += self.col.encode(buf)?;
|
||||||
|
len += self.data.len().encode(buf)?;
|
||||||
|
}
|
||||||
|
Ok(len)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deflate(&mut self) {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
{
|
||||||
|
debug_assert!(!self.has_been_deflated);
|
||||||
|
self.has_been_deflated = true;
|
||||||
|
}
|
||||||
|
if self.data.len() > DEFLATE_MIN_SIZE {
|
||||||
|
let mut deflated = Vec::new();
|
||||||
|
let mut deflater = DeflateEncoder::new(&self.data[..], Compression::default());
|
||||||
|
//This unwrap should be okay as we're reading and writing to in memory buffers
|
||||||
|
deflater.read_to_end(&mut deflated).unwrap();
|
||||||
|
self.col |= COLUMN_TYPE_DEFLATE;
|
||||||
|
self.data = deflated;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
62
automerge/src/error.rs
Normal file
62
automerge/src/error.rs
Normal file
|
|
@ -0,0 +1,62 @@
|
||||||
|
use crate::types::{ActorId, ScalarValue};
|
||||||
|
use crate::value::DataType;
|
||||||
|
use crate::{decoding, encoding};
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum AutomergeError {
|
||||||
|
#[error("invalid opid format `{0}`")]
|
||||||
|
InvalidOpId(String),
|
||||||
|
#[error("obj id not from this document `{0}`")]
|
||||||
|
ForeignObjId(String),
|
||||||
|
#[error("there was an encoding problem: {0}")]
|
||||||
|
Encoding(#[from] encoding::Error),
|
||||||
|
#[error("there was a decoding problem: {0}")]
|
||||||
|
Decoding(#[from] decoding::Error),
|
||||||
|
#[error("key must not be an empty string")]
|
||||||
|
EmptyStringKey,
|
||||||
|
#[error("invalid seq {0}")]
|
||||||
|
InvalidSeq(u64),
|
||||||
|
#[error("index {0} is out of bounds")]
|
||||||
|
InvalidIndex(usize),
|
||||||
|
#[error("duplicate seq {0} found for actor {1}")]
|
||||||
|
DuplicateSeqNumber(u64, ActorId),
|
||||||
|
#[error("generic automerge error")]
|
||||||
|
Fail,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "wasm")]
|
||||||
|
impl From<AutomergeError> for wasm_bindgen::JsValue {
|
||||||
|
fn from(err: AutomergeError) -> Self {
|
||||||
|
js_sys::Error::new(&std::format!("{}", err)).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
#[error("Invalid actor ID: {0}")]
|
||||||
|
pub struct InvalidActorId(pub String);
|
||||||
|
|
||||||
|
#[derive(Error, Debug, PartialEq)]
|
||||||
|
#[error("Invalid scalar value, expected {expected} but received {unexpected}")]
|
||||||
|
pub(crate) struct InvalidScalarValue {
|
||||||
|
pub raw_value: ScalarValue,
|
||||||
|
pub datatype: DataType,
|
||||||
|
pub unexpected: String,
|
||||||
|
pub expected: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Error, Debug, PartialEq)]
|
||||||
|
#[error("Invalid change hash slice: {0:?}")]
|
||||||
|
pub struct InvalidChangeHashSlice(pub Vec<u8>);
|
||||||
|
|
||||||
|
#[derive(Error, Debug, PartialEq)]
|
||||||
|
#[error("Invalid object ID: {0}")]
|
||||||
|
pub struct InvalidObjectId(pub String);
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
#[error("Invalid element ID: {0}")]
|
||||||
|
pub struct InvalidElementId(pub String);
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
#[error("Invalid OpID: {0}")]
|
||||||
|
pub struct InvalidOpId(pub String);
|
||||||
82
automerge/src/exid.rs
Normal file
82
automerge/src/exid.rs
Normal file
|
|
@ -0,0 +1,82 @@
|
||||||
|
use crate::ActorId;
|
||||||
|
use serde::Serialize;
|
||||||
|
use serde::Serializer;
|
||||||
|
use std::cmp::{Ord, Ordering};
|
||||||
|
use std::fmt;
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum ExId {
|
||||||
|
Root,
|
||||||
|
Id(u64, ActorId, usize),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for ExId {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
match (self, other) {
|
||||||
|
(ExId::Root, ExId::Root) => true,
|
||||||
|
(ExId::Id(ctr1, actor1, _), ExId::Id(ctr2, actor2, _))
|
||||||
|
if ctr1 == ctr2 && actor1 == actor2 =>
|
||||||
|
{
|
||||||
|
true
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for ExId {}
|
||||||
|
|
||||||
|
impl fmt::Display for ExId {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
ExId::Root => write!(f, "_root"),
|
||||||
|
ExId::Id(ctr, actor, _) => write!(f, "{}@{}", ctr, actor),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hash for ExId {
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
match self {
|
||||||
|
ExId::Root => 0.hash(state),
|
||||||
|
ExId::Id(ctr, actor, _) => {
|
||||||
|
ctr.hash(state);
|
||||||
|
actor.hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for ExId {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
match (self, other) {
|
||||||
|
(ExId::Root, ExId::Root) => Ordering::Equal,
|
||||||
|
(ExId::Root, _) => Ordering::Less,
|
||||||
|
(_, ExId::Root) => Ordering::Greater,
|
||||||
|
(ExId::Id(c1, a1, _), ExId::Id(c2, a2, _)) if c1 == c2 => a2.cmp(a1),
|
||||||
|
(ExId::Id(c1, _, _), ExId::Id(c2, _, _)) => c1.cmp(c2),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for ExId {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for ExId {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
serializer.serialize_str(self.to_string().as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<ExId> for ExId {
|
||||||
|
fn as_ref(&self) -> &ExId {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -5,7 +5,7 @@ use std::ops::Index;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) struct IndexedCache<T> {
|
pub(crate) struct IndexedCache<T> {
|
||||||
pub(crate) cache: Vec<T>,
|
pub cache: Vec<T>,
|
||||||
lookup: HashMap<T, usize>,
|
lookup: HashMap<T, usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -22,14 +22,14 @@ impl<T> IndexedCache<T>
|
||||||
where
|
where
|
||||||
T: Clone + Eq + Hash + Ord,
|
T: Clone + Eq + Hash + Ord,
|
||||||
{
|
{
|
||||||
pub(crate) fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
IndexedCache {
|
IndexedCache {
|
||||||
cache: Default::default(),
|
cache: Default::default(),
|
||||||
lookup: Default::default(),
|
lookup: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn cache(&mut self, item: T) -> usize {
|
pub fn cache(&mut self, item: T) -> usize {
|
||||||
if let Some(n) = self.lookup.get(&item) {
|
if let Some(n) = self.lookup.get(&item) {
|
||||||
*n
|
*n
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -40,37 +40,32 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn lookup(&self, item: &T) -> Option<usize> {
|
pub fn lookup(&self, item: &T) -> Option<usize> {
|
||||||
self.lookup.get(item).cloned()
|
self.lookup.get(item).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
pub fn len(&self) -> usize {
|
||||||
pub(crate) fn len(&self) -> usize {
|
|
||||||
self.cache.len()
|
self.cache.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get(&self, index: usize) -> &T {
|
pub fn get(&self, index: usize) -> &T {
|
||||||
&self.cache[index]
|
&self.cache[index]
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn safe_get(&self, index: usize) -> Option<&T> {
|
|
||||||
self.cache.get(index)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove the last inserted entry into this cache.
|
/// Remove the last inserted entry into this cache.
|
||||||
/// This is safe to do as it does not require reshuffling other entries.
|
/// This is safe to do as it does not require reshuffling other entries.
|
||||||
///
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
///
|
///
|
||||||
/// Panics on an empty cache.
|
/// Panics on an empty cache.
|
||||||
pub(crate) fn remove_last(&mut self) -> T {
|
pub fn remove_last(&mut self) -> T {
|
||||||
let last = self.cache.len() - 1;
|
let last = self.cache.len() - 1;
|
||||||
let t = self.cache.remove(last);
|
let t = self.cache.remove(last);
|
||||||
self.lookup.remove(&t);
|
self.lookup.remove(&t);
|
||||||
t
|
t
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn sorted(&self) -> IndexedCache<T> {
|
pub fn sorted(&self) -> IndexedCache<T> {
|
||||||
let mut sorted = Self::new();
|
let mut sorted = Self::new();
|
||||||
self.cache.iter().sorted().cloned().for_each(|item| {
|
self.cache.iter().sorted().cloned().for_each(|item| {
|
||||||
let n = sorted.cache.len();
|
let n = sorted.cache.len();
|
||||||
|
|
@ -80,26 +75,7 @@ where
|
||||||
sorted
|
sorted
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a vector from positions in this index to positions in an equivalent sorted index
|
pub fn encode_index(&self) -> Vec<usize> {
|
||||||
///
|
|
||||||
/// This is useful primarily when encoding an `IndexedCache<ActorId>` in the document format.
|
|
||||||
/// In this case we encode the actors in sorted order in the document and all ops reference the
|
|
||||||
/// offset into this sorted actor array. But the `IndexedCache<ActorId>` we have in the
|
|
||||||
/// application does not contain actors in sorted order because we add them as we encounter
|
|
||||||
/// them, so we must map from the actor IDs in the application to the actor IDs in the document
|
|
||||||
/// format
|
|
||||||
///
|
|
||||||
/// # Examples
|
|
||||||
///
|
|
||||||
/// ```rust,ignore
|
|
||||||
/// let idx: IndexedCache<String> = IndexedCache::new();
|
|
||||||
/// let first_idx = idx.cache("b"); // first_idx is `0`
|
|
||||||
/// let second_idx = idx.cache("a"); // second_idx i `1`
|
|
||||||
/// let encoded = idx.encode_index();
|
|
||||||
/// // first_idx (0) maps to `1` whilst second_idx (1) maps to `0` because "a" < "b"
|
|
||||||
/// assert_eq!(encoded, vec![1,0])
|
|
||||||
/// ```
|
|
||||||
pub(crate) fn encode_index(&self) -> Vec<usize> {
|
|
||||||
let sorted: Vec<_> = self.cache.iter().sorted().cloned().collect();
|
let sorted: Vec<_> = self.cache.iter().sorted().cloned().collect();
|
||||||
self.cache
|
self.cache
|
||||||
.iter()
|
.iter()
|
||||||
|
|
@ -123,15 +99,3 @@ impl<T> Index<usize> for IndexedCache<T> {
|
||||||
&self.cache[i]
|
&self.cache[i]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A: Hash + Eq + Clone> FromIterator<A> for IndexedCache<A> {
|
|
||||||
fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
|
|
||||||
let mut cache = Vec::new();
|
|
||||||
let mut lookup = HashMap::new();
|
|
||||||
for (index, elem) in iter.into_iter().enumerate() {
|
|
||||||
cache.push(elem.clone());
|
|
||||||
lookup.insert(elem, index);
|
|
||||||
}
|
|
||||||
Self { cache, lookup }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,17 +1,13 @@
|
||||||
|
use crate::op_set::B;
|
||||||
use crate::{query, Automerge};
|
use crate::{query, Automerge};
|
||||||
|
|
||||||
/// An iterator over the keys of an object
|
|
||||||
///
|
|
||||||
/// This is returned by [`crate::ReadDoc::keys`] and method. The returned item is either
|
|
||||||
/// the keys of a map, or the encoded element IDs of a sequence.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Keys<'a, 'k> {
|
pub struct Keys<'a, 'k> {
|
||||||
keys: Option<query::Keys<'k>>,
|
keys: Option<query::Keys<'k, B>>,
|
||||||
doc: &'a Automerge,
|
doc: &'a Automerge,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'k> Keys<'a, 'k> {
|
impl<'a, 'k> Keys<'a, 'k> {
|
||||||
pub(crate) fn new(doc: &'a Automerge, keys: Option<query::Keys<'k>>) -> Self {
|
pub(crate) fn new(doc: &'a Automerge, keys: Option<query::Keys<'k, B>>) -> Self {
|
||||||
Self { keys, doc }
|
Self { keys, doc }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1,17 +1,13 @@
|
||||||
|
use crate::op_set::B;
|
||||||
use crate::{query, Automerge};
|
use crate::{query, Automerge};
|
||||||
|
|
||||||
/// An iterator over the keys of an object at a particular point in history
|
|
||||||
///
|
|
||||||
/// This is returned by [`crate::ReadDoc::keys_at`] method. The returned item is either the keys of a map,
|
|
||||||
/// or the encoded element IDs of a sequence.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct KeysAt<'a, 'k> {
|
pub struct KeysAt<'a, 'k> {
|
||||||
keys: Option<query::KeysAt<'k>>,
|
keys: Option<query::KeysAt<'k, B>>,
|
||||||
doc: &'a Automerge,
|
doc: &'a Automerge,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'k> KeysAt<'a, 'k> {
|
impl<'a, 'k> KeysAt<'a, 'k> {
|
||||||
pub(crate) fn new(doc: &'a Automerge, keys: Option<query::KeysAt<'k>>) -> Self {
|
pub(crate) fn new(doc: &'a Automerge, keys: Option<query::KeysAt<'k, B>>) -> Self {
|
||||||
Self { keys, doc }
|
Self { keys, doc }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -132,7 +132,7 @@ impl Key {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize)]
|
#[derive(Debug, Default, Clone, PartialEq, Serialize)]
|
||||||
#[serde(transparent)]
|
#[serde(transparent)]
|
||||||
pub struct SortedVec<T>(Vec<T>);
|
pub struct SortedVec<T>(Vec<T>);
|
||||||
|
|
||||||
|
|
@ -157,7 +157,7 @@ impl<T> SortedVec<T> {
|
||||||
self.0.get_mut(index)
|
self.0.get_mut(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter(&self) -> std::slice::Iter<'_, T> {
|
pub fn iter(&self) -> impl Iterator<Item = &T> {
|
||||||
self.0.iter()
|
self.0.iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -216,8 +216,8 @@ pub struct Op {
|
||||||
impl Op {
|
impl Op {
|
||||||
pub fn primitive_value(&self) -> Option<ScalarValue> {
|
pub fn primitive_value(&self) -> Option<ScalarValue> {
|
||||||
match &self.action {
|
match &self.action {
|
||||||
OpType::Put(v) => Some(v.clone()),
|
OpType::Set(v) => Some(v.clone()),
|
||||||
OpType::Increment(i) => Some(ScalarValue::Int(*i)),
|
OpType::Inc(i) => Some(ScalarValue::Int(*i)),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -9,7 +9,7 @@ impl Serialize for ChangeHash {
|
||||||
where
|
where
|
||||||
S: Serializer,
|
S: Serializer,
|
||||||
{
|
{
|
||||||
hex::encode(self.0).serialize(serializer)
|
hex::encode(&self.0).serialize(serializer)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -19,7 +19,7 @@ impl Serialize for Op {
|
||||||
}
|
}
|
||||||
|
|
||||||
let numerical_datatype = match &self.action {
|
let numerical_datatype = match &self.action {
|
||||||
OpType::Put(value) => value.as_numerical_datatype(),
|
OpType::Set(value) => value.as_numerical_datatype(),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -47,9 +47,14 @@ impl Serialize for Op {
|
||||||
op.serialize_field("datatype", &datatype)?;
|
op.serialize_field("datatype", &datatype)?;
|
||||||
}
|
}
|
||||||
match &self.action {
|
match &self.action {
|
||||||
OpType::Increment(n) => op.serialize_field("value", &n)?,
|
OpType::Inc(n) => op.serialize_field("value", &n)?,
|
||||||
OpType::Put(ScalarValue::Counter(c)) => op.serialize_field("value", &c.start)?,
|
OpType::Set(value) => op.serialize_field("value", &value)?,
|
||||||
OpType::Put(value) => op.serialize_field("value", &value)?,
|
OpType::MarkBegin(m) => {
|
||||||
|
op.serialize_field("name", &m.name)?;
|
||||||
|
op.serialize_field("expand", &m.expand)?;
|
||||||
|
op.serialize_field("value", &m.value)?;
|
||||||
|
}
|
||||||
|
OpType::MarkEnd(s) => op.serialize_field("expand", &s)?,
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
op.serialize_field("pred", &self.pred)?;
|
op.serialize_field("pred", &self.pred)?;
|
||||||
|
|
@ -71,6 +76,8 @@ pub(crate) enum RawOpType {
|
||||||
Del,
|
Del,
|
||||||
Inc,
|
Inc,
|
||||||
Set,
|
Set,
|
||||||
|
MarkBegin,
|
||||||
|
MarkEnd,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Serialize for RawOpType {
|
impl Serialize for RawOpType {
|
||||||
|
|
@ -86,6 +93,8 @@ impl Serialize for RawOpType {
|
||||||
RawOpType::Del => "del",
|
RawOpType::Del => "del",
|
||||||
RawOpType::Inc => "inc",
|
RawOpType::Inc => "inc",
|
||||||
RawOpType::Set => "set",
|
RawOpType::Set => "set",
|
||||||
|
RawOpType::MarkBegin => "mark_begin",
|
||||||
|
RawOpType::MarkEnd => "mark_end",
|
||||||
};
|
};
|
||||||
serializer.serialize_str(s)
|
serializer.serialize_str(s)
|
||||||
}
|
}
|
||||||
|
|
@ -117,6 +126,8 @@ impl<'de> Deserialize<'de> for RawOpType {
|
||||||
"del" => Ok(RawOpType::Del),
|
"del" => Ok(RawOpType::Del),
|
||||||
"inc" => Ok(RawOpType::Inc),
|
"inc" => Ok(RawOpType::Inc),
|
||||||
"set" => Ok(RawOpType::Set),
|
"set" => Ok(RawOpType::Set),
|
||||||
|
"mark_begin" => Ok(RawOpType::MarkBegin),
|
||||||
|
"mark_end" => Ok(RawOpType::MarkEnd),
|
||||||
other => Err(Error::unknown_variant(other, VARIANTS)),
|
other => Err(Error::unknown_variant(other, VARIANTS)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -132,7 +143,7 @@ impl<'de> Deserialize<'de> for Op {
|
||||||
impl<'de> Visitor<'de> for OperationVisitor {
|
impl<'de> Visitor<'de> for OperationVisitor {
|
||||||
type Value = Op;
|
type Value = Op;
|
||||||
|
|
||||||
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
formatter.write_str("An operation object")
|
formatter.write_str("An operation object")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -188,7 +199,31 @@ impl<'de> Deserialize<'de> for Op {
|
||||||
RawOpType::MakeTable => OpType::Make(ObjType::Table),
|
RawOpType::MakeTable => OpType::Make(ObjType::Table),
|
||||||
RawOpType::MakeList => OpType::Make(ObjType::List),
|
RawOpType::MakeList => OpType::Make(ObjType::List),
|
||||||
RawOpType::MakeText => OpType::Make(ObjType::Text),
|
RawOpType::MakeText => OpType::Make(ObjType::Text),
|
||||||
RawOpType::Del => OpType::Delete,
|
RawOpType::Del => OpType::Del,
|
||||||
|
RawOpType::MarkBegin => {
|
||||||
|
let name = name.ok_or_else(|| Error::missing_field("mark(name)"))?;
|
||||||
|
let expand = expand.unwrap_or(false);
|
||||||
|
let value = if let Some(datatype) = datatype {
|
||||||
|
let raw_value = value
|
||||||
|
.ok_or_else(|| Error::missing_field("value"))?
|
||||||
|
.unwrap_or(ScalarValue::Null);
|
||||||
|
raw_value.as_datatype(datatype).map_err(|e| {
|
||||||
|
Error::invalid_value(
|
||||||
|
Unexpected::Other(e.unexpected.as_str()),
|
||||||
|
&e.expected.as_str(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
} else {
|
||||||
|
value
|
||||||
|
.ok_or_else(|| Error::missing_field("value"))?
|
||||||
|
.unwrap_or(ScalarValue::Null)
|
||||||
|
};
|
||||||
|
OpType::mark(name, expand, value)
|
||||||
|
}
|
||||||
|
RawOpType::MarkEnd => {
|
||||||
|
let expand = expand.unwrap_or(true);
|
||||||
|
OpType::MarkEnd(expand)
|
||||||
|
}
|
||||||
RawOpType::Set => {
|
RawOpType::Set => {
|
||||||
let value = if let Some(datatype) = datatype {
|
let value = if let Some(datatype) = datatype {
|
||||||
let raw_value = value
|
let raw_value = value
|
||||||
|
|
@ -205,20 +240,17 @@ impl<'de> Deserialize<'de> for Op {
|
||||||
.ok_or_else(|| Error::missing_field("value"))?
|
.ok_or_else(|| Error::missing_field("value"))?
|
||||||
.unwrap_or(ScalarValue::Null)
|
.unwrap_or(ScalarValue::Null)
|
||||||
};
|
};
|
||||||
OpType::Put(value)
|
OpType::Set(value)
|
||||||
}
|
}
|
||||||
RawOpType::Inc => match value.flatten() {
|
RawOpType::Inc => match value.flatten() {
|
||||||
Some(ScalarValue::Int(n)) => Ok(OpType::Increment(n)),
|
Some(ScalarValue::Int(n)) => Ok(OpType::Inc(n)),
|
||||||
Some(ScalarValue::Uint(n)) => Ok(OpType::Increment(n as i64)),
|
Some(ScalarValue::Uint(n)) => Ok(OpType::Inc(n as i64)),
|
||||||
Some(ScalarValue::F64(n)) => Ok(OpType::Increment(n as i64)),
|
Some(ScalarValue::F64(n)) => Ok(OpType::Inc(n as i64)),
|
||||||
Some(ScalarValue::Counter(n)) => Ok(OpType::Increment(n.into())),
|
Some(ScalarValue::Counter(n)) => Ok(OpType::Inc(n.into())),
|
||||||
Some(ScalarValue::Timestamp(n)) => Ok(OpType::Increment(n)),
|
Some(ScalarValue::Timestamp(n)) => Ok(OpType::Inc(n)),
|
||||||
Some(ScalarValue::Bytes(s)) => {
|
Some(ScalarValue::Bytes(s)) => {
|
||||||
Err(Error::invalid_value(Unexpected::Bytes(&s), &"a number"))
|
Err(Error::invalid_value(Unexpected::Bytes(&s), &"a number"))
|
||||||
}
|
}
|
||||||
Some(ScalarValue::Unknown { bytes, .. }) => {
|
|
||||||
Err(Error::invalid_value(Unexpected::Bytes(&bytes), &"a number"))
|
|
||||||
}
|
|
||||||
Some(ScalarValue::Str(s)) => {
|
Some(ScalarValue::Str(s)) => {
|
||||||
Err(Error::invalid_value(Unexpected::Str(&s), &"a number"))
|
Err(Error::invalid_value(Unexpected::Str(&s), &"a number"))
|
||||||
}
|
}
|
||||||
|
|
@ -270,7 +302,7 @@ mod tests {
|
||||||
"pred": []
|
"pred": []
|
||||||
}),
|
}),
|
||||||
expected: Ok(Op {
|
expected: Ok(Op {
|
||||||
action: OpType::Put(ScalarValue::Uint(123)),
|
action: OpType::Set(ScalarValue::Uint(123)),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -288,7 +320,7 @@ mod tests {
|
||||||
"pred": []
|
"pred": []
|
||||||
}),
|
}),
|
||||||
expected: Ok(Op {
|
expected: Ok(Op {
|
||||||
action: OpType::Put(ScalarValue::Int(-123)),
|
action: OpType::Set(ScalarValue::Int(-123)),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -306,7 +338,7 @@ mod tests {
|
||||||
"pred": []
|
"pred": []
|
||||||
}),
|
}),
|
||||||
expected: Ok(Op {
|
expected: Ok(Op {
|
||||||
action: OpType::Put(ScalarValue::F64(-123.0)),
|
action: OpType::Set(ScalarValue::F64(-123.0)),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -323,7 +355,7 @@ mod tests {
|
||||||
"pred": []
|
"pred": []
|
||||||
}),
|
}),
|
||||||
expected: Ok(Op {
|
expected: Ok(Op {
|
||||||
action: OpType::Put(ScalarValue::Str("somestring".into())),
|
action: OpType::Set(ScalarValue::Str("somestring".into())),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -340,7 +372,7 @@ mod tests {
|
||||||
"pred": []
|
"pred": []
|
||||||
}),
|
}),
|
||||||
expected: Ok(Op {
|
expected: Ok(Op {
|
||||||
action: OpType::Put(ScalarValue::F64(1.23)),
|
action: OpType::Set(ScalarValue::F64(1.23)),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -357,7 +389,7 @@ mod tests {
|
||||||
"pred": []
|
"pred": []
|
||||||
}),
|
}),
|
||||||
expected: Ok(Op {
|
expected: Ok(Op {
|
||||||
action: OpType::Put(ScalarValue::Boolean(true)),
|
action: OpType::Set(ScalarValue::Boolean(true)),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -386,7 +418,7 @@ mod tests {
|
||||||
"pred": []
|
"pred": []
|
||||||
}),
|
}),
|
||||||
expected: Ok(Op {
|
expected: Ok(Op {
|
||||||
action: OpType::Put(ScalarValue::Counter(123.into())),
|
action: OpType::Set(ScalarValue::Counter(123.into())),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -434,7 +466,7 @@ mod tests {
|
||||||
"pred": []
|
"pred": []
|
||||||
}),
|
}),
|
||||||
expected: Ok(Op {
|
expected: Ok(Op {
|
||||||
action: OpType::Increment(12),
|
action: OpType::Inc(12),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -451,7 +483,7 @@ mod tests {
|
||||||
"pred": []
|
"pred": []
|
||||||
}),
|
}),
|
||||||
expected: Ok(Op {
|
expected: Ok(Op {
|
||||||
action: OpType::Increment(12),
|
action: OpType::Inc(12),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -478,7 +510,7 @@ mod tests {
|
||||||
"pred": []
|
"pred": []
|
||||||
}),
|
}),
|
||||||
expected: Ok(Op {
|
expected: Ok(Op {
|
||||||
action: OpType::Put(ScalarValue::Null),
|
action: OpType::Set(ScalarValue::Null),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -556,7 +588,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_serialize_key() {
|
fn test_serialize_key() {
|
||||||
let map_key = Op {
|
let map_key = Op {
|
||||||
action: OpType::Increment(12),
|
action: OpType::Inc(12),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
|
|
@ -567,7 +599,7 @@ mod tests {
|
||||||
assert_eq!(json.as_object().unwrap().get("key"), Some(&expected));
|
assert_eq!(json.as_object().unwrap().get("key"), Some(&expected));
|
||||||
|
|
||||||
let elemid_key = Op {
|
let elemid_key = Op {
|
||||||
action: OpType::Increment(12),
|
action: OpType::Inc(12),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716")
|
key: OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716")
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
|
@ -584,35 +616,35 @@ mod tests {
|
||||||
fn test_round_trips() {
|
fn test_round_trips() {
|
||||||
let testcases = vec![
|
let testcases = vec![
|
||||||
Op {
|
Op {
|
||||||
action: OpType::Put(ScalarValue::Uint(12)),
|
action: OpType::Set(ScalarValue::Uint(12)),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
pred: SortedVec::new(),
|
pred: SortedVec::new(),
|
||||||
},
|
},
|
||||||
Op {
|
Op {
|
||||||
action: OpType::Increment(12),
|
action: OpType::Inc(12),
|
||||||
obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(),
|
obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(),
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
pred: SortedVec::new(),
|
pred: SortedVec::new(),
|
||||||
},
|
},
|
||||||
Op {
|
Op {
|
||||||
action: OpType::Put(ScalarValue::Uint(12)),
|
action: OpType::Set(ScalarValue::Uint(12)),
|
||||||
obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(),
|
obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(),
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
pred: vec![OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap()].into(),
|
pred: vec![OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap()].into(),
|
||||||
},
|
},
|
||||||
Op {
|
Op {
|
||||||
action: OpType::Increment(12),
|
action: OpType::Inc(12),
|
||||||
obj: ObjectId::Root,
|
obj: ObjectId::Root,
|
||||||
key: "somekey".into(),
|
key: "somekey".into(),
|
||||||
insert: false,
|
insert: false,
|
||||||
pred: SortedVec::new(),
|
pred: SortedVec::new(),
|
||||||
},
|
},
|
||||||
Op {
|
Op {
|
||||||
action: OpType::Put("seomthing".into()),
|
action: OpType::Set("seomthing".into()),
|
||||||
obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(),
|
obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(),
|
||||||
key: OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716")
|
key: OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716")
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
|
@ -15,9 +15,11 @@ impl Serialize for OpType {
|
||||||
OpType::Make(ObjType::Table) => RawOpType::MakeTable,
|
OpType::Make(ObjType::Table) => RawOpType::MakeTable,
|
||||||
OpType::Make(ObjType::List) => RawOpType::MakeList,
|
OpType::Make(ObjType::List) => RawOpType::MakeList,
|
||||||
OpType::Make(ObjType::Text) => RawOpType::MakeText,
|
OpType::Make(ObjType::Text) => RawOpType::MakeText,
|
||||||
OpType::Delete => RawOpType::Del,
|
OpType::MarkBegin(_) => RawOpType::MarkBegin,
|
||||||
OpType::Increment(_) => RawOpType::Inc,
|
OpType::MarkEnd(_) => RawOpType::MarkEnd,
|
||||||
OpType::Put(_) => RawOpType::Set,
|
OpType::Del => RawOpType::Del,
|
||||||
|
OpType::Inc(_) => RawOpType::Inc,
|
||||||
|
OpType::Set(_) => RawOpType::Set,
|
||||||
};
|
};
|
||||||
raw_type.serialize(serializer)
|
raw_type.serialize(serializer)
|
||||||
}
|
}
|
||||||
|
|
@ -12,7 +12,7 @@ impl<'de> Deserialize<'de> for ScalarValue {
|
||||||
impl<'de> de::Visitor<'de> for ValueVisitor {
|
impl<'de> de::Visitor<'de> for ValueVisitor {
|
||||||
type Value = ScalarValue;
|
type Value = ScalarValue;
|
||||||
|
|
||||||
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
formatter.write_str("a number, string, bool, or null")
|
formatter.write_str("a number, string, bool, or null")
|
||||||
}
|
}
|
||||||
|
|
||||||
64
automerge/src/lib.rs
Normal file
64
automerge/src/lib.rs
Normal file
|
|
@ -0,0 +1,64 @@
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! log {
|
||||||
|
( $( $t:tt )* ) => {
|
||||||
|
{
|
||||||
|
use $crate::__log;
|
||||||
|
__log!( $( $t )* );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(all(feature = "wasm", target_family = "wasm"))]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! __log {
|
||||||
|
( $( $t:tt )* ) => {
|
||||||
|
web_sys::console::log_1(&format!( $( $t )* ).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(all(feature = "wasm", target_family = "wasm")))]
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! __log {
|
||||||
|
( $( $t:tt )* ) => {
|
||||||
|
println!( $( $t )* );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mod autocommit;
|
||||||
|
mod automerge;
|
||||||
|
mod change;
|
||||||
|
mod clock;
|
||||||
|
mod columnar;
|
||||||
|
mod decoding;
|
||||||
|
mod encoding;
|
||||||
|
mod error;
|
||||||
|
mod exid;
|
||||||
|
mod indexed_cache;
|
||||||
|
mod keys;
|
||||||
|
mod keys_at;
|
||||||
|
mod legacy;
|
||||||
|
mod op_set;
|
||||||
|
mod op_tree;
|
||||||
|
mod query;
|
||||||
|
pub mod sync;
|
||||||
|
pub mod transaction;
|
||||||
|
mod types;
|
||||||
|
mod value;
|
||||||
|
#[cfg(feature = "optree-visualisation")]
|
||||||
|
mod visualisation;
|
||||||
|
|
||||||
|
pub use crate::automerge::Automerge;
|
||||||
|
pub use autocommit::AutoCommit;
|
||||||
|
pub use change::Change;
|
||||||
|
pub use decoding::Error as DecodingError;
|
||||||
|
pub use decoding::InvalidChangeError;
|
||||||
|
pub use encoding::Error as EncodingError;
|
||||||
|
pub use error::AutomergeError;
|
||||||
|
pub use exid::ExId as ObjId;
|
||||||
|
pub use keys::Keys;
|
||||||
|
pub use keys_at::KeysAt;
|
||||||
|
pub use legacy::Change as ExpandedChange;
|
||||||
|
pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop};
|
||||||
|
pub use value::{ScalarValue, Value};
|
||||||
|
|
||||||
|
pub const ROOT: ObjId = ObjId::Root;
|
||||||
192
automerge/src/op_set.rs
Normal file
192
automerge/src/op_set.rs
Normal file
|
|
@ -0,0 +1,192 @@
|
||||||
|
use crate::clock::Clock;
|
||||||
|
use crate::indexed_cache::IndexedCache;
|
||||||
|
use crate::op_tree::OpTreeInternal;
|
||||||
|
use crate::query::{self, TreeQuery};
|
||||||
|
use crate::types::{ActorId, Key, ObjId, Op, OpId, OpType};
|
||||||
|
use crate::ObjType;
|
||||||
|
use fxhash::FxBuildHasher;
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
pub(crate) const B: usize = 16;
|
||||||
|
pub(crate) type OpSet = OpSetInternal<B>;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub(crate) struct OpSetInternal<const B: usize> {
|
||||||
|
trees: HashMap<ObjId, (ObjType, OpTreeInternal<B>), FxBuildHasher>,
|
||||||
|
length: usize,
|
||||||
|
pub m: OpSetMetadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> OpSetInternal<B> {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
let mut trees: HashMap<_, _, _> = Default::default();
|
||||||
|
trees.insert(ObjId::root(), (ObjType::Map, Default::default()));
|
||||||
|
OpSetInternal {
|
||||||
|
trees,
|
||||||
|
length: 0,
|
||||||
|
m: OpSetMetadata {
|
||||||
|
actors: IndexedCache::new(),
|
||||||
|
props: IndexedCache::new(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn iter(&self) -> Iter<'_, B> {
|
||||||
|
let mut objs: Vec<_> = self.trees.keys().collect();
|
||||||
|
objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0));
|
||||||
|
Iter {
|
||||||
|
inner: self,
|
||||||
|
index: 0,
|
||||||
|
sub_index: 0,
|
||||||
|
objs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn keys(&self, obj: ObjId) -> Option<query::Keys<B>> {
|
||||||
|
if let Some((_typ, tree)) = self.trees.get(&obj) {
|
||||||
|
tree.keys()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn keys_at(&self, obj: ObjId, clock: Clock) -> Option<query::KeysAt<B>> {
|
||||||
|
if let Some((_typ, tree)) = self.trees.get(&obj) {
|
||||||
|
tree.keys_at(clock)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn search<Q>(&self, obj: &ObjId, query: Q) -> Q
|
||||||
|
where
|
||||||
|
Q: TreeQuery<B>,
|
||||||
|
{
|
||||||
|
if let Some((_typ, tree)) = self.trees.get(obj) {
|
||||||
|
tree.search(query, &self.m)
|
||||||
|
} else {
|
||||||
|
query
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn replace<F>(&mut self, obj: &ObjId, index: usize, f: F)
|
||||||
|
where
|
||||||
|
F: FnMut(&mut Op),
|
||||||
|
{
|
||||||
|
if let Some((_typ, tree)) = self.trees.get_mut(obj) {
|
||||||
|
tree.replace(index, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove(&mut self, obj: &ObjId, index: usize) -> Op {
|
||||||
|
// this happens on rollback - be sure to go back to the old state
|
||||||
|
let (_typ, tree) = self.trees.get_mut(obj).unwrap();
|
||||||
|
self.length -= 1;
|
||||||
|
let op = tree.remove(index);
|
||||||
|
if let OpType::Make(_) = &op.action {
|
||||||
|
self.trees.remove(&op.id.into());
|
||||||
|
}
|
||||||
|
op
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert(&mut self, index: usize, obj: &ObjId, element: Op) {
|
||||||
|
if let OpType::Make(typ) = element.action {
|
||||||
|
self.trees
|
||||||
|
.insert(element.id.into(), (typ, Default::default()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some((_typ, tree)) = self.trees.get_mut(obj) {
|
||||||
|
//let tree = self.trees.get_mut(&element.obj).unwrap();
|
||||||
|
tree.insert(index, element);
|
||||||
|
self.length += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn object_type(&self, id: &ObjId) -> Option<ObjType> {
|
||||||
|
self.trees.get(id).map(|(typ, _)| *typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "optree-visualisation")]
|
||||||
|
pub fn visualise(&self) -> String {
|
||||||
|
let mut out = Vec::new();
|
||||||
|
let graph = super::visualisation::GraphVisualisation::construct(&self.trees, &self.m);
|
||||||
|
dot::render(&graph, &mut out).unwrap();
|
||||||
|
String::from_utf8_lossy(&out[..]).to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> Default for OpSetInternal<B> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, const B: usize> IntoIterator for &'a OpSetInternal<B> {
|
||||||
|
type Item = (&'a ObjId, &'a Op);
|
||||||
|
|
||||||
|
type IntoIter = Iter<'a, B>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
let mut objs: Vec<_> = self.trees.keys().collect();
|
||||||
|
objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0));
|
||||||
|
Iter {
|
||||||
|
inner: self,
|
||||||
|
index: 0,
|
||||||
|
objs,
|
||||||
|
sub_index: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct Iter<'a, const B: usize> {
|
||||||
|
inner: &'a OpSetInternal<B>,
|
||||||
|
index: usize,
|
||||||
|
objs: Vec<&'a ObjId>,
|
||||||
|
sub_index: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, const B: usize> Iterator for Iter<'a, B> {
|
||||||
|
type Item = (&'a ObjId, &'a Op);
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
let mut result = None;
|
||||||
|
for obj in self.objs.iter().skip(self.index) {
|
||||||
|
let (_typ, tree) = self.inner.trees.get(obj)?;
|
||||||
|
result = tree.get(self.sub_index).map(|op| (*obj, op));
|
||||||
|
if result.is_some() {
|
||||||
|
self.sub_index += 1;
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
self.index += 1;
|
||||||
|
self.sub_index = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
pub(crate) struct OpSetMetadata {
|
||||||
|
pub actors: IndexedCache<ActorId>,
|
||||||
|
pub props: IndexedCache<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OpSetMetadata {
|
||||||
|
pub fn key_cmp(&self, left: &Key, right: &Key) -> Ordering {
|
||||||
|
match (left, right) {
|
||||||
|
(Key::Map(a), Key::Map(b)) => self.props[*a].cmp(&self.props[*b]),
|
||||||
|
_ => panic!("can only compare map keys"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lamport_cmp(&self, left: OpId, right: OpId) -> Ordering {
|
||||||
|
match (left, right) {
|
||||||
|
(OpId(0, _), OpId(0, _)) => Ordering::Equal,
|
||||||
|
(OpId(0, _), OpId(_, _)) => Ordering::Less,
|
||||||
|
(OpId(_, _), OpId(0, _)) => Ordering::Greater,
|
||||||
|
(OpId(a, x), OpId(b, y)) if a == b => self.actors[x].cmp(&self.actors[y]),
|
||||||
|
(OpId(a, _), OpId(b, _)) => a.cmp(&b),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -5,20 +5,175 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) use crate::op_set::OpSetMetadata;
|
pub(crate) use crate::op_set::OpSetMetadata;
|
||||||
use crate::query::{ChangeVisibility, Index, QueryResult, TreeQuery};
|
use crate::types::{Op, OpId};
|
||||||
use crate::types::Op;
|
use crate::{
|
||||||
pub(crate) const B: usize = 16;
|
clock::Clock,
|
||||||
|
query::{self, Index, QueryResult, TreeQuery},
|
||||||
|
};
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub(crate) type OpTree = OpTreeInternal<16>;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct OpTreeNode {
|
pub(crate) struct OpTreeInternal<const B: usize> {
|
||||||
pub(crate) children: Vec<OpTreeNode>,
|
pub(crate) root_node: Option<OpTreeNode<B>>,
|
||||||
pub(crate) elements: Vec<usize>,
|
|
||||||
pub(crate) index: Index,
|
|
||||||
pub(crate) length: usize,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OpTreeNode {
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) fn new() -> Self {
|
pub(crate) struct OpTreeNode<const B: usize> {
|
||||||
|
pub(crate) elements: Vec<Op>,
|
||||||
|
pub(crate) children: Vec<OpTreeNode<B>>,
|
||||||
|
pub index: Index,
|
||||||
|
length: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> OpTreeInternal<B> {
|
||||||
|
/// Construct a new, empty, sequence.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self { root_node: None }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the length of the sequence.
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.root_node.as_ref().map_or(0, |n| n.len())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn keys(&self) -> Option<query::Keys<B>> {
|
||||||
|
self.root_node.as_ref().map(query::Keys::new)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn keys_at(&self, clock: Clock) -> Option<query::KeysAt<B>> {
|
||||||
|
self.root_node
|
||||||
|
.as_ref()
|
||||||
|
.map(|root| query::KeysAt::new(root, clock))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn search<Q>(&self, mut query: Q, m: &OpSetMetadata) -> Q
|
||||||
|
where
|
||||||
|
Q: TreeQuery<B>,
|
||||||
|
{
|
||||||
|
self.root_node
|
||||||
|
.as_ref()
|
||||||
|
.map(|root| match query.query_node_with_metadata(root, m) {
|
||||||
|
QueryResult::Descend => root.search(&mut query, m),
|
||||||
|
_ => true,
|
||||||
|
});
|
||||||
|
query
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create an iterator through the sequence.
|
||||||
|
pub fn iter(&self) -> Iter<'_, B> {
|
||||||
|
Iter {
|
||||||
|
inner: self,
|
||||||
|
index: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert the `element` into the sequence at `index`.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// Panics if `index > len`.
|
||||||
|
pub fn insert(&mut self, index: usize, element: Op) {
|
||||||
|
let old_len = self.len();
|
||||||
|
if let Some(root) = self.root_node.as_mut() {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
root.check();
|
||||||
|
|
||||||
|
if root.is_full() {
|
||||||
|
let original_len = root.len();
|
||||||
|
let new_root = OpTreeNode::new();
|
||||||
|
|
||||||
|
// move new_root to root position
|
||||||
|
let old_root = mem::replace(root, new_root);
|
||||||
|
|
||||||
|
root.length += old_root.len();
|
||||||
|
root.index = old_root.index.clone();
|
||||||
|
root.children.push(old_root);
|
||||||
|
root.split_child(0);
|
||||||
|
|
||||||
|
assert_eq!(original_len, root.len());
|
||||||
|
|
||||||
|
// after splitting the root has one element and two children, find which child the
|
||||||
|
// index is in
|
||||||
|
let first_child_len = root.children[0].len();
|
||||||
|
let (child, insertion_index) = if first_child_len < index {
|
||||||
|
(&mut root.children[1], index - (first_child_len + 1))
|
||||||
|
} else {
|
||||||
|
(&mut root.children[0], index)
|
||||||
|
};
|
||||||
|
root.length += 1;
|
||||||
|
root.index.insert(&element);
|
||||||
|
child.insert_into_non_full_node(insertion_index, element)
|
||||||
|
} else {
|
||||||
|
root.insert_into_non_full_node(index, element)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let mut root = OpTreeNode::new();
|
||||||
|
root.insert_into_non_full_node(index, element);
|
||||||
|
self.root_node = Some(root)
|
||||||
|
}
|
||||||
|
assert_eq!(self.len(), old_len + 1, "{:#?}", self);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the `element` at `index` in the sequence.
|
||||||
|
pub fn get(&self, index: usize) -> Option<&Op> {
|
||||||
|
self.root_node.as_ref().and_then(|n| n.get(index))
|
||||||
|
}
|
||||||
|
|
||||||
|
// this replaces get_mut() because it allows the indexes to update correctly
|
||||||
|
pub fn replace<F>(&mut self, index: usize, mut f: F)
|
||||||
|
where
|
||||||
|
F: FnMut(&mut Op),
|
||||||
|
{
|
||||||
|
if self.len() > index {
|
||||||
|
let op = self.get(index).unwrap();
|
||||||
|
let mut new_op = op.clone();
|
||||||
|
f(&mut new_op);
|
||||||
|
self.set(index, new_op);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Removes the element at `index` from the sequence.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// Panics if `index` is out of bounds.
|
||||||
|
pub fn remove(&mut self, index: usize) -> Op {
|
||||||
|
if let Some(root) = self.root_node.as_mut() {
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
let len = root.check();
|
||||||
|
let old = root.remove(index);
|
||||||
|
|
||||||
|
if root.elements.is_empty() {
|
||||||
|
if root.is_leaf() {
|
||||||
|
self.root_node = None;
|
||||||
|
} else {
|
||||||
|
self.root_node = Some(root.children.remove(0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
debug_assert_eq!(len, self.root_node.as_ref().map_or(0, |r| r.check()) + 1);
|
||||||
|
old
|
||||||
|
} else {
|
||||||
|
panic!("remove from empty tree")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update the `element` at `index` in the sequence, returning the old value.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// Panics if `index > len`
|
||||||
|
pub fn set(&mut self, index: usize, element: Op) -> Op {
|
||||||
|
self.root_node.as_mut().unwrap().set(index, element)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> OpTreeNode<B> {
|
||||||
|
fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
elements: Vec::new(),
|
elements: Vec::new(),
|
||||||
children: Vec::new(),
|
children: Vec::new(),
|
||||||
|
|
@ -27,77 +182,31 @@ impl OpTreeNode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_element<'a, 'b: 'a, Q>(
|
pub fn search<Q>(&self, query: &mut Q, m: &OpSetMetadata) -> bool
|
||||||
&'b self,
|
|
||||||
query: &mut Q,
|
|
||||||
m: &OpSetMetadata,
|
|
||||||
ops: &'a [Op],
|
|
||||||
index: usize,
|
|
||||||
) -> bool
|
|
||||||
where
|
where
|
||||||
Q: TreeQuery<'a>,
|
Q: TreeQuery<B>,
|
||||||
{
|
|
||||||
if let Some(e) = self.elements.get(index) {
|
|
||||||
if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn search<'a, 'b: 'a, Q>(
|
|
||||||
&'b self,
|
|
||||||
query: &mut Q,
|
|
||||||
m: &OpSetMetadata,
|
|
||||||
ops: &'a [Op],
|
|
||||||
mut skip: Option<usize>,
|
|
||||||
) -> bool
|
|
||||||
where
|
|
||||||
Q: TreeQuery<'a>,
|
|
||||||
{
|
{
|
||||||
if self.is_leaf() {
|
if self.is_leaf() {
|
||||||
for e in self.elements.iter().skip(skip.unwrap_or(0)) {
|
for e in &self.elements {
|
||||||
if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish {
|
if query.query_element_with_metadata(e, m) == QueryResult::Finish {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
for (child_index, child) in self.children.iter().enumerate() {
|
for (child_index, child) in self.children.iter().enumerate() {
|
||||||
match skip {
|
match query.query_node_with_metadata(child, m) {
|
||||||
Some(n) if n > child.len() => {
|
QueryResult::Descend => {
|
||||||
skip = Some(n - child.len() - 1);
|
if child.search(query, m) {
|
||||||
}
|
|
||||||
Some(n) if n == child.len() => {
|
|
||||||
skip = Some(0); // important to not be None so we never call query_node again
|
|
||||||
if self.search_element(query, m, ops, child_index) {
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(n) => {
|
QueryResult::Finish => return true,
|
||||||
if child.search(query, m, ops, Some(n)) {
|
QueryResult::Next => (),
|
||||||
return true;
|
}
|
||||||
}
|
if let Some(e) = self.elements.get(child_index) {
|
||||||
skip = Some(0); // important to not be None so we never call query_node again
|
if query.query_element_with_metadata(e, m) == QueryResult::Finish {
|
||||||
if self.search_element(query, m, ops, child_index) {
|
return true;
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
// descend and try find it
|
|
||||||
match query.query_node_with_metadata(child, m, ops) {
|
|
||||||
QueryResult::Descend => {
|
|
||||||
if child.search(query, m, ops, None) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
QueryResult::Finish => return true,
|
|
||||||
QueryResult::Next => (),
|
|
||||||
QueryResult::Skip(_) => panic!("had skip from non-root node"),
|
|
||||||
}
|
|
||||||
if self.search_element(query, m, ops, child_index) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -105,26 +214,26 @@ impl OpTreeNode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
self.length
|
self.length
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reindex(&mut self, ops: &[Op]) {
|
fn reindex(&mut self) {
|
||||||
let mut index = Index::new();
|
let mut index = Index::new();
|
||||||
for c in &self.children {
|
for c in &self.children {
|
||||||
index.merge(&c.index);
|
index.merge(&c.index);
|
||||||
}
|
}
|
||||||
for i in &self.elements {
|
for e in &self.elements {
|
||||||
index.insert(&ops[*i]);
|
index.insert(e);
|
||||||
}
|
}
|
||||||
self.index = index
|
self.index = index
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn is_leaf(&self) -> bool {
|
fn is_leaf(&self) -> bool {
|
||||||
self.children.is_empty()
|
self.children.is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn is_full(&self) -> bool {
|
fn is_full(&self) -> bool {
|
||||||
self.elements.len() >= 2 * B - 1
|
self.elements.len() >= 2 * B - 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -139,13 +248,13 @@ impl OpTreeNode {
|
||||||
cumulative_len += child.len() + 1;
|
cumulative_len += child.len() + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
panic!("index {} not found in node with len {}", index, self.len())
|
panic!("index not found in node")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn insert_into_non_full_node(&mut self, index: usize, element: usize, ops: &[Op]) {
|
fn insert_into_non_full_node(&mut self, index: usize, element: Op) {
|
||||||
assert!(!self.is_full());
|
assert!(!self.is_full());
|
||||||
|
|
||||||
self.index.insert(&ops[element]);
|
self.index.insert(&element);
|
||||||
|
|
||||||
if self.is_leaf() {
|
if self.is_leaf() {
|
||||||
self.length += 1;
|
self.length += 1;
|
||||||
|
|
@ -155,14 +264,14 @@ impl OpTreeNode {
|
||||||
let child = &mut self.children[child_index];
|
let child = &mut self.children[child_index];
|
||||||
|
|
||||||
if child.is_full() {
|
if child.is_full() {
|
||||||
self.split_child(child_index, ops);
|
self.split_child(child_index);
|
||||||
|
|
||||||
// child structure has changed so we need to find the index again
|
// child structure has changed so we need to find the index again
|
||||||
let (child_index, sub_index) = self.find_child_index(index);
|
let (child_index, sub_index) = self.find_child_index(index);
|
||||||
let child = &mut self.children[child_index];
|
let child = &mut self.children[child_index];
|
||||||
child.insert_into_non_full_node(sub_index, element, ops);
|
child.insert_into_non_full_node(sub_index, element);
|
||||||
} else {
|
} else {
|
||||||
child.insert_into_non_full_node(sub_index, element, ops);
|
child.insert_into_non_full_node(sub_index, element);
|
||||||
}
|
}
|
||||||
self.length += 1;
|
self.length += 1;
|
||||||
}
|
}
|
||||||
|
|
@ -170,7 +279,7 @@ impl OpTreeNode {
|
||||||
|
|
||||||
// A utility function to split the child `full_child_index` of this node
|
// A utility function to split the child `full_child_index` of this node
|
||||||
// Note that `full_child_index` must be full when this function is called.
|
// Note that `full_child_index` must be full when this function is called.
|
||||||
pub(crate) fn split_child(&mut self, full_child_index: usize, ops: &[Op]) {
|
fn split_child(&mut self, full_child_index: usize) {
|
||||||
let original_len_self = self.len();
|
let original_len_self = self.len();
|
||||||
|
|
||||||
let full_child = &mut self.children[full_child_index];
|
let full_child = &mut self.children[full_child_index];
|
||||||
|
|
@ -204,8 +313,8 @@ impl OpTreeNode {
|
||||||
|
|
||||||
let full_child_len = full_child.len();
|
let full_child_len = full_child.len();
|
||||||
|
|
||||||
full_child.reindex(ops);
|
full_child.reindex();
|
||||||
successor_sibling.reindex(ops);
|
successor_sibling.reindex();
|
||||||
|
|
||||||
self.children
|
self.children
|
||||||
.insert(full_child_index + 1, successor_sibling);
|
.insert(full_child_index + 1, successor_sibling);
|
||||||
|
|
@ -217,37 +326,32 @@ impl OpTreeNode {
|
||||||
assert_eq!(original_len_self, self.len());
|
assert_eq!(original_len_self, self.len());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_from_leaf(&mut self, index: usize) -> usize {
|
fn remove_from_leaf(&mut self, index: usize) -> Op {
|
||||||
self.length -= 1;
|
self.length -= 1;
|
||||||
self.elements.remove(index)
|
self.elements.remove(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_element_from_non_leaf(
|
fn remove_element_from_non_leaf(&mut self, index: usize, element_index: usize) -> Op {
|
||||||
&mut self,
|
|
||||||
index: usize,
|
|
||||||
element_index: usize,
|
|
||||||
ops: &[Op],
|
|
||||||
) -> usize {
|
|
||||||
self.length -= 1;
|
self.length -= 1;
|
||||||
if self.children[element_index].elements.len() >= B {
|
if self.children[element_index].elements.len() >= B {
|
||||||
let total_index = self.cumulative_index(element_index);
|
let total_index = self.cumulative_index(element_index);
|
||||||
// recursively delete index - 1 in predecessor_node
|
// recursively delete index - 1 in predecessor_node
|
||||||
let predecessor = self.children[element_index].remove(index - 1 - total_index, ops);
|
let predecessor = self.children[element_index].remove(index - 1 - total_index);
|
||||||
// replace element with that one
|
// replace element with that one
|
||||||
mem::replace(&mut self.elements[element_index], predecessor)
|
mem::replace(&mut self.elements[element_index], predecessor)
|
||||||
} else if self.children[element_index + 1].elements.len() >= B {
|
} else if self.children[element_index + 1].elements.len() >= B {
|
||||||
// recursively delete index + 1 in successor_node
|
// recursively delete index + 1 in successor_node
|
||||||
let total_index = self.cumulative_index(element_index + 1);
|
let total_index = self.cumulative_index(element_index + 1);
|
||||||
let successor = self.children[element_index + 1].remove(index + 1 - total_index, ops);
|
let successor = self.children[element_index + 1].remove(index + 1 - total_index);
|
||||||
// replace element with that one
|
// replace element with that one
|
||||||
mem::replace(&mut self.elements[element_index], successor)
|
mem::replace(&mut self.elements[element_index], successor)
|
||||||
} else {
|
} else {
|
||||||
let middle_element = self.elements.remove(element_index);
|
let middle_element = self.elements.remove(element_index);
|
||||||
let successor_child = self.children.remove(element_index + 1);
|
let successor_child = self.children.remove(element_index + 1);
|
||||||
self.children[element_index].merge(middle_element, successor_child, ops);
|
self.children[element_index].merge(middle_element, successor_child);
|
||||||
|
|
||||||
let total_index = self.cumulative_index(element_index);
|
let total_index = self.cumulative_index(element_index);
|
||||||
self.children[element_index].remove(index - total_index, ops)
|
self.children[element_index].remove(index - total_index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -258,12 +362,7 @@ impl OpTreeNode {
|
||||||
.sum()
|
.sum()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_from_internal_child(
|
fn remove_from_internal_child(&mut self, index: usize, mut child_index: usize) -> Op {
|
||||||
&mut self,
|
|
||||||
index: usize,
|
|
||||||
mut child_index: usize,
|
|
||||||
ops: &[Op],
|
|
||||||
) -> usize {
|
|
||||||
if self.children[child_index].elements.len() < B
|
if self.children[child_index].elements.len() < B
|
||||||
&& if child_index > 0 {
|
&& if child_index > 0 {
|
||||||
self.children[child_index - 1].elements.len() < B
|
self.children[child_index - 1].elements.len() < B
|
||||||
|
|
@ -287,14 +386,14 @@ impl OpTreeNode {
|
||||||
let successor = self.children.remove(child_index);
|
let successor = self.children.remove(child_index);
|
||||||
child_index -= 1;
|
child_index -= 1;
|
||||||
|
|
||||||
self.children[child_index].merge(middle, successor, ops);
|
self.children[child_index].merge(middle, successor);
|
||||||
} else {
|
} else {
|
||||||
let middle = self.elements.remove(child_index);
|
let middle = self.elements.remove(child_index);
|
||||||
|
|
||||||
// use the sucessor sibling
|
// use the sucessor sibling
|
||||||
let successor = self.children.remove(child_index + 1);
|
let successor = self.children.remove(child_index + 1);
|
||||||
|
|
||||||
self.children[child_index].merge(middle, successor, ops);
|
self.children[child_index].merge(middle, successor);
|
||||||
}
|
}
|
||||||
} else if self.children[child_index].elements.len() < B {
|
} else if self.children[child_index].elements.len() < B {
|
||||||
if child_index > 0
|
if child_index > 0
|
||||||
|
|
@ -306,16 +405,12 @@ impl OpTreeNode {
|
||||||
let last_element = self.children[child_index - 1].elements.pop().unwrap();
|
let last_element = self.children[child_index - 1].elements.pop().unwrap();
|
||||||
assert!(!self.children[child_index - 1].elements.is_empty());
|
assert!(!self.children[child_index - 1].elements.is_empty());
|
||||||
self.children[child_index - 1].length -= 1;
|
self.children[child_index - 1].length -= 1;
|
||||||
self.children[child_index - 1]
|
self.children[child_index - 1].index.remove(&last_element);
|
||||||
.index
|
|
||||||
.remove(&ops[last_element]);
|
|
||||||
|
|
||||||
let parent_element =
|
let parent_element =
|
||||||
mem::replace(&mut self.elements[child_index - 1], last_element);
|
mem::replace(&mut self.elements[child_index - 1], last_element);
|
||||||
|
|
||||||
self.children[child_index]
|
self.children[child_index].index.insert(&parent_element);
|
||||||
.index
|
|
||||||
.insert(&ops[parent_element]);
|
|
||||||
self.children[child_index]
|
self.children[child_index]
|
||||||
.elements
|
.elements
|
||||||
.insert(0, parent_element);
|
.insert(0, parent_element);
|
||||||
|
|
@ -323,10 +418,10 @@ impl OpTreeNode {
|
||||||
|
|
||||||
if let Some(last_child) = self.children[child_index - 1].children.pop() {
|
if let Some(last_child) = self.children[child_index - 1].children.pop() {
|
||||||
self.children[child_index - 1].length -= last_child.len();
|
self.children[child_index - 1].length -= last_child.len();
|
||||||
self.children[child_index - 1].reindex(ops);
|
self.children[child_index - 1].reindex();
|
||||||
self.children[child_index].length += last_child.len();
|
self.children[child_index].length += last_child.len();
|
||||||
self.children[child_index].children.insert(0, last_child);
|
self.children[child_index].children.insert(0, last_child);
|
||||||
self.children[child_index].reindex(ops);
|
self.children[child_index].reindex();
|
||||||
}
|
}
|
||||||
} else if self
|
} else if self
|
||||||
.children
|
.children
|
||||||
|
|
@ -334,9 +429,7 @@ impl OpTreeNode {
|
||||||
.map_or(false, |c| c.elements.len() >= B)
|
.map_or(false, |c| c.elements.len() >= B)
|
||||||
{
|
{
|
||||||
let first_element = self.children[child_index + 1].elements.remove(0);
|
let first_element = self.children[child_index + 1].elements.remove(0);
|
||||||
self.children[child_index + 1]
|
self.children[child_index + 1].index.remove(&first_element);
|
||||||
.index
|
|
||||||
.remove(&ops[first_element]);
|
|
||||||
self.children[child_index + 1].length -= 1;
|
self.children[child_index + 1].length -= 1;
|
||||||
|
|
||||||
assert!(!self.children[child_index + 1].elements.is_empty());
|
assert!(!self.children[child_index + 1].elements.is_empty());
|
||||||
|
|
@ -344,39 +437,37 @@ impl OpTreeNode {
|
||||||
let parent_element = mem::replace(&mut self.elements[child_index], first_element);
|
let parent_element = mem::replace(&mut self.elements[child_index], first_element);
|
||||||
|
|
||||||
self.children[child_index].length += 1;
|
self.children[child_index].length += 1;
|
||||||
self.children[child_index]
|
self.children[child_index].index.insert(&parent_element);
|
||||||
.index
|
|
||||||
.insert(&ops[parent_element]);
|
|
||||||
self.children[child_index].elements.push(parent_element);
|
self.children[child_index].elements.push(parent_element);
|
||||||
|
|
||||||
if !self.children[child_index + 1].is_leaf() {
|
if !self.children[child_index + 1].is_leaf() {
|
||||||
let first_child = self.children[child_index + 1].children.remove(0);
|
let first_child = self.children[child_index + 1].children.remove(0);
|
||||||
self.children[child_index + 1].length -= first_child.len();
|
self.children[child_index + 1].length -= first_child.len();
|
||||||
self.children[child_index + 1].reindex(ops);
|
self.children[child_index + 1].reindex();
|
||||||
self.children[child_index].length += first_child.len();
|
self.children[child_index].length += first_child.len();
|
||||||
|
|
||||||
self.children[child_index].children.push(first_child);
|
self.children[child_index].children.push(first_child);
|
||||||
self.children[child_index].reindex(ops);
|
self.children[child_index].reindex();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.length -= 1;
|
self.length -= 1;
|
||||||
let total_index = self.cumulative_index(child_index);
|
let total_index = self.cumulative_index(child_index);
|
||||||
self.children[child_index].remove(index - total_index, ops)
|
self.children[child_index].remove(index - total_index)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn check(&self) -> usize {
|
fn check(&self) -> usize {
|
||||||
let l = self.elements.len() + self.children.iter().map(|c| c.check()).sum::<usize>();
|
let l = self.elements.len() + self.children.iter().map(|c| c.check()).sum::<usize>();
|
||||||
assert_eq!(self.len(), l, "{:#?}", self);
|
assert_eq!(self.len(), l, "{:#?}", self);
|
||||||
|
|
||||||
l
|
l
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn remove(&mut self, index: usize, ops: &[Op]) -> usize {
|
pub fn remove(&mut self, index: usize) -> Op {
|
||||||
let original_len = self.len();
|
let original_len = self.len();
|
||||||
if self.is_leaf() {
|
if self.is_leaf() {
|
||||||
let v = self.remove_from_leaf(index);
|
let v = self.remove_from_leaf(index);
|
||||||
self.index.remove(&ops[v]);
|
self.index.remove(&v);
|
||||||
assert_eq!(original_len, self.len() + 1);
|
assert_eq!(original_len, self.len() + 1);
|
||||||
debug_assert_eq!(self.check(), self.len());
|
debug_assert_eq!(self.check(), self.len());
|
||||||
v
|
v
|
||||||
|
|
@ -393,16 +484,15 @@ impl OpTreeNode {
|
||||||
let v = self.remove_element_from_non_leaf(
|
let v = self.remove_element_from_non_leaf(
|
||||||
index,
|
index,
|
||||||
min(child_index, self.elements.len() - 1),
|
min(child_index, self.elements.len() - 1),
|
||||||
ops,
|
|
||||||
);
|
);
|
||||||
self.index.remove(&ops[v]);
|
self.index.remove(&v);
|
||||||
assert_eq!(original_len, self.len() + 1);
|
assert_eq!(original_len, self.len() + 1);
|
||||||
debug_assert_eq!(self.check(), self.len());
|
debug_assert_eq!(self.check(), self.len());
|
||||||
return v;
|
return v;
|
||||||
}
|
}
|
||||||
Ordering::Greater => {
|
Ordering::Greater => {
|
||||||
let v = self.remove_from_internal_child(index, child_index, ops);
|
let v = self.remove_from_internal_child(index, child_index);
|
||||||
self.index.remove(&ops[v]);
|
self.index.remove(&v);
|
||||||
assert_eq!(original_len, self.len() + 1);
|
assert_eq!(original_len, self.len() + 1);
|
||||||
debug_assert_eq!(self.check(), self.len());
|
debug_assert_eq!(self.check(), self.len());
|
||||||
return v;
|
return v;
|
||||||
|
|
@ -419,8 +509,8 @@ impl OpTreeNode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge(&mut self, middle: usize, successor_sibling: OpTreeNode, ops: &[Op]) {
|
fn merge(&mut self, middle: Op, successor_sibling: OpTreeNode<B>) {
|
||||||
self.index.insert(&ops[middle]);
|
self.index.insert(&middle);
|
||||||
self.index.merge(&successor_sibling.index);
|
self.index.merge(&successor_sibling.index);
|
||||||
self.elements.push(middle);
|
self.elements.push(middle);
|
||||||
self.elements.extend(successor_sibling.elements);
|
self.elements.extend(successor_sibling.elements);
|
||||||
|
|
@ -429,50 +519,47 @@ impl OpTreeNode {
|
||||||
assert!(self.is_full());
|
assert!(self.is_full());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update the operation at the given index using the provided function.
|
pub fn set(&mut self, index: usize, element: Op) -> Op {
|
||||||
///
|
|
||||||
/// This handles updating the indices after the update.
|
|
||||||
pub(crate) fn update<'a>(
|
|
||||||
&mut self,
|
|
||||||
index: usize,
|
|
||||||
vis: ChangeVisibility<'a>,
|
|
||||||
) -> ChangeVisibility<'a> {
|
|
||||||
if self.is_leaf() {
|
if self.is_leaf() {
|
||||||
self.index.change_vis(vis)
|
let old_element = self.elements.get_mut(index).unwrap();
|
||||||
|
self.index.replace(old_element, &element);
|
||||||
|
mem::replace(old_element, element)
|
||||||
} else {
|
} else {
|
||||||
let mut cumulative_len = 0;
|
let mut cumulative_len = 0;
|
||||||
let len = self.len();
|
for (child_index, child) in self.children.iter_mut().enumerate() {
|
||||||
for (_child_index, child) in self.children.iter_mut().enumerate() {
|
|
||||||
match (cumulative_len + child.len()).cmp(&index) {
|
match (cumulative_len + child.len()).cmp(&index) {
|
||||||
Ordering::Less => {
|
Ordering::Less => {
|
||||||
cumulative_len += child.len() + 1;
|
cumulative_len += child.len() + 1;
|
||||||
}
|
}
|
||||||
Ordering::Equal => {
|
Ordering::Equal => {
|
||||||
return self.index.change_vis(vis);
|
let old_element = self.elements.get_mut(child_index).unwrap();
|
||||||
|
self.index.replace(old_element, &element);
|
||||||
|
return mem::replace(old_element, element);
|
||||||
}
|
}
|
||||||
Ordering::Greater => {
|
Ordering::Greater => {
|
||||||
let vis = child.update(index - cumulative_len, vis);
|
let old_element = child.set(index - cumulative_len, element.clone());
|
||||||
return self.index.change_vis(vis);
|
self.index.replace(&old_element, &element);
|
||||||
|
return old_element;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
panic!("Invalid index to set: {} but len was {}", index, len)
|
panic!("Invalid index to set: {} but len was {}", index, self.len())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn last(&self) -> usize {
|
pub fn last(&self) -> &Op {
|
||||||
if self.is_leaf() {
|
if self.is_leaf() {
|
||||||
// node is never empty so this is safe
|
// node is never empty so this is safe
|
||||||
*self.elements.last().unwrap()
|
self.elements.last().unwrap()
|
||||||
} else {
|
} else {
|
||||||
// if not a leaf then there is always at least one child
|
// if not a leaf then there is always at least one child
|
||||||
self.children.last().unwrap().last()
|
self.children.last().unwrap().last()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get(&self, index: usize) -> Option<usize> {
|
pub fn get(&self, index: usize) -> Option<&Op> {
|
||||||
if self.is_leaf() {
|
if self.is_leaf() {
|
||||||
return self.elements.get(index).copied();
|
return self.elements.get(index);
|
||||||
} else {
|
} else {
|
||||||
let mut cumulative_len = 0;
|
let mut cumulative_len = 0;
|
||||||
for (child_index, child) in self.children.iter().enumerate() {
|
for (child_index, child) in self.children.iter().enumerate() {
|
||||||
|
|
@ -480,7 +567,7 @@ impl OpTreeNode {
|
||||||
Ordering::Less => {
|
Ordering::Less => {
|
||||||
cumulative_len += child.len() + 1;
|
cumulative_len += child.len() + 1;
|
||||||
}
|
}
|
||||||
Ordering::Equal => return self.elements.get(child_index).copied(),
|
Ordering::Equal => return self.elements.get(child_index),
|
||||||
Ordering::Greater => {
|
Ordering::Greater => {
|
||||||
return child.get(index - cumulative_len);
|
return child.get(index - cumulative_len);
|
||||||
}
|
}
|
||||||
|
|
@ -490,3 +577,110 @@ impl OpTreeNode {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> Default for OpTreeInternal<B> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> PartialEq for OpTreeInternal<B> {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, const B: usize> IntoIterator for &'a OpTreeInternal<B> {
|
||||||
|
type Item = &'a Op;
|
||||||
|
|
||||||
|
type IntoIter = Iter<'a, B>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
Iter {
|
||||||
|
inner: self,
|
||||||
|
index: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct Iter<'a, const B: usize> {
|
||||||
|
inner: &'a OpTreeInternal<B>,
|
||||||
|
index: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, const B: usize> Iterator for Iter<'a, B> {
|
||||||
|
type Item = &'a Op;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
self.index += 1;
|
||||||
|
self.inner.get(self.index - 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||||
|
self.index += n + 1;
|
||||||
|
self.inner.get(self.index - 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
struct CounterData {
|
||||||
|
pos: usize,
|
||||||
|
val: i64,
|
||||||
|
succ: HashSet<OpId>,
|
||||||
|
op: Op,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::legacy as amp;
|
||||||
|
use crate::types::{Op, OpId};
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn op() -> Op {
|
||||||
|
let zero = OpId(0, 0);
|
||||||
|
Op {
|
||||||
|
id: zero,
|
||||||
|
action: amp::OpType::Set(0.into()),
|
||||||
|
key: zero.into(),
|
||||||
|
succ: vec![],
|
||||||
|
pred: vec![],
|
||||||
|
insert: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn insert() {
|
||||||
|
let mut t = OpTree::new();
|
||||||
|
|
||||||
|
t.insert(0, op());
|
||||||
|
t.insert(1, op());
|
||||||
|
t.insert(0, op());
|
||||||
|
t.insert(0, op());
|
||||||
|
t.insert(0, op());
|
||||||
|
t.insert(3, op());
|
||||||
|
t.insert(4, op());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn insert_book() {
|
||||||
|
let mut t = OpTree::new();
|
||||||
|
|
||||||
|
for i in 0..100 {
|
||||||
|
t.insert(i % 2, op());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn insert_book_vec() {
|
||||||
|
let mut t = OpTree::new();
|
||||||
|
let mut v = Vec::new();
|
||||||
|
|
||||||
|
for i in 0..100 {
|
||||||
|
t.insert(i % 3, op());
|
||||||
|
v.insert(i % 3, op());
|
||||||
|
|
||||||
|
assert_eq!(v, t.iter().cloned().collect::<Vec<_>>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
288
automerge/src/query.rs
Normal file
288
automerge/src/query.rs
Normal file
|
|
@ -0,0 +1,288 @@
|
||||||
|
use crate::exid::ExId;
|
||||||
|
use crate::op_tree::{OpSetMetadata, OpTreeNode};
|
||||||
|
use crate::types::{Clock, Counter, ElemId, Op, OpId, OpType, ScalarValue};
|
||||||
|
use fxhash::FxBuildHasher;
|
||||||
|
use serde::Serialize;
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
mod attribute;
|
||||||
|
mod attribute2;
|
||||||
|
mod insert;
|
||||||
|
mod keys;
|
||||||
|
mod keys_at;
|
||||||
|
mod len;
|
||||||
|
mod len_at;
|
||||||
|
mod list_vals;
|
||||||
|
mod list_vals_at;
|
||||||
|
mod nth;
|
||||||
|
mod nth_at;
|
||||||
|
mod opid;
|
||||||
|
mod prop;
|
||||||
|
mod prop_at;
|
||||||
|
mod raw_spans;
|
||||||
|
mod seek_op;
|
||||||
|
mod spans;
|
||||||
|
|
||||||
|
pub(crate) use attribute::{Attribute, ChangeSet};
|
||||||
|
pub(crate) use attribute2::{Attribute2, ChangeSet2};
|
||||||
|
pub(crate) use insert::InsertNth;
|
||||||
|
pub(crate) use keys::Keys;
|
||||||
|
pub(crate) use keys_at::KeysAt;
|
||||||
|
pub(crate) use len::Len;
|
||||||
|
pub(crate) use len_at::LenAt;
|
||||||
|
pub(crate) use list_vals::ListVals;
|
||||||
|
pub(crate) use list_vals_at::ListValsAt;
|
||||||
|
pub(crate) use nth::Nth;
|
||||||
|
pub(crate) use nth_at::NthAt;
|
||||||
|
pub(crate) use opid::OpIdSearch;
|
||||||
|
pub(crate) use prop::Prop;
|
||||||
|
pub(crate) use prop_at::PropAt;
|
||||||
|
pub(crate) use raw_spans::RawSpans;
|
||||||
|
pub(crate) use seek_op::SeekOp;
|
||||||
|
pub(crate) use spans::{Span, Spans};
|
||||||
|
|
||||||
|
#[derive(Serialize, Debug, Clone, PartialEq)]
|
||||||
|
pub struct SpanInfo {
|
||||||
|
pub id: ExId,
|
||||||
|
pub start: usize,
|
||||||
|
pub end: usize,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub span_type: String,
|
||||||
|
pub value: ScalarValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub(crate) struct CounterData {
|
||||||
|
pos: usize,
|
||||||
|
val: i64,
|
||||||
|
succ: HashSet<OpId>,
|
||||||
|
op: Op,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) trait TreeQuery<const B: usize> {
|
||||||
|
#[inline(always)]
|
||||||
|
fn query_node_with_metadata(
|
||||||
|
&mut self,
|
||||||
|
child: &OpTreeNode<B>,
|
||||||
|
_m: &OpSetMetadata,
|
||||||
|
) -> QueryResult {
|
||||||
|
self.query_node(child)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn query_node(&mut self, _child: &OpTreeNode<B>) -> QueryResult {
|
||||||
|
QueryResult::Descend
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline(always)]
|
||||||
|
fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult {
|
||||||
|
self.query_element(element)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn query_element(&mut self, _element: &Op) -> QueryResult {
|
||||||
|
panic!("invalid element query")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub(crate) enum QueryResult {
|
||||||
|
Next,
|
||||||
|
Descend,
|
||||||
|
Finish,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
pub(crate) struct Index {
|
||||||
|
pub visible: HashMap<ElemId, usize, FxBuildHasher>,
|
||||||
|
/// Set of opids found in this node and below.
|
||||||
|
pub ops: HashSet<OpId, FxBuildHasher>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Index {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Index {
|
||||||
|
visible: Default::default(),
|
||||||
|
ops: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the number of visible elements in this index.
|
||||||
|
pub fn visible_len(&self) -> usize {
|
||||||
|
self.visible.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has_visible(&self, e: &Option<ElemId>) -> bool {
|
||||||
|
if let Some(seen) = e {
|
||||||
|
self.visible.contains_key(seen)
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn replace(&mut self, old: &Op, new: &Op) {
|
||||||
|
if old.id != new.id {
|
||||||
|
self.ops.remove(&old.id);
|
||||||
|
self.ops.insert(new.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert!(new.key == old.key);
|
||||||
|
|
||||||
|
match (new.visible(), old.visible(), new.elemid()) {
|
||||||
|
(false, true, Some(elem)) => match self.visible.get(&elem).copied() {
|
||||||
|
Some(n) if n == 1 => {
|
||||||
|
self.visible.remove(&elem);
|
||||||
|
}
|
||||||
|
Some(n) => {
|
||||||
|
self.visible.insert(elem, n - 1);
|
||||||
|
}
|
||||||
|
None => panic!("remove overun in index"),
|
||||||
|
},
|
||||||
|
(true, false, Some(elem)) => match self.visible.get(&elem).copied() {
|
||||||
|
Some(n) => {
|
||||||
|
self.visible.insert(elem, n + 1);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
self.visible.insert(elem, 1);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert(&mut self, op: &Op) {
|
||||||
|
self.ops.insert(op.id);
|
||||||
|
if op.visible() {
|
||||||
|
if let Some(elem) = op.elemid() {
|
||||||
|
match self.visible.get(&elem).copied() {
|
||||||
|
Some(n) => {
|
||||||
|
self.visible.insert(elem, n + 1);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
self.visible.insert(elem, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove(&mut self, op: &Op) {
|
||||||
|
self.ops.remove(&op.id);
|
||||||
|
if op.visible() {
|
||||||
|
if let Some(elem) = op.elemid() {
|
||||||
|
match self.visible.get(&elem).copied() {
|
||||||
|
Some(n) if n == 1 => {
|
||||||
|
self.visible.remove(&elem);
|
||||||
|
}
|
||||||
|
Some(n) => {
|
||||||
|
self.visible.insert(elem, n - 1);
|
||||||
|
}
|
||||||
|
None => panic!("remove overun in index"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn merge(&mut self, other: &Index) {
|
||||||
|
for id in &other.ops {
|
||||||
|
self.ops.insert(*id);
|
||||||
|
}
|
||||||
|
for (elem, n) in other.visible.iter() {
|
||||||
|
match self.visible.get(elem).cloned() {
|
||||||
|
None => {
|
||||||
|
self.visible.insert(*elem, 1);
|
||||||
|
}
|
||||||
|
Some(m) => {
|
||||||
|
self.visible.insert(*elem, m + n);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Index {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Default)]
|
||||||
|
pub(crate) struct VisWindow {
|
||||||
|
counters: HashMap<OpId, CounterData>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VisWindow {
|
||||||
|
fn visible_at(&mut self, op: &Op, pos: usize, clock: &Clock) -> bool {
|
||||||
|
if !clock.covers(&op.id) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut visible = false;
|
||||||
|
match op.action {
|
||||||
|
OpType::Set(ScalarValue::Counter(Counter { start, .. })) => {
|
||||||
|
self.counters.insert(
|
||||||
|
op.id,
|
||||||
|
CounterData {
|
||||||
|
pos,
|
||||||
|
val: start,
|
||||||
|
succ: op.succ.iter().cloned().collect(),
|
||||||
|
op: op.clone(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
if !op.succ.iter().any(|i| clock.covers(i)) {
|
||||||
|
visible = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
OpType::Inc(inc_val) => {
|
||||||
|
for id in &op.pred {
|
||||||
|
// pred is always before op.id so we can see them
|
||||||
|
if let Some(mut entry) = self.counters.get_mut(id) {
|
||||||
|
entry.succ.remove(&op.id);
|
||||||
|
entry.val += inc_val;
|
||||||
|
entry.op.action = OpType::Set(ScalarValue::counter(entry.val));
|
||||||
|
if !entry.succ.iter().any(|i| clock.covers(i)) {
|
||||||
|
visible = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
if !op.succ.iter().any(|i| clock.covers(i)) {
|
||||||
|
visible = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
visible
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn seen_op(&self, op: &Op, pos: usize) -> Vec<(usize, Op)> {
|
||||||
|
let mut result = vec![];
|
||||||
|
for pred in &op.pred {
|
||||||
|
if let Some(entry) = self.counters.get(pred) {
|
||||||
|
result.push((entry.pos, entry.op.clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if result.is_empty() {
|
||||||
|
vec![(pos, op.clone())]
|
||||||
|
} else {
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn binary_search_by<F, const B: usize>(node: &OpTreeNode<B>, f: F) -> usize
|
||||||
|
where
|
||||||
|
F: Fn(&Op) -> Ordering,
|
||||||
|
{
|
||||||
|
let mut right = node.len();
|
||||||
|
let mut left = 0;
|
||||||
|
while left < right {
|
||||||
|
let seq = (left + right) / 2;
|
||||||
|
if f(node.get(seq).unwrap()) == Ordering::Less {
|
||||||
|
left = seq + 1;
|
||||||
|
} else {
|
||||||
|
right = seq;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
left
|
||||||
|
}
|
||||||
128
automerge/src/query/attribute.rs
Normal file
128
automerge/src/query/attribute.rs
Normal file
|
|
@ -0,0 +1,128 @@
|
||||||
|
use crate::clock::Clock;
|
||||||
|
use crate::query::{OpSetMetadata, QueryResult, TreeQuery};
|
||||||
|
use crate::types::{ElemId, Op};
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::ops::Range;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub(crate) struct Attribute<const B: usize> {
|
||||||
|
pos: usize,
|
||||||
|
seen: usize,
|
||||||
|
last_seen: Option<ElemId>,
|
||||||
|
baseline: Clock,
|
||||||
|
pub change_sets: Vec<ChangeSet>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct ChangeSet {
|
||||||
|
clock: Clock,
|
||||||
|
next_add: Option<Range<usize>>,
|
||||||
|
next_del: Option<(usize, String)>,
|
||||||
|
pub add: Vec<Range<usize>>,
|
||||||
|
pub del: Vec<(usize, String)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Clock> for ChangeSet {
|
||||||
|
fn from(clock: Clock) -> Self {
|
||||||
|
ChangeSet {
|
||||||
|
clock,
|
||||||
|
next_add: None,
|
||||||
|
next_del: None,
|
||||||
|
add: Vec::new(),
|
||||||
|
del: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChangeSet {
|
||||||
|
fn cut_add(&mut self) {
|
||||||
|
if let Some(add) = self.next_add.take() {
|
||||||
|
self.add.push(add)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cut_del(&mut self) {
|
||||||
|
if let Some(del) = self.next_del.take() {
|
||||||
|
self.del.push(del)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> Attribute<B> {
|
||||||
|
pub fn new(baseline: Clock, change_sets: Vec<Clock>) -> Self {
|
||||||
|
Attribute {
|
||||||
|
pos: 0,
|
||||||
|
seen: 0,
|
||||||
|
last_seen: None,
|
||||||
|
baseline,
|
||||||
|
change_sets: change_sets.into_iter().map(|c| c.into()).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_add(&mut self, element: &Op) {
|
||||||
|
let baseline = self.baseline.covers(&element.id);
|
||||||
|
for cs in &mut self.change_sets {
|
||||||
|
if !baseline && cs.clock.covers(&element.id) {
|
||||||
|
// is part of the change_set
|
||||||
|
if let Some(range) = &mut cs.next_add {
|
||||||
|
range.end += 1;
|
||||||
|
} else {
|
||||||
|
cs.next_add = Some(Range {
|
||||||
|
start: self.seen,
|
||||||
|
end: self.seen + 1,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cs.cut_add();
|
||||||
|
}
|
||||||
|
cs.cut_del();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// id is in baseline
|
||||||
|
// succ is not in baseline but is in cs
|
||||||
|
|
||||||
|
fn update_del(&mut self, element: &Op) {
|
||||||
|
let baseline = self.baseline.covers(&element.id);
|
||||||
|
for cs in &mut self.change_sets {
|
||||||
|
if baseline && element.succ.iter().any(|id| cs.clock.covers(id)) {
|
||||||
|
// was deleted by change set
|
||||||
|
if let Some(s) = element.as_string() {
|
||||||
|
if let Some((_, span)) = &mut cs.next_del {
|
||||||
|
span.push_str(&s);
|
||||||
|
} else {
|
||||||
|
cs.next_del = Some((self.seen, s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
//cs.cut_del();
|
||||||
|
}
|
||||||
|
//cs.cut_add();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finish(&mut self) {
|
||||||
|
for cs in &mut self.change_sets {
|
||||||
|
cs.cut_add();
|
||||||
|
cs.cut_del();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> TreeQuery<B> for Attribute<B> {
|
||||||
|
fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult {
|
||||||
|
if element.insert {
|
||||||
|
self.last_seen = None;
|
||||||
|
}
|
||||||
|
if self.last_seen.is_none() && element.visible() {
|
||||||
|
self.update_add(element);
|
||||||
|
self.seen += 1;
|
||||||
|
self.last_seen = element.elemid();
|
||||||
|
}
|
||||||
|
if !element.succ.is_empty() {
|
||||||
|
self.update_del(element);
|
||||||
|
}
|
||||||
|
self.pos += 1;
|
||||||
|
QueryResult::Next
|
||||||
|
}
|
||||||
|
}
|
||||||
172
automerge/src/query/attribute2.rs
Normal file
172
automerge/src/query/attribute2.rs
Normal file
|
|
@ -0,0 +1,172 @@
|
||||||
|
use crate::clock::Clock;
|
||||||
|
use crate::query::{OpSetMetadata, QueryResult, TreeQuery};
|
||||||
|
use crate::types::{ElemId, Op};
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use std::ops::Range;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub(crate) struct Attribute2<const B: usize> {
|
||||||
|
pos: usize,
|
||||||
|
seen: usize,
|
||||||
|
last_seen: Option<ElemId>,
|
||||||
|
baseline: Clock,
|
||||||
|
pub change_sets: Vec<ChangeSet2>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct ChangeSet2 {
|
||||||
|
clock: Clock,
|
||||||
|
next_add: Option<CS2Add>,
|
||||||
|
next_del: Option<CS2Del>,
|
||||||
|
pub add: Vec<CS2Add>,
|
||||||
|
pub del: Vec<CS2Del>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct CS2Add {
|
||||||
|
pub actor: usize,
|
||||||
|
pub range: Range<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub struct CS2Del {
|
||||||
|
pub pos: usize,
|
||||||
|
pub actor: usize,
|
||||||
|
pub span: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Clock> for ChangeSet2 {
|
||||||
|
fn from(clock: Clock) -> Self {
|
||||||
|
ChangeSet2 {
|
||||||
|
clock,
|
||||||
|
next_add: None,
|
||||||
|
next_del: None,
|
||||||
|
add: Vec::new(),
|
||||||
|
del: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChangeSet2 {
|
||||||
|
fn cut_add(&mut self) {
|
||||||
|
if let Some(add) = self.next_add.take() {
|
||||||
|
self.add.push(add)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cut_del(&mut self) {
|
||||||
|
if let Some(del) = self.next_del.take() {
|
||||||
|
self.del.push(del)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> Attribute2<B> {
|
||||||
|
pub fn new(baseline: Clock, change_sets: Vec<Clock>) -> Self {
|
||||||
|
Attribute2 {
|
||||||
|
pos: 0,
|
||||||
|
seen: 0,
|
||||||
|
last_seen: None,
|
||||||
|
baseline,
|
||||||
|
change_sets: change_sets.into_iter().map(|c| c.into()).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_add(&mut self, element: &Op) {
|
||||||
|
let baseline = self.baseline.covers(&element.id);
|
||||||
|
for cs in &mut self.change_sets {
|
||||||
|
if !baseline && cs.clock.covers(&element.id) {
|
||||||
|
// is part of the change_set
|
||||||
|
if let Some(CS2Add { range, actor }) = &mut cs.next_add {
|
||||||
|
if *actor == element.id.actor() {
|
||||||
|
range.end += 1;
|
||||||
|
} else {
|
||||||
|
cs.cut_add();
|
||||||
|
cs.next_add = Some(CS2Add {
|
||||||
|
actor: element.id.actor(),
|
||||||
|
range: Range {
|
||||||
|
start: self.seen,
|
||||||
|
end: self.seen + 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cs.next_add = Some(CS2Add {
|
||||||
|
actor: element.id.actor(),
|
||||||
|
range: Range {
|
||||||
|
start: self.seen,
|
||||||
|
end: self.seen + 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cs.cut_add();
|
||||||
|
}
|
||||||
|
cs.cut_del();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// id is in baseline
|
||||||
|
// succ is not in baseline but is in cs
|
||||||
|
|
||||||
|
fn update_del(&mut self, element: &Op) {
|
||||||
|
if !self.baseline.covers(&element.id) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
for cs in &mut self.change_sets {
|
||||||
|
let succ: Vec<_> = element
|
||||||
|
.succ
|
||||||
|
.iter()
|
||||||
|
.filter(|id| cs.clock.covers(id))
|
||||||
|
.collect();
|
||||||
|
// was deleted by change set
|
||||||
|
if let Some(suc) = succ.get(0) {
|
||||||
|
if let Some(s) = element.as_string() {
|
||||||
|
if let Some(CS2Del { actor, span, .. }) = &mut cs.next_del {
|
||||||
|
if suc.actor() == *actor {
|
||||||
|
span.push_str(&s);
|
||||||
|
} else {
|
||||||
|
cs.cut_del();
|
||||||
|
cs.next_del = Some(CS2Del {
|
||||||
|
pos: self.seen,
|
||||||
|
actor: suc.actor(),
|
||||||
|
span: s,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cs.next_del = Some(CS2Del {
|
||||||
|
pos: self.seen,
|
||||||
|
actor: suc.actor(),
|
||||||
|
span: s,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finish(&mut self) {
|
||||||
|
for cs in &mut self.change_sets {
|
||||||
|
cs.cut_add();
|
||||||
|
cs.cut_del();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> TreeQuery<B> for Attribute2<B> {
|
||||||
|
fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult {
|
||||||
|
if element.insert {
|
||||||
|
self.last_seen = None;
|
||||||
|
}
|
||||||
|
if self.last_seen.is_none() && element.visible() {
|
||||||
|
self.update_add(element);
|
||||||
|
self.seen += 1;
|
||||||
|
self.last_seen = element.elemid();
|
||||||
|
}
|
||||||
|
if !element.succ.is_empty() {
|
||||||
|
self.update_del(element);
|
||||||
|
}
|
||||||
|
self.pos += 1;
|
||||||
|
QueryResult::Next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::error::AutomergeError;
|
use crate::error::AutomergeError;
|
||||||
use crate::op_tree::OpTreeNode;
|
use crate::op_tree::OpTreeNode;
|
||||||
use crate::query::{OpTree, QueryResult, TreeQuery};
|
use crate::query::{QueryResult, TreeQuery};
|
||||||
use crate::types::{ElemId, Key, ListEncoding, Op, HEAD};
|
use crate::types::{ElemId, Key, Op, HEAD};
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
|
@ -10,31 +10,27 @@ pub(crate) struct InsertNth {
|
||||||
target: usize,
|
target: usize,
|
||||||
/// the number of visible operations seen
|
/// the number of visible operations seen
|
||||||
seen: usize,
|
seen: usize,
|
||||||
last_width: usize,
|
|
||||||
encoding: ListEncoding,
|
|
||||||
//pub pos: usize,
|
//pub pos: usize,
|
||||||
/// the number of operations (including non-visible) that we have seen
|
/// the number of operations (including non-visible) that we have seen
|
||||||
n: usize,
|
n: usize,
|
||||||
valid: Option<usize>,
|
valid: Option<usize>,
|
||||||
/// last_seen is the target elemid of the last `seen` operation.
|
/// last_seen is the target elemid of the last `seen` operation.
|
||||||
/// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes.
|
/// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes.
|
||||||
last_seen: Option<Key>,
|
last_seen: Option<ElemId>,
|
||||||
last_insert: Option<ElemId>,
|
last_insert: Option<ElemId>,
|
||||||
last_valid_insert: Option<Key>,
|
last_valid_insert: Option<ElemId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl InsertNth {
|
impl InsertNth {
|
||||||
pub(crate) fn new(target: usize, encoding: ListEncoding) -> Self {
|
pub fn new(target: usize) -> Self {
|
||||||
let (valid, last_valid_insert) = if target == 0 {
|
let (valid, last_valid_insert) = if target == 0 {
|
||||||
(Some(0), Some(Key::Seq(HEAD)))
|
(Some(0), Some(HEAD))
|
||||||
} else {
|
} else {
|
||||||
(None, None)
|
(None, None)
|
||||||
};
|
};
|
||||||
InsertNth {
|
InsertNth {
|
||||||
target,
|
target,
|
||||||
seen: 0,
|
seen: 0,
|
||||||
last_width: 0,
|
|
||||||
encoding,
|
|
||||||
n: 0,
|
n: 0,
|
||||||
valid,
|
valid,
|
||||||
last_seen: None,
|
last_seen: None,
|
||||||
|
|
@ -43,41 +39,34 @@ impl InsertNth {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn pos(&self) -> usize {
|
pub fn pos(&self) -> usize {
|
||||||
self.valid.unwrap_or(self.n)
|
self.valid.unwrap_or(self.n)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn key(&self) -> Result<Key, AutomergeError> {
|
pub fn key(&self) -> Result<Key, AutomergeError> {
|
||||||
self.last_valid_insert
|
Ok(self
|
||||||
.ok_or(AutomergeError::InvalidIndex(self.target))
|
.last_valid_insert
|
||||||
|
.ok_or(AutomergeError::InvalidIndex(self.target))?
|
||||||
|
.into())
|
||||||
|
//if self.target == 0 {
|
||||||
|
/*
|
||||||
|
if self.last_insert.is_none() {
|
||||||
|
Ok(HEAD.into())
|
||||||
|
} else if self.seen == self.target && self.last_insert.is_some() {
|
||||||
|
Ok(Key::Seq(self.last_insert.unwrap()))
|
||||||
|
} else {
|
||||||
|
Err(AutomergeError::InvalidIndex(self.target))
|
||||||
|
}
|
||||||
|
*/
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TreeQuery<'a> for InsertNth {
|
impl<const B: usize> TreeQuery<B> for InsertNth {
|
||||||
fn equiv(&mut self, other: &Self) -> bool {
|
fn query_node(&mut self, child: &OpTreeNode<B>) -> QueryResult {
|
||||||
self.pos() == other.pos() && self.key() == other.key()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn can_shortcut_search(&mut self, tree: &'a OpTree) -> bool {
|
|
||||||
if let Some((index, pos)) = &tree.last_insert {
|
|
||||||
if let Some(op) = tree.internal.get(*pos) {
|
|
||||||
if *index + op.width(self.encoding) == self.target {
|
|
||||||
self.valid = Some(*pos + 1);
|
|
||||||
self.last_valid_insert = Some(op.elemid_or_key());
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
fn query_node(&mut self, child: &OpTreeNode, ops: &[Op]) -> QueryResult {
|
|
||||||
// if this node has some visible elements then we may find our target within
|
// if this node has some visible elements then we may find our target within
|
||||||
let mut num_vis = child.index.visible_len(self.encoding);
|
let mut num_vis = child.index.visible_len();
|
||||||
if let Some(last_seen) = self.last_seen {
|
if child.index.has_visible(&self.last_seen) {
|
||||||
if child.index.has_visible(&last_seen) {
|
num_vis -= 1;
|
||||||
num_vis -= 1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.seen + num_vis >= self.target {
|
if self.seen + num_vis >= self.target {
|
||||||
|
|
@ -94,9 +83,9 @@ impl<'a> TreeQuery<'a> for InsertNth {
|
||||||
// - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly
|
// - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly
|
||||||
// - the visible op is in this node and the elemid references it so it can be set here
|
// - the visible op is in this node and the elemid references it so it can be set here
|
||||||
// - the visible op is in a future node and so it will be counted as seen there
|
// - the visible op is in a future node and so it will be counted as seen there
|
||||||
let last_elemid = ops[child.last()].elemid_or_key();
|
let last_elemid = child.last().elemid();
|
||||||
if child.index.has_visible(&last_elemid) {
|
if child.index.has_visible(&last_elemid) {
|
||||||
self.last_seen = Some(last_elemid);
|
self.last_seen = last_elemid;
|
||||||
}
|
}
|
||||||
QueryResult::Next
|
QueryResult::Next
|
||||||
}
|
}
|
||||||
|
|
@ -110,13 +99,16 @@ impl<'a> TreeQuery<'a> for InsertNth {
|
||||||
self.last_seen = None;
|
self.last_seen = None;
|
||||||
self.last_insert = element.elemid();
|
self.last_insert = element.elemid();
|
||||||
}
|
}
|
||||||
|
if self.valid.is_some() && element.valid_mark_anchor() {
|
||||||
|
self.last_valid_insert = element.elemid();
|
||||||
|
self.valid = None;
|
||||||
|
}
|
||||||
if self.last_seen.is_none() && element.visible() {
|
if self.last_seen.is_none() && element.visible() {
|
||||||
if self.seen >= self.target {
|
if self.seen >= self.target {
|
||||||
return QueryResult::Finish;
|
return QueryResult::Finish;
|
||||||
}
|
}
|
||||||
self.last_width = element.width(self.encoding);
|
self.seen += 1;
|
||||||
self.seen += self.last_width;
|
self.last_seen = element.elemid();
|
||||||
self.last_seen = Some(element.elemid_or_key());
|
|
||||||
self.last_valid_insert = self.last_seen
|
self.last_valid_insert = self.last_seen
|
||||||
}
|
}
|
||||||
self.n += 1;
|
self.n += 1;
|
||||||
54
automerge/src/query/keys.rs
Normal file
54
automerge/src/query/keys.rs
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
use crate::op_tree::OpTreeNode;
|
||||||
|
use crate::types::Key;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct Keys<'a, const B: usize> {
|
||||||
|
index: usize,
|
||||||
|
last_key: Option<Key>,
|
||||||
|
index_back: usize,
|
||||||
|
last_key_back: Option<Key>,
|
||||||
|
root_child: &'a OpTreeNode<B>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, const B: usize> Keys<'a, B> {
|
||||||
|
pub(crate) fn new(root_child: &'a OpTreeNode<B>) -> Self {
|
||||||
|
Self {
|
||||||
|
index: 0,
|
||||||
|
last_key: None,
|
||||||
|
index_back: root_child.len(),
|
||||||
|
last_key_back: None,
|
||||||
|
root_child,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, const B: usize> Iterator for Keys<'a, B> {
|
||||||
|
type Item = Key;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
for i in self.index..self.index_back {
|
||||||
|
let op = self.root_child.get(i)?;
|
||||||
|
self.index += 1;
|
||||||
|
if Some(op.key) != self.last_key && op.visible() {
|
||||||
|
self.last_key = Some(op.key);
|
||||||
|
return Some(op.key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, const B: usize> DoubleEndedIterator for Keys<'a, B> {
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
for i in (self.index..self.index_back).rev() {
|
||||||
|
let op = self.root_child.get(i)?;
|
||||||
|
self.index_back -= 1;
|
||||||
|
if Some(op.key) != self.last_key_back && op.visible() {
|
||||||
|
self.last_key_back = Some(op.key);
|
||||||
|
return Some(op.key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,59 +1,59 @@
|
||||||
use crate::op_tree::OpTreeInternal;
|
use crate::op_tree::OpTreeNode;
|
||||||
use crate::query::VisWindow;
|
use crate::query::VisWindow;
|
||||||
use crate::types::{Clock, Key};
|
use crate::types::{Clock, Key};
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct KeysAt<'a> {
|
pub(crate) struct KeysAt<'a, const B: usize> {
|
||||||
clock: Clock,
|
clock: Clock,
|
||||||
window: VisWindow,
|
window: VisWindow,
|
||||||
index: usize,
|
index: usize,
|
||||||
last_key: Option<Key>,
|
last_key: Option<Key>,
|
||||||
index_back: usize,
|
index_back: usize,
|
||||||
last_key_back: Option<Key>,
|
last_key_back: Option<Key>,
|
||||||
op_tree: &'a OpTreeInternal,
|
root_child: &'a OpTreeNode<B>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> KeysAt<'a> {
|
impl<'a, const B: usize> KeysAt<'a, B> {
|
||||||
pub(crate) fn new(op_tree: &'a OpTreeInternal, clock: Clock) -> Self {
|
pub(crate) fn new(root_child: &'a OpTreeNode<B>, clock: Clock) -> Self {
|
||||||
Self {
|
Self {
|
||||||
clock,
|
clock,
|
||||||
window: VisWindow::default(),
|
window: VisWindow::default(),
|
||||||
index: 0,
|
index: 0,
|
||||||
last_key: None,
|
last_key: None,
|
||||||
index_back: op_tree.len(),
|
index_back: root_child.len(),
|
||||||
last_key_back: None,
|
last_key_back: None,
|
||||||
op_tree,
|
root_child,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for KeysAt<'a> {
|
impl<'a, const B: usize> Iterator for KeysAt<'a, B> {
|
||||||
type Item = Key;
|
type Item = Key;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
for i in self.index..self.index_back {
|
for i in self.index..self.root_child.len() {
|
||||||
let op = self.op_tree.get(i)?;
|
let op = self.root_child.get(i)?;
|
||||||
let visible = self.window.visible_at(op, i, &self.clock);
|
let visible = self.window.visible_at(op, i, &self.clock);
|
||||||
self.index += 1;
|
self.index += 1;
|
||||||
if Some(op.elemid_or_key()) != self.last_key && visible {
|
if Some(op.key) != self.last_key && visible {
|
||||||
self.last_key = Some(op.elemid_or_key());
|
self.last_key = Some(op.key);
|
||||||
return Some(op.elemid_or_key());
|
return Some(op.key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> DoubleEndedIterator for KeysAt<'a> {
|
impl<'a, const B: usize> DoubleEndedIterator for KeysAt<'a, B> {
|
||||||
fn next_back(&mut self) -> Option<Self::Item> {
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
for i in self.index..self.index_back {
|
for i in self.index..self.index_back {
|
||||||
let op = self.op_tree.get(i)?;
|
let op = self.root_child.get(i)?;
|
||||||
let visible = self.window.visible_at(op, i, &self.clock);
|
let visible = self.window.visible_at(op, i, &self.clock);
|
||||||
self.index_back -= 1;
|
self.index_back -= 1;
|
||||||
if Some(op.elemid_or_key()) != self.last_key_back && visible {
|
if Some(op.key) != self.last_key_back && visible {
|
||||||
self.last_key_back = Some(op.elemid_or_key());
|
self.last_key_back = Some(op.key);
|
||||||
return Some(op.elemid_or_key());
|
return Some(op.key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
21
automerge/src/query/len.rs
Normal file
21
automerge/src/query/len.rs
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
use crate::op_tree::OpTreeNode;
|
||||||
|
use crate::query::{QueryResult, TreeQuery};
|
||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub(crate) struct Len {
|
||||||
|
pub len: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Len {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Len { len: 0 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<const B: usize> TreeQuery<B> for Len {
|
||||||
|
fn query_node(&mut self, child: &OpTreeNode<B>) -> QueryResult {
|
||||||
|
self.len = child.index.visible_len();
|
||||||
|
QueryResult::Finish
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,39 +1,37 @@
|
||||||
use crate::query::{QueryResult, TreeQuery, VisWindow};
|
use crate::query::{QueryResult, TreeQuery, VisWindow};
|
||||||
use crate::types::{Clock, ElemId, ListEncoding, Op};
|
use crate::types::{Clock, ElemId, Op};
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub(crate) struct LenAt {
|
pub(crate) struct LenAt {
|
||||||
pub(crate) len: usize,
|
pub len: usize,
|
||||||
clock: Clock,
|
clock: Clock,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
encoding: ListEncoding,
|
|
||||||
last: Option<ElemId>,
|
last: Option<ElemId>,
|
||||||
window: VisWindow,
|
window: VisWindow,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LenAt {
|
impl LenAt {
|
||||||
pub(crate) fn new(clock: Clock, encoding: ListEncoding) -> Self {
|
pub fn new(clock: Clock) -> Self {
|
||||||
LenAt {
|
LenAt {
|
||||||
clock,
|
clock,
|
||||||
pos: 0,
|
pos: 0,
|
||||||
len: 0,
|
len: 0,
|
||||||
encoding,
|
|
||||||
last: None,
|
last: None,
|
||||||
window: Default::default(),
|
window: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TreeQuery<'a> for LenAt {
|
impl<const B: usize> TreeQuery<B> for LenAt {
|
||||||
fn query_element(&mut self, op: &'a Op) -> QueryResult {
|
fn query_element(&mut self, op: &Op) -> QueryResult {
|
||||||
if op.insert {
|
if op.insert {
|
||||||
self.last = None;
|
self.last = None;
|
||||||
}
|
}
|
||||||
let elem = op.elemid();
|
let elem = op.elemid();
|
||||||
let visible = self.window.visible_at(op, self.pos, &self.clock);
|
let visible = self.window.visible_at(op, self.pos, &self.clock);
|
||||||
if elem != self.last && visible {
|
if elem != self.last && visible {
|
||||||
self.len += op.width(self.encoding);
|
self.len += 1;
|
||||||
self.last = elem;
|
self.last = elem;
|
||||||
}
|
}
|
||||||
self.pos += 1;
|
self.pos += 1;
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue