diff --git a/.github/workflows/advisory-cron.yaml b/.github/workflows/advisory-cron.yaml index 31bac5a3..90923191 100644 --- a/.github/workflows/advisory-cron.yaml +++ b/.github/workflows/advisory-cron.yaml @@ -1,4 +1,4 @@ -name: Advisories +name: ci on: schedule: - cron: '0 18 * * *' diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8519ac5e..9a9753d0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,11 +1,11 @@ -name: CI +name: ci on: push: branches: - - main + - experiment pull_request: branches: - - main + - experiment jobs: fmt: runs-on: ubuntu-latest @@ -14,8 +14,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.67.0 - default: true + toolchain: stable components: rustfmt - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/fmt @@ -28,8 +27,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.67.0 - default: true + toolchain: stable components: clippy - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/lint @@ -42,14 +40,9 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.67.0 - default: true + toolchain: stable - uses: Swatinem/rust-cache@v1 - - name: Build rust docs - run: ./scripts/ci/rust-docs - shell: bash - - name: Install doxygen - run: sudo apt-get install -y doxygen + - run: ./scripts/ci/docs shell: bash cargo-deny: @@ -64,88 +57,40 @@ jobs: - uses: actions/checkout@v2 - uses: EmbarkStudios/cargo-deny-action@v1 with: - arguments: '--manifest-path ./rust/Cargo.toml' command: check ${{ matrix.checks }} wasm_tests: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Install wasm-bindgen-cli - run: cargo install wasm-bindgen-cli wasm-opt - - name: Install wasm32 target - run: rustup target add wasm32-unknown-unknown + - name: Install wasm-pack + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh - name: run tests run: ./scripts/ci/wasm_tests - deno_tests: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: denoland/setup-deno@v1 - with: - deno-version: v1.x - - name: Install wasm-bindgen-cli - run: cargo install wasm-bindgen-cli wasm-opt - - name: Install wasm32 target - run: rustup target add wasm32-unknown-unknown - - name: run tests - run: ./scripts/ci/deno_tests - - js_fmt: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: install - run: yarn global add prettier - - name: format - run: prettier -c javascript/.prettierrc javascript js_tests: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Install wasm-bindgen-cli - run: cargo install wasm-bindgen-cli wasm-opt - - name: Install wasm32 target - run: rustup target add wasm32-unknown-unknown + - name: Install wasm-pack + run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh - name: run tests run: ./scripts/ci/js_tests - cmake_build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: nightly-2023-01-26 - default: true - - uses: Swatinem/rust-cache@v1 - - name: Install CMocka - run: sudo apt-get install -y libcmocka-dev - - name: Install/update CMake - uses: jwlawson/actions-setup-cmake@v1.12 - with: - cmake-version: latest - - name: Install rust-src - run: rustup component add rust-src - - name: Build and test C bindings - run: ./scripts/ci/cmake-build Release Static - shell: bash - linux: runs-on: ubuntu-latest strategy: matrix: toolchain: - - 1.67.0 + - stable + - nightly + continue-on-error: ${{ matrix.toolchain == 'nightly' }} steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: ${{ matrix.toolchain }} - default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test shell: bash @@ -157,8 +102,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.67.0 - default: true + toolchain: stable - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test shell: bash @@ -170,8 +114,8 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.67.0 - default: true + toolchain: stable - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test shell: bash + diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml deleted file mode 100644 index b501d526..00000000 --- a/.github/workflows/docs.yaml +++ /dev/null @@ -1,52 +0,0 @@ -on: - push: - branches: - - main - -name: Documentation - -jobs: - deploy-docs: - concurrency: deploy-docs - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - - name: Toolchain - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: stable - override: true - - - name: Cache - uses: Swatinem/rust-cache@v1 - - - name: Clean docs dir - run: rm -rf docs - shell: bash - - - name: Clean Rust docs dir - uses: actions-rs/cargo@v1 - with: - command: clean - args: --manifest-path ./rust/Cargo.toml --doc - - - name: Build Rust docs - uses: actions-rs/cargo@v1 - with: - command: doc - args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps - - - name: Move Rust docs - run: mkdir -p docs && mv rust/target/doc/* docs/. - shell: bash - - - name: Configure root page - run: echo '' > docs/index.html - - - name: Deploy docs - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./docs diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml deleted file mode 100644 index 762671ff..00000000 --- a/.github/workflows/release.yaml +++ /dev/null @@ -1,214 +0,0 @@ -name: Release -on: - push: - branches: - - main - -jobs: - check_if_wasm_version_upgraded: - name: Check if WASM version has been upgraded - runs-on: ubuntu-latest - outputs: - wasm_version: ${{ steps.version-updated.outputs.current-package-version }} - wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }} - steps: - - uses: JiPaix/package-json-updated-action@v1.0.5 - id: version-updated - with: - path: rust/automerge-wasm/package.json - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - publish-wasm: - name: Publish WASM package - runs-on: ubuntu-latest - needs: - - check_if_wasm_version_upgraded - # We create release only if the version in the package.json has been upgraded - if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true' - steps: - - uses: actions/setup-node@v3 - with: - node-version: '16.x' - registry-url: 'https://registry.npmjs.org' - - uses: denoland/setup-deno@v1 - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - ref: ${{ github.ref }} - - name: Get rid of local github workflows - run: rm -r .github/workflows - - name: Remove tmp_branch if it exists - run: git push origin :tmp_branch || true - - run: git checkout -b tmp_branch - - name: Install wasm-bindgen-cli - run: cargo install wasm-bindgen-cli wasm-opt - - name: Install wasm32 target - run: rustup target add wasm32-unknown-unknown - - name: run wasm js tests - id: wasm_js_tests - run: ./scripts/ci/wasm_tests - - name: run wasm deno tests - id: wasm_deno_tests - run: ./scripts/ci/deno_tests - - name: build release - id: build_release - run: | - npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release - - name: Collate deno release files - if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' - run: | - mkdir $GITHUB_WORKSPACE/deno_wasm_dist - cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist - cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist - cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist - cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist - sed -i '1i /// ' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js - - name: Create npm release - if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' - run: | - if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then - echo "This version is already published" - exit 0 - fi - EXTRA_ARGS="--access public" - if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then - echo "Is pre-release version" - EXTRA_ARGS="$EXTRA_ARGS --tag next" - fi - if [ "$NODE_AUTH_TOKEN" = "" ]; then - echo "Can't publish on NPM, You need a NPM_TOKEN secret." - false - fi - npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS - env: - NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} - VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} - - name: Commit wasm deno release files - run: | - git config --global user.name "actions" - git config --global user.email actions@github.com - git add $GITHUB_WORKSPACE/deno_wasm_dist - git commit -am "Add deno release files" - git push origin tmp_branch - - name: Tag wasm release - if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' - uses: softprops/action-gh-release@v1 - with: - name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} - tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} - target_commitish: tmp_branch - generate_release_notes: false - draft: false - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Remove tmp_branch - run: git push origin :tmp_branch - check_if_js_version_upgraded: - name: Check if JS version has been upgraded - runs-on: ubuntu-latest - outputs: - js_version: ${{ steps.version-updated.outputs.current-package-version }} - js_has_updated: ${{ steps.version-updated.outputs.has-updated }} - steps: - - uses: JiPaix/package-json-updated-action@v1.0.5 - id: version-updated - with: - path: javascript/package.json - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - publish-js: - name: Publish JS package - runs-on: ubuntu-latest - needs: - - check_if_js_version_upgraded - - check_if_wasm_version_upgraded - - publish-wasm - # We create release only if the version in the package.json has been upgraded and after the WASM release - if: | - (always() && ! cancelled()) && - (needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') && - needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true' - steps: - - uses: actions/setup-node@v3 - with: - node-version: '16.x' - registry-url: 'https://registry.npmjs.org' - - uses: denoland/setup-deno@v1 - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - ref: ${{ github.ref }} - - name: Get rid of local github workflows - run: rm -r .github/workflows - - name: Remove js_tmp_branch if it exists - run: git push origin :js_tmp_branch || true - - run: git checkout -b js_tmp_branch - - name: check js formatting - run: | - yarn global add prettier - prettier -c javascript/.prettierrc javascript - - name: run js tests - id: js_tests - run: | - cargo install wasm-bindgen-cli wasm-opt - rustup target add wasm32-unknown-unknown - ./scripts/ci/js_tests - - name: build js release - id: build_release - run: | - npm --prefix $GITHUB_WORKSPACE/javascript run build - - name: build js deno release - id: build_deno_release - run: | - VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build - env: - WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} - - name: run deno tests - id: deno_tests - run: | - npm --prefix $GITHUB_WORKSPACE/javascript run deno:test - - name: Collate deno release files - if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success' - run: | - mkdir $GITHUB_WORKSPACE/deno_js_dist - cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist - - name: Create npm release - if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success' - run: | - if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then - echo "This version is already published" - exit 0 - fi - EXTRA_ARGS="--access public" - if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then - echo "Is pre-release version" - EXTRA_ARGS="$EXTRA_ARGS --tag next" - fi - if [ "$NODE_AUTH_TOKEN" = "" ]; then - echo "Can't publish on NPM, You need a NPM_TOKEN secret." - false - fi - npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS - env: - NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} - VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }} - - name: Commit js deno release files - run: | - git config --global user.name "actions" - git config --global user.email actions@github.com - git add $GITHUB_WORKSPACE/deno_js_dist - git commit -am "Add deno js release files" - git push origin js_tmp_branch - - name: Tag JS release - if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success' - uses: softprops/action-gh-release@v1 - with: - name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }} - tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }} - target_commitish: js_tmp_branch - generate_release_notes: false - draft: false - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Remove js_tmp_branch - run: git push origin :js_tmp_branch diff --git a/.gitignore b/.gitignore index f77865d0..95d3d639 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,4 @@ +/target /.direnv perf.* /Cargo.lock -build/ -.vim/* -/target diff --git a/rust/Cargo.toml b/Cargo.toml similarity index 63% rename from rust/Cargo.toml rename to Cargo.toml index 5d29fc9f..e1941120 100644 --- a/rust/Cargo.toml +++ b/Cargo.toml @@ -1,17 +1,15 @@ [workspace] members = [ "automerge", - "automerge-c", - "automerge-cli", - "automerge-test", "automerge-wasm", + "automerge-cli", "edit-trace", ] -resolver = "2" [profile.release] +debug = true lto = true -codegen-units = 1 +opt-level = 3 [profile.bench] -debug = true \ No newline at end of file +debug = true diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..9f8db2d1 --- /dev/null +++ b/Makefile @@ -0,0 +1,13 @@ +rust: + cd automerge && cargo test + +wasm: + cd automerge-wasm && yarn + cd automerge-wasm && yarn build + cd automerge-wasm && yarn test + cd automerge-wasm && yarn link + +js: wasm + cd automerge-js && yarn + cd automerge-js && yarn link "automerge-wasm" + cd automerge-js && yarn test diff --git a/README.md b/README.md index ad174da4..e7a277a8 100644 --- a/README.md +++ b/README.md @@ -1,147 +1,81 @@ -# Automerge +# Automerge - NEXT -Automerge logo +This is pretty much a ground up rewrite of automerge-rs. The objective of this +rewrite is to radically simplify the API. The end goal being to produce a library +which is easy to work with both in Rust and from FFI. -[![homepage](https://img.shields.io/badge/homepage-published-informational)](https://automerge.org/) -[![main docs](https://img.shields.io/badge/docs-main-informational)](https://automerge.org/automerge-rs/automerge/) -[![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) -[![docs](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml) +## How? -Automerge is a library which provides fast implementations of several different -CRDTs, a compact compression format for these CRDTs, and a sync protocol for -efficiently transmitting those changes over the network. The objective of the -project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational -databases support server applications - by providing mechanisms for persistence -which allow application developers to avoid thinking about hard distributed -computing problems. Automerge aims to be PostgreSQL for your local-first app. +The current iteration of automerge-rs is complicated to work with because it +adopts the frontend/backend split architecture of the JS implementation. This +architecture was necessary due to basic operations on the automerge opset being +too slow to perform on the UI thread. Recently @orionz has been able to improve +the performance to the point where the split is no longer necessary. This means +we can adopt a much simpler mutable API. -If you're looking for documentation on the JavaScript implementation take a look -at https://automerge.org/docs/hello/. There are other implementations in both -Rust and C, but they are earlier and don't have documentation yet. You can find -them in `rust/automerge` and `rust/automerge-c` if you are comfortable -reading the code and tests to figure out how to use them. - -If you're familiar with CRDTs and interested in the design of Automerge in -particular take a look at https://automerge.org/docs/how-it-works/backend/ - -Finally, if you want to talk to us about this project please [join the -Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw) +The architecture is now built around the `OpTree`. This is a data structure +which supports efficiently inserting new operations and realising values of +existing operations. Most interactions with the `OpTree` are in the form of +implementations of `TreeQuery` - a trait which can be used to traverse the +optree and producing state of some kind. User facing operations are exposed on +an `Automerge` object, under the covers these operations typically instantiate +some `TreeQuery` and run it over the `OpTree`. ## Status -This project is formed of a core Rust implementation which is exposed via FFI in -javascript+WASM, C, and soon other languages. Alex -([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining -automerge, other members of Ink and Switch are also contributing time and there -are several other maintainers. The focus is currently on shipping the new JS -package. We expect to be iterating the API and adding new features over the next -six months so there will likely be several major version bumps in all packages -in that time. +We have working code which passes all of the tests in the JS test suite. We're +now working on writing a bunch more tests and cleaning up the API. -In general we try and respect semver. +## Development -### JavaScript +### Running CI -A stable release of the javascript package is currently available as -`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are -available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at -https://deno.land/x/automerge +The steps CI will run are all defined in `./scripts/ci`. Obviously CI will run +everything when you submit a PR, but if you want to run everything locally +before you push you can run `./scripts/ci/run` to run everything. -### Rust +### Running the JS tests -The rust codebase is currently oriented around producing a performant backend -for the Javascript wrapper and as such the API for Rust code is low level and -not well documented. We will be returning to this over the next few months but -for now you will need to be comfortable reading the tests and asking questions -to figure out how to use it. If you are looking to build rust applications which -use automerge you may want to look into -[autosurgeon](https://github.com/alexjg/autosurgeon) +You will need to have [node](https://nodejs.org/en/), [yarn](https://yarnpkg.com/getting-started/install), [rust](https://rustup.rs/) and [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/) installed. -## Repository Organisation +To build and test the rust library: -- `./rust` - the rust rust implementation and also the Rust components of - platform specific wrappers (e.g. `automerge-wasm` for the WASM API or - `automerge-c` for the C FFI bindings) -- `./javascript` - The javascript library which uses `automerge-wasm` - internally but presents a more idiomatic javascript interface -- `./scripts` - scripts which are useful to maintenance of the repository. - This includes the scripts which are run in CI. -- `./img` - static assets for use in `.md` files - -## Building - -To build this codebase you will need: - -- `rust` -- `node` -- `yarn` -- `cmake` -- `cmocka` - -You will also need to install the following with `cargo install` - -- `wasm-bindgen-cli` -- `wasm-opt` -- `cargo-deny` - -And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation. - -The various subprojects (the rust code, the wrapper projects) have their own -build instructions, but to run the tests that will be run in CI you can run -`./scripts/ci/run`. - -### For macOS - -These instructions worked to build locally on macOS 13.1 (arm64) as of -Nov 29th 2022. - -```bash -# clone the repo -git clone https://github.com/automerge/automerge-rs -cd automerge-rs - -# install rustup -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh - -# install homebrew -/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" - -# install cmake, node, cmocka -brew install cmake node cmocka - -# install yarn -npm install --global yarn - -# install javascript dependencies -yarn --cwd ./javascript - -# install rust dependencies -cargo install wasm-bindgen-cli wasm-opt cargo-deny - -# get nightly rust to produce optimized automerge-c builds -rustup toolchain install nightly -rustup component add rust-src --toolchain nightly - -# add wasm target in addition to current architecture -rustup target add wasm32-unknown-unknown - -# Run ci script -./scripts/ci/run +```shell + $ cd automerge + $ cargo test ``` -If your build fails to find `cmocka.h` you may need to teach it about homebrew's -installation location: +To build and test the wasm library: -``` -export CPATH=/opt/homebrew/include -export LIBRARY_PATH=/opt/homebrew/lib -./scripts/ci/run +```shell + ## setup + $ cd automerge-wasm + $ yarn + + ## building or testing + $ yarn build + $ yarn test + + ## without this the js library wont automatically use changes + $ yarn link + + ## cutting a release or doing benchmarking + $ yarn release + $ yarn opt ## or set `wasm-opt = false` in Cargo.toml on supported platforms (not arm64 osx) ``` -## Contributing +And finally to test the js library. This is where most of the tests reside. -Please try and split your changes up into relatively independent commits which -change one subsystem at a time and add good commit messages which describe what -the change is and why you're making it (err on the side of longer commit -messages). `git blame` should give future maintainers a good idea of why -something is the way it is. +```shell + ## setup + $ cd automerge-js + $ yarn + $ yarn link "automerge-wasm" + + ## testing + $ yarn test +``` + +## Benchmarking + +The `edit-trace` folder has the main code for running the edit trace benchmarking. diff --git a/TODO.md b/TODO.md new file mode 100644 index 00000000..646c0c20 --- /dev/null +++ b/TODO.md @@ -0,0 +1,32 @@ +### next steps: + 1. C API + 2. port rust command line tool + 3. fast load + +### ergonomics: + 1. value() -> () or something that into's a value + +### automerge: + 1. single pass (fast) load + 2. micro-patches / bare bones observation API / fully hydrated documents + +### future: + 1. handle columns with unknown data in and out + 2. branches with different indexes + +### Peritext + 1. add mark / remove mark -- type, start/end elemid (inclusive,exclusive) + 2. track any formatting ops that start or end on a character + 3. ops right before the character, ops right after that character + 4. query a single character - character, plus marks that start or end on that character + what is its current formatting, + what are the ops that include that in their span, + None = same as last time, Set( bold, italic ), + keep these on index + 5. op probably belongs with the start character - possible packed at the beginning or end of the list + +### maybe: + 1. tables + +### no: + 1. cursors diff --git a/rust/automerge-cli/.gitignore b/automerge-cli/.gitignore similarity index 100% rename from rust/automerge-cli/.gitignore rename to automerge-cli/.gitignore diff --git a/automerge-cli/Cargo.lock b/automerge-cli/Cargo.lock new file mode 100644 index 00000000..a330ee89 --- /dev/null +++ b/automerge-cli/Cargo.lock @@ -0,0 +1,857 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + +[[package]] +name = "anyhow" +version = "1.0.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "automerge" +version = "0.1.0" +dependencies = [ + "flate2", + "fxhash", + "hex", + "itertools", + "js-sys", + "leb128", + "nonzero_ext", + "rand", + "serde", + "sha2", + "smol_str", + "thiserror", + "tinyvec", + "tracing", + "unicode-segmentation", + "uuid", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "automerge-cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "atty", + "automerge", + "clap", + "colored_json", + "combine", + "duct", + "maplit", + "serde_json", + "thiserror", + "tracing-subscriber", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "block-buffer" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "clap" +version = "3.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced1892c55c910c1219e98d6fc8d71f6bddba7905866ce740066d8bfea859312" +dependencies = [ + "atty", + "bitflags", + "clap_derive", + "indexmap", + "lazy_static", + "os_str_bytes", + "strsim", + "termcolor", + "textwrap", +] + +[[package]] +name = "clap_derive" +version = "3.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da95d038ede1a964ce99f49cbe27a7fb538d1da595e4b4f70b8c8f338d17bf16" +dependencies = [ + "heck", + "proc-macro-error", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "colored_json" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd32eb54d016e203b7c2600e3a7802c75843a92e38ccc4869aefeca21771a64" +dependencies = [ + "ansi_term", + "atty", + "libc", + "serde", + "serde_json", +] + +[[package]] +name = "combine" +version = "4.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50b727aacc797f9fc28e355d21f34709ac4fc9adecfe470ad07b8f4464f53062" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "cpufeatures" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "digest" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "duct" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc6a0a59ed0888e0041cf708e66357b7ae1a82f1c67247e1f93b5e0818f7d8d" +dependencies = [ + "libc", + "once_cell", + "os_pipe", + "shared_child", +] + +[[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + +[[package]] +name = "flate2" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" +dependencies = [ + "cfg-if", + "crc32fast", + "libc", + "miniz_oxide", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "generic-array" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" + +[[package]] +name = "heck" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "indexmap" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "itertools" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" + +[[package]] +name = "js-sys" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "leb128" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" + +[[package]] +name = "libc" +version = "0.2.119" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4" + +[[package]] +name = "log" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "maplit" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" + +[[package]] +name = "memchr" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" + +[[package]] +name = "miniz_oxide" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +dependencies = [ + "adler", + "autocfg", +] + +[[package]] +name = "nonzero_ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44a1290799eababa63ea60af0cbc3f03363e328e58f32fb0294798ed3e85f444" + +[[package]] +name = "once_cell" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" + +[[package]] +name = "os_pipe" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb233f06c2307e1f5ce2ecad9f8121cffbbee2c95428f44ea85222e460d0d213" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "os_str_bytes" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64" +dependencies = [ + "memchr", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" + +[[package]] +name = "ppv-lite86" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro2" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "quote" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "ryu" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" + +[[package]] +name = "serde" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha2" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shared_child" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6be9f7d5565b1483af3e72975e2dee33879b3b86bd48c0929fccf6585d79e65a" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "smallvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" + +[[package]] +name = "smol_str" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61d15c83e300cce35b7c8cd39ff567c1ef42dde6d4a1a38dbdbf9a59902261bd" +dependencies = [ + "serde", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "syn" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "termcolor" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "textwrap" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb" + +[[package]] +name = "thiserror" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +dependencies = [ + "once_cell", +] + +[[package]] +name = "tinyvec" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "tracing" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6c650a8ef0cd2dd93736f033d21cbd1224c5a967aa0c258d00fcf7dafef9b9f" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23" +dependencies = [ + "lazy_static", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e0ab7bdc962035a87fba73f3acca9b8a8d0034c2e6f60b84aeaaddddc155dce" +dependencies = [ + "ansi_term", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "typenum" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" + +[[package]] +name = "unicode-segmentation" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" + +[[package]] +name = "unicode-xid" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" + +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom", + "serde", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "wasm-bindgen" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" +dependencies = [ + "bumpalo", + "lazy_static", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" + +[[package]] +name = "web-sys" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/rust/automerge-cli/Cargo.toml b/automerge-cli/Cargo.toml similarity index 76% rename from rust/automerge-cli/Cargo.toml rename to automerge-cli/Cargo.toml index 430090a6..38dec0e6 100644 --- a/rust/automerge-cli/Cargo.toml +++ b/automerge-cli/Cargo.toml @@ -4,7 +4,6 @@ version = "0.1.0" authors = ["Alex Good "] edition = "2018" license = "MIT" -rust-version = "1.57.0" [[bin]] name = "automerge" @@ -13,18 +12,17 @@ bench = false doc = false [dependencies] -clap = {version = "~4", features = ["derive"]} +clap = {version = "~3.1", features = ["derive"]} serde_json = "^1.0" anyhow = "1.0" +atty = "^0.2" thiserror = "^1.0" combine = "^4.5" maplit = "^1.0" +colored_json = "^2.1" tracing-subscriber = "~0.3" automerge = { path = "../automerge" } -is-terminal = "0.4.1" -termcolor = "1.1.3" -serde = "1.0.150" [dev-dependencies] duct = "^0.13" diff --git a/rust/automerge-cli/IDEAS.md b/automerge-cli/IDEAS.md similarity index 100% rename from rust/automerge-cli/IDEAS.md rename to automerge-cli/IDEAS.md diff --git a/rust/automerge-cli/src/change.rs b/automerge-cli/src/change.rs similarity index 100% rename from rust/automerge-cli/src/change.rs rename to automerge-cli/src/change.rs diff --git a/rust/automerge-cli/src/examine.rs b/automerge-cli/src/examine.rs similarity index 77% rename from rust/automerge-cli/src/examine.rs rename to automerge-cli/src/examine.rs index 0ee102fb..010fa0f1 100644 --- a/rust/automerge-cli/src/examine.rs +++ b/automerge-cli/src/examine.rs @@ -1,8 +1,6 @@ use automerge as am; use thiserror::Error; -use crate::{color_json::print_colored_json, SkipVerifyFlag}; - #[derive(Error, Debug)] pub enum ExamineError { #[error("Error reading change file: {:?}", source)] @@ -22,28 +20,21 @@ pub enum ExamineError { }, } -pub(crate) fn examine( +pub fn examine( mut input: impl std::io::Read, mut output: impl std::io::Write, - skip: SkipVerifyFlag, is_tty: bool, ) -> Result<(), ExamineError> { let mut buf: Vec = Vec::new(); input .read_to_end(&mut buf) .map_err(|e| ExamineError::ReadingChanges { source: e })?; - let doc = skip - .load(&buf) + let doc = am::Automerge::load(&buf) .map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?; - let uncompressed_changes: Vec<_> = doc - .get_changes(&[]) - .unwrap() - .iter() - .map(|c| c.decode()) - .collect(); + let uncompressed_changes: Vec<_> = doc.get_changes(&[]).iter().map(|c| c.decode()).collect(); if is_tty { let json_changes = serde_json::to_value(uncompressed_changes).unwrap(); - print_colored_json(&json_changes).unwrap(); + colored_json::write_colored_json(&json_changes, &mut output).unwrap(); writeln!(output).unwrap(); } else { let json_changes = serde_json::to_string_pretty(&uncompressed_changes).unwrap(); diff --git a/rust/automerge-cli/src/export.rs b/automerge-cli/src/export.rs similarity index 81% rename from rust/automerge-cli/src/export.rs rename to automerge-cli/src/export.rs index 45f39101..7b0be98e 100644 --- a/rust/automerge-cli/src/export.rs +++ b/automerge-cli/src/export.rs @@ -1,14 +1,11 @@ use anyhow::Result; use automerge as am; -use automerge::ReadDoc; - -use crate::{color_json::print_colored_json, SkipVerifyFlag}; pub(crate) fn map_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { let keys = doc.keys(obj); let mut map = serde_json::Map::new(); for k in keys { - let val = doc.get(obj, &k); + let val = doc.value(obj, &k); match val { Ok(Some((am::Value::Object(o), exid))) if o == am::ObjType::Map || o == am::ObjType::Table => @@ -31,7 +28,7 @@ fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { let len = doc.length(obj); let mut array = Vec::new(); for i in 0..len { - let val = doc.get(obj, i); + let val = doc.value(obj, i as usize); match val { Ok(Some((am::Value::Object(o), exid))) if o == am::ObjType::Map || o == am::ObjType::Table => @@ -53,13 +50,11 @@ fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value { match val { am::ScalarValue::Str(s) => serde_json::Value::String(s.to_string()), - am::ScalarValue::Bytes(b) | am::ScalarValue::Unknown { bytes: b, .. } => { - serde_json::Value::Array( - b.iter() - .map(|byte| serde_json::Value::Number((*byte).into())) - .collect(), - ) - } + am::ScalarValue::Bytes(b) => serde_json::Value::Array( + b.iter() + .map(|byte| serde_json::Value::Number((*byte).into())) + .collect(), + ), am::ScalarValue::Int(n) => serde_json::Value::Number((*n).into()), am::ScalarValue::Uint(n) => serde_json::Value::Number((*n).into()), am::ScalarValue::F64(n) => serde_json::Number::from_f64(*n) @@ -72,23 +67,22 @@ fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value { } } -fn get_state_json(input_data: Vec, skip: SkipVerifyFlag) -> Result { - let doc = skip.load(&input_data).unwrap(); // FIXME +fn get_state_json(input_data: Vec) -> Result { + let doc = am::Automerge::load(&input_data).unwrap(); // FIXME Ok(map_to_json(&doc, &am::ObjId::Root)) } -pub(crate) fn export_json( +pub fn export_json( mut changes_reader: impl std::io::Read, mut writer: impl std::io::Write, - skip: SkipVerifyFlag, is_tty: bool, ) -> Result<()> { let mut input_data = vec![]; changes_reader.read_to_end(&mut input_data)?; - let state_json = get_state_json(input_data, skip)?; + let state_json = get_state_json(input_data)?; if is_tty { - print_colored_json(&state_json).unwrap(); + colored_json::write_colored_json(&state_json, &mut writer).unwrap(); writeln!(writer).unwrap(); } else { writeln!( @@ -107,10 +101,7 @@ mod tests { #[test] fn cli_export_with_empty_input() { - assert_eq!( - get_state_json(vec![], Default::default()).unwrap(), - serde_json::json!({}) - ) + assert_eq!(get_state_json(vec![]).unwrap(), serde_json::json!({})) } #[test] @@ -124,7 +115,7 @@ mod tests { let mut backend = initialize_from_json(&initial_state_json).unwrap(); let change_bytes = backend.save(); assert_eq!( - get_state_json(change_bytes, Default::default()).unwrap(), + get_state_json(change_bytes).unwrap(), serde_json::json!({"sparrows": 15.0}) ) } @@ -151,7 +142,7 @@ mod tests { */ let change_bytes = backend.save(); assert_eq!( - get_state_json(change_bytes, Default::default()).unwrap(), + get_state_json(change_bytes).unwrap(), serde_json::json!({ "birds": { "wrens": 3.0, diff --git a/rust/automerge-cli/src/import.rs b/automerge-cli/src/import.rs similarity index 87% rename from rust/automerge-cli/src/import.rs rename to automerge-cli/src/import.rs index a9556071..9f9a3210 100644 --- a/rust/automerge-cli/src/import.rs +++ b/automerge-cli/src/import.rs @@ -22,31 +22,31 @@ fn import_map( for (key, value) in map { match value { serde_json::Value::Null => { - doc.put(obj, key, ())?; + doc.set(obj, key, ())?; } serde_json::Value::Bool(b) => { - doc.put(obj, key, *b)?; + doc.set(obj, key, *b)?; } serde_json::Value::String(s) => { - doc.put(obj, key, s)?; + doc.set(obj, key, s.as_ref())?; } serde_json::Value::Array(vec) => { - let id = doc.put_object(obj, key, am::ObjType::List)?; + let id = doc.set_object(obj, key, am::ObjType::List)?; import_list(doc, &id, vec)?; } serde_json::Value::Number(n) => { if let Some(m) = n.as_i64() { - doc.put(obj, key, m)?; + doc.set(obj, key, m)?; } else if let Some(m) = n.as_u64() { - doc.put(obj, key, m)?; + doc.set(obj, key, m)?; } else if let Some(m) = n.as_f64() { - doc.put(obj, key, m)?; + doc.set(obj, key, m)?; } else { anyhow::bail!("not a number"); } } serde_json::Value::Object(map) => { - let id = doc.put_object(obj, key, am::ObjType::Map)?; + let id = doc.set_object(obj, key, am::ObjType::Map)?; import_map(doc, &id, map)?; } } @@ -68,7 +68,7 @@ fn import_list( doc.insert(obj, i, *b)?; } serde_json::Value::String(s) => { - doc.insert(obj, i, s)?; + doc.insert(obj, i, s.as_ref())?; } serde_json::Value::Array(vec) => { let id = doc.insert_object(obj, i, am::ObjType::List)?; diff --git a/rust/automerge-cli/src/main.rs b/automerge-cli/src/main.rs similarity index 60% rename from rust/automerge-cli/src/main.rs rename to automerge-cli/src/main.rs index 8f3f816d..ffc13012 100644 --- a/rust/automerge-cli/src/main.rs +++ b/automerge-cli/src/main.rs @@ -1,15 +1,10 @@ use std::{fs::File, path::PathBuf, str::FromStr}; use anyhow::{anyhow, Result}; -use clap::{ - builder::{BoolishValueParser, TypedValueParser, ValueParserFactory}, - Parser, -}; -use is_terminal::IsTerminal; +use clap::Parser; -mod color_json; +//mod change; mod examine; -mod examine_sync; mod export; mod import; mod merge; @@ -21,50 +16,12 @@ struct Opts { cmd: Command, } -#[derive(clap::ValueEnum, Clone, Debug)] +#[derive(Debug)] enum ExportFormat { Json, Toml, } -#[derive(Copy, Clone, Default, Debug)] -pub(crate) struct SkipVerifyFlag(bool); - -impl SkipVerifyFlag { - fn load(&self, buf: &[u8]) -> Result { - if self.0 { - automerge::Automerge::load(buf) - } else { - automerge::Automerge::load_unverified_heads(buf) - } - } -} - -#[derive(Clone)] -struct SkipVerifyFlagParser; -impl ValueParserFactory for SkipVerifyFlag { - type Parser = SkipVerifyFlagParser; - - fn value_parser() -> Self::Parser { - SkipVerifyFlagParser - } -} - -impl TypedValueParser for SkipVerifyFlagParser { - type Value = SkipVerifyFlag; - - fn parse_ref( - &self, - cmd: &clap::Command, - arg: Option<&clap::Arg>, - value: &std::ffi::OsStr, - ) -> Result { - BoolishValueParser::new() - .parse_ref(cmd, arg, value) - .map(SkipVerifyFlag) - } -} - impl FromStr for ExportFormat { type Err = anyhow::Error; @@ -86,15 +43,12 @@ enum Command { format: ExportFormat, /// Path that contains Automerge changes + #[clap(parse(from_os_str))] changes_file: Option, /// The file to write to. If omitted assumes stdout - #[clap(long("out"), short('o'))] + #[clap(parse(from_os_str), long("out"), short('o'))] output_file: Option, - - /// Whether to verify the head hashes of a compressed document - #[clap(long, action = clap::ArgAction::SetFalse)] - skip_verifying_heads: SkipVerifyFlag, }, Import { @@ -102,37 +56,69 @@ enum Command { #[clap(long, short, default_value = "json")] format: ExportFormat, + #[clap(parse(from_os_str))] input_file: Option, /// Path to write Automerge changes to - #[clap(long("out"), short('o'))] + #[clap(parse(from_os_str), long("out"), short('o'))] changes_file: Option, }, - /// Read an automerge document and print a JSON representation of the changes in it to stdout - Examine { + /// Read an automerge document from a file or stdin, perform a change on it and write a new + /// document to stdout or the specified output file. + Change { + /// The change script to perform. Change scripts have the form []. + /// The possible commands are 'set', 'insert', 'delete', and 'increment'. + /// + /// Paths look like this: $["mapkey"][0]. They always lways start with a '$', then each + /// subsequent segment of the path is either a string in double quotes to index a key in a + /// map, or an integer index to address an array element. + /// + /// Examples + /// + /// ## set + /// + /// > automerge change 'set $["someobject"] {"items": []}' somefile + /// + /// ## insert + /// + /// > automerge change 'insert $["someobject"]["items"][0] "item1"' somefile + /// + /// ## increment + /// + /// > automerge change 'increment $["mycounter"]' + /// + /// ## delete + /// + /// > automerge change 'delete $["someobject"]["items"]' somefile + script: String, + + /// The file to change, if omitted will assume stdin + #[clap(parse(from_os_str))] input_file: Option, - skip_verifying_heads: SkipVerifyFlag, + + /// Path to write Automerge changes to, if omitted will write to stdout + #[clap(parse(from_os_str), long("out"), short('o'))] + output_file: Option, }, - /// Read an automerge sync messaage and print a JSON representation of it - ExamineSync { input_file: Option }, + /// Read an automerge document and print a JSON representation of the changes in it to stdout + Examine { input_file: Option }, /// Read one or more automerge documents and output a merged, compacted version of them Merge { /// The file to write to. If omitted assumes stdout - #[clap(long("out"), short('o'))] + #[clap(parse(from_os_str), long("out"), short('o'))] output_file: Option, - /// The file(s) to compact. If empty assumes stdin input: Vec, }, } fn open_file_or_stdin(maybe_path: Option) -> Result> { - if std::io::stdin().is_terminal() { + if atty::is(atty::Stream::Stdin) { if let Some(path) = maybe_path { - Ok(Box::new(File::open(path).unwrap())) + Ok(Box::new(File::open(&path).unwrap())) } else { Err(anyhow!( "Must provide file path if not providing input via stdin" @@ -144,9 +130,9 @@ fn open_file_or_stdin(maybe_path: Option) -> Result) -> Result> { - if std::io::stdout().is_terminal() { + if atty::is(atty::Stream::Stdout) { if let Some(path) = maybe_path { - Ok(Box::new(File::create(path).unwrap())) + Ok(Box::new(File::create(&path).unwrap())) } else { Err(anyhow!("Must provide file path if not piping to stdout")) } @@ -163,22 +149,16 @@ fn main() -> Result<()> { changes_file, format, output_file, - skip_verifying_heads, } => { let output: Box = if let Some(output_file) = output_file { - Box::new(File::create(output_file)?) + Box::new(File::create(&output_file)?) } else { Box::new(std::io::stdout()) }; match format { ExportFormat::Json => { let mut in_buffer = open_file_or_stdin(changes_file)?; - export::export_json( - &mut in_buffer, - output, - skip_verifying_heads, - std::io::stdout().is_terminal(), - ) + export::export_json(&mut in_buffer, output, atty::is(atty::Stream::Stdout)) } ExportFormat::Toml => unimplemented!(), } @@ -195,30 +175,23 @@ fn main() -> Result<()> { } ExportFormat::Toml => unimplemented!(), }, - Command::Examine { - input_file, - skip_verifying_heads, + Command::Change { .. + //input_file, + //output_file, + //script, } => { + unimplemented!() +/* let in_buffer = open_file_or_stdin(input_file)?; - let out_buffer = std::io::stdout(); - match examine::examine( - in_buffer, - out_buffer, - skip_verifying_heads, - std::io::stdout().is_terminal(), - ) { - Ok(()) => {} - Err(e) => { - eprintln!("Error: {:?}", e); - } - } - Ok(()) + let mut out_buffer = create_file_or_stdout(output_file)?; + change::change(in_buffer, &mut out_buffer, script.as_str()) + .map_err(|e| anyhow::format_err!("Unable to make changes: {:?}", e)) +*/ } - Command::ExamineSync { input_file } => { + Command::Examine { input_file } => { let in_buffer = open_file_or_stdin(input_file)?; let out_buffer = std::io::stdout(); - match examine_sync::examine_sync(in_buffer, out_buffer, std::io::stdout().is_terminal()) - { + match examine::examine(in_buffer, out_buffer, atty::is(atty::Stream::Stdout)) { Ok(()) => {} Err(e) => { eprintln!("Error: {:?}", e); diff --git a/rust/automerge-cli/src/merge.rs b/automerge-cli/src/merge.rs similarity index 100% rename from rust/automerge-cli/src/merge.rs rename to automerge-cli/src/merge.rs diff --git a/rust/automerge-cli/tests/integration.rs b/automerge-cli/tests/integration.rs similarity index 100% rename from rust/automerge-cli/tests/integration.rs rename to automerge-cli/tests/integration.rs diff --git a/automerge-js/.gitignore b/automerge-js/.gitignore new file mode 100644 index 00000000..5add9449 --- /dev/null +++ b/automerge-js/.gitignore @@ -0,0 +1,2 @@ +/node_modules +/yarn.lock diff --git a/automerge-js/package.json b/automerge-js/package.json new file mode 100644 index 00000000..17018429 --- /dev/null +++ b/automerge-js/package.json @@ -0,0 +1,18 @@ +{ + "name": "automerge-js", + "version": "0.1.0", + "main": "src/index.js", + "license": "MIT", + "scripts": { + "test": "mocha --bail --full-trace" + }, + "devDependencies": { + "mocha": "^9.1.1" + }, + "dependencies": { + "automerge-wasm": "file:../automerge-wasm", + "fast-sha256": "^1.3.0", + "pako": "^2.0.4", + "uuid": "^8.3" + } +} diff --git a/rust/automerge-wasm/test/helpers/columnar.js b/automerge-js/src/columnar.js similarity index 100% rename from rust/automerge-wasm/test/helpers/columnar.js rename to automerge-js/src/columnar.js diff --git a/rust/automerge-wasm/test/helpers/common.js b/automerge-js/src/common.js similarity index 100% rename from rust/automerge-wasm/test/helpers/common.js rename to automerge-js/src/common.js diff --git a/automerge-js/src/constants.js b/automerge-js/src/constants.js new file mode 100644 index 00000000..ea92228c --- /dev/null +++ b/automerge-js/src/constants.js @@ -0,0 +1,18 @@ +// Properties of the document root object +//const OPTIONS = Symbol('_options') // object containing options passed to init() +//const CACHE = Symbol('_cache') // map from objectId to immutable object +const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers) +const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers) +const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers) +const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers) +const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers) + +// Properties of all Automerge objects +//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string) +//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts +//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback +//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element + +module.exports = { + STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN +} diff --git a/javascript/src/counter.ts b/automerge-js/src/counter.js similarity index 63% rename from javascript/src/counter.ts rename to automerge-js/src/counter.js index 88adb840..1ea56479 100644 --- a/javascript/src/counter.ts +++ b/automerge-js/src/counter.js @@ -1,16 +1,12 @@ -import { Automerge, type ObjID, type Prop } from "@automerge/automerge-wasm" -import { COUNTER } from "./constants" /** * The most basic CRDT: an integer value that can be changed only by * incrementing and decrementing. Since addition of integers is commutative, * the value trivially converges. */ -export class Counter { - value: number - - constructor(value?: number) { +class Counter { + constructor(value) { this.value = value || 0 - Reflect.defineProperty(this, COUNTER, { value: true }) + Object.freeze(this) } /** @@ -21,7 +17,7 @@ export class Counter { * concatenating it with another string, as in `x + ''`. * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf */ - valueOf(): number { + valueOf() { return this.value } @@ -30,7 +26,7 @@ export class Counter { * this method is called e.g. when you do `['value: ', x].join('')` or when * you use string interpolation: `value: ${x}`. */ - toString(): string { + toString() { return this.valueOf().toString() } @@ -38,7 +34,7 @@ export class Counter { * Returns the counter value, so that a JSON serialization of an Automerge * document represents the counter simply as an integer. */ - toJSON(): number { + toJSON() { return this.value } } @@ -48,32 +44,13 @@ export class Counter { * callback. */ class WriteableCounter extends Counter { - context: Automerge - path: Prop[] - objectId: ObjID - key: Prop - - constructor( - value: number, - context: Automerge, - path: Prop[], - objectId: ObjID, - key: Prop - ) { - super(value) - this.context = context - this.path = path - this.objectId = objectId - this.key = key - } - /** * Increases the value of the counter by `delta`. If `delta` is not given, * increases the value of the counter by 1. */ - increment(delta: number): number { - delta = typeof delta === "number" ? delta : 1 - this.context.increment(this.objectId, this.key, delta) + increment(delta) { + delta = typeof delta === 'number' ? delta : 1 + this.context.inc(this.objectId, this.key, delta) this.value += delta return this.value } @@ -82,8 +59,8 @@ class WriteableCounter extends Counter { * Decreases the value of the counter by `delta`. If `delta` is not given, * decreases the value of the counter by 1. */ - decrement(delta: number): number { - return this.increment(typeof delta === "number" ? -delta : -1) + decrement(delta) { + return this.inc(typeof delta === 'number' ? -delta : -1) } } @@ -93,15 +70,15 @@ class WriteableCounter extends Counter { * `objectId` is the ID of the object containing the counter, and `key` is * the property name (key in map, or index in list) where the counter is * located. - */ -export function getWriteableCounter( - value: number, - context: Automerge, - path: Prop[], - objectId: ObjID, - key: Prop -): WriteableCounter { - return new WriteableCounter(value, context, path, objectId, key) +*/ +function getWriteableCounter(value, context, path, objectId, key) { + const instance = Object.create(WriteableCounter.prototype) + instance.value = value + instance.context = context + instance.path = path + instance.objectId = objectId + instance.key = key + return instance } -//module.exports = { Counter, getWriteableCounter } +module.exports = { Counter, getWriteableCounter } diff --git a/rust/automerge-wasm/test/helpers/encoding.js b/automerge-js/src/encoding.js similarity index 100% rename from rust/automerge-wasm/test/helpers/encoding.js rename to automerge-js/src/encoding.js diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js new file mode 100644 index 00000000..326fc967 --- /dev/null +++ b/automerge-js/src/index.js @@ -0,0 +1,372 @@ +const AutomergeWASM = require("automerge-wasm") +const uuid = require('./uuid') + +let { rootProxy, listProxy, textProxy, mapProxy } = require("./proxies") +let { Counter } = require("./counter") +let { Text } = require("./text") +let { Int, Uint, Float64 } = require("./numbers") +let { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } = require("./constants") + +function init(actor) { + if (typeof actor != 'string') { + actor = null + } + const state = AutomergeWASM.create(actor) + return rootProxy(state, true); +} + +function clone(doc) { + const state = doc[STATE].clone() + return rootProxy(state, true); +} + +function free(doc) { + return doc[STATE].free() +} + +function from(data, actor) { + let doc1 = init(actor) + let doc2 = change(doc1, (d) => Object.assign(d, data)) + return doc2 +} + +function change(doc, options, callback) { + if (callback === undefined) { + // FIXME implement options + callback = options + options = {} + } + if (typeof options === "string") { + options = { message: options } + } + if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { + throw new RangeError("must be the document root"); + } + if (doc[FROZEN] === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (!!doc[HEADS] === true) { + throw new RangeError("Attempting to change an out of date document"); + } + if (doc[READ_ONLY] === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const state = doc[STATE] + const heads = state.getHeads() + try { + doc[HEADS] = heads + doc[FROZEN] = true + let root = rootProxy(state); + callback(root) + if (state.pendingOps() === 0) { + doc[FROZEN] = false + doc[HEADS] = undefined + return doc + } else { + state.commit(options.message, options.time) + return rootProxy(state, true); + } + } catch (e) { + //console.log("ERROR: ",e) + doc[FROZEN] = false + doc[HEADS] = undefined + state.rollback() + throw e + } +} + +function emptyChange(doc, options) { + if (options === undefined) { + options = {} + } + if (typeof options === "string") { + options = { message: options } + } + + if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { + throw new RangeError("must be the document root"); + } + if (doc[FROZEN] === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (doc[READ_ONLY] === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + + const state = doc[STATE] + state.commit(options.message, options.time) + return rootProxy(state, true); +} + +function load(data, actor) { + const state = AutomergeWASM.loadDoc(data, actor) + return rootProxy(state, true); +} + +function save(doc) { + const state = doc[STATE] + return state.save() +} + +function merge(local, remote) { + if (local[HEADS] === true) { + throw new RangeError("Attempting to change an out of date document"); + } + const localState = local[STATE] + const heads = localState.getHeads() + const remoteState = remote[STATE] + const changes = localState.getChangesAdded(remoteState) + localState.applyChanges(changes) + local[HEADS] = heads + return rootProxy(localState, true) +} + +function getActorId(doc) { + const state = doc[STATE] + return state.getActorId() +} + +function conflictAt(context, objectId, prop) { + let values = context.values(objectId, prop) + if (values.length <= 1) { + return + } + let result = {} + for (const conflict of values) { + const datatype = conflict[0] + const value = conflict[1] + switch (datatype) { + case "map": + result[value] = mapProxy(context, value, [ prop ], true) + break; + case "list": + result[value] = listProxy(context, value, [ prop ], true) + break; + case "text": + result[value] = textProxy(context, value, [ prop ], true) + break; + //case "table": + //case "cursor": + case "str": + case "uint": + case "int": + case "f64": + case "boolean": + case "bytes": + case "null": + result[conflict[2]] = value + break; + case "counter": + result[conflict[2]] = new Counter(value) + break; + case "timestamp": + result[conflict[2]] = new Date(value) + break; + default: + throw RangeError(`datatype ${datatype} unimplemented`) + } + } + return result +} + +function getConflicts(doc, prop) { + const state = doc[STATE] + const objectId = doc[OBJECT_ID] + return conflictAt(state, objectId, prop) +} + +function getLastLocalChange(doc) { + const state = doc[STATE] + try { + return state.getLastLocalChange() + } catch (e) { + return + } +} + +function getObjectId(doc) { + return doc[OBJECT_ID] +} + +function getChanges(oldState, newState) { + const o = oldState[STATE] + const n = newState[STATE] + const heads = oldState[HEADS] + return n.getChanges(heads || o.getHeads()) +} + +function getAllChanges(doc) { + const state = doc[STATE] + return state.getChanges([]) +} + +function applyChanges(doc, changes) { + if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { + throw new RangeError("must be the document root"); + } + if (doc[FROZEN] === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (doc[READ_ONLY] === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const state = doc[STATE] + const heads = state.getHeads() + state.applyChanges(changes) + doc[HEADS] = heads + return [rootProxy(state, true)]; +} + +function getHistory(doc) { + const actor = getActorId(doc) + const history = getAllChanges(doc) + return history.map((change, index) => ({ + get change () { + return decodeChange(change) + }, + get snapshot () { + const [state] = applyChanges(init(), history.slice(0, index + 1)) + return state + } + }) + ) +} + +function equals() { + if (!isObject(val1) || !isObject(val2)) return val1 === val2 + const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() + if (keys1.length !== keys2.length) return false + for (let i = 0; i < keys1.length; i++) { + if (keys1[i] !== keys2[i]) return false + if (!equals(val1[keys1[i]], val2[keys2[i]])) return false + } + return true +} + +function encodeSyncMessage(msg) { + return AutomergeWASM.encodeSyncMessage(msg) +} + +function decodeSyncMessage(msg) { + return AutomergeWASM.decodeSyncMessage(msg) +} + +function encodeSyncState(state) { + return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state)) +} + +function decodeSyncState(state) { + return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state)) +} + +function generateSyncMessage(doc, inState) { + const state = doc[STATE] + const syncState = AutomergeWASM.importSyncState(inState) + const message = state.generateSyncMessage(syncState) + const outState = AutomergeWASM.exportSyncState(syncState) + return [ outState, message ] +} + +function receiveSyncMessage(doc, inState, message) { + const syncState = AutomergeWASM.importSyncState(inState) + if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") { + throw new RangeError("must be the document root"); + } + if (doc[FROZEN] === true) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (!!doc[HEADS] === true) { + throw new RangeError("Attempting to change an out of date document"); + } + if (doc[READ_ONLY] === false) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const state = doc[STATE] + const heads = state.getHeads() + state.receiveSyncMessage(syncState, message) + const outState = AutomergeWASM.exportSyncState(syncState) + doc[HEADS] = heads + return [rootProxy(state, true), outState, null]; +} + +function initSyncState() { + return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState(change)) +} + +function encodeChange(change) { + return AutomergeWASM.encodeChange(change) +} + +function decodeChange(data) { + return AutomergeWASM.decodeChange(data) +} + +function encodeSyncMessage(change) { + return AutomergeWASM.encodeSyncMessage(change) +} + +function decodeSyncMessage(data) { + return AutomergeWASM.decodeSyncMessage(data) +} + +function getMissingDeps(doc, heads) { + const state = doc[STATE] + return state.getMissingDeps(heads) +} + +function getHeads(doc) { + const state = doc[STATE] + return doc[HEADS] || state.getHeads() +} + +function dump(doc) { + const state = doc[STATE] + state.dump() +} + +function toJS(doc) { + if (typeof doc === "object") { + if (doc instanceof Uint8Array) { + return doc + } + if (doc === null) { + return doc + } + if (doc instanceof Array) { + return doc.map((a) => toJS(a)) + } + if (doc instanceof Text) { + return doc.map((a) => toJS(a)) + } + let tmp = {} + for (index in doc) { + tmp[index] = toJS(doc[index]) + } + return tmp + } else { + return doc + } +} + +module.exports = { + init, from, change, emptyChange, clone, free, + load, save, merge, getChanges, getAllChanges, applyChanges, + getLastLocalChange, getObjectId, getActorId, getConflicts, + encodeChange, decodeChange, equals, getHistory, getHeads, uuid, + generateSyncMessage, receiveSyncMessage, initSyncState, + decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, + getMissingDeps, + dump, Text, Counter, Int, Uint, Float64, toJS, +} + +// depricated +// Frontend, setDefaultBackend, Backend + +// more... +/* +for (let name of ['getObjectId', 'getObjectById', + 'setActorId', + 'Text', 'Table', 'Counter', 'Observable' ]) { + module.exports[name] = Frontend[name] +} +*/ diff --git a/automerge-js/src/numbers.js b/automerge-js/src/numbers.js new file mode 100644 index 00000000..1ee22dee --- /dev/null +++ b/automerge-js/src/numbers.js @@ -0,0 +1,33 @@ +// Convience classes to allow users to stricly specify the number type they want + +class Int { + constructor(value) { + if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) { + throw new RangeError(`Value ${value} cannot be a uint`) + } + this.value = value + Object.freeze(this) + } +} + +class Uint { + constructor(value) { + if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) { + throw new RangeError(`Value ${value} cannot be a uint`) + } + this.value = value + Object.freeze(this) + } +} + +class Float64 { + constructor(value) { + if (typeof value !== 'number') { + throw new RangeError(`Value ${value} cannot be a float64`) + } + this.value = value || 0.0 + Object.freeze(this) + } +} + +module.exports = { Int, Uint, Float64 } diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js new file mode 100644 index 00000000..f9e27855 --- /dev/null +++ b/automerge-js/src/proxies.js @@ -0,0 +1,623 @@ + +const AutomergeWASM = require("automerge-wasm") +const { Int, Uint, Float64 } = require("./numbers"); +const { Counter, getWriteableCounter } = require("./counter"); +const { Text } = require("./text"); +const { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } = require("./constants") + +function parseListIndex(key) { + if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) + if (typeof key !== 'number') { + // throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key)) + return key + } + if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) { + throw new RangeError('A list index must be positive, but you passed ' + key) + } + return key +} + +function valueAt(target, prop) { + const { context, objectId, path, readonly, heads} = target + let value = context.value(objectId, prop, heads) + if (value === undefined) { + return + } + const datatype = value[0] + const val = value[1] + switch (datatype) { + case undefined: return; + case "map": return mapProxy(context, val, [ ... path, prop ], readonly, heads); + case "list": return listProxy(context, val, [ ... path, prop ], readonly, heads); + case "text": return textProxy(context, val, [ ... path, prop ], readonly, heads); + //case "table": + //case "cursor": + case "str": return val; + case "uint": return val; + case "int": return val; + case "f64": return val; + case "boolean": return val; + case "null": return null; + case "bytes": return val; + case "timestamp": return val; + case "counter": { + if (readonly) { + return new Counter(val); + } else { + return getWriteableCounter(val, context, path, objectId, prop) + } + } + default: + throw RangeError(`datatype ${datatype} unimplemented`) + } +} + +function import_value(value) { + switch (typeof value) { + case 'object': + if (value == null) { + return [ null, "null"] + } else if (value instanceof Uint) { + return [ value.value, "uint" ] + } else if (value instanceof Int) { + return [ value.value, "int" ] + } else if (value instanceof Float64) { + return [ value.value, "f64" ] + } else if (value instanceof Counter) { + return [ value.value, "counter" ] + } else if (value instanceof Date) { + return [ value.getTime(), "timestamp" ] + } else if (value instanceof Uint8Array) { + return [ value, "bytes" ] + } else if (value instanceof Array) { + return [ value, "list" ] + } else if (value instanceof Text) { + return [ value, "text" ] + } else if (value[OBJECT_ID]) { + throw new RangeError('Cannot create a reference to an existing document object') + } else { + return [ value, "map" ] + } + break; + case 'boolean': + return [ value, "boolean" ] + case 'number': + if (Number.isInteger(value)) { + return [ value, "int" ] + } else { + return [ value, "f64" ] + } + break; + case 'string': + return [ value ] + break; + default: + throw new RangeError(`Unsupported type of value: ${typeof value}`) + } +} + +const MapHandler = { + get (target, key) { + const { context, objectId, path, readonly, frozen, heads, cache } = target + if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } + if (key === OBJECT_ID) return objectId + if (key === READ_ONLY) return readonly + if (key === FROZEN) return frozen + if (key === HEADS) return heads + if (key === STATE) return context; + if (!cache[key]) { + cache[key] = valueAt(target, key) + } + return cache[key] + }, + + set (target, key, val) { + let { context, objectId, path, readonly, frozen} = target + target.cache = {} // reset cache on set + if (val && val[OBJECT_ID]) { + throw new RangeError('Cannot create a reference to an existing document object') + } + if (key === FROZEN) { + target.frozen = val + return + } + if (key === HEADS) { + target.heads = val + return + } + let [ value, datatype ] = import_value(val) + if (frozen) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (readonly) { + throw new RangeError(`Object property "${key}" cannot be modified`) + } + switch (datatype) { + case "list": + const list = context.set_object(objectId, key, []) + const proxyList = listProxy(context, list, [ ... path, key ], readonly ); + for (let i = 0; i < value.length; i++) { + proxyList[i] = value[i] + } + break; + case "text": + const text = context.set_object(objectId, key, "", "text") + const proxyText = textProxy(context, text, [ ... path, key ], readonly ); + for (let i = 0; i < value.length; i++) { + proxyText[i] = value.get(i) + } + break; + case "map": + const map = context.set_object(objectId, key, {}) + const proxyMap = mapProxy(context, map, [ ... path, key ], readonly ); + for (const key in value) { + proxyMap[key] = value[key] + } + break; + default: + context.set(objectId, key, value, datatype) + } + return true + }, + + deleteProperty (target, key) { + const { context, objectId, path, readonly, frozen } = target + target.cache = {} // reset cache on delete + if (readonly) { + throw new RangeError(`Object property "${key}" cannot be modified`) + } + context.del(objectId, key) + return true + }, + + has (target, key) { + const value = this.get(target, key) + return value !== undefined + }, + + getOwnPropertyDescriptor (target, key) { + const { context, objectId } = target + const value = this.get(target, key) + if (typeof value !== 'undefined') { + return { + configurable: true, enumerable: true, value + } + } + }, + + ownKeys (target) { + const { context, objectId, heads} = target + return context.keys(objectId, heads) + }, +} + + +const ListHandler = { + get (target, index) { + const {context, objectId, path, readonly, frozen, heads } = target + index = parseListIndex(index) + if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } } + if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } + if (index === OBJECT_ID) return objectId + if (index === READ_ONLY) return readonly + if (index === FROZEN) return frozen + if (index === HEADS) return heads + if (index === STATE) return context; + if (index === 'length') return context.length(objectId, heads); + if (index === Symbol.iterator) { + let i = 0; + return function *() { + // FIXME - ugly + let value = valueAt(target, i) + while (value !== undefined) { + yield value + i += 1 + value = valueAt(target, i) + } + } + } + if (typeof index === 'number') { + return valueAt(target, index) + } else { + return listMethods(target)[index] + } + }, + + set (target, index, val) { + let {context, objectId, path, readonly, frozen } = target + index = parseListIndex(index) + if (val && val[OBJECT_ID]) { + throw new RangeError('Cannot create a reference to an existing document object') + } + if (index === FROZEN) { + target.frozen = val + return + } + if (index === HEADS) { + target.heads = val + return + } + if (typeof index == "string") { + throw new RangeError('list index must be a number') + } + const [ value, datatype] = import_value(val) + if (frozen) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (readonly) { + throw new RangeError(`Object property "${index}" cannot be modified`) + } + switch (datatype) { + case "list": + let list + if (index >= context.length(objectId)) { + list = context.insert_object(objectId, index, []) + } else { + list = context.set_object(objectId, index, []) + } + const proxyList = listProxy(context, list, [ ... path, index ], readonly); + proxyList.splice(0,0,...value) + break; + case "text": + let text + if (index >= context.length(objectId)) { + text = context.insert_object(objectId, index, "", "text") + } else { + text = context.set_object(objectId, index, "", "text") + } + const proxyText = textProxy(context, text, [ ... path, index ], readonly); + proxyText.splice(0,0,...value) + break; + case "map": + let map + if (index >= context.length(objectId)) { + map = context.insert_object(objectId, index, {}) + } else { + map = context.set_object(objectId, index, {}) + } + const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); + for (const key in value) { + proxyMap[key] = value[key] + } + break; + default: + if (index >= context.length(objectId)) { + context.insert(objectId, index, value, datatype) + } else { + context.set(objectId, index, value, datatype) + } + } + return true + }, + + deleteProperty (target, index) { + const {context, objectId} = target + index = parseListIndex(index) + if (context.value(objectId, index)[0] == "counter") { + throw new TypeError('Unsupported operation: deleting a counter from a list') + } + context.del(objectId, index) + return true + }, + + has (target, index) { + const {context, objectId, heads} = target + index = parseListIndex(index) + if (typeof index === 'number') { + return index < context.length(objectId, heads) + } + return index === 'length' + }, + + getOwnPropertyDescriptor (target, index) { + const {context, objectId, path, readonly, frozen, heads} = target + + if (index === 'length') return {writable: true, value: context.length(objectId, heads) } + if (index === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId} + + index = parseListIndex(index) + + let value = valueAt(target, index) + return { configurable: true, enumerable: true, value } + }, + + getPrototypeOf(target) { return Object.getPrototypeOf([]) }, + ownKeys (target) { + const {context, objectId, heads } = target + let keys = [] + // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array + // but not uncommenting it causes for (i in list) {} to not enumerate values properly + //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } + keys.push("length"); + return keys + } +} + +const TextHandler = Object.assign({}, ListHandler, { + get (target, index) { + // FIXME this is a one line change from ListHandler.get() + const {context, objectId, path, readonly, frozen, heads } = target + index = parseListIndex(index) + if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } + if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } } + if (index === OBJECT_ID) return objectId + if (index === READ_ONLY) return readonly + if (index === FROZEN) return frozen + if (index === HEADS) return heads + if (index === STATE) return context; + if (index === 'length') return context.length(objectId, heads); + if (index === Symbol.iterator) { + let i = 0; + return function *() { + let value = valueAt(target, i) + while (value !== undefined) { + yield value + i += 1 + value = valueAt(target, i) + } + } + } + if (typeof index === 'number') { + return valueAt(target, index) + } else { + return textMethods(target)[index] || listMethods(target)[index] + } + }, + getPrototypeOf(target) { + return Object.getPrototypeOf(new Text()) + }, +}) + +function mapProxy(context, objectId, path, readonly, heads) { + return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) +} + +function listProxy(context, objectId, path, readonly, heads) { + let target = [] + Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) + return new Proxy(target, ListHandler) +} + +function textProxy(context, objectId, path, readonly, heads) { + let target = [] + Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) + return new Proxy(target, TextHandler) +} + +function rootProxy(context, readonly) { + return mapProxy(context, "_root", [], readonly) +} + +function listMethods(target) { + const {context, objectId, path, readonly, frozen, heads} = target + const methods = { + deleteAt(index, numDelete) { + if (typeof numDelete === 'number') { + context.splice(objectId, index, numDelete) + } else { + context.del(objectId, index) + } + return this + }, + + fill(val, start, end) { + // FIXME + let list = context.getObject(objectId) + let [value, datatype] = valueAt(target, index) + for (let index = parseListIndex(start || 0); index < parseListIndex(end || list.length); index++) { + context.set(objectId, index, value, datatype) + } + return this + }, + + indexOf(o, start = 0) { + // FIXME + const id = o[OBJECT_ID] + if (id) { + const list = context.getObject(objectId) + for (let index = start; index < list.length; index++) { + if (list[index][OBJECT_ID] === id) { + return index + } + } + return -1 + } else { + return context.indexOf(objectId, o, start) + } + }, + + insertAt(index, ...values) { + this.splice(index, 0, ...values) + return this + }, + + pop() { + let length = context.length(objectId) + if (length == 0) { + return undefined + } + let last = valueAt(target, length - 1) + context.del(objectId, length - 1) + return last + }, + + push(...values) { + let len = context.length(objectId) + this.splice(len, 0, ...values) + return context.length(objectId) + }, + + shift() { + if (context.length(objectId) == 0) return + const first = valueAt(target, 0) + context.del(objectId, 0) + return first + }, + + splice(index, del, ...vals) { + index = parseListIndex(index) + del = parseListIndex(del) + for (let val of vals) { + if (val && val[OBJECT_ID]) { + throw new RangeError('Cannot create a reference to an existing document object') + } + } + if (frozen) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (readonly) { + throw new RangeError("Sequence object cannot be modified outside of a change block") + } + let result = [] + for (let i = 0; i < del; i++) { + let value = valueAt(target, index) + result.push(value) + context.del(objectId, index) + } + const values = vals.map((val) => import_value(val)) + for (let [value,datatype] of values) { + switch (datatype) { + case "list": + const list = context.insert_object(objectId, index, []) + const proxyList = listProxy(context, list, [ ... path, index ], readonly); + proxyList.splice(0,0,...value) + break; + case "text": + const text = context.insert_object(objectId, index, "", "text") + const proxyText = textProxy(context, text, [ ... path, index ], readonly); + proxyText.splice(0,0,...value) + break; + case "map": + const map = context.insert_object(objectId, index, {}) + const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); + for (const key in value) { + proxyMap[key] = value[key] + } + break; + default: + context.insert(objectId, index, value, datatype) + } + index += 1 + } + return result + }, + + unshift(...values) { + this.splice(0, 0, ...values) + return context.length(objectId) + }, + + entries() { + let i = 0; + const iterator = { + next: () => { + let value = valueAt(target, i) + if (value === undefined) { + return { value: undefined, done: true } + } else { + return { value: [ i, value ], done: false } + } + } + } + return iterator + }, + + keys() { + let i = 0; + let len = context.length(objectId, heads) + const iterator = { + next: () => { + let value = undefined + if (i < len) { value = i; i++ } + return { value, done: true } + } + } + return iterator + }, + + values() { + let i = 0; + const iterator = { + next: () => { + let value = valueAt(target, i) + if (value === undefined) { + return { value: undefined, done: true } + } else { + return { value, done: false } + } + } + } + return iterator + } + } + + // Read-only methods that can delegate to the JavaScript built-in implementations + // FIXME - super slow + for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', + 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', + 'slice', 'some', 'toLocaleString', 'toString']) { + methods[method] = (...args) => { + const list = [] + while (true) { + let value = valueAt(target, list.length) + if (value == undefined) { + break + } + list.push(value) + } + + return list[method](...args) + } + } + + return methods +} + +function textMethods(target) { + const {context, objectId, path, readonly, frozen} = target + const methods = { + set (index, value) { + return this[index] = value + }, + get (index) { + return this[index] + }, + toString () { + let str = '' + let length = this.length + for (let i = 0; i < length; i++) { + const value = this.get(i) + if (typeof value === 'string') str += value + } + return str + }, + toSpans () { + let spans = [] + let chars = '' + let length = this.length + for (let i = 0; i < length; i++) { + const value = this[i] + if (typeof value === 'string') { + chars += value + } else { + if (chars.length > 0) { + spans.push(chars) + chars = '' + } + spans.push(value) + } + } + if (chars.length > 0) { + spans.push(chars) + } + return spans + }, + toJSON () { + return this.toString() + } + } + return methods +} + + +module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler } diff --git a/javascript/test/legacy/sync.js b/automerge-js/src/sync.js similarity index 80% rename from javascript/test/legacy/sync.js rename to automerge-js/src/sync.js index 233c4292..2ae3f4e4 100644 --- a/javascript/test/legacy/sync.js +++ b/automerge-js/src/sync.js @@ -16,15 +16,11 @@ * last sync to disk), and we fall back to sending the entire document in this case. */ -const Backend = null //require('./backend') -const { - hexStringToBytes, - bytesToHexString, - Encoder, - Decoder, -} = require("./encoding") -const { decodeChangeMeta } = require("./columnar") -const { copyObject } = require("./common") +//const Backend = require('./backend') +const Backend = {} //require('./backend') +const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding') +const { decodeChangeMeta } = require('./columnar') +const { copyObject } = require('../src/common') const HASH_SIZE = 32 // 256 bits = 32 bytes const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification @@ -33,8 +29,7 @@ const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identif // These constants correspond to a 1% false positive rate. The values can be changed without // breaking compatibility of the network protocol, since the parameters used for a particular // Bloom filter are encoded in the wire format. -const BITS_PER_ENTRY = 10, - NUM_PROBES = 7 +const BITS_PER_ENTRY = 10, NUM_PROBES = 7 /** * A Bloom filter implementation that can be serialised to a byte array for transmission @@ -42,15 +37,13 @@ const BITS_PER_ENTRY = 10, * so this implementation does not perform its own hashing. */ class BloomFilter { - constructor(arg) { + constructor (arg) { if (Array.isArray(arg)) { // arg is an array of SHA256 hashes in hexadecimal encoding this.numEntries = arg.length this.numBitsPerEntry = BITS_PER_ENTRY this.numProbes = NUM_PROBES - this.bits = new Uint8Array( - Math.ceil((this.numEntries * this.numBitsPerEntry) / 8) - ) + this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) for (let hash of arg) this.addHash(hash) } else if (arg instanceof Uint8Array) { if (arg.byteLength === 0) { @@ -63,12 +56,10 @@ class BloomFilter { this.numEntries = decoder.readUint32() this.numBitsPerEntry = decoder.readUint32() this.numProbes = decoder.readUint32() - this.bits = decoder.readRawBytes( - Math.ceil((this.numEntries * this.numBitsPerEntry) / 8) - ) + this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) } } else { - throw new TypeError("invalid argument") + throw new TypeError('invalid argument') } } @@ -96,32 +87,12 @@ class BloomFilter { * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf */ getProbes(hash) { - const hashBytes = hexStringToBytes(hash), - modulo = 8 * this.bits.byteLength - if (hashBytes.byteLength !== 32) - throw new RangeError(`Not a 256-bit hash: ${hash}`) + const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength + if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) // on the next three lines, the right shift means interpret value as unsigned - let x = - ((hashBytes[0] | - (hashBytes[1] << 8) | - (hashBytes[2] << 16) | - (hashBytes[3] << 24)) >>> - 0) % - modulo - let y = - ((hashBytes[4] | - (hashBytes[5] << 8) | - (hashBytes[6] << 16) | - (hashBytes[7] << 24)) >>> - 0) % - modulo - let z = - ((hashBytes[8] | - (hashBytes[9] << 8) | - (hashBytes[10] << 16) | - (hashBytes[11] << 24)) >>> - 0) % - modulo + let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo + let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo + let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo const probes = [x] for (let i = 1; i < this.numProbes; i++) { x = (x + y) % modulo @@ -158,14 +129,12 @@ class BloomFilter { * Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array. */ function encodeHashes(encoder, hashes) { - if (!Array.isArray(hashes)) throw new TypeError("hashes must be an array") + if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array') encoder.appendUint32(hashes.length) for (let i = 0; i < hashes.length; i++) { - if (i > 0 && hashes[i - 1] >= hashes[i]) - throw new RangeError("hashes must be sorted") + if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted') const bytes = hexStringToBytes(hashes[i]) - if (bytes.byteLength !== HASH_SIZE) - throw new TypeError("heads hashes must be 256 bits") + if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits') encoder.appendRawBytes(bytes) } } @@ -175,8 +144,7 @@ function encodeHashes(encoder, hashes) { * array of hex strings. */ function decodeHashes(decoder) { - let length = decoder.readUint32(), - hashes = [] + let length = decoder.readUint32(), hashes = [] for (let i = 0; i < length; i++) { hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) } @@ -216,11 +184,11 @@ function decodeSyncMessage(bytes) { const heads = decodeHashes(decoder) const need = decodeHashes(decoder) const haveCount = decoder.readUint32() - let message = { heads, need, have: [], changes: [] } + let message = {heads, need, have: [], changes: []} for (let i = 0; i < haveCount; i++) { const lastSync = decodeHashes(decoder) const bloom = decoder.readPrefixedBytes(decoder) - message.have.push({ lastSync, bloom }) + message.have.push({lastSync, bloom}) } const changeCount = decoder.readUint32() for (let i = 0; i < changeCount; i++) { @@ -267,7 +235,7 @@ function decodeSyncState(bytes) { function makeBloomFilter(backend, lastSync) { const newChanges = Backend.getChanges(backend, lastSync) const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash) - return { lastSync, bloom: new BloomFilter(hashes).bytes } + return {lastSync, bloom: new BloomFilter(hashes).bytes} } /** @@ -278,26 +246,20 @@ function makeBloomFilter(backend, lastSync) { */ function getChangesToSend(backend, have, need) { if (have.length === 0) { - return need - .map(hash => Backend.getChangeByHash(backend, hash)) - .filter(change => change !== undefined) + return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) } - let lastSyncHashes = {}, - bloomFilters = [] + let lastSyncHashes = {}, bloomFilters = [] for (let h of have) { for (let hash of h.lastSync) lastSyncHashes[hash] = true bloomFilters.push(new BloomFilter(h.bloom)) } // Get all changes that were added since the last sync - const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)).map( - change => decodeChangeMeta(change, true) - ) + const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) + .map(change => decodeChangeMeta(change, true)) - let changeHashes = {}, - dependents = {}, - hashesToSend = {} + let changeHashes = {}, dependents = {}, hashesToSend = {} for (let change of changes) { changeHashes[change.hash] = true @@ -331,8 +293,7 @@ function getChangesToSend(backend, have, need) { let changesToSend = [] for (let hash of need) { hashesToSend[hash] = true - if (!changeHashes[hash]) { - // Change is not among those returned by getMissingChanges()? + if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? const change = Backend.getChangeByHash(backend, hash) if (change) changesToSend.push(change) } @@ -357,7 +318,7 @@ function initSyncState() { } function compareArrays(a, b) { - return a.length === b.length && a.every((v, i) => v === b[i]) + return (a.length === b.length) && a.every((v, i) => v === b[i]) } /** @@ -369,19 +330,10 @@ function generateSyncMessage(backend, syncState) { throw new Error("generateSyncMessage called with no Automerge document") } if (!syncState) { - throw new Error( - "generateSyncMessage requires a syncState, which can be created with initSyncState()" - ) + throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") } - let { - sharedHeads, - lastSentHeads, - theirHeads, - theirNeed, - theirHave, - sentHashes, - } = syncState + let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState const ourHeads = Backend.getHeads(backend) // Hashes to explicitly request from the remote peer: any missing dependencies of unapplied @@ -405,28 +357,18 @@ function generateSyncMessage(backend, syncState) { const lastSync = theirHave[0].lastSync if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) { // we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need - const resetMsg = { - heads: ourHeads, - need: [], - have: [{ lastSync: [], bloom: new Uint8Array(0) }], - changes: [], - } + const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []} return [syncState, encodeSyncMessage(resetMsg)] } } // XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size // these changes should ideally be RLE encoded but we haven't implemented that yet. - let changesToSend = - Array.isArray(theirHave) && Array.isArray(theirNeed) - ? getChangesToSend(backend, theirHave, theirNeed) - : [] + let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : [] // If the heads are equal, we're in sync and don't need to do anything further - const headsUnchanged = - Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) - const headsEqual = - Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) + const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) + const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) if (headsUnchanged && headsEqual && changesToSend.length === 0) { // no need to send a sync message if we know we're synced! return [syncState, null] @@ -434,19 +376,12 @@ function generateSyncMessage(backend, syncState) { // TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the // unnecessary recomputation - changesToSend = changesToSend.filter( - change => !sentHashes[decodeChangeMeta(change, true).hash] - ) + changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash]) // Regular response to a sync message: send any changes that the other node // doesn't have. We leave the "have" field empty because the previous message // generated by `syncStart` already indicated what changes we have. - const syncMessage = { - heads: ourHeads, - have: ourHave, - need: ourNeed, - changes: changesToSend, - } + const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend} if (changesToSend.length > 0) { sentHashes = copyObject(sentHashes) for (const change of changesToSend) { @@ -454,10 +389,7 @@ function generateSyncMessage(backend, syncState) { } } - syncState = Object.assign({}, syncState, { - lastSentHeads: ourHeads, - sentHashes, - }) + syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes}) return [syncState, encodeSyncMessage(syncMessage)] } @@ -475,14 +407,13 @@ function generateSyncMessage(backend, syncState) { * another peer, that means that peer had those changes, and therefore we now both know about them. */ function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) { - const newHeads = myNewHeads.filter(head => !myOldHeads.includes(head)) - const commonHeads = ourOldSharedHeads.filter(head => - myNewHeads.includes(head) - ) + const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head)) + const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head)) const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort() return advancedHeads } + /** * Given a backend, a message message and the state of our peer, apply any changes, update what * we believe about the peer, and (if there were applied changes) produce a patch for the frontend @@ -492,13 +423,10 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) { throw new Error("generateSyncMessage called with no Automerge document") } if (!oldSyncState) { - throw new Error( - "generateSyncMessage requires a syncState, which can be created with initSyncState()" - ) + throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") } - let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, - patch = null + let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null const message = decodeSyncMessage(binaryMessage) const beforeHeads = Backend.getHeads(backend) @@ -507,27 +435,18 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) { // changes without applying them. The set of changes may also be incomplete if the sender decided // to break a large set of changes into chunks. if (message.changes.length > 0) { - ;[backend, patch] = Backend.applyChanges(backend, message.changes) - sharedHeads = advanceHeads( - beforeHeads, - Backend.getHeads(backend), - sharedHeads - ) + [backend, patch] = Backend.applyChanges(backend, message.changes) + sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads) } // If heads are equal, indicate we don't need to send a response message - if ( - message.changes.length === 0 && - compareArrays(message.heads, beforeHeads) - ) { + if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) { lastSentHeads = message.heads } // If all of the remote heads are known to us, that means either our heads are equal, or we are // ahead of the remote peer. In this case, take the remote heads to be our shared heads. - const knownHeads = message.heads.filter(head => - Backend.getChangeByHash(backend, head) - ) + const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head)) if (knownHeads.length === message.heads.length) { sharedHeads = message.heads // If the remote peer has lost all its data, reset our state to perform a full resync @@ -549,18 +468,14 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) { theirHave: message.have, // the information we need to calculate the changes they need theirHeads: message.heads, theirNeed: message.need, - sentHashes, + sentHashes } return [backend, syncState, patch] } module.exports = { - receiveSyncMessage, - generateSyncMessage, - encodeSyncMessage, - decodeSyncMessage, - initSyncState, - encodeSyncState, - decodeSyncState, - BloomFilter, // BloomFilter is a private API, exported only for testing purposes + receiveSyncMessage, generateSyncMessage, + encodeSyncMessage, decodeSyncMessage, + initSyncState, encodeSyncState, decodeSyncState, + BloomFilter // BloomFilter is a private API, exported only for testing purposes } diff --git a/automerge-js/src/text.js b/automerge-js/src/text.js new file mode 100644 index 00000000..a7f442fe --- /dev/null +++ b/automerge-js/src/text.js @@ -0,0 +1,132 @@ +const { OBJECT_ID } = require('./constants') +const { isObject } = require('../src/common') + +class Text { + constructor (text) { + const instance = Object.create(Text.prototype) + if (typeof text === 'string') { + instance.elems = [...text] + } else if (Array.isArray(text)) { + instance.elems = text + } else if (text === undefined) { + instance.elems = [] + } else { + throw new TypeError(`Unsupported initial value for Text: ${text}`) + } + return instance + } + + get length () { + return this.elems.length + } + + get (index) { + return this.elems[index] + } + + getElemId (index) { + return undefined + } + + /** + * Iterates over the text elements character by character, including any + * inline objects. + */ + [Symbol.iterator] () { + let elems = this.elems, index = -1 + return { + next () { + index += 1 + if (index < elems.length) { + return {done: false, value: elems[index]} + } else { + return {done: true} + } + } + } + } + + /** + * Returns the content of the Text object as a simple string, ignoring any + * non-character elements. + */ + toString() { + // Concatting to a string is faster than creating an array and then + // .join()ing for small (<100KB) arrays. + // https://jsperf.com/join-vs-loop-w-type-test + let str = '' + for (const elem of this.elems) { + if (typeof elem === 'string') str += elem + } + return str + } + + /** + * Returns the content of the Text object as a sequence of strings, + * interleaved with non-character elements. + * + * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans: + * => ['ab', {x: 3}, 'cd'] + */ + toSpans() { + let spans = [] + let chars = '' + for (const elem of this.elems) { + if (typeof elem === 'string') { + chars += elem + } else { + if (chars.length > 0) { + spans.push(chars) + chars = '' + } + spans.push(elem) + } + } + if (chars.length > 0) { + spans.push(chars) + } + return spans + } + + /** + * Returns the content of the Text object as a simple string, so that the + * JSON serialization of an Automerge document represents text nicely. + */ + toJSON() { + return this.toString() + } + + /** + * Updates the list item at position `index` to a new value `value`. + */ + set (index, value) { + this.elems[index] = value + } + + /** + * Inserts new list items `values` starting at position `index`. + */ + insertAt(index, ...values) { + this.elems.splice(index, 0, ... values) + } + + /** + * Deletes `numDelete` list items starting at position `index`. + * if `numDelete` is not given, one item is deleted. + */ + deleteAt(index, numDelete = 1) { + this.elems.splice(index, numDelete) + } +} + +// Read-only methods that can delegate to the JavaScript built-in array +for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', + 'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', + 'slice', 'some', 'toLocaleString']) { + Text.prototype[method] = function (...args) { + const array = [...this] + return array[method](...args) + } +} + +module.exports = { Text } diff --git a/automerge-js/src/uuid.js b/automerge-js/src/uuid.js new file mode 100644 index 00000000..42a8cc6e --- /dev/null +++ b/automerge-js/src/uuid.js @@ -0,0 +1,16 @@ +const { v4: uuid } = require('uuid') + +function defaultFactory() { + return uuid().replace(/-/g, '') +} + +let factory = defaultFactory + +function makeUuid() { + return factory() +} + +makeUuid.setFactory = newFactory => { factory = newFactory } +makeUuid.reset = () => { factory = defaultFactory } + +module.exports = makeUuid diff --git a/automerge-js/test/basic_test.js b/automerge-js/test/basic_test.js new file mode 100644 index 00000000..68d2fecf --- /dev/null +++ b/automerge-js/test/basic_test.js @@ -0,0 +1,164 @@ + +const assert = require('assert') +const util = require('util') +const Automerge = require('..') + +describe('Automerge', () => { + describe('basics', () => { + it('should init clone and free', () => { + let doc1 = Automerge.init() + let doc2 = Automerge.clone(doc1); + }) + + it('handle basic set and read on root object', () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, (d) => { + d.hello = "world" + d.big = "little" + d.zip = "zop" + d.app = "dap" + assert.deepEqual(d, { hello: "world", big: "little", zip: "zop", app: "dap" }) + }) + assert.deepEqual(doc2, { hello: "world", big: "little", zip: "zop", app: "dap" }) + }) + + it('handle basic sets over many changes', () => { + let doc1 = Automerge.init() + let timestamp = new Date(); + let counter = new Automerge.Counter(100); + let bytes = new Uint8Array([10,11,12]); + let doc2 = Automerge.change(doc1, (d) => { + d.hello = "world" + }) + let doc3 = Automerge.change(doc2, (d) => { + d.counter1 = counter + }) + let doc4 = Automerge.change(doc3, (d) => { + d.timestamp1 = timestamp + }) + let doc5 = Automerge.change(doc4, (d) => { + d.app = null + }) + let doc6 = Automerge.change(doc5, (d) => { + d.bytes1 = bytes + }) + let doc7 = Automerge.change(doc6, (d) => { + d.uint = new Automerge.Uint(1) + d.int = new Automerge.Int(-1) + d.float64 = new Automerge.Float64(5.5) + d.number1 = 100 + d.number2 = -45.67 + d.true = true + d.false = false + }) + + assert.deepEqual(doc7, { hello: "world", true: true, false: false, int: -1, uint: 1, float64: 5.5, number1: 100, number2: -45.67, counter1: counter, timestamp1: timestamp, bytes1: bytes, app: null }) + + let changes = Automerge.getAllChanges(doc7) + let t1 = Automerge.init() + ;let [t2] = Automerge.applyChanges(t1, changes) + assert.deepEqual(doc7,t2) + }) + + it('handle overwrites to values', () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, (d) => { + d.hello = "world1" + }) + let doc3 = Automerge.change(doc2, (d) => { + d.hello = "world2" + }) + let doc4 = Automerge.change(doc3, (d) => { + d.hello = "world3" + }) + let doc5 = Automerge.change(doc4, (d) => { + d.hello = "world4" + }) + assert.deepEqual(doc5, { hello: "world4" } ) + }) + + it('handle set with object value', () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, (d) => { + d.subobj = { hello: "world", subsubobj: { zip: "zop" } } + }) + assert.deepEqual(doc2, { subobj: { hello: "world", subsubobj: { zip: "zop" } } }) + }) + + it('handle simple list creation', () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, (d) => d.list = []) + assert.deepEqual(doc2, { list: []}) + }) + + it('handle simple lists', () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, (d) => { + d.list = [ 1, 2, 3 ] + }) + assert.deepEqual(doc2.list.length, 3) + assert.deepEqual(doc2.list[0], 1) + assert.deepEqual(doc2.list[1], 2) + assert.deepEqual(doc2.list[2], 3) + assert.deepEqual(doc2, { list: [1,2,3] }) + // assert.deepStrictEqual(Automerge.toJS(doc2), { list: [1,2,3] }) + + let doc3 = Automerge.change(doc2, (d) => { + d.list[1] = "a" + }) + + assert.deepEqual(doc3.list.length, 3) + assert.deepEqual(doc3.list[0], 1) + assert.deepEqual(doc3.list[1], "a") + assert.deepEqual(doc3.list[2], 3) + assert.deepEqual(doc3, { list: [1,"a",3] }) + }) + it('handle simple lists', () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, (d) => { + d.list = [ 1, 2, 3 ] + }) + let changes = Automerge.getChanges(doc1, doc2) + let docB1 = Automerge.init() + ;let [docB2] = Automerge.applyChanges(docB1, changes) + assert.deepEqual(docB2, doc2); + }) + it('handle text', () => { + let doc1 = Automerge.init() + let tmp = new Automerge.Text("hello") + let doc2 = Automerge.change(doc1, (d) => { + d.list = new Automerge.Text("hello") + d.list.insertAt(2,"Z") + }) + let changes = Automerge.getChanges(doc1, doc2) + let docB1 = Automerge.init() + ;let [docB2] = Automerge.applyChanges(docB1, changes) + assert.deepEqual(docB2, doc2); + }) + + it('have many list methods', () => { + let doc1 = Automerge.from({ list: [1,2,3] }) + assert.deepEqual(doc1, { list: [1,2,3] }); + let doc2 = Automerge.change(doc1, (d) => { + d.list.splice(1,1,9,10) + }) + assert.deepEqual(doc2, { list: [1,9,10,3] }); + let doc3 = Automerge.change(doc2, (d) => { + d.list.push(11,12) + }) + assert.deepEqual(doc3, { list: [1,9,10,3,11,12] }); + let doc4 = Automerge.change(doc3, (d) => { + d.list.unshift(2,2) + }) + assert.deepEqual(doc4, { list: [2,2,1,9,10,3,11,12] }); + let doc5 = Automerge.change(doc4, (d) => { + d.list.shift() + }) + assert.deepEqual(doc5, { list: [2,1,9,10,3,11,12] }); + let doc6 = Automerge.change(doc5, (d) => { + d.list.insertAt(3,100,101) + }) + assert.deepEqual(doc6, { list: [2,1,9,100,101,10,3,11,12] }); + }) + }) +}) diff --git a/automerge-js/test/columnar_test.js b/automerge-js/test/columnar_test.js new file mode 100644 index 00000000..8cbe1482 --- /dev/null +++ b/automerge-js/test/columnar_test.js @@ -0,0 +1,97 @@ +const assert = require('assert') +const { checkEncoded } = require('./helpers') +const Automerge = require('..') +const { encodeChange, decodeChange } = Automerge + +describe('change encoding', () => { + it('should encode text edits', () => { + /* + const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: '', deps: [], ops: [ + {action: 'makeText', obj: '_root', key: 'text', insert: false, pred: []}, + {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []}, + {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', insert: false, pred: ['2@aaaa']}, + {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []}, + {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []} + ]} + */ + const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: null, deps: [], ops: [ + {action: 'makeText', obj: '_root', key: 'text', pred: []}, + {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []}, + {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', pred: ['2@aaaa']}, + {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []}, + {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []} + ]} + checkEncoded(encodeChange(change1), [ + 0x85, 0x6f, 0x4a, 0x83, // magic bytes + 0xe2, 0xbd, 0xfb, 0xf5, // checksum + 1, 94, 0, 2, 0xaa, 0xaa, // chunkType: change, length, deps, actor 'aaaa' + 1, 1, 9, 0, 0, // seq, startOp, time, message, actor list + 12, 0x01, 4, 0x02, 4, // column count, objActor, objCtr + 0x11, 8, 0x13, 7, 0x15, 8, // keyActor, keyCtr, keyStr + 0x34, 4, 0x42, 6, // insert, action + 0x56, 6, 0x57, 3, // valLen, valRaw + 0x70, 6, 0x71, 2, 0x73, 2, // predNum, predActor, predCtr + 0, 1, 4, 0, // objActor column: null, 0, 0, 0, 0 + 0, 1, 4, 1, // objCtr column: null, 1, 1, 1, 1 + 0, 2, 0x7f, 0, 0, 1, 0x7f, 0, // keyActor column: null, null, 0, null, 0 + 0, 1, 0x7c, 0, 2, 0x7e, 4, // keyCtr column: null, 0, 2, 0, 4 + 0x7f, 4, 0x74, 0x65, 0x78, 0x74, 0, 4, // keyStr column: 'text', null, null, null, null + 1, 1, 1, 2, // insert column: false, true, false, true, true + 0x7d, 4, 1, 3, 2, 1, // action column: makeText, set, del, set, set + 0x7d, 0, 0x16, 0, 2, 0x16, // valLen column: 0, 0x16, 0, 0x16, 0x16 + 0x68, 0x48, 0x69, // valRaw column: 'h', 'H', 'i' + 2, 0, 0x7f, 1, 2, 0, // predNum column: 0, 0, 1, 0, 0 + 0x7f, 0, // predActor column: 0 + 0x7f, 2 // predCtr column: 2 + ]) + const decoded = decodeChange(encodeChange(change1)) + assert.deepStrictEqual(decoded, Object.assign({hash: decoded.hash}, change1)) + }) + + // FIXME - skipping this b/c it was never implemented in the rust impl and isnt trivial +/* + it.skip('should require strict ordering of preds', () => { + const change = new Uint8Array([ + 133, 111, 74, 131, 31, 229, 112, 44, 1, 105, 1, 58, 30, 190, 100, 253, 180, 180, 66, 49, 126, + 81, 142, 10, 3, 35, 140, 189, 231, 34, 145, 57, 66, 23, 224, 149, 64, 97, 88, 140, 168, 194, + 229, 4, 244, 209, 58, 138, 67, 140, 1, 152, 236, 250, 2, 0, 1, 4, 55, 234, 66, 242, 8, 21, 11, + 52, 1, 66, 2, 86, 3, 87, 10, 112, 2, 113, 3, 115, 4, 127, 9, 99, 111, 109, 109, 111, 110, 86, + 97, 114, 1, 127, 1, 127, 166, 1, 52, 48, 57, 49, 52, 57, 52, 53, 56, 50, 127, 2, 126, 0, 1, + 126, 139, 1, 0 + ]) + assert.throws(() => { decodeChange(change) }, /operation IDs are not in ascending order/) + }) +*/ + + describe('with trailing bytes', () => { + let change = new Uint8Array([ + 0x85, 0x6f, 0x4a, 0x83, // magic bytes + 0xb2, 0x98, 0x9e, 0xa9, // checksum + 1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234' + 1, 1, 252, 250, 220, 255, 5, // seq, startOp, time + 14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111, 110, // message: 'Initialization' + 0, 6, // actor list, column count + 0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action + 0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum + 0x7f, 1, 0x78, // keyStr: 'x' + 1, // insert: false + 0x7f, 1, // action: set + 0x7f, 19, // valLen: 1 byte of type uint + 1, // valRaw: 1 + 0x7f, 0, // predNum: 0 + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 // 10 trailing bytes + ]) + + it('should allow decoding and re-encoding', () => { + // NOTE: This calls the JavaScript encoding and decoding functions, even when the WebAssembly + // backend is loaded. Should the wasm backend export its own functions for testing? + checkEncoded(change, encodeChange(decodeChange(change))) + }) + + it('should be preserved in document encoding', () => { + const [doc] = Automerge.applyChanges(Automerge.init(), [change]) + const [reconstructed] = Automerge.getAllChanges(Automerge.load(Automerge.save(doc))) + checkEncoded(change, reconstructed) + }) + }) +}) diff --git a/javascript/test/helpers.ts b/automerge-js/test/helpers.js similarity index 56% rename from javascript/test/helpers.ts rename to automerge-js/test/helpers.js index df76e558..c3fc52ae 100644 --- a/javascript/test/helpers.ts +++ b/automerge-js/test/helpers.js @@ -1,21 +1,16 @@ -import * as assert from "assert" -import { Encoder } from "./legacy/encoding" +const assert = require('assert') +const { Encoder } = require('../src/encoding') // Assertion that succeeds if the first argument deepStrictEquals at least one of the // subsequent arguments (but we don't care which one) -export function assertEqualsOneOf(actual, ...expected) { +function assertEqualsOneOf(actual, ...expected) { assert(expected.length > 0) for (let i = 0; i < expected.length; i++) { try { assert.deepStrictEqual(actual, expected[i]) return // if we get here without an exception, that means success } catch (e) { - if (e instanceof assert.AssertionError) { - if (!e.name.match(/^AssertionError/) || i === expected.length - 1) - throw e - } else { - throw e - } + if (!e.name.match(/^AssertionError/) || i === expected.length - 1) throw e } } } @@ -24,13 +19,14 @@ export function assertEqualsOneOf(actual, ...expected) { * Asserts that the byte array maintained by `encoder` contains the same byte * sequence as the array `bytes`. */ -export function checkEncoded(encoder, bytes, detail?) { - const encoded = encoder instanceof Encoder ? encoder.buffer : encoder +function checkEncoded(encoder, bytes, detail) { + const encoded = (encoder instanceof Encoder) ? encoder.buffer : encoder const expected = new Uint8Array(bytes) - const message = - (detail ? `${detail}: ` : "") + `${encoded} expected to equal ${expected}` + const message = (detail ? `${detail}: ` : '') + `${encoded} expected to equal ${expected}` assert(encoded.byteLength === expected.byteLength, message) for (let i = 0; i < encoded.byteLength; i++) { assert(encoded[i] === expected[i], message) } } + +module.exports = { assertEqualsOneOf, checkEncoded } diff --git a/automerge-js/test/legacy_tests.js b/automerge-js/test/legacy_tests.js new file mode 100644 index 00000000..76348d06 --- /dev/null +++ b/automerge-js/test/legacy_tests.js @@ -0,0 +1,1419 @@ +const assert = require('assert') +//const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge') +const Automerge = require('../src') +const { assertEqualsOneOf } = require('./helpers') +const { decodeChange } = require('../src/columnar') +//const { decodeChange } = Automerge + +const UUID_PATTERN = /^[0-9a-f]{32}$/ +const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/ + +// CORE FEATURES +// +// TODO - Cursors +// TODO - Tables +// TODO - on-pass load() & reconstruct change from opset +// TODO - micro-patches (needed for fully hydrated object in js) +// TODO - valueAt(heads) / GC +// +// AUTOMERGE UNSUPPORTED +// +// TODO - patchCallback + + +describe('Automerge', () => { + describe('initialization ', () => { + it('should initially be an empty map', () => { + const doc = Automerge.init() + assert.deepStrictEqual(doc, {}) + }) + + it('should allow instantiating from an existing object', () => { + const initialState = { birds: { wrens: 3, magpies: 4 } } + const doc = Automerge.from(initialState) + assert.deepStrictEqual(doc, initialState) + }) + + it('should allow merging of an object initialized with `from`', () => { + let doc1 = Automerge.from({ cards: [] }) + let doc2 = Automerge.merge(Automerge.init(), doc1) + assert.deepStrictEqual(doc2, { cards: [] }) + }) + + it('should allow passing an actorId when instantiating from an existing object', () => { + const actorId = '1234' + let doc = Automerge.from({ foo: 1 }, actorId) + assert.strictEqual(Automerge.getActorId(doc), '1234') + }) + + it('accepts an empty object as initial state', () => { + const doc = Automerge.from({}) + assert.deepStrictEqual(doc, {}) + }) + + it('accepts an array as initial state, but converts it to an object', () => { + const doc = Automerge.from(['a', 'b', 'c']) + assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' }) + }) + + it('accepts strings as initial values, but treats them as an array of characters', () => { + const doc = Automerge.from('abc') + assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' }) + }) + + it('ignores numbers provided as initial values', () => { + const doc = Automerge.from(123) + assert.deepStrictEqual(doc, {}) + }) + + it('ignores booleans provided as initial values', () => { + const doc1 = Automerge.from(false) + assert.deepStrictEqual(doc1, {}) + const doc2 = Automerge.from(true) + assert.deepStrictEqual(doc2, {}) + }) + }) + + describe('sequential use', () => { + let s1, s2 + beforeEach(() => { + s1 = Automerge.init() + }) + + it('should not mutate objects', () => { + s2 = Automerge.change(s1, doc => doc.foo = 'bar') + assert.strictEqual(s1.foo, undefined) + assert.strictEqual(s2.foo, 'bar') + }) + + it('changes should be retrievable', () => { + const change1 = Automerge.getLastLocalChange(s1) + s2 = Automerge.change(s1, doc => doc.foo = 'bar') + const change2 = Automerge.getLastLocalChange(s2) + assert.strictEqual(change1, undefined) + const change = decodeChange(change2) + assert.deepStrictEqual(change, { + actor: change.actor, deps: [], seq: 1, startOp: 1, + hash: change.hash, message: '', time: change.time, + ops: [{obj: '_root', key: 'foo', action: 'set', insert: false, value: 'bar', pred: []}] + }) + }) + + it('should not register any conflicts on repeated assignment', () => { + assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined) + s1 = Automerge.change(s1, 'change', doc => doc.foo = 'one') + assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined) + s1 = Automerge.change(s1, 'change', doc => doc.foo = 'two') + assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined) + }) + + describe('changes', () => { + it('should group several changes', () => { + s2 = Automerge.change(s1, 'change message', doc => { + doc.first = 'one' + assert.strictEqual(doc.first, 'one') + doc.second = 'two' + assert.deepStrictEqual(doc, { + first: 'one', second: 'two' + }) + }) + assert.deepStrictEqual(s1, {}) + assert.deepStrictEqual(s2, {first: 'one', second: 'two'}) + }) + + it('should freeze objects if desired', () => { + s1 = Automerge.init({freeze: true}) + s2 = Automerge.change(s1, doc => doc.foo = 'bar') + try { + s2.foo = 'lemon' + } catch (e) { } + assert.strictEqual(s2.foo, 'bar') + + let deleted = false + try { + deleted = delete s2.foo + } catch (e) { } + assert.strictEqual(s2.foo, 'bar') + assert.strictEqual(deleted, false) + + Automerge.change(s2, () => { + try { + s2.foo = 'lemon' + } catch (e) { } + assert.strictEqual(s2.foo, 'bar') + }) + + assert.throws(() => { Object.assign(s2, {x: 4}) }) + assert.strictEqual(s2.x, undefined) + }) + + it('should allow repeated reading and writing of values', () => { + s2 = Automerge.change(s1, 'change message', doc => { + doc.value = 'a' + assert.strictEqual(doc.value, 'a') + doc.value = 'b' + doc.value = 'c' + assert.strictEqual(doc.value, 'c') + }) + assert.deepStrictEqual(s1, {}) + assert.deepStrictEqual(s2, {value: 'c'}) + }) + + it('should not record conflicts when writing the same field several times within one change', () => { + s1 = Automerge.change(s1, 'change message', doc => { + doc.value = 'a' + doc.value = 'b' + doc.value = 'c' + }) + assert.strictEqual(s1.value, 'c') + assert.strictEqual(Automerge.getConflicts(s1, 'value'), undefined) + }) + + it('should return the unchanged state object if nothing changed', () => { + s2 = Automerge.change(s1, () => {}) + assert.strictEqual(s2, s1) + }) + + it('should ignore field updates that write the existing value', () => { + s1 = Automerge.change(s1, doc => doc.field = 123) + s2 = Automerge.change(s1, doc => doc.field = 123) + assert.strictEqual(s2, s1) + }) + + it('should not ignore field updates that resolve a conflict', () => { + s2 = Automerge.merge(Automerge.init(), s1) + s1 = Automerge.change(s1, doc => doc.field = 123) + s2 = Automerge.change(s2, doc => doc.field = 321) + s1 = Automerge.merge(s1, s2) + assert.strictEqual(Object.keys(Automerge.getConflicts(s1, 'field')).length, 2) + const resolved = Automerge.change(s1, doc => doc.field = s1.field) + assert.notStrictEqual(resolved, s1) + assert.deepStrictEqual(resolved, {field: s1.field}) + assert.strictEqual(Automerge.getConflicts(resolved, 'field'), undefined) + }) + + it('should ignore list element updates that write the existing value', () => { + s1 = Automerge.change(s1, doc => doc.list = [123]) + s2 = Automerge.change(s1, doc => doc.list[0] = 123) + assert.strictEqual(s2, s1) + }) + + it('should not ignore list element updates that resolve a conflict', () => { + s1 = Automerge.change(s1, doc => doc.list = [1]) + s2 = Automerge.merge(Automerge.init(), s1) + s1 = Automerge.change(s1, doc => doc.list[0] = 123) + s2 = Automerge.change(s2, doc => doc.list[0] = 321) + s1 = Automerge.merge(s1, s2) + assert.deepStrictEqual(Automerge.getConflicts(s1.list, 0), { + [`3@${Automerge.getActorId(s1)}`]: 123, + [`3@${Automerge.getActorId(s2)}`]: 321 + }) + const resolved = Automerge.change(s1, doc => doc.list[0] = s1.list[0]) + assert.deepStrictEqual(resolved, s1) + assert.notStrictEqual(resolved, s1) + assert.strictEqual(Automerge.getConflicts(resolved.list, 0), undefined) + }) + + it('should sanity-check arguments', () => { + s1 = Automerge.change(s1, doc => doc.nested = {}) + assert.throws(() => { Automerge.change({}, doc => doc.foo = 'bar') }, /must be the document root/) + assert.throws(() => { Automerge.change(s1.nested, doc => doc.foo = 'bar') }, /must be the document root/) + }) + + it('should not allow nested change blocks', () => { + assert.throws(() => { + Automerge.change(s1, doc1 => { + Automerge.change(doc1, doc2 => { + doc2.foo = 'bar' + }) + }) + }, /Calls to Automerge.change cannot be nested/) + assert.throws(() => { + s1 = Automerge.change(s1, doc1 => { + s2 = Automerge.change(s1, doc2 => doc2.two = 2) + doc1.one = 1 + }) + }, /Attempting to use an outdated Automerge document/) + }) + + it('should not allow the same base document to be used for multiple changes', () => { + assert.throws(() => { + Automerge.change(s1, doc => doc.one = 1) + Automerge.change(s1, doc => doc.two = 2) + }, /Attempting to use an outdated Automerge document/) + }) + + it('should allow a document to be cloned', () => { + s1 = Automerge.change(s1, doc => doc.zero = 0) + s2 = Automerge.clone(s1) + s1 = Automerge.change(s1, doc => doc.one = 1) + s2 = Automerge.change(s2, doc => doc.two = 2) + assert.deepStrictEqual(s1, {zero: 0, one: 1}) + assert.deepStrictEqual(s2, {zero: 0, two: 2}) + Automerge.free(s1) + Automerge.free(s2) + }) + + it('should work with Object.assign merges', () => { + s1 = Automerge.change(s1, doc1 => { + doc1.stuff = {foo: 'bar', baz: 'blur'} + }) + s1 = Automerge.change(s1, doc1 => { + doc1.stuff = Object.assign({}, doc1.stuff, {baz: 'updated!'}) + }) + assert.deepStrictEqual(s1, {stuff: {foo: 'bar', baz: 'updated!'}}) + }) + + it('should support Date objects in maps', () => { + const now = new Date() + s1 = Automerge.change(s1, doc => doc.now = now) + let changes = Automerge.getAllChanges(s1) + ;[s2] = Automerge.applyChanges(Automerge.init(), changes) + assert.strictEqual(s2.now instanceof Date, true) + assert.strictEqual(s2.now.getTime(), now.getTime()) + }) + + it('should support Date objects in lists', () => { + const now = new Date() + s1 = Automerge.change(s1, doc => doc.list = [now]) + let changes = Automerge.getAllChanges(s1) + ;[s2] = Automerge.applyChanges(Automerge.init(), changes) + assert.strictEqual(s2.list[0] instanceof Date, true) + assert.strictEqual(s2.list[0].getTime(), now.getTime()) + }) + + /* + it.skip('should call patchCallback if supplied', () => { + const callbacks = [], actor = Automerge.getActorId(s1) + const s2 = Automerge.change(s1, { + patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local}) + }, doc => { + doc.birds = ['Goldfinch'] + }) + assert.strictEqual(callbacks.length, 1) + assert.deepStrictEqual(callbacks[0].patch, { + actor, seq: 1, maxOp: 2, deps: [], clock: {[actor]: 1}, pendingChanges: 0, + diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { + objectId: `1@${actor}`, type: 'list', edits: [ + {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {'type': 'value', value: 'Goldfinch'}} + ] + }}}} + }) + assert.strictEqual(callbacks[0].before, s1) + assert.strictEqual(callbacks[0].after, s2) + assert.strictEqual(callbacks[0].local, true) + }) + */ + + /* + it.skip('should call a patchCallback set up on document initialisation', () => { + const callbacks = [] + s1 = Automerge.init({ + patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local}) + }) + const s2 = Automerge.change(s1, doc => doc.bird = 'Goldfinch') + const actor = Automerge.getActorId(s1) + assert.strictEqual(callbacks.length, 1) + assert.deepStrictEqual(callbacks[0].patch, { + actor, seq: 1, maxOp: 1, deps: [], clock: {[actor]: 1}, pendingChanges: 0, + diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}} + }) + assert.strictEqual(callbacks[0].before, s1) + assert.strictEqual(callbacks[0].after, s2) + assert.strictEqual(callbacks[0].local, true) + }) + */ + }) + + describe('emptyChange()', () => { + it('should append an empty change to the history', () => { + s1 = Automerge.change(s1, 'first change', doc => doc.field = 123) + s2 = Automerge.emptyChange(s1, 'empty change') + assert.notStrictEqual(s2, s1) + assert.deepStrictEqual(s2, s1) + assert.deepStrictEqual(Automerge.getHistory(s2).map(state => state.change.message), ['first change', 'empty change']) + }) + + it('should reference dependencies', () => { + s1 = Automerge.change(s1, doc => doc.field = 123) + s2 = Automerge.merge(Automerge.init(), s1) + s2 = Automerge.change(s2, doc => doc.other = 'hello') + s1 = Automerge.emptyChange(Automerge.merge(s1, s2)) + const history = Automerge.getHistory(s1) + const emptyChange = history[2].change + assert.deepStrictEqual(emptyChange.deps, [history[0].change.hash, history[1].change.hash].sort()) + assert.deepStrictEqual(emptyChange.ops, []) + }) + }) + + describe('root object', () => { + it('should handle single-property assignment', () => { + s1 = Automerge.change(s1, 'set bar', doc => doc.foo = 'bar') + s1 = Automerge.change(s1, 'set zap', doc => doc.zip = 'zap') + assert.strictEqual(s1.foo, 'bar') + assert.strictEqual(s1.zip, 'zap') + assert.deepStrictEqual(s1, {foo: 'bar', zip: 'zap'}) + }) + + it('should allow floating-point values', () => { + s1 = Automerge.change(s1, doc => doc.number = 1589032171.1) + assert.strictEqual(s1.number, 1589032171.1) + }) + + it('should handle multi-property assignment', () => { + s1 = Automerge.change(s1, 'multi-assign', doc => { + Object.assign(doc, {foo: 'bar', answer: 42}) + }) + assert.strictEqual(s1.foo, 'bar') + assert.strictEqual(s1.answer, 42) + assert.deepStrictEqual(s1, {foo: 'bar', answer: 42}) + }) + + it('should handle root property deletion', () => { + s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar'; doc.something = null }) + s1 = Automerge.change(s1, 'del foo', doc => { delete doc.foo }) + assert.strictEqual(s1.foo, undefined) + assert.strictEqual(s1.something, null) + assert.deepStrictEqual(s1, {something: null}) + }) + + it('should follow JS delete behavior', () => { + s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar' }) + let deleted + s1 = Automerge.change(s1, 'del foo', doc => { + deleted = delete doc.foo + }) + assert.strictEqual(deleted, true) + let deleted2 + assert.doesNotThrow(() => { + s1 = Automerge.change(s1, 'del baz', doc => { + deleted2 = delete doc.baz + }) + }) + assert.strictEqual(deleted2, true) + }) + + it('should allow the type of a property to be changed', () => { + s1 = Automerge.change(s1, 'set number', doc => doc.prop = 123) + assert.strictEqual(s1.prop, 123) + s1 = Automerge.change(s1, 'set string', doc => doc.prop = '123') + assert.strictEqual(s1.prop, '123') + s1 = Automerge.change(s1, 'set null', doc => doc.prop = null) + assert.strictEqual(s1.prop, null) + s1 = Automerge.change(s1, 'set bool', doc => doc.prop = true) + assert.strictEqual(s1.prop, true) + }) + + it('should require property names to be valid', () => { + assert.throws(() => { + Automerge.change(s1, 'foo', doc => doc[''] = 'x') + }, /must not be an empty string/) + }) + + it('should not allow assignment of unsupported datatypes', () => { + Automerge.change(s1, doc => { + assert.throws(() => { doc.foo = undefined }, /Unsupported type of value: undefined/) + assert.throws(() => { doc.foo = {prop: undefined} }, /Unsupported type of value: undefined/) + assert.throws(() => { doc.foo = () => {} }, /Unsupported type of value: function/) + assert.throws(() => { doc.foo = Symbol('foo') }, /Unsupported type of value: symbol/) + }) + }) + }) + + describe('nested maps', () => { + it('should assign an objectId to nested maps', () => { + s1 = Automerge.change(s1, doc => { doc.nested = {} }) + let id = Automerge.getObjectId(s1.nested) + assert.strictEqual(OPID_PATTERN.test(Automerge.getObjectId(s1.nested)), true) + assert.notEqual(Automerge.getObjectId(s1.nested), '_root') + }) + + it('should handle assignment of a nested property', () => { + s1 = Automerge.change(s1, 'first change', doc => { + doc.nested = {} + doc.nested.foo = 'bar' + }) + s1 = Automerge.change(s1, 'second change', doc => { + doc.nested.one = 1 + }) + assert.deepStrictEqual(s1, {nested: {foo: 'bar', one: 1}}) + assert.deepStrictEqual(s1.nested, {foo: 'bar', one: 1}) + assert.strictEqual(s1.nested.foo, 'bar') + assert.strictEqual(s1.nested.one, 1) + }) + + it('should handle assignment of an object literal', () => { + s1 = Automerge.change(s1, doc => { + doc.textStyle = {bold: false, fontSize: 12} + }) + assert.deepStrictEqual(s1, {textStyle: {bold: false, fontSize: 12}}) + assert.deepStrictEqual(s1.textStyle, {bold: false, fontSize: 12}) + assert.strictEqual(s1.textStyle.bold, false) + assert.strictEqual(s1.textStyle.fontSize, 12) + }) + + it('should handle assignment of multiple nested properties', () => { + s1 = Automerge.change(s1, doc => { + doc.textStyle = {bold: false, fontSize: 12} + Object.assign(doc.textStyle, {typeface: 'Optima', fontSize: 14}) + }) + assert.strictEqual(s1.textStyle.typeface, 'Optima') + assert.strictEqual(s1.textStyle.bold, false) + assert.strictEqual(s1.textStyle.fontSize, 14) + assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', bold: false, fontSize: 14}) + }) + + it('should handle arbitrary-depth nesting', () => { + s1 = Automerge.change(s1, doc => { + doc.a = {b: {c: {d: {e: {f: {g: 'h'}}}}}} + }) + s1 = Automerge.change(s1, doc => { + doc.a.b.c.d.e.f.i = 'j' + }) + assert.deepStrictEqual(s1, {a: { b: { c: { d: { e: { f: { g: 'h', i: 'j'}}}}}}}) + assert.strictEqual(s1.a.b.c.d.e.f.g, 'h') + assert.strictEqual(s1.a.b.c.d.e.f.i, 'j') + }) + + it('should allow an old object to be replaced with a new one', () => { + s1 = Automerge.change(s1, 'change 1', doc => { + doc.myPet = {species: 'dog', legs: 4, breed: 'dachshund'} + }) + s2 = Automerge.change(s1, 'change 2', doc => { + doc.myPet = {species: 'koi', variety: '紅白', colors: {red: true, white: true, black: false}} + }) + assert.deepStrictEqual(s1.myPet, { + species: 'dog', legs: 4, breed: 'dachshund' + }) + assert.strictEqual(s1.myPet.breed, 'dachshund') + assert.deepStrictEqual(s2.myPet, { + species: 'koi', variety: '紅白', + colors: {red: true, white: true, black: false} + }) + assert.strictEqual(s2.myPet.breed, undefined) + assert.strictEqual(s2.myPet.variety, '紅白') + }) + + it('should allow fields to be changed between primitive and nested map', () => { + s1 = Automerge.change(s1, doc => doc.color = '#ff7f00') + assert.strictEqual(s1.color, '#ff7f00') + s1 = Automerge.change(s1, doc => doc.color = {red: 255, green: 127, blue: 0}) + assert.deepStrictEqual(s1.color, {red: 255, green: 127, blue: 0}) + s1 = Automerge.change(s1, doc => doc.color = '#ff7f00') + assert.strictEqual(s1.color, '#ff7f00') + }) + + it('should not allow several references to the same map object', () => { + s1 = Automerge.change(s1, doc => doc.object = {}) + assert.throws(() => { + Automerge.change(s1, doc => { doc.x = doc.object }) + }, /Cannot create a reference to an existing document object/) + assert.throws(() => { + Automerge.change(s1, doc => { doc.x = s1.object }) + }, /Cannot create a reference to an existing document object/) + assert.throws(() => { + Automerge.change(s1, doc => { doc.x = {}; doc.y = doc.x }) + }, /Cannot create a reference to an existing document object/) + }) + + it('should not allow object-copying idioms', () => { + s1 = Automerge.change(s1, doc => { + doc.items = [{id: 'id1', name: 'one'}, {id: 'id2', name: 'two'}] + }) + // People who have previously worked with immutable state in JavaScript may be tempted + // to use idioms like this, which don't work well with Automerge -- see e.g. + // https://github.com/automerge/automerge/issues/260 + assert.throws(() => { + Automerge.change(s1, doc => { + doc.items = [...doc.items, {id: 'id3', name: 'three'}] + }) + }, /Cannot create a reference to an existing document object/) + }) + + it('should handle deletion of properties within a map', () => { + s1 = Automerge.change(s1, 'set style', doc => { + doc.textStyle = {typeface: 'Optima', bold: false, fontSize: 12} + }) + s1 = Automerge.change(s1, 'non-bold', doc => delete doc.textStyle.bold) + assert.strictEqual(s1.textStyle.bold, undefined) + assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', fontSize: 12}) + }) + + it('should handle deletion of references to a map', () => { + s1 = Automerge.change(s1, 'make rich text doc', doc => { + Object.assign(doc, {title: 'Hello', textStyle: {typeface: 'Optima', fontSize: 12}}) + }) + s1 = Automerge.change(s1, doc => delete doc.textStyle) + assert.strictEqual(s1.textStyle, undefined) + assert.deepStrictEqual(s1, {title: 'Hello'}) + }) + + it('should validate field names', () => { + s1 = Automerge.change(s1, doc => doc.nested = {}) + assert.throws(() => { Automerge.change(s1, doc => doc.nested[''] = 'x') }, /must not be an empty string/) + assert.throws(() => { Automerge.change(s1, doc => doc.nested = {'': 'x'}) }, /must not be an empty string/) + }) + }) + + describe('lists', () => { + it('should allow elements to be inserted', () => { + s1 = Automerge.change(s1, doc => doc.noodles = []) + s1 = Automerge.change(s1, doc => doc.noodles.insertAt(0, 'udon', 'soba')) + s1 = Automerge.change(s1, doc => doc.noodles.insertAt(1, 'ramen')) + assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']}) + assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba']) + assert.strictEqual(s1.noodles[0], 'udon') + assert.strictEqual(s1.noodles[1], 'ramen') + assert.strictEqual(s1.noodles[2], 'soba') + assert.strictEqual(s1.noodles.length, 3) + }) + + it('should handle assignment of a list literal', () => { + s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) + assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']}) + assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba']) + assert.strictEqual(s1.noodles[0], 'udon') + assert.strictEqual(s1.noodles[1], 'ramen') + assert.strictEqual(s1.noodles[2], 'soba') + assert.strictEqual(s1.noodles[3], undefined) + assert.strictEqual(s1.noodles.length, 3) + }) + + it('should only allow numeric indexes', () => { + s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) + s1 = Automerge.change(s1, doc => doc.noodles[1] = 'Ramen!') + assert.strictEqual(s1.noodles[1], 'Ramen!') + s1 = Automerge.change(s1, doc => doc.noodles['1'] = 'RAMEN!!!') + assert.strictEqual(s1.noodles[1], 'RAMEN!!!') + assert.throws(() => { Automerge.change(s1, doc => doc.noodles.favourite = 'udon') }, /list index must be a number/) + assert.throws(() => { Automerge.change(s1, doc => doc.noodles[''] = 'udon') }, /list index must be a number/) + assert.throws(() => { Automerge.change(s1, doc => doc.noodles['1e6'] = 'udon') }, /list index must be a number/) + }) + + it('should handle deletion of list elements', () => { + s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) + s1 = Automerge.change(s1, doc => delete doc.noodles[1]) + assert.deepStrictEqual(s1.noodles, ['udon', 'soba']) + s1 = Automerge.change(s1, doc => doc.noodles.deleteAt(1)) + assert.deepStrictEqual(s1.noodles, ['udon']) + assert.strictEqual(s1.noodles[0], 'udon') + assert.strictEqual(s1.noodles[1], undefined) + assert.strictEqual(s1.noodles[2], undefined) + assert.strictEqual(s1.noodles.length, 1) + }) + + it('should handle assignment of individual list indexes', () => { + s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon', 'ramen', 'soba']) + s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi') + assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi', 'soba']) + assert.strictEqual(s1.japaneseFood[0], 'udon') + assert.strictEqual(s1.japaneseFood[1], 'sushi') + assert.strictEqual(s1.japaneseFood[2], 'soba') + assert.strictEqual(s1.japaneseFood[3], undefined) + assert.strictEqual(s1.japaneseFood.length, 3) + }) + + it('concurrent edits insert in reverse actorid order if counters equal', () => { + s1 = Automerge.init('aaaa') + s2 = Automerge.init('bbbb') + s1 = Automerge.change(s1, doc => doc.list = []) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "2@bbbb")) + s2 = Automerge.merge(s2, s1) + assert.deepStrictEqual(Automerge.toJS(s2).list, ["2@bbbb", "2@aaaa"]) + }) + + it('concurrent edits insert in reverse counter order if different', () => { + s1 = Automerge.init('aaaa') + s2 = Automerge.init('bbbb') + s1 = Automerge.change(s1, doc => doc.list = []) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) + s2 = Automerge.change(s2, doc => doc.foo = "2@bbbb") + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "3@bbbb")) + s2 = Automerge.merge(s2, s1) + assert.deepStrictEqual(s2.list, ["3@bbbb", "2@aaaa"]) + }) + + it('should treat out-by-one assignment as insertion', () => { + s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon']) + s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi') + assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi']) + assert.strictEqual(s1.japaneseFood[0], 'udon') + assert.strictEqual(s1.japaneseFood[1], 'sushi') + assert.strictEqual(s1.japaneseFood[2], undefined) + assert.strictEqual(s1.japaneseFood.length, 2) + }) + + it('should not allow out-of-range assignment', () => { + s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon']) + assert.throws(() => { Automerge.change(s1, doc => doc.japaneseFood[4] = 'ramen') }, /is out of bounds/) + }) + + it('should allow bulk assignment of multiple list indexes', () => { + s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) + s1 = Automerge.change(s1, doc => Object.assign(doc.noodles, {0: 'うどん', 2: 'そば'})) + assert.deepStrictEqual(s1.noodles, ['うどん', 'ramen', 'そば']) + assert.strictEqual(s1.noodles[0], 'うどん') + assert.strictEqual(s1.noodles[1], 'ramen') + assert.strictEqual(s1.noodles[2], 'そば') + assert.strictEqual(s1.noodles.length, 3) + }) + + it('should handle nested objects', () => { + s1 = Automerge.change(s1, doc => doc.noodles = [{type: 'ramen', dishes: ['tonkotsu', 'shoyu']}]) + s1 = Automerge.change(s1, doc => doc.noodles.push({type: 'udon', dishes: ['tempura udon']})) + s1 = Automerge.change(s1, doc => doc.noodles[0].dishes.push('miso')) + assert.deepStrictEqual(s1, {noodles: [ + {type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso']}, + {type: 'udon', dishes: ['tempura udon']} + ]}) + assert.deepStrictEqual(s1.noodles[0], { + type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso'] + }) + assert.deepStrictEqual(s1.noodles[1], { + type: 'udon', dishes: ['tempura udon'] + }) + }) + + it('should handle nested lists', () => { + s1 = Automerge.change(s1, doc => doc.noodleMatrix = [['ramen', 'tonkotsu', 'shoyu']]) + s1 = Automerge.change(s1, doc => doc.noodleMatrix.push(['udon', 'tempura udon'])) + s1 = Automerge.change(s1, doc => doc.noodleMatrix[0].push('miso')) + assert.deepStrictEqual(s1.noodleMatrix, [['ramen', 'tonkotsu', 'shoyu', 'miso'], ['udon', 'tempura udon']]) + assert.deepStrictEqual(s1.noodleMatrix[0], ['ramen', 'tonkotsu', 'shoyu', 'miso']) + assert.deepStrictEqual(s1.noodleMatrix[1], ['udon', 'tempura udon']) + }) + + it('should handle deep nesting', () => { + s1 = Automerge.change(s1, doc => doc.nesting = { + maps: { m1: { m2: { foo: "bar", baz: {} }, m2a: { } } }, + lists: [ [ 1, 2, 3 ], [ [ 3, 4, 5, [6]], 7 ] ], + mapsinlists: [ { foo: "bar" }, [ { bar: "baz" } ] ], + listsinmaps: { foo: [1, 2, 3], bar: [ [ { baz: "123" } ] ] } + }) + s1 = Automerge.change(s1, doc => { + doc.nesting.maps.m1a = "123" + doc.nesting.maps.m1.m2.baz.xxx = "123" + delete doc.nesting.maps.m1.m2a + doc.nesting.lists.shift() + doc.nesting.lists[0][0].pop() + doc.nesting.lists[0][0].push(100) + doc.nesting.mapsinlists[0].foo = "baz" + doc.nesting.mapsinlists[1][0].foo = "bar" + delete doc.nesting.mapsinlists[1] + doc.nesting.listsinmaps.foo.push(4) + doc.nesting.listsinmaps.bar[0][0].baz = "456" + delete doc.nesting.listsinmaps.bar + }) + assert.deepStrictEqual(s1, { nesting: { + maps: { m1: { m2: { foo: "bar", baz: { xxx: "123" } } }, m1a: "123" }, + lists: [ [ [ 3, 4, 5, 100 ], 7 ] ], + mapsinlists: [ { foo: "baz" } ], + listsinmaps: { foo: [1, 2, 3, 4] } + }}) + }) + + it('should handle replacement of the entire list', () => { + s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen']) + s1 = Automerge.change(s1, doc => doc.japaneseNoodles = doc.noodles.slice()) + s1 = Automerge.change(s1, doc => doc.noodles = ['wonton', 'pho']) + assert.deepStrictEqual(s1, { + noodles: ['wonton', 'pho'], + japaneseNoodles: ['udon', 'soba', 'ramen'] + }) + assert.deepStrictEqual(s1.noodles, ['wonton', 'pho']) + assert.strictEqual(s1.noodles[0], 'wonton') + assert.strictEqual(s1.noodles[1], 'pho') + assert.strictEqual(s1.noodles[2], undefined) + assert.strictEqual(s1.noodles.length, 2) + }) + + it('should allow assignment to change the type of a list element', () => { + s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen']) + assert.deepStrictEqual(s1.noodles, ['udon', 'soba', 'ramen']) + s1 = Automerge.change(s1, doc => doc.noodles[1] = {type: 'soba', options: ['hot', 'cold']}) + assert.deepStrictEqual(s1.noodles, ['udon', {type: 'soba', options: ['hot', 'cold']}, 'ramen']) + s1 = Automerge.change(s1, doc => doc.noodles[1] = ['hot soba', 'cold soba']) + assert.deepStrictEqual(s1.noodles, ['udon', ['hot soba', 'cold soba'], 'ramen']) + s1 = Automerge.change(s1, doc => doc.noodles[1] = 'soba is the best') + assert.deepStrictEqual(s1.noodles, ['udon', 'soba is the best', 'ramen']) + }) + + it('should allow list creation and assignment in the same change callback', () => { + s1 = Automerge.change(Automerge.init(), doc => { + doc.letters = ['a', 'b', 'c'] + doc.letters[1] = 'd' + }) + assert.strictEqual(s1.letters[1], 'd') + }) + + it('should allow adding and removing list elements in the same change callback', () => { + s1 = Automerge.change(Automerge.init(), doc => doc.noodles = []) + s1 = Automerge.change(s1, doc => { + doc.noodles.push('udon') + doc.noodles.deleteAt(0) + }) + assert.deepStrictEqual(s1, {noodles: []}) + // do the add-remove cycle twice, test for #151 (https://github.com/automerge/automerge/issues/151) + s1 = Automerge.change(s1, doc => { + doc.noodles.push('soba') + doc.noodles.deleteAt(0) + }) + assert.deepStrictEqual(s1, {noodles: []}) + }) + + it('should handle arbitrary-depth nesting', () => { + s1 = Automerge.change(s1, doc => doc.maze = [[[[[[[['noodles', ['here']]]]]]]]]) + s1 = Automerge.change(s1, doc => doc.maze[0][0][0][0][0][0][0][1].unshift('found')) + assert.deepStrictEqual(s1.maze, [[[[[[[['noodles', ['found', 'here']]]]]]]]]) + assert.deepStrictEqual(s1.maze[0][0][0][0][0][0][0][1][1], 'here') + s2 = Automerge.load(Automerge.save(s1)) + assert.deepStrictEqual(s1,s2) + }) + + it('should not allow several references to the same list object', () => { + s1 = Automerge.change(s1, doc => doc.list = []) + assert.throws(() => { + Automerge.change(s1, doc => { doc.x = doc.list }) + }, /Cannot create a reference to an existing document object/) + assert.throws(() => { + Automerge.change(s1, doc => { doc.x = s1.list }) + }, /Cannot create a reference to an existing document object/) + assert.throws(() => { + Automerge.change(s1, doc => { doc.x = []; doc.y = doc.x }) + }, /Cannot create a reference to an existing document object/) + }) + }) + + describe('counters', () => { + // counter + it('should allow deleting counters from maps', () => { + const s1 = Automerge.change(Automerge.init(), doc => doc.birds = {wrens: new Automerge.Counter(1)}) + const s2 = Automerge.change(s1, doc => doc.birds.wrens.increment(2)) + const s3 = Automerge.change(s2, doc => delete doc.birds.wrens) + assert.deepStrictEqual(s2, {birds: {wrens: new Automerge.Counter(3)}}) + assert.deepStrictEqual(s3, {birds: {}}) + }) + + // counter + /* + it('should not allow deleting counters from lists', () => { + const s1 = Automerge.change(Automerge.init(), doc => doc.recordings = [new Automerge.Counter(1)]) + const s2 = Automerge.change(s1, doc => doc.recordings[0].increment(2)) + assert.deepStrictEqual(s2, {recordings: [new Automerge.Counter(3)]}) + assert.throws(() => { Automerge.change(s2, doc => doc.recordings.deleteAt(0)) }, /Unsupported operation/) + }) + */ + }) + }) + + describe('concurrent use', () => { + let s1, s2, s3 + beforeEach(() => { + s1 = Automerge.init() + s2 = Automerge.init() + s3 = Automerge.init() + }) + + it('should merge concurrent updates of different properties', () => { + s1 = Automerge.change(s1, doc => doc.foo = 'bar') + s2 = Automerge.change(s2, doc => doc.hello = 'world') + s3 = Automerge.merge(s1, s2) + assert.strictEqual(s3.foo, 'bar') + assert.strictEqual(s3.hello, 'world') + assert.deepStrictEqual(s3, {foo: 'bar', hello: 'world'}) + assert.strictEqual(Automerge.getConflicts(s3, 'foo'), undefined) + assert.strictEqual(Automerge.getConflicts(s3, 'hello'), undefined) + s4 = Automerge.load(Automerge.save(s3)) + assert.deepEqual(s3,s4) + }) + + it('should add concurrent increments of the same property', () => { + s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter()) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.counter.increment()) + s2 = Automerge.change(s2, doc => doc.counter.increment(2)) + s3 = Automerge.merge(s1, s2) + assert.strictEqual(s1.counter.value, 1) + assert.strictEqual(s2.counter.value, 2) + assert.strictEqual(s3.counter.value, 3) + assert.strictEqual(Automerge.getConflicts(s3, 'counter'), undefined) + s4 = Automerge.load(Automerge.save(s3)) + assert.deepEqual(s3,s4) + }) + + it('should add increments only to the values they precede', () => { + s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter(0)) + s1 = Automerge.change(s1, doc => doc.counter.increment()) + s2 = Automerge.change(s2, doc => doc.counter = new Automerge.Counter(100)) + s2 = Automerge.change(s2, doc => doc.counter.increment(3)) + s3 = Automerge.merge(s1, s2) + if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { + assert.deepStrictEqual(s3, {counter: new Automerge.Counter(1)}) + } else { + assert.deepStrictEqual(s3, {counter: new Automerge.Counter(103)}) + } + assert.deepStrictEqual(Automerge.getConflicts(s3, 'counter'), { + [`1@${Automerge.getActorId(s1)}`]: new Automerge.Counter(1), + [`1@${Automerge.getActorId(s2)}`]: new Automerge.Counter(103) + }) + s4 = Automerge.load(Automerge.save(s3)) + assert.deepEqual(s3,s4) + }) + + it('should detect concurrent updates of the same field', () => { + s1 = Automerge.change(s1, doc => doc.field = 'one') + s2 = Automerge.change(s2, doc => doc.field = 'two') + s3 = Automerge.merge(s1, s2) + if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { + assert.deepStrictEqual(s3, {field: 'one'}) + } else { + assert.deepStrictEqual(s3, {field: 'two'}) + } + assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), { + [`1@${Automerge.getActorId(s1)}`]: 'one', + [`1@${Automerge.getActorId(s2)}`]: 'two' + }) + }) + + it('should detect concurrent updates of the same list element', () => { + s1 = Automerge.change(s1, doc => doc.birds = ['finch']) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.birds[0] = 'greenfinch') + s2 = Automerge.change(s2, doc => doc.birds[0] = 'goldfinch') + s3 = Automerge.merge(s1, s2) + if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { + assert.deepStrictEqual(s3.birds, ['greenfinch']) + } else { + assert.deepStrictEqual(s3.birds, ['goldfinch']) + } + assert.deepStrictEqual(Automerge.getConflicts(s3.birds, 0), { + [`3@${Automerge.getActorId(s1)}`]: 'greenfinch', + [`3@${Automerge.getActorId(s2)}`]: 'goldfinch' + }) + }) + + it('should handle assignment conflicts of different types', () => { + s1 = Automerge.change(s1, doc => doc.field = 'string') + s2 = Automerge.change(s2, doc => doc.field = ['list']) + s3 = Automerge.change(s3, doc => doc.field = {thing: 'map'}) + s1 = Automerge.merge(Automerge.merge(s1, s2), s3) + assertEqualsOneOf(s1.field, 'string', ['list'], {thing: 'map'}) + assert.deepStrictEqual(Automerge.getConflicts(s1, 'field'), { + [`1@${Automerge.getActorId(s1)}`]: 'string', + [`1@${Automerge.getActorId(s2)}`]: ['list'], + [`1@${Automerge.getActorId(s3)}`]: {thing: 'map'} + }) + }) + + it('should handle changes within a conflicting map field', () => { + s1 = Automerge.change(s1, doc => doc.field = 'string') + s2 = Automerge.change(s2, doc => doc.field = {}) + s2 = Automerge.change(s2, doc => doc.field.innerKey = 42) + s3 = Automerge.merge(s1, s2) + assertEqualsOneOf(s3.field, 'string', {innerKey: 42}) + assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), { + [`1@${Automerge.getActorId(s1)}`]: 'string', + [`1@${Automerge.getActorId(s2)}`]: {innerKey: 42} + }) + }) + + it('should handle changes within a conflicting list element', () => { + s1 = Automerge.change(s1, doc => doc.list = ['hello']) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.list[0] = {map1: true}) + s1 = Automerge.change(s1, doc => doc.list[0].key = 1) + s2 = Automerge.change(s2, doc => doc.list[0] = {map2: true}) + s2 = Automerge.change(s2, doc => doc.list[0].key = 2) + s3 = Automerge.merge(s1, s2) + if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { + assert.deepStrictEqual(s3.list, [{map1: true, key: 1}]) + } else { + assert.deepStrictEqual(s3.list, [{map2: true, key: 2}]) + } + assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), { + [`3@${Automerge.getActorId(s1)}`]: {map1: true, key: 1}, + [`3@${Automerge.getActorId(s2)}`]: {map2: true, key: 2} + }) + }) + + it('should not merge concurrently assigned nested maps', () => { + s1 = Automerge.change(s1, doc => doc.config = {background: 'blue'}) + s2 = Automerge.change(s2, doc => doc.config = {logo_url: 'logo.png'}) + s3 = Automerge.merge(s1, s2) + assertEqualsOneOf(s3.config, {background: 'blue'}, {logo_url: 'logo.png'}) + assert.deepStrictEqual(Automerge.getConflicts(s3, 'config'), { + [`1@${Automerge.getActorId(s1)}`]: {background: 'blue'}, + [`1@${Automerge.getActorId(s2)}`]: {logo_url: 'logo.png'} + }) + }) + + it('should clear conflicts after assigning a new value', () => { + s1 = Automerge.change(s1, doc => doc.field = 'one') + s2 = Automerge.change(s2, doc => doc.field = 'two') + s3 = Automerge.merge(s1, s2) + s3 = Automerge.change(s3, doc => doc.field = 'three') + assert.deepStrictEqual(s3, {field: 'three'}) + assert.strictEqual(Automerge.getConflicts(s3, 'field'), undefined) + s2 = Automerge.merge(s2, s3) + assert.deepStrictEqual(s2, {field: 'three'}) + assert.strictEqual(Automerge.getConflicts(s2, 'field'), undefined) + }) + + it('should handle concurrent insertions at different list positions', () => { + s1 = Automerge.change(s1, doc => doc.list = ['one', 'three']) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.list.splice(1, 0, 'two')) + s2 = Automerge.change(s2, doc => doc.list.push('four')) + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s3, {list: ['one', 'two', 'three', 'four']}) + assert.strictEqual(Automerge.getConflicts(s3, 'list'), undefined) + }) + + it('should handle concurrent insertions at the same list position', () => { + s1 = Automerge.change(s1, doc => doc.birds = ['parakeet']) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.birds.push('starling')) + s2 = Automerge.change(s2, doc => doc.birds.push('chaffinch')) + s3 = Automerge.merge(s1, s2) + assertEqualsOneOf(s3.birds, ['parakeet', 'starling', 'chaffinch'], ['parakeet', 'chaffinch', 'starling']) + s2 = Automerge.merge(s2, s3) + assert.deepStrictEqual(s2, s3) + }) + + it('should handle concurrent assignment and deletion of a map entry', () => { + // Add-wins semantics + s1 = Automerge.change(s1, doc => doc.bestBird = 'robin') + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => delete doc.bestBird) + s2 = Automerge.change(s2, doc => doc.bestBird = 'magpie') + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s1, {}) + assert.deepStrictEqual(s2, {bestBird: 'magpie'}) + assert.deepStrictEqual(s3, {bestBird: 'magpie'}) + assert.strictEqual(Automerge.getConflicts(s3, 'bestBird'), undefined) + }) + + it('should handle concurrent assignment and deletion of a list element', () => { + // Concurrent assignment ressurects a deleted list element. Perhaps a little + // surprising, but consistent with add-wins semantics of maps (see test above) + s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch']) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.birds[1] = 'starling') + s2 = Automerge.change(s2, doc => doc.birds.splice(1, 1)) + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s1.birds, ['blackbird', 'starling', 'goldfinch']) + assert.deepStrictEqual(s2.birds, ['blackbird', 'goldfinch']) + assert.deepStrictEqual(s3.birds, ['blackbird', 'starling', 'goldfinch']) + s4 = Automerge.load(Automerge.save(s3)) + assert.deepStrictEqual(s3, s4); + }) + + it('should handle insertion after a deleted list element', () => { + s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch']) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.birds.splice(1, 2)) + s2 = Automerge.change(s2, doc => doc.birds.splice(2, 0, 'starling')) + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s3, {birds: ['blackbird', 'starling']}) + assert.deepStrictEqual(Automerge.merge(s2, s3), {birds: ['blackbird', 'starling']}) + }) + + it('should handle concurrent deletion of the same element', () => { + s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant']) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.birds.deleteAt(1)) // buzzard + s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s3.birds, ['albatross', 'cormorant']) + }) + + it('should handle concurrent deletion of different elements', () => { + s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant']) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.birds.deleteAt(0)) // albatross + s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s3.birds, ['cormorant']) + }) + + it('should handle concurrent updates at different levels of the tree', () => { + // A delete higher up in the tree overrides an update in a subtree + s1 = Automerge.change(s1, doc => doc.animals = {birds: {pink: 'flamingo', black: 'starling'}, mammals: ['badger']}) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.animals.birds.brown = 'sparrow') + s2 = Automerge.change(s2, doc => delete doc.animals.birds) + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s1.animals, { + birds: { + pink: 'flamingo', brown: 'sparrow', black: 'starling' + }, + mammals: ['badger'] + }) + assert.deepStrictEqual(s2.animals, {mammals: ['badger']}) + assert.deepStrictEqual(s3.animals, {mammals: ['badger']}) + }) + + it('should handle updates of concurrently deleted objects', () => { + s1 = Automerge.change(s1, doc => doc.birds = {blackbird: {feathers: 'black'}}) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => delete doc.birds.blackbird) + s2 = Automerge.change(s2, doc => doc.birds.blackbird.beak = 'orange') + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s1, {birds: {}}) + }) + + it('should not interleave sequence insertions at the same position', () => { + s1 = Automerge.change(s1, doc => doc.wisdom = []) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.wisdom.push('to', 'be', 'is', 'to', 'do')) + s2 = Automerge.change(s2, doc => doc.wisdom.push('to', 'do', 'is', 'to', 'be')) + s3 = Automerge.merge(s1, s2) + assertEqualsOneOf(s3.wisdom, + ['to', 'be', 'is', 'to', 'do', 'to', 'do', 'is', 'to', 'be'], + ['to', 'do', 'is', 'to', 'be', 'to', 'be', 'is', 'to', 'do']) + // In case you're wondering: http://quoteinvestigator.com/2013/09/16/do-be-do/ + }) + + describe('multiple insertions at the same list position', () => { + it('should handle insertion by greater actor ID', () => { + s1 = Automerge.init('aaaa') + s2 = Automerge.init('bbbb') + s1 = Automerge.change(s1, doc => doc.list = ['two']) + s2 = Automerge.merge(s2, s1) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one')) + assert.deepStrictEqual(s2.list, ['one', 'two']) + }) + + it('should handle insertion by lesser actor ID', () => { + s1 = Automerge.init('bbbb') + s2 = Automerge.init('aaaa') + s1 = Automerge.change(s1, doc => doc.list = ['two']) + s2 = Automerge.merge(s2, s1) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one')) + assert.deepStrictEqual(s2.list, ['one', 'two']) + }) + + it('should handle insertion regardless of actor ID', () => { + s1 = Automerge.change(s1, doc => doc.list = ['two']) + s2 = Automerge.merge(s2, s1) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one')) + assert.deepStrictEqual(s2.list, ['one', 'two']) + }) + + it('should make insertion order consistent with causality', () => { + s1 = Automerge.change(s1, doc => doc.list = ['four']) + s2 = Automerge.merge(s2, s1) + s2 = Automerge.change(s2, doc => doc.list.unshift('three')) + s1 = Automerge.merge(s1, s2) + s1 = Automerge.change(s1, doc => doc.list.unshift('two')) + s2 = Automerge.merge(s2, s1) + s2 = Automerge.change(s2, doc => doc.list.unshift('one')) + assert.deepStrictEqual(s2.list, ['one', 'two', 'three', 'four']) + }) + }) + }) + + describe('saving and loading', () => { + it('should save and restore an empty document', () => { + let s = Automerge.load(Automerge.save(Automerge.init())) + assert.deepStrictEqual(s, {}) + }) + + it('should generate a new random actor ID', () => { + let s1 = Automerge.init() + let s2 = Automerge.load(Automerge.save(s1)) + assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s1).toString()), true) + assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s2).toString()), true) + assert.notEqual(Automerge.getActorId(s1), Automerge.getActorId(s2)) + }) + + it('should allow a custom actor ID to be set', () => { + let s = Automerge.load(Automerge.save(Automerge.init()), '333333') + assert.strictEqual(Automerge.getActorId(s), '333333') + }) + + it('should reconstitute complex datatypes', () => { + let s1 = Automerge.change(Automerge.init(), doc => doc.todos = [{title: 'water plants', done: false}]) + let s2 = Automerge.load(Automerge.save(s1)) + assert.deepStrictEqual(s2, {todos: [{title: 'water plants', done: false}]}) + }) + + it('should save and load maps with @ symbols in the keys', () => { + let s1 = Automerge.change(Automerge.init(), doc => doc["123@4567"] = "hello") + let s2 = Automerge.load(Automerge.save(s1)) + assert.deepStrictEqual(s2, { "123@4567": "hello" }) + }) + + it('should reconstitute conflicts', () => { + let s1 = Automerge.change(Automerge.init('111111'), doc => doc.x = 3) + let s2 = Automerge.change(Automerge.init('222222'), doc => doc.x = 5) + s1 = Automerge.merge(s1, s2) + let s3 = Automerge.load(Automerge.save(s1)) + assert.strictEqual(s1.x, 5) + assert.strictEqual(s3.x, 5) + assert.deepStrictEqual(Automerge.getConflicts(s1, 'x'), {'1@111111': 3, '1@222222': 5}) + assert.deepStrictEqual(Automerge.getConflicts(s3, 'x'), {'1@111111': 3, '1@222222': 5}) + }) + + it('should reconstitute element ID counters', () => { + const s1 = Automerge.init('01234567') + const s2 = Automerge.change(s1, doc => doc.list = ['a']) + const listId = Automerge.getObjectId(s2.list) + const changes12 = Automerge.getAllChanges(s2).map(decodeChange) + assert.deepStrictEqual(changes12, [{ + hash: changes12[0].hash, actor: '01234567', seq: 1, startOp: 1, + time: changes12[0].time, message: '', deps: [], ops: [ + {obj: '_root', action: 'makeList', key: 'list', insert: false, pred: []}, + {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'a', pred: []} + ] + }]) + const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0)) + const s4 = Automerge.load(Automerge.save(s3), '01234567') + const s5 = Automerge.change(s4, doc => doc.list.push('b')) + const changes45 = Automerge.getAllChanges(s5).map(decodeChange) + assert.deepStrictEqual(s5, {list: ['b']}) + assert.deepStrictEqual(changes45[2], { + hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 4, + time: changes45[2].time, message: '', deps: [changes45[1].hash], ops: [ + {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'b', pred: []} + ] + }) + }) + + it('should allow a reloaded list to be mutated', () => { + let doc = Automerge.change(Automerge.init(), doc => doc.foo = []) + doc = Automerge.load(Automerge.save(doc)) + doc = Automerge.change(doc, 'add', doc => doc.foo.push(1)) + doc = Automerge.load(Automerge.save(doc)) + assert.deepStrictEqual(doc.foo, [1]) + }) + + it('should reload a document containing deflated columns', () => { + // In this test, the keyCtr column is long enough for deflate compression to kick in, but the + // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr. + // When checking whether the columns appear in ascending order, we must ignore the deflate bit. + let doc = Automerge.change(Automerge.init(), doc => { + doc.list = [] + for (let i = 0; i < 200; i++) doc.list.insertAt(Math.floor(Math.random() * i), 'a') + }) + Automerge.load(Automerge.save(doc)) + let expected = [] + for (let i = 0; i < 200; i++) expected.push('a') + assert.deepStrictEqual(doc, {list: expected}) + }) + + /* + it.skip('should call patchCallback if supplied', () => { + const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) + const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) + const callbacks = [], actor = Automerge.getActorId(s1) + const reloaded = Automerge.load(Automerge.save(s2), { + patchCallback(patch, before, after, local) { + callbacks.push({patch, before, after, local}) + } + }) + assert.strictEqual(callbacks.length, 1) + assert.deepStrictEqual(callbacks[0].patch, { + maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0, + diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { + objectId: `1@${actor}`, type: 'list', edits: [ + {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']} + ] + }}}} + }) + assert.deepStrictEqual(callbacks[0].before, {}) + assert.strictEqual(callbacks[0].after, reloaded) + assert.strictEqual(callbacks[0].local, false) + }) + */ + }) + + describe('history API', () => { + it('should return an empty history for an empty document', () => { + assert.deepStrictEqual(Automerge.getHistory(Automerge.init()), []) + }) + + it('should make past document states accessible', () => { + let s = Automerge.init() + s = Automerge.change(s, doc => doc.config = {background: 'blue'}) + s = Automerge.change(s, doc => doc.birds = ['mallard']) + s = Automerge.change(s, doc => doc.birds.unshift('oystercatcher')) + assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.snapshot), [ + {config: {background: 'blue'}}, + {config: {background: 'blue'}, birds: ['mallard']}, + {config: {background: 'blue'}, birds: ['oystercatcher', 'mallard']} + ]) + }) + + it('should make change messages accessible', () => { + let s = Automerge.init() + s = Automerge.change(s, 'Empty Bookshelf', doc => doc.books = []) + s = Automerge.change(s, 'Add Orwell', doc => doc.books.push('Nineteen Eighty-Four')) + s = Automerge.change(s, 'Add Huxley', doc => doc.books.push('Brave New World')) + assert.deepStrictEqual(s.books, ['Nineteen Eighty-Four', 'Brave New World']) + assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.change.message), + ['Empty Bookshelf', 'Add Orwell', 'Add Huxley']) + }) + }) + + describe('changes API', () => { + it('should return an empty list on an empty document', () => { + let changes = Automerge.getAllChanges(Automerge.init()) + assert.deepStrictEqual(changes, []) + }) + + it('should return an empty list when nothing changed', () => { + let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) + assert.deepStrictEqual(Automerge.getChanges(s1, s1), []) + }) + + it('should do nothing when applying an empty list of changes', () => { + let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) + assert.deepStrictEqual(Automerge.applyChanges(s1, [])[0], s1) + }) + + it('should return all changes when compared to an empty document', () => { + let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) + let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) + let changes = Automerge.getChanges(Automerge.init(), s2) + assert.strictEqual(changes.length, 2) + }) + + it('should allow a document copy to be reconstructed from scratch', () => { + let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) + let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) + let changes = Automerge.getAllChanges(s2) + let [s3] = Automerge.applyChanges(Automerge.init(), changes) + assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch']) + }) + + it('should return changes since the last given version', () => { + let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) + let changes1 = Automerge.getAllChanges(s1) + let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) + let changes2 = Automerge.getChanges(s1, s2) + assert.strictEqual(changes1.length, 1) // Add Chaffinch + assert.strictEqual(changes2.length, 1) // Add Bullfinch + }) + + it('should incrementally apply changes since the last given version', () => { + let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) + let changes1 = Automerge.getAllChanges(s1) + let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) + let changes2 = Automerge.getChanges(s1, s2) + let [s3] = Automerge.applyChanges(Automerge.init(), changes1) + let [s4] = Automerge.applyChanges(s3, changes2) + assert.deepStrictEqual(s3.birds, ['Chaffinch']) + assert.deepStrictEqual(s4.birds, ['Chaffinch', 'Bullfinch']) + }) + + it('should handle updates to a list element', () => { + let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch', 'Bullfinch']) + let s2 = Automerge.change(s1, doc => doc.birds[0] = 'Goldfinch') + let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) + assert.deepStrictEqual(s3.birds, ['Goldfinch', 'Bullfinch']) + assert.strictEqual(Automerge.getConflicts(s3.birds, 0), undefined) + }) + + // TEXT + it('should handle updates to a text object', () => { + let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('ab')) + let s2 = Automerge.change(s1, doc => doc.text.set(0, 'A')) + let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) + assert.deepStrictEqual([...s3.text], ['A', 'b']) + }) + + /* + it.skip('should report missing dependencies', () => { + let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) + let s2 = Automerge.merge(Automerge.init(), s1) + s2 = Automerge.change(s2, doc => doc.birds.push('Bullfinch')) + let changes = Automerge.getAllChanges(s2) + let [s3, patch] = Automerge.applyChanges(Automerge.init(), [changes[1]]) + assert.deepStrictEqual(s3, {}) + assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), + decodeChange(changes[1]).deps) + assert.strictEqual(patch.pendingChanges, 1) + ;[s3, patch] = Automerge.applyChanges(s3, [changes[0]]) + assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch']) + assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), []) + assert.strictEqual(patch.pendingChanges, 0) + }) + */ + + it('should report missing dependencies with out-of-order applyChanges', () => { + let s0 = Automerge.init() + let s1 = Automerge.change(s0, doc => doc.test = ['a']) + let changes01 = Automerge.getAllChanges(s1) + let s2 = Automerge.change(s1, doc => doc.test = ['b']) + let changes12 = Automerge.getChanges(s1, s2) + let s3 = Automerge.change(s2, doc => doc.test = ['c']) + let changes23 = Automerge.getChanges(s2, s3) + let s4 = Automerge.init() + let [s5] = Automerge.applyChanges(s4, changes23) + let [s6] = Automerge.applyChanges(s5, changes12) +// assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s6)), [decodeChange(changes01[0]).hash]) + assert.deepStrictEqual(Automerge.getMissingDeps(s6), [decodeChange(changes01[0]).hash]) + }) + + /* + it.skip('should call patchCallback if supplied when applying changes', () => { + const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) + const callbacks = [], actor = Automerge.getActorId(s1) + const before = Automerge.init() + const [after, patch] = Automerge.applyChanges(before, Automerge.getAllChanges(s1), { + patchCallback(patch, before, after, local) { + callbacks.push({patch, before, after, local}) + } + }) + assert.strictEqual(callbacks.length, 1) + assert.deepStrictEqual(callbacks[0].patch, { + maxOp: 2, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0, + diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { + objectId: `1@${actor}`, type: 'list', edits: [ + {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {type: 'value', value: 'Goldfinch'}} + ] + }}}} + }) + assert.strictEqual(callbacks[0].patch, patch) + assert.strictEqual(callbacks[0].before, before) + assert.strictEqual(callbacks[0].after, after) + assert.strictEqual(callbacks[0].local, false) + }) + */ + + /* + it.skip('should merge multiple applied changes into one patch', () => { + const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) + const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) + const patches = [], actor = Automerge.getActorId(s2) + Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2), + {patchCallback: p => patches.push(p)}) + assert.deepStrictEqual(patches, [{ + maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0, + diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { + objectId: `1@${actor}`, type: 'list', edits: [ + {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']} + ] + }}}} + }]) + }) + */ + + /* + it.skip('should call a patchCallback registered on doc initialisation', () => { + const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch') + const patches = [], actor = Automerge.getActorId(s1) + const before = Automerge.init({patchCallback: p => patches.push(p)}) + Automerge.applyChanges(before, Automerge.getAllChanges(s1)) + assert.deepStrictEqual(patches, [{ + maxOp: 1, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0, + diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}} + }]) + }) + */ + }) +}) diff --git a/javascript/test/sync_test.ts b/automerge-js/test/sync_test.js similarity index 54% rename from javascript/test/sync_test.ts rename to automerge-js/test/sync_test.js index 5724985c..86c3b3fd 100644 --- a/javascript/test/sync_test.ts +++ b/automerge-js/test/sync_test.js @@ -1,57 +1,48 @@ -import * as assert from "assert" -import * as Automerge from "../src" -import { BloomFilter } from "./legacy/sync" -import { - decodeSyncMessage, - encodeSyncMessage, - decodeSyncState, - encodeSyncState, - initSyncState, -} from "../src" +const assert = require('assert') +const Automerge = require('..'); +const { BloomFilter } = require('../src/sync') +const { decodeChangeMeta } = require('../src/columnar') +const { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } = Automerge + +function inspect(a) { + const util = require("util"); + return util.inspect(a,false,null,true) +} function getHeads(doc) { return Automerge.getHeads(doc) } function getMissingDeps(doc) { - return Automerge.getMissingDeps(doc, []) + return Automerge.getMissingDeps(doc) } -function sync( - a, - b, - aSyncState = initSyncState(), - bSyncState = initSyncState() -) { +function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) { const MAX_ITER = 10 - let aToBmsg: Automerge.SyncMessage | null = null, - bToAmsg: Automerge.SyncMessage | null = null, - i = 0 + let aToBmsg = null, bToAmsg = null, i = 0 do { - ;[aSyncState, aToBmsg] = Automerge.generateSyncMessage(a, aSyncState) + [aSyncState, aToBmsg] = Automerge.generateSyncMessage(a, aSyncState) ;[bSyncState, bToAmsg] = Automerge.generateSyncMessage(b, bSyncState) if (aToBmsg) { - ;[b, bSyncState] = Automerge.receiveSyncMessage(b, bSyncState, aToBmsg) + [b, bSyncState] = Automerge.receiveSyncMessage(b, bSyncState, aToBmsg) } if (bToAmsg) { - ;[a, aSyncState] = Automerge.receiveSyncMessage(a, aSyncState, bToAmsg) + [a, aSyncState] = Automerge.receiveSyncMessage(a, aSyncState, bToAmsg) } if (i++ > MAX_ITER) { - throw new Error( - `Did not synchronize within ${MAX_ITER} iterations. Do you have a bug causing an infinite loop?` - ) + throw new Error(`Did not synchronize within ${MAX_ITER} iterations. Do you have a bug causing an infinite loop?`) } } while (aToBmsg || bToAmsg) return [a, b, aSyncState, bSyncState] } -describe("Data sync protocol", () => { - describe("with docs already in sync", () => { - describe("an empty local doc", () => { - it("should send a sync message implying no local data", () => { +describe('Data sync protocol', () => { + describe('with docs already in sync', () => { + describe('an empty local doc', () => { + it('should send a sync message implying no local data', () => { let n1 = Automerge.init() let s1 = initSyncState() let m1 @@ -65,35 +56,26 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(message.changes, []) }) - it("should not reply if we have no data as well", () => { - let n1 = Automerge.init(), - n2 = Automerge.init() - let s1 = initSyncState(), - s2 = initSyncState() - let m1: Automerge.SyncMessage | null = null, - m2: Automerge.SyncMessage | null = null + it('should not reply if we have no data as well', () => { + let n1 = Automerge.init(), n2 = Automerge.init() + let s1 = initSyncState(), s2 = initSyncState() + let m1 = null, m2 = null ;[s1, m1] = Automerge.generateSyncMessage(n1, s1) - if (m1 != null) { - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) - } + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) ;[s2, m2] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(m2, null) }) }) - describe("documents with data", () => { - it("repos with equal heads do not need a reply message", () => { - let n1 = Automerge.init(), - n2 = Automerge.init() - let s1 = initSyncState(), - s2 = initSyncState() - let m1: Automerge.SyncMessage | null = null, - m2: Automerge.SyncMessage | null = null + describe('documents with data', () => { + it('repos with equal heads do not need a reply message', () => { + let n1 = Automerge.init(), n2 = Automerge.init() + let s1 = initSyncState(), s2 = initSyncState() + let m1 = null, m2 = null // make two nodes with the same changes - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = [])) - for (let i = 0; i < 10; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i)) + n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) + for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i)) ;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1)) assert.deepStrictEqual(n1, n2) @@ -102,96 +84,82 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(s1.lastSentHeads, getHeads(n1)) // heads are equal so this message should be null - if (m1 != null) { - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) - } + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) ;[s2, m2] = Automerge.generateSyncMessage(n2, s2) assert.strictEqual(m2, null) }) - it("n1 should offer all changes to n2 when starting from nothing", () => { - let n1 = Automerge.init(), - n2 = Automerge.init() + it('n1 should offer all changes to n2 when starting from nothing', () => { + let n1 = Automerge.init(), n2 = Automerge.init() // make changes for n1 that n2 should request - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = [])) - for (let i = 0; i < 10; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i)) + n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) + for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i)) assert.notDeepStrictEqual(n1, n2) const [after1, after2] = sync(n1, n2) assert.deepStrictEqual(after1, after2) }) - it("should sync peers where one has commits the other does not", () => { - let n1 = Automerge.init(), - n2 = Automerge.init() + it('should sync peers where one has commits the other does not', () => { + let n1 = Automerge.init(), n2 = Automerge.init() // make changes for n1 that n2 should request - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = [])) - for (let i = 0; i < 10; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i)) + n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) + for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i)) assert.notDeepStrictEqual(n1, n2) ;[n1, n2] = sync(n1, n2) assert.deepStrictEqual(n1, n2) }) - it("should work with prior sync state", () => { + it('should work with prior sync state', () => { // create & synchronize two nodes - let n1 = Automerge.init(), - n2 = Automerge.init() - let s1 = initSyncState(), - s2 = initSyncState() + let n1 = Automerge.init(), n2 = Automerge.init() + let s1 = initSyncState(), s2 = initSyncState() - for (let i = 0; i < 5; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2) // modify the first node further - for (let i = 5; i < 10; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 5; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) assert.notDeepStrictEqual(n1, n2) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(n1, n2) }) - it("should not generate messages once synced", () => { + it('should not generate messages once synced', () => { // create & synchronize two nodes - let n1 = Automerge.init("abc123"), - n2 = Automerge.init("def456") - let s1 = initSyncState(), - s2 = initSyncState() + let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') + let s1 = initSyncState(), s2 = initSyncState() - let message - for (let i = 0; i < 5; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) - for (let i = 0; i < 5; i++) - n2 = Automerge.change(n2, { time: 0 }, doc => (doc.y = i)) + let message, patch + for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 5; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.y = i) - // n1 reports what it has - ;[s1, message] = Automerge.generateSyncMessage(n1, s1) + // n1 reports what it has + ;[s1, message] = Automerge.generateSyncMessage(n1, s1, n1) // n2 receives that message and sends changes along with what it has - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message) + ;[n2, s2, patch] = Automerge.receiveSyncMessage(n2, s2, message) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) //assert.deepStrictEqual(patch, null) // no changes arrived // n1 receives the changes and replies with the changes it now knows n2 needs - ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, message) + ;[n1, s1, patch] = Automerge.receiveSyncMessage(n1, s1, message) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) //assert.deepStrictEqual(patch.diffs.props, {y: {'5@def456': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived // n2 applies the changes and sends confirmation ending the exchange - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message) + ;[n2, s2, patch] = Automerge.receiveSyncMessage(n2, s2, message) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) //assert.deepStrictEqual(patch.diffs.props, {x: {'5@abc123': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived // n1 receives the message and has nothing more to say - ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, message) + ;[n1, s1, patch] = Automerge.receiveSyncMessage(n1, s1, message) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.deepStrictEqual(message, null) //assert.deepStrictEqual(patch, null) // no changes arrived @@ -201,38 +169,28 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(message, null) }) - it("should allow simultaneous messages during synchronization", () => { + it('should allow simultaneous messages during synchronization', () => { // create & synchronize two nodes - let n1 = Automerge.init("abc123"), - n2 = Automerge.init("def456") - let s1 = initSyncState(), - s2 = initSyncState() - for (let i = 0; i < 5; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) - for (let i = 0; i < 5; i++) - n2 = Automerge.change(n2, { time: 0 }, doc => (doc.y = i)) - const head1 = getHeads(n1)[0], - head2 = getHeads(n2)[0] + let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') + let s1 = initSyncState(), s2 = initSyncState() + for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 5; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.y = i) + const head1 = getHeads(n1)[0], head2 = getHeads(n2)[0] // both sides report what they have but have no shared peer state let msg1to2, msg2to1 ;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1) ;[s2, msg2to1] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) - assert.deepStrictEqual( - decodeSyncMessage(msg1to2).have[0].lastSync.length, - 0 - ) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0) assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) - assert.deepStrictEqual( - decodeSyncMessage(msg2to1).have[0].lastSync.length, - 0 - ) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0) // n1 and n2 receives that message and update sync state but make no patch - ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + let patch1, patch2 + ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) //assert.deepStrictEqual(patch1, null) // no changes arrived, so no patch - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) //assert.deepStrictEqual(patch2, null) // no changes arrived, so no patch // now both reply with their local changes the other lacks @@ -243,14 +201,15 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) // both should now apply the changes and update the frontend - ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) assert.deepStrictEqual(getMissingDeps(n1), []) //assert.notDeepStrictEqual(patch1, null) - assert.deepStrictEqual(n1, { x: 4, y: 4 }) - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + assert.deepStrictEqual(n1, {x: 4, y: 4}) + + ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) assert.deepStrictEqual(getMissingDeps(n2), []) //assert.notDeepStrictEqual(patch2, null) - assert.deepStrictEqual(n2, { x: 4, y: 4 }) + assert.deepStrictEqual(n2, {x: 4, y: 4}) // The response acknowledges the changes received, and sends no further changes ;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1) @@ -259,8 +218,8 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) // After receiving acknowledgements, their shared heads should be equal - ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) //assert.deepStrictEqual(patch1, null) @@ -273,56 +232,41 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(msg2to1, null) // If we make one more change, and start another sync, its lastSync should be updated - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 5)) + n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 5) ;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1) - assert.deepStrictEqual( - decodeSyncMessage(msg1to2).have[0].lastSync, - [head1, head2].sort() - ) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) }) - it("should assume sent changes were recieved until we hear otherwise", () => { - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - message: Automerge.SyncMessage | null = null + it('should assume sent changes were recieved until we hear otherwise', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), message = null - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.items = [])) - ;[n1, n2, s1] = sync(n1, n2) + n1 = Automerge.change(n1, {time: 0}, doc => doc.items = []) + ;[n1, n2, s1, s2 ] = sync(n1, n2) - n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("x")) + n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('x')) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - if (message != null) { - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - } + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("y")) + n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('y')) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - if (message != null) { - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - } + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("z")) + n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('z')) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - if (message != null) { - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - } + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) }) - it("should work regardless of who initiates the exchange", () => { + it('should work regardless of who initiates the exchange', () => { // create & synchronize two nodes - let n1 = Automerge.init(), - n2 = Automerge.init() - let s1 = initSyncState(), - s2 = initSyncState() + let n1 = Automerge.init(), n2 = Automerge.init() + let s1 = initSyncState(), s2 = initSyncState() - for (let i = 0; i < 5; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // modify the first node further - for (let i = 5; i < 10; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 5; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) assert.notDeepStrictEqual(n1, n2) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) @@ -331,24 +275,21 @@ describe("Data sync protocol", () => { }) }) - describe("with diverged documents", () => { - it("should work without prior sync state", () => { + describe('with diverged documents', () => { + it('should work without prior sync state', () => { // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- c15 <-- c16 <-- c17 // lastSync is undefined. // create two peers both with divergent commits - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - for (let i = 0; i < 10; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + ;[n1, n2] = sync(n1, n2) - for (let i = 10; i < 15; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) - for (let i = 15; i < 18; i++) - n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = i)) + for (let i = 10; i < 15; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 15; i < 18; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = i) assert.notDeepStrictEqual(n1, n2) ;[n1, n2] = sync(n1, n2) @@ -356,26 +297,21 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(n1, n2) }) - it("should work with prior sync state", () => { + it('should work with prior sync state', () => { // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- c15 <-- c16 <-- c17 // lastSync is c9. // create two peers both with divergent commits - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - s2 = initSyncState() + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() - for (let i = 0; i < 10; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) - for (let i = 10; i < 15; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) - for (let i = 15; i < 18; i++) - n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = i)) + for (let i = 10; i < 15; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 15; i < 18; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = i) s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) @@ -385,33 +321,27 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(n1, n2) }) - it("should ensure non-empty state after sync", () => { - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - s2 = initSyncState() + it('should ensure non-empty state after sync', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() - for (let i = 0; i < 3; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(s1.sharedHeads, getHeads(n1)) assert.deepStrictEqual(s2.sharedHeads, getHeads(n1)) }) - it("should re-sync after one node crashed with data loss", () => { + it('should re-sync after one node crashed with data loss', () => { // Scenario: (r) (n2) (n1) // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - s2 = initSyncState() + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 - for (let i = 0; i < 3; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // save a copy of n2 as "r" to simulate recovering from crash @@ -419,43 +349,38 @@ describe("Data sync protocol", () => { ;[r, rSyncState] = [Automerge.clone(n2), s2] // sync another few commits - for (let i = 3; i < 6; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 3; i < 6; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // everyone should be on the same page here assert.deepStrictEqual(getHeads(n1), getHeads(n2)) assert.deepStrictEqual(n1, n2) // now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r - for (let i = 6; i < 9; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 6; i < 9; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) s1 = decodeSyncState(encodeSyncState(s1)) rSyncState = decodeSyncState(encodeSyncState(rSyncState)) assert.notDeepStrictEqual(getHeads(n1), getHeads(r)) assert.notDeepStrictEqual(n1, r) - assert.deepStrictEqual(n1, { x: 8 }) - assert.deepStrictEqual(r, { x: 2 }) + assert.deepStrictEqual(n1, {x: 8}) + assert.deepStrictEqual(r, {x: 2}) ;[n1, r, s1, rSyncState] = sync(n1, r, s1, rSyncState) assert.deepStrictEqual(getHeads(n1), getHeads(r)) assert.deepStrictEqual(n1, r) }) - it("should resync after one node experiences data loss without disconnecting", () => { - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - s2 = initSyncState() + it('should resync after one node experiences data loss without disconnecting', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 - for (let i = 0; i < 3; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(getHeads(n1), getHeads(n2)) assert.deepStrictEqual(n1, n2) - let n2AfterDataLoss = Automerge.init("89abcdef") + let n2AfterDataLoss = Automerge.init('89abcdef') // "n2" now has no data, but n1 still thinks it does. Note we don't do // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting @@ -464,35 +389,29 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(n1, n2) }) - it("should handle changes concurrent to the last sync heads", () => { - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef"), - n3 = Automerge.init("fedcba98") - let s12 = initSyncState(), - s21 = initSyncState(), - s23 = initSyncState(), - s32 = initSyncState() + it('should handle changes concurrent to the last sync heads', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') + let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() // Change 1 is known to all three nodes - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 1)) + n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) ;[n1, n2, s12, s21] = sync(n1, n2, s12, s21) ;[n2, n3, s23, s32] = sync(n2, n3, s23, s32) // Change 2 is known to n1 and n2 - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 2)) + n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 2) ;[n1, n2, s12, s21] = sync(n1, n2, s12, s21) // Each of the three nodes makes one change (changes 3, 4, 5) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 3)) - n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = 4)) - n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 5)) + n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 3) + n2 = Automerge.change(n2, {time: 0}, doc => doc.x = 4) + n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 5) // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) let change = Automerge.getLastLocalChange(n3) - if (typeof Buffer === "function" && change != null) - change = Buffer.from(change) - ;[n2] = (change && Automerge.applyChanges(n2, [change])) || [n2] + if (typeof Buffer === 'function') change = Buffer.from(change) + ;[n2] = Automerge.applyChanges(n2, [change]) // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads ;[n1, n2, s12, s21] = sync(n1, n2, s12, s21) @@ -500,14 +419,12 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(n1, n2) }) - it("should handle histories with lots of branching and merging", () => { - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef"), - n3 = Automerge.init("fedcba98") - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 0)) - ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n1)!]) - ;[n3] = Automerge.applyChanges(n3, [Automerge.getLastLocalChange(n1)!]) - n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 1)) + it('should handle histories with lots of branching and merging', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') + n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 0) + ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n1)]) + ;[n3] = Automerge.applyChanges(n3, [Automerge.getLastLocalChange(n1)]) + n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 1) // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 // / \/ \/ \/ @@ -516,29 +433,29 @@ describe("Data sync protocol", () => { // \ / // ---------------------------------------------- n3c1 <----- for (let i = 1; i < 20; i++) { - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n1 = i)) - n2 = Automerge.change(n2, { time: 0 }, doc => (doc.n2 = i)) + n1 = Automerge.change(n1, {time: 0}, doc => doc.n1 = i) + n2 = Automerge.change(n2, {time: 0}, doc => doc.n2 = i) const change1 = Automerge.getLastLocalChange(n1) const change2 = Automerge.getLastLocalChange(n2) - ;[n1] = Automerge.applyChanges(n1, [change2!]) - ;[n2] = Automerge.applyChanges(n2, [change1!]) + ;[n1] = Automerge.applyChanges(n1, [change2]) + ;[n2] = Automerge.applyChanges(n2, [change1]) } - let s1 = initSyncState(), - s2 = initSyncState() + let s1 = initSyncState(), s2 = initSyncState() ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // Having n3's last change concurrent to the last sync heads forces us into the slower code path - ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n3)!]) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n1 = "final")) - n2 = Automerge.change(n2, { time: 0 }, doc => (doc.n2 = "final")) + ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n3)]) + n1 = Automerge.change(n1, {time: 0}, doc => doc.n1 = 'final') + n2 = Automerge.change(n2, {time: 0}, doc => doc.n2 = 'final') + ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(getHeads(n1), getHeads(n2)) assert.deepStrictEqual(n1, n2) }) }) - describe("with false positives", () => { + describe('with false positives', () => { // NOTE: the following tests use brute force to search for Bloom filter false positives. The // tests make change hashes deterministic by fixing the actorId and change timestamp to be // constants. The loop that searches for false positives is then initialised such that it finds @@ -547,36 +464,22 @@ describe("Data sync protocol", () => { // then the false positive will no longer be the first loop iteration. The tests should still // pass because the loop will run until a false positive is found, but they will be slower. - it("should handle a false-positive head", () => { + it('should handle a false-positive head', () => { // Scenario: ,-- n1 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 is a false positive in the Bloom filter containing {n1}. // lastSync is c9. - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - s2 = initSyncState() + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() - for (let i = 0; i < 10; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2) - for (let i = 1; ; i++) { - // search for false positive; see comment above - const n1up = Automerge.change( - Automerge.clone(n1, { actor: "01234567" }), - { time: 0 }, - doc => (doc.x = `${i} @ n1`) - ) - const n2up = Automerge.change( - Automerge.clone(n2, { actor: "89abcdef" }), - { time: 0 }, - doc => (doc.x = `${i} @ n2`) - ) + for (let i = 1; ; i++) { // search for false positive; see comment above + const n1up = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) + const n2up = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) { - n1 = n1up - n2 = n2up - break + n1 = n1up; n2 = n2up; break } } const allHeads = [...getHeads(n1), ...getHeads(n2)].sort() @@ -587,7 +490,7 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(getHeads(n2), allHeads) }) - describe("with a false-positive dependency", () => { + describe('with a false-positive dependency', () => { let n1, n2, s1, s2, n1hash2, n2hash2 beforeEach(() => { @@ -596,57 +499,34 @@ describe("Data sync protocol", () => { // `-- n2c1 <-- n2c2 // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. // lastSync is c9. - n1 = Automerge.init("01234567") - n2 = Automerge.init("89abcdef") + n1 = Automerge.init('01234567') + n2 = Automerge.init('89abcdef') s1 = initSyncState() s2 = initSyncState() - for (let i = 0; i < 10; i++) - n1 = Automerge.change(n1, { time: 0 }, (doc: any) => (doc.x = i)) + for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2) let n1hash1, n2hash1 - for (let i = 29; ; i++) { - // search for false positive; see comment above - const n1us1 = Automerge.change( - Automerge.clone(n1, { actor: "01234567" }), - { time: 0 }, - (doc: any) => (doc.x = `${i} @ n1`) - ) - const n2us1 = Automerge.change( - Automerge.clone(n2, { actor: "89abcdef" }), - { time: 0 }, - (doc: any) => (doc.x = `${i} @ n2`) - ) - n1hash1 = getHeads(n1us1)[0] - n2hash1 = getHeads(n2us1)[0] - const n1us2 = Automerge.change( - n1us1, - { time: 0 }, - (doc: any) => (doc.x = "final @ n1") - ) - const n2us2 = Automerge.change( - n2us1, - { time: 0 }, - (doc: any) => (doc.x = "final @ n2") - ) - n1hash2 = getHeads(n1us2)[0] - n2hash2 = getHeads(n2us2)[0] + for (let i = 29; ; i++) { // search for false positive; see comment above + const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) + const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + n1hash1 = getHeads(n1us1)[0]; n2hash1 = getHeads(n2us1)[0] + const n1us2 = Automerge.change(n1us1, {time: 0}, doc => doc.x = 'final @ n1') + const n2us2 = Automerge.change(n2us1, {time: 0}, doc => doc.x = 'final @ n2') + n1hash2 = getHeads(n1us2)[0]; n2hash2 = getHeads(n2us2)[0] if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { - n1 = n1us2 - n2 = n2us2 - break + n1 = n1us2; n2 = n2us2; break } } }) - it("should sync two nodes without connection reset", () => { - ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) + it('should sync two nodes without connection reset', () => { + [n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(getHeads(n1), [n1hash2, n2hash2].sort()) assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort()) }) - // FIXME - this has a periodic failure - it("should sync two nodes with connection reset", () => { + it('should sync two nodes with connection reset', () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) @@ -654,7 +534,7 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort()) }) - it.skip("should sync three nodes", () => { + it('should sync three nodes', () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) @@ -674,73 +554,37 @@ describe("Data sync protocol", () => { assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - let n3 = Automerge.init("fedcba98"), - s13 = initSyncState(), - s31 = initSyncState() + let n3 = Automerge.init('fedcba98'), s13 = initSyncState(), s31 = initSyncState() ;[n1, n3, s13, s31] = sync(n1, n3, s13, s31) assert.deepStrictEqual(getHeads(n1), [n1hash2]) assert.deepStrictEqual(getHeads(n3), [n1hash2]) }) }) - it("should not require an additional request when a false-positive depends on a true-negative", () => { + it('should not require an additional request when a false-positive depends on a true-negative', () => { // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. // lastSync is c4. - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - s2 = initSyncState() + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() let n1hash3, n2hash3 - for (let i = 0; i < 5; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2) - for (let i = 86; ; i++) { - // search for false positive; see comment above - const n1us1 = Automerge.change( - Automerge.clone(n1, { actor: "01234567" }), - { time: 0 }, - doc => (doc.x = `${i} @ n1`) - ) - const n2us1 = Automerge.change( - Automerge.clone(n2, { actor: "89abcdef" }), - { time: 0 }, - doc => (doc.x = `${i} @ n2`) - ) + for (let i = 86; ; i++) { // search for false positive; see comment above + const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) + const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) const n1hash1 = getHeads(n1us1)[0] - const n1us2 = Automerge.change( - n1us1, - { time: 0 }, - doc => (doc.x = `${i + 1} @ n1`) - ) - const n2us2 = Automerge.change( - n2us1, - { time: 0 }, - doc => (doc.x = `${i + 1} @ n2`) - ) - const n1hash2 = getHeads(n1us2)[0], - n2hash2 = getHeads(n2us2)[0] - const n1up3 = Automerge.change( - n1us2, - { time: 0 }, - doc => (doc.x = "final @ n1") - ) - const n2up3 = Automerge.change( - n2us2, - { time: 0 }, - doc => (doc.x = "final @ n2") - ) - n1hash3 = getHeads(n1up3)[0] - n2hash3 = getHeads(n2up3)[0] - if ( - new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2) - ) { - n1 = n1up3 - n2 = n2up3 - break + const n1us2 = Automerge.change(n1us1, {time: 0}, doc => doc.x = `${i + 1} @ n1`) + const n2us2 = Automerge.change(n2us1, {time: 0}, doc => doc.x = `${i + 1} @ n2`) + const n1hash2 = getHeads(n1us2)[0], n2hash2 = getHeads(n2us2)[0] + const n1up3 = Automerge.change(n1us2, {time: 0}, doc => doc.x = 'final @ n1') + const n2up3 = Automerge.change(n2us2, {time: 0}, doc => doc.x = 'final @ n2') + n1hash3 = getHeads(n1up3)[0]; n2hash3 = getHeads(n2up3)[0] + if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) { + n1 = n1up3; n2 = n2up3; break } } const bothHeads = [n1hash3, n2hash3].sort() @@ -751,46 +595,31 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(getHeads(n2), bothHeads) }) - it("should handle chains of false-positives", () => { + it('should handle chains of false-positives', () => { // Scenario: ,-- c5 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. // lastSync is c4. - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - s2 = initSyncState() + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() - for (let i = 0; i < 5; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 5)) - for (let i = 2; ; i++) { - // search for false positive; see comment above - const n2us1 = Automerge.change( - Automerge.clone(n2, { actor: "89abcdef" }), - { time: 0 }, - doc => (doc.x = `${i} @ n2`) - ) + n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 5) + for (let i = 2; ; i++) { // search for false positive; see comment above + const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us1)[0])) { - n2 = n2us1 - break + n2 = n2us1; break } } - for (let i = 141; ; i++) { - // search for false positive; see comment above - const n2us2 = Automerge.change( - Automerge.clone(n2, { actor: "89abcdef" }), - { time: 0 }, - doc => (doc.x = `${i} again`) - ) + for (let i = 141; ; i++) { // search for false positive; see comment above + const n2us2 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} again`) if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us2)[0])) { - n2 = n2us2 - break + n2 = n2us2; break } } - n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = "final @ n2")) + n2 = Automerge.change(n2, {time: 0}, doc => doc.x = 'final @ n2') const allHeads = [...getHeads(n1), ...getHeads(n2)].sort() s1 = decodeSyncState(encodeSyncState(s1)) @@ -800,46 +629,32 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(getHeads(n2), allHeads) }) - it("should allow the false-positive hash to be explicitly requested", () => { + it('should allow the false-positive hash to be explicitly requested', () => { // Scenario: ,-- n1 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 causes a false positive in the Bloom filter containing {n1}. - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - s2 = initSyncState() + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() let message - for (let i = 0; i < 10; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n1, n2, s1, s2] = sync(n1, n2) s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) - for (let i = 1; ; i++) { - // brute-force search for false positive; see comment above - const n1up = Automerge.change( - Automerge.clone(n1, { actor: "01234567" }), - { time: 0 }, - doc => (doc.x = `${i} @ n1`) - ) - const n2up = Automerge.change( - Automerge.clone(n2, { actor: "89abcdef" }), - { time: 0 }, - doc => (doc.x = `${i} @ n2`) - ) + for (let i = 1; ; i++) { // brute-force search for false positive; see comment above + const n1up = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) + const n2up = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) // check if the bloom filter on n2 will believe n1 already has a particular hash // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) { - n1 = n1up - n2 = n2up - break + n1 = n1up; n2 = n2up; break } } // n1 creates a sync message for n2 with an ill-fated bloom - ;[s1, message] = Automerge.generateSyncMessage(n1, s1) + [s1, message] = Automerge.generateSyncMessage(n1, s1) assert.strictEqual(decodeSyncMessage(message).changes.length, 0) // n2 receives it and DOESN'T send a change back @@ -863,42 +678,32 @@ describe("Data sync protocol", () => { }) }) - describe("protocol features", () => { - it("should allow multiple Bloom filters", () => { + describe('protocol features', () => { + it('should allow multiple Bloom filters', () => { // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 // c0 <-- c1 <-- c2 <-+--- n2c1 <-- n2c2 <-- n2c3 // `-- n3c1 <-- n3c2 <-- n3c3 // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef"), - n3 = Automerge.init("76543210") - let s13 = initSyncState() - let s32 = initSyncState(), - s31 = initSyncState(), - s23 = initSyncState() + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') + let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() + let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() let message1, message2, message3 - for (let i = 0; i < 3; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) - // sync all 3 nodes - ;[n1, n2, ,] = sync(n1, n2) // eslint-disable-line no-unused-vars -- kept for consistency + for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + // sync all 3 nodes + ;[n1, n2, s12, s21] = sync(n1, n2) // eslint-disable-line no-unused-vars -- kept for consistency ;[n1, n3, s13, s31] = sync(n1, n3) ;[n3, n2, s32, s23] = sync(n3, n2) - for (let i = 0; i < 2; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = `${i} @ n1`)) - for (let i = 0; i < 2; i++) - n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = `${i} @ n2`)) + for (let i = 0; i < 2; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `${i} @ n1`) + for (let i = 0; i < 2; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = `${i} @ n2`) ;[n1] = Automerge.applyChanges(n1, Automerge.getAllChanges(n2)) ;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1)) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = `3 @ n1`)) - n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = `3 @ n2`)) - for (let i = 0; i < 3; i++) - n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = `${i} @ n3`)) - const n1c3 = getHeads(n1)[0], - n2c3 = getHeads(n2)[0], - n3c3 = getHeads(n3)[0] + n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `3 @ n1`) + n2 = Automerge.change(n2, {time: 0}, doc => doc.x = `3 @ n2`) + for (let i = 0; i < 3; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = `${i} @ n3`) + const n1c3 = getHeads(n1)[0], n2c3 = getHeads(n2)[0], n3c3 = getHeads(n3)[0] s13 = decodeSyncState(encodeSyncState(s13)) s31 = decodeSyncState(encodeSyncState(s31)) s23 = decodeSyncState(encodeSyncState(s23)) @@ -920,11 +725,7 @@ describe("Data sync protocol", () => { const modifiedMessage = decodeSyncMessage(message3) modifiedMessage.have.push(decodeSyncMessage(message1).have[0]) assert.strictEqual(modifiedMessage.changes.length, 0) - ;[n2, s23] = Automerge.receiveSyncMessage( - n2, - s23, - encodeSyncMessage(modifiedMessage) - ) + ;[n2, s23] = Automerge.receiveSyncMessage(n2, s23, encodeSyncMessage(modifiedMessage)) // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) ;[s23, message2] = Automerge.generateSyncMessage(n2, s23) @@ -938,76 +739,53 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(getHeads(n3), [n1c3, n2c3, n3c3].sort()) }) - it("should allow any change to be requested", () => { - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - s2 = initSyncState() - let message: Automerge.SyncMessage | null = null + it('should allow any change to be requested', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message = null - for (let i = 0; i < 3; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) const lastSync = getHeads(n1) - for (let i = 3; i < 6; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 3; i < 6; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + ;[n1, n2, s1, s2] = sync(n1, n2) s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - const modMsg = decodeSyncMessage(message!) + const modMsg = decodeSyncMessage(message) modMsg.need = lastSync // re-request change 2 - ;[n2, s2] = Automerge.receiveSyncMessage( - n2, - s2, - encodeSyncMessage(modMsg) - ) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, encodeSyncMessage(modMsg)) ;[s1, message] = Automerge.generateSyncMessage(n2, s2) - assert.strictEqual(decodeSyncMessage(message!).changes.length, 1) - assert.strictEqual( - Automerge.decodeChange(decodeSyncMessage(message!).changes[0]).hash, - lastSync[0] - ) + assert.strictEqual(decodeSyncMessage(message).changes.length, 1) + assert.strictEqual(Automerge.decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) }) - it("should ignore requests for a nonexistent change", () => { - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef") - let s1 = initSyncState(), - s2 = initSyncState() - let message: Automerge.SyncMessage | null = null + it('should ignore requests for a nonexistent change', () => { + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message = null - for (let i = 0; i < 3; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) ;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1)) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - const decoded = Automerge.decodeSyncMessage(message!) - decoded.need = [ - "0000000000000000000000000000000000000000000000000000000000000000", - ] - message = Automerge.encodeSyncMessage(decoded) - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message!) + message.need = ['0000000000000000000000000000000000000000000000000000000000000000'] + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) assert.strictEqual(message, null) }) - it("should allow a subset of changes to be sent", () => { + it('should allow a subset of changes to be sent', () => { // ,-- c1 <-- c2 // c0 <-+ // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - let n1 = Automerge.init("01234567"), - n2 = Automerge.init("89abcdef"), - n3 = Automerge.init("76543210") - let s1 = initSyncState(), - s2 = initSyncState() + let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') + let s1 = initSyncState(), s2 = initSyncState() let msg, decodedMsg - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 0)) + n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 0) n3 = Automerge.merge(n3, n1) - for (let i = 1; i <= 2; i++) - n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) // n1 has {c0, c1, c2} - for (let i = 3; i <= 4; i++) - n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = i)) // n3 has {c0, c3, c4} - const c2 = getHeads(n1)[0], - c4 = getHeads(n3)[0] + for (let i = 1; i <= 2; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) // n1 has {c0, c1, c2} + for (let i = 3; i <= 4; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = i) // n3 has {c0, c3, c4} + const c2 = getHeads(n1)[0], c4 = getHeads(n3)[0] n2 = Automerge.merge(n2, n3) // n2 has {c0, c3, c4} // Sync n1 and n2, so their shared heads are {c2, c4} @@ -1018,13 +796,11 @@ describe("Data sync protocol", () => { assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort()) // n2 and n3 apply {c5, c6, c7, c8} - n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 5)) + n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 5) const change5 = Automerge.getLastLocalChange(n3) - n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 6)) - const change6 = Automerge.getLastLocalChange(n3), - c6 = getHeads(n3)[0] - for (let i = 7; i <= 8; i++) - n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = i)) + n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 6) + const change6 = Automerge.getLastLocalChange(n3), c6 = getHeads(n3)[0] + for (let i = 7; i <= 8; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = i) const c8 = getHeads(n3)[0] n2 = Automerge.merge(n2, n3) @@ -1035,10 +811,9 @@ describe("Data sync protocol", () => { decodedMsg = decodeSyncMessage(msg) decodedMsg.changes = [change5, change6] msg = encodeSyncMessage(decodedMsg) - const sentHashes = [ - Automerge.decodeChange(change5!).hash, - Automerge.decodeChange(change6!).hash, - ] + const sentHashes = {} + sentHashes[decodeChangeMeta(change5, true).hash] = true + sentHashes[decodeChangeMeta(change6, true).hash] = true s2.sentHashes = sentHashes ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg) assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) @@ -1047,10 +822,7 @@ describe("Data sync protocol", () => { ;[s1, msg] = Automerge.generateSyncMessage(n1, s1) ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg) assert.deepStrictEqual(decodeSyncMessage(msg).need, [c8]) - assert.deepStrictEqual( - decodeSyncMessage(msg).have[0].lastSync, - [c2, c6].sort() - ) + assert.deepStrictEqual(decodeSyncMessage(msg).have[0].lastSync, [c2, c6].sort()) assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) assert.deepStrictEqual(s2.sharedHeads, [c2, c6].sort()) diff --git a/automerge-js/test/text_test.js b/automerge-js/test/text_test.js new file mode 100644 index 00000000..57e8884e --- /dev/null +++ b/automerge-js/test/text_test.js @@ -0,0 +1,697 @@ +const assert = require('assert') +const Automerge = require('..') +const { assertEqualsOneOf } = require('./helpers') + +function attributeStateToAttributes(accumulatedAttributes) { + const attributes = {} + Object.entries(accumulatedAttributes).forEach(([key, values]) => { + if (values.length && values[0] !== null) { + attributes[key] = values[0] + } + }) + return attributes +} + +function isEquivalent(a, b) { + const aProps = Object.getOwnPropertyNames(a) + const bProps = Object.getOwnPropertyNames(b) + + if (aProps.length != bProps.length) { + return false + } + + for (let i = 0; i < aProps.length; i++) { + const propName = aProps[i] + if (a[propName] !== b[propName]) { + return false + } + } + + return true +} + +function isControlMarker(pseudoCharacter) { + return typeof pseudoCharacter === 'object' && pseudoCharacter.attributes +} + +function opFrom(text, attributes) { + let op = { insert: text } + if (Object.keys(attributes).length > 0) { + op.attributes = attributes + } + return op +} + +function accumulateAttributes(span, accumulatedAttributes) { + Object.entries(span).forEach(([key, value]) => { + if (!accumulatedAttributes[key]) { + accumulatedAttributes[key] = [] + } + if (value === null) { + if (accumulatedAttributes[key].length === 0 || accumulatedAttributes[key] === null) { + accumulatedAttributes[key].unshift(null) + } else { + accumulatedAttributes[key].shift() + } + } else { + if (accumulatedAttributes[key][0] === null) { + accumulatedAttributes[key].shift() + } else { + accumulatedAttributes[key].unshift(value) + } + } + }) + return accumulatedAttributes +} + +function automergeTextToDeltaDoc(text) { + let ops = [] + let controlState = {} + let currentString = "" + let attributes = {} + text.toSpans().forEach((span) => { + if (isControlMarker(span)) { + controlState = accumulateAttributes(span.attributes, controlState) + } else { + let next = attributeStateToAttributes(controlState) + + // if the next span has the same calculated attributes as the current span + // don't bother outputting it as a separate span, just let it ride + if (typeof span === 'string' && isEquivalent(next, attributes)) { + currentString = currentString + span + return + } + + if (currentString) { + ops.push(opFrom(currentString, attributes)) + } + + // If we've got a string, we might be able to concatenate it to another + // same-attributed-string, so remember it and go to the next iteration. + if (typeof span === 'string') { + currentString = span + attributes = next + } else { + // otherwise we have an embed "character" and should output it immediately. + // embeds are always one-"character" in length. + ops.push(opFrom(span, next)) + currentString = '' + attributes = {} + } + } + }) + + // at the end, flush any accumulated string out + if (currentString) { + ops.push(opFrom(currentString, attributes)) + } + + return ops +} + +function inverseAttributes(attributes) { + let invertedAttributes = {} + Object.keys(attributes).forEach((key) => { + invertedAttributes[key] = null + }) + return invertedAttributes +} + +function applyDeleteOp(text, offset, op) { + let length = op.delete + while (length > 0) { + if (isControlMarker(text.get(offset))) { + offset += 1 + } else { + // we need to not delete control characters, but we do delete embed characters + text.deleteAt(offset, 1) + length -= 1 + } + } + return [text, offset] +} + +function applyRetainOp(text, offset, op) { + let length = op.retain + + if (op.attributes) { + text.insertAt(offset, { attributes: op.attributes }) + offset += 1 + } + + while (length > 0) { + const char = text.get(offset) + offset += 1 + if (!isControlMarker(char)) { + length -= 1 + } + } + + if (op.attributes) { + text.insertAt(offset, { attributes: inverseAttributes(op.attributes) }) + offset += 1 + } + + return [text, offset] +} + + +function applyInsertOp(text, offset, op) { + let originalOffset = offset + + if (typeof op.insert === 'string') { + text.insertAt(offset, ...op.insert.split('')) + offset += op.insert.length + } else { + // we have an embed or something similar + text.insertAt(offset, op.insert) + offset += 1 + } + + if (op.attributes) { + text.insertAt(originalOffset, { attributes: op.attributes }) + offset += 1 + } + if (op.attributes) { + text.insertAt(offset, { attributes: inverseAttributes(op.attributes) }) + offset += 1 + } + return [text, offset] +} + +// XXX: uhhhhh, why can't I pass in text? +function applyDeltaDocToAutomergeText(delta, doc) { + let offset = 0 + + delta.forEach(op => { + if (op.retain) { + [, offset] = applyRetainOp(doc.text, offset, op) + } else if (op.delete) { + [, offset] = applyDeleteOp(doc.text, offset, op) + } else if (op.insert) { + [, offset] = applyInsertOp(doc.text, offset, op) + } + }) +} + +describe('Automerge.Text', () => { + let s1, s2 + beforeEach(() => { + s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text()) + s2 = Automerge.merge(Automerge.init(), s1) + }) + + it('should support insertion', () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a')) + assert.strictEqual(s1.text.length, 1) + assert.strictEqual(s1.text.get(0), 'a') + assert.strictEqual(s1.text.toString(), 'a') + //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`) + }) + + it('should support deletion', () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c')) + s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1)) + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text.get(0), 'a') + assert.strictEqual(s1.text.get(1), 'c') + assert.strictEqual(s1.text.toString(), 'ac') + }) + + it("should support implicit and explicit deletion", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) + s1 = Automerge.change(s1, doc => doc.text.deleteAt(1)) + s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0)) + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text.get(0), "a") + assert.strictEqual(s1.text.get(1), "c") + assert.strictEqual(s1.text.toString(), "ac") + }) + + it('should handle concurrent insertion', () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c')) + s2 = Automerge.change(s2, doc => doc.text.insertAt(0, 'x', 'y', 'z')) + s1 = Automerge.merge(s1, s2) + assert.strictEqual(s1.text.length, 6) + assertEqualsOneOf(s1.text.toString(), 'abcxyz', 'xyzabc') + assertEqualsOneOf(s1.text.join(''), 'abcxyz', 'xyzabc') + }) + + it('should handle text and other ops in the same change', () => { + s1 = Automerge.change(s1, doc => { + doc.foo = 'bar' + doc.text.insertAt(0, 'a') + }) + assert.strictEqual(s1.foo, 'bar') + assert.strictEqual(s1.text.toString(), 'a') + assert.strictEqual(s1.text.join(''), 'a') + }) + + it('should serialize to JSON as a simple string', () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', '"', 'b')) + assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}') + }) + + it('should allow modification before an object is assigned to a document', () => { + s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text() + text.insertAt(0, 'a', 'b', 'c', 'd') + text.deleteAt(2) + doc.text = text + assert.strictEqual(doc.text.toString(), 'abd') + assert.strictEqual(doc.text.join(''), 'abd') + }) + assert.strictEqual(s1.text.toString(), 'abd') + assert.strictEqual(s1.text.join(''), 'abd') + }) + + it('should allow modification after an object is assigned to a document', () => { + s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text() + doc.text = text + doc.text.insertAt(0, 'a', 'b', 'c', 'd') + doc.text.deleteAt(2) + assert.strictEqual(doc.text.toString(), 'abd') + assert.strictEqual(doc.text.join(''), 'abd') + }) + assert.strictEqual(s1.text.join(''), 'abd') + }) + + it('should not allow modification outside of a change callback', () => { + assert.throws(() => s1.text.insertAt(0, 'a'), /object cannot be modified outside of a change block/) + }) + + describe('with initial value', () => { + it('should accept a string as initial value', () => { + let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('init')) + assert.strictEqual(s1.text.length, 4) + assert.strictEqual(s1.text.get(0), 'i') + assert.strictEqual(s1.text.get(1), 'n') + assert.strictEqual(s1.text.get(2), 'i') + assert.strictEqual(s1.text.get(3), 't') + assert.strictEqual(s1.text.toString(), 'init') + }) + + it('should accept an array as initial value', () => { + let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text(['i', 'n', 'i', 't'])) + assert.strictEqual(s1.text.length, 4) + assert.strictEqual(s1.text.get(0), 'i') + assert.strictEqual(s1.text.get(1), 'n') + assert.strictEqual(s1.text.get(2), 'i') + assert.strictEqual(s1.text.get(3), 't') + assert.strictEqual(s1.text.toString(), 'init') + }) + + it('should initialize text in Automerge.from()', () => { + let s1 = Automerge.from({text: new Automerge.Text('init')}) + assert.strictEqual(s1.text.length, 4) + assert.strictEqual(s1.text.get(0), 'i') + assert.strictEqual(s1.text.get(1), 'n') + assert.strictEqual(s1.text.get(2), 'i') + assert.strictEqual(s1.text.get(3), 't') + assert.strictEqual(s1.text.toString(), 'init') + }) + + it('should encode the initial value as a change', () => { + const s1 = Automerge.from({text: new Automerge.Text('init')}) + const changes = Automerge.getAllChanges(s1) + assert.strictEqual(changes.length, 1) + const [s2] = Automerge.applyChanges(Automerge.init(), changes) + assert.strictEqual(s2.text instanceof Automerge.Text, true) + assert.strictEqual(s2.text.toString(), 'init') + assert.strictEqual(s2.text.join(''), 'init') + }) + + it('should allow immediate access to the value', () => { + Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text('init') + assert.strictEqual(text.length, 4) + assert.strictEqual(text.get(0), 'i') + assert.strictEqual(text.toString(), 'init') + doc.text = text + assert.strictEqual(doc.text.length, 4) + assert.strictEqual(doc.text.get(0), 'i') + assert.strictEqual(doc.text.toString(), 'init') + }) + }) + + it('should allow pre-assignment modification of the initial value', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text('init') + text.deleteAt(3) + assert.strictEqual(text.join(''), 'ini') + doc.text = text + assert.strictEqual(doc.text.join(''), 'ini') + assert.strictEqual(doc.text.toString(), 'ini') + }) + assert.strictEqual(s1.text.toString(), 'ini') + assert.strictEqual(s1.text.join(''), 'ini') + }) + + it('should allow post-assignment modification of the initial value', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text('init') + doc.text = text + doc.text.deleteAt(0) + doc.text.insertAt(0, 'I') + assert.strictEqual(doc.text.join(''), 'Init') + assert.strictEqual(doc.text.toString(), 'Init') + }) + assert.strictEqual(s1.text.join(''), 'Init') + assert.strictEqual(s1.text.toString(), 'Init') + }) + }) + + describe('non-textual control characters', () => { + let s1 + beforeEach(() => { + s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text() + doc.text.insertAt(0, 'a') + doc.text.insertAt(1, { attribute: 'bold' }) + }) + }) + + it('should allow fetching non-textual characters', () => { + assert.deepEqual(s1.text.get(1), { attribute: 'bold' }) + //assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`) + }) + + it('should include control characters in string length', () => { + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text.get(0), 'a') + }) + + it('should exclude control characters from toString()', () => { + assert.strictEqual(s1.text.toString(), 'a') + }) + + it('should allow control characters to be updated', () => { + const s2 = Automerge.change(s1, doc => doc.text.get(1).attribute = 'italic') + const s3 = Automerge.load(Automerge.save(s2)) + assert.strictEqual(s1.text.get(1).attribute, 'bold') + assert.strictEqual(s2.text.get(1).attribute, 'italic') + assert.strictEqual(s3.text.get(1).attribute, 'italic') + }) + + describe('spans interface to Text', () => { + it('should return a simple string as a single span', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('hello world') + }) + assert.deepEqual(s1.text.toSpans(), ['hello world']) + }) + it('should return an empty string as an empty array', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text() + }) + assert.deepEqual(s1.text.toSpans(), []) + }) + it('should split a span at a control character', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('hello world') + doc.text.insertAt(5, { attributes: { bold: true } }) + }) + assert.deepEqual(s1.text.toSpans(), + ['hello', { attributes: { bold: true } }, ' world']) + }) + it('should allow consecutive control characters', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('hello world') + doc.text.insertAt(5, { attributes: { bold: true } }) + doc.text.insertAt(6, { attributes: { italic: true } }) + }) + assert.deepEqual(s1.text.toSpans(), + ['hello', + { attributes: { bold: true } }, + { attributes: { italic: true } }, + ' world' + ]) + }) + it('should allow non-consecutive control characters', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('hello world') + doc.text.insertAt(5, { attributes: { bold: true } }) + doc.text.insertAt(12, { attributes: { italic: true } }) + }) + assert.deepEqual(s1.text.toSpans(), + ['hello', + { attributes: { bold: true } }, + ' world', + { attributes: { italic: true } } + ]) + }) + + it('should be convertable into a Quill delta', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('Gandalf the Grey') + doc.text.insertAt(0, { attributes: { bold: true } }) + doc.text.insertAt(7 + 1, { attributes: { bold: null } }) + doc.text.insertAt(12 + 2, { attributes: { color: '#cccccc' } }) + }) + + let deltaDoc = automergeTextToDeltaDoc(s1.text) + + // From https://quilljs.com/docs/delta/ + let expectedDoc = [ + { insert: 'Gandalf', attributes: { bold: true } }, + { insert: ' the ' }, + { insert: 'Grey', attributes: { color: '#cccccc' } } + ] + + assert.deepEqual(deltaDoc, expectedDoc) + }) + + it('should support embeds', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('') + doc.text.insertAt(0, { attributes: { link: 'https://quilljs.com' } }) + doc.text.insertAt(1, { + image: 'https://quilljs.com/assets/images/icon.png' + }) + doc.text.insertAt(2, { attributes: { link: null } }) + }) + + let deltaDoc = automergeTextToDeltaDoc(s1.text) + + // From https://quilljs.com/docs/delta/ + let expectedDoc = [{ + // An image link + insert: { + image: 'https://quilljs.com/assets/images/icon.png' + }, + attributes: { + link: 'https://quilljs.com' + } + }] + + assert.deepEqual(deltaDoc, expectedDoc) + }) + + it('should handle concurrent overlapping spans', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('Gandalf the Grey') + }) + + let s2 = Automerge.merge(Automerge.init(), s1) + + let s3 = Automerge.change(s1, doc => { + doc.text.insertAt(8, { attributes: { bold: true } }) + doc.text.insertAt(16 + 1, { attributes: { bold: null } }) + }) + + let s4 = Automerge.change(s2, doc => { + doc.text.insertAt(0, { attributes: { bold: true } }) + doc.text.insertAt(11 + 1, { attributes: { bold: null } }) + }) + + let merged = Automerge.merge(s3, s4) + + let deltaDoc = automergeTextToDeltaDoc(merged.text) + + // From https://quilljs.com/docs/delta/ + let expectedDoc = [ + { insert: 'Gandalf the Grey', attributes: { bold: true } }, + ] + + assert.deepEqual(deltaDoc, expectedDoc) + }) + + it('should handle debolding spans', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('Gandalf the Grey') + }) + + let s2 = Automerge.merge(Automerge.init(), s1) + + let s3 = Automerge.change(s1, doc => { + doc.text.insertAt(0, { attributes: { bold: true } }) + doc.text.insertAt(16 + 1, { attributes: { bold: null } }) + }) + + let s4 = Automerge.change(s2, doc => { + doc.text.insertAt(8, { attributes: { bold: null } }) + doc.text.insertAt(11 + 1, { attributes: { bold: true } }) + }) + + + let merged = Automerge.merge(s3, s4) + + let deltaDoc = automergeTextToDeltaDoc(merged.text) + + // From https://quilljs.com/docs/delta/ + let expectedDoc = [ + { insert: 'Gandalf ', attributes: { bold: true } }, + { insert: 'the' }, + { insert: ' Grey', attributes: { bold: true } }, + ] + + assert.deepEqual(deltaDoc, expectedDoc) + }) + + // xxx: how would this work for colors? + it('should handle destyling across destyled spans', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('Gandalf the Grey') + }) + + let s2 = Automerge.merge(Automerge.init(), s1) + + let s3 = Automerge.change(s1, doc => { + doc.text.insertAt(0, { attributes: { bold: true } }) + doc.text.insertAt(16 + 1, { attributes: { bold: null } }) + }) + + let s4 = Automerge.change(s2, doc => { + doc.text.insertAt(8, { attributes: { bold: null } }) + doc.text.insertAt(11 + 1, { attributes: { bold: true } }) + }) + + let merged = Automerge.merge(s3, s4) + + let final = Automerge.change(merged, doc => { + doc.text.insertAt(3 + 1, { attributes: { bold: null } }) + doc.text.insertAt(doc.text.length, { attributes: { bold: true } }) + }) + + let deltaDoc = automergeTextToDeltaDoc(final.text) + + // From https://quilljs.com/docs/delta/ + let expectedDoc = [ + { insert: 'Gan', attributes: { bold: true } }, + { insert: 'dalf the Grey' }, + ] + + assert.deepEqual(deltaDoc, expectedDoc) + }) + + it('should apply an insert', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('Hello world') + }) + + const delta = [ + { retain: 6 }, + { insert: 'reader' }, + { delete: 5 } + ] + + let s2 = Automerge.change(s1, doc => { + applyDeltaDocToAutomergeText(delta, doc) + }) + + assert.strictEqual(s2.text.join(''), 'Hello reader') + }) + + it('should apply an insert with control characters', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('Hello world') + }) + + const delta = [ + { retain: 6 }, + { insert: 'reader', attributes: { bold: true } }, + { delete: 5 }, + { insert: '!' } + ] + + let s2 = Automerge.change(s1, doc => { + applyDeltaDocToAutomergeText(delta, doc) + }) + + assert.strictEqual(s2.text.toString(), 'Hello reader!') + assert.deepEqual(s2.text.toSpans(), [ + "Hello ", + { attributes: { bold: true } }, + "reader", + { attributes: { bold: null } }, + "!" + ]) + }) + + it('should account for control characters in retain/delete lengths', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('Hello world') + doc.text.insertAt(4, { attributes: { color: '#ccc' } }) + doc.text.insertAt(10, { attributes: { color: '#f00' } }) + }) + + const delta = [ + { retain: 6 }, + { insert: 'reader', attributes: { bold: true } }, + { delete: 5 }, + { insert: '!' } + ] + + let s2 = Automerge.change(s1, doc => { + applyDeltaDocToAutomergeText(delta, doc) + }) + + assert.strictEqual(s2.text.toString(), 'Hello reader!') + assert.deepEqual(s2.text.toSpans(), [ + "Hell", + { attributes: { color: '#ccc'} }, + "o ", + { attributes: { bold: true } }, + "reader", + { attributes: { bold: null } }, + { attributes: { color: '#f00'} }, + "!" + ]) + }) + + it('should support embeds', () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text('') + }) + + let deltaDoc = [{ + // An image link + insert: { + image: 'https://quilljs.com/assets/images/icon.png' + }, + attributes: { + link: 'https://quilljs.com' + } + }] + + let s2 = Automerge.change(s1, doc => { + applyDeltaDocToAutomergeText(deltaDoc, doc) + }) + + assert.deepEqual(s2.text.toSpans(), [ + { attributes: { link: 'https://quilljs.com' } }, + { image: 'https://quilljs.com/assets/images/icon.png'}, + { attributes: { link: null } }, + ]) + }) + }) + }) + + it('should support unicode when creating text', () => { + s1 = Automerge.from({ + text: new Automerge.Text('🐦') + }) + assert.strictEqual(s1.text.get(0), '🐦') + }) +}) diff --git a/automerge-js/test/uuid_test.js b/automerge-js/test/uuid_test.js new file mode 100644 index 00000000..a0f83df1 --- /dev/null +++ b/automerge-js/test/uuid_test.js @@ -0,0 +1,32 @@ +const assert = require('assert') +const Automerge = require('..') + +const uuid = Automerge.uuid + +describe('uuid', () => { + afterEach(() => { + uuid.reset() + }) + + describe('default implementation', () => { + it('generates unique values', () => { + assert.notEqual(uuid(), uuid()) + }) + }) + + describe('custom implementation', () => { + let counter + + function customUuid() { + return `custom-uuid-${counter++}` + } + + before(() => uuid.setFactory(customUuid)) + beforeEach(() => counter = 0) + + it('invokes the custom factory', () => { + assert.equal(uuid(), 'custom-uuid-0') + assert.equal(uuid(), 'custom-uuid-1') + }) + }) +}) diff --git a/rust/automerge-wasm/.gitignore b/automerge-wasm/.gitignore similarity index 59% rename from rust/automerge-wasm/.gitignore rename to automerge-wasm/.gitignore index 77c11e08..90f5b649 100644 --- a/rust/automerge-wasm/.gitignore +++ b/automerge-wasm/.gitignore @@ -1,6 +1,7 @@ /node_modules -/bundler -/nodejs -/deno +/dev +/node +/web +/target Cargo.lock yarn.lock diff --git a/rust/automerge-wasm/Cargo.toml b/automerge-wasm/Cargo.toml similarity index 78% rename from rust/automerge-wasm/Cargo.toml rename to automerge-wasm/Cargo.toml index b6055a7d..2ee2b44e 100644 --- a/rust/automerge-wasm/Cargo.toml +++ b/automerge-wasm/Cargo.toml @@ -2,14 +2,13 @@ [package] name = "automerge-wasm" description = "An js/wasm wrapper for the rust implementation of automerge-backend" -repository = "https://github.com/automerge/automerge-rs" -version = "0.1.0" +# repository = "https://github.com/automerge/automerge-rs" +version = "0.0.4" authors = ["Alex Good ","Orion Henry ", "Martin Kleppmann"] categories = ["wasm"] readme = "README.md" edition = "2021" license = "MIT" -rust-version = "1.57.0" [lib] crate-type = ["cdylib","rlib"] @@ -28,24 +27,23 @@ serde = "^1.0" serde_json = "^1.0" rand = { version = "^0.8.4" } getrandom = { version = "^0.2.2", features=["js"] } -uuid = { version = "^1.2.1", features=["v4", "js", "serde"] } -serde-wasm-bindgen = "0.4.3" +uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] } +serde-wasm-bindgen = "0.1.3" serde_bytes = "0.11.5" +unicode-segmentation = "1.7.1" hex = "^0.4.3" regex = "^1.5" -itertools = "^0.10.3" -thiserror = "^1.0.16" [dependencies.wasm-bindgen] -version = "^0.2.83" +version = "^0.2" #features = ["std"] features = ["serde-serialize", "std"] [package.metadata.wasm-pack.profile.release] -# wasm-opt = false +wasm-opt = true [package.metadata.wasm-pack.profile.profiling] -wasm-opt = false +wasm-opt = true # The `web-sys` crate allows you to interact with the various browser APIs, # like the DOM. @@ -57,6 +55,5 @@ features = ["console"] [dev-dependencies] futures = "^0.1" -proptest = { version = "^1.0.0", default-features = false, features = ["std"] } wasm-bindgen-futures = "^0.4" wasm-bindgen-test = "^0.3" diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md new file mode 100644 index 00000000..80f8f1fa --- /dev/null +++ b/automerge-wasm/README.md @@ -0,0 +1,4 @@ +## Automerge WASM Low Level Interface + +This is a low level automerge library written in rust exporting a javascript API via WASM. This low level api is the underpinning to the `automerge-js` library that reimplements the Automerge API via these low level functions. + diff --git a/automerge-wasm/attr_bug.js b/automerge-wasm/attr_bug.js new file mode 100644 index 00000000..324fba33 --- /dev/null +++ b/automerge-wasm/attr_bug.js @@ -0,0 +1,15 @@ +let Automerge = require(".") +let util = require('util') + +let heads = ['d138235e8123c407852968a976bb3d05bb30b9f7639854e64cb4adee98a407a6'] +let newHeads = ['d2a0500dad1b4ef1ca0f66015ae24f5cd7bec8316aa8e1115640a665e188147e'] +let text = '10@e1761c3ec92a87d3620d1bc007bdf83a000015ca0b60684edfd007672a0f00113ba1' +let data = '133,111,74,131,126,182,225,217,0,130,22,22,34,0,174,8,20,12,38,118,140,95,76,123,139,6,212,187,22,0,0,45,11,84,68,75,148,168,76,245,27,147,189,91,99,157,102,34,0,174,8,20,12,38,118,140,95,76,123,139,6,212,187,22,0,0,60,72,31,34,255,16,190,226,176,124,232,19,117,181,152,202,34,0,174,8,20,12,38,118,140,95,76,123,139,6,212,187,22,0,0,173,17,57,82,13,196,120,217,253,4,117,222,120,203,127,31,34,0,174,8,20,12,38,118,140,95,76,123,139,6,212,187,22,0,0,195,238,208,1,215,183,150,181,230,202,10,131,10,53,212,98,16,118,64,44,216,205,38,70,50,172,104,141,96,213,70,225,153,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,7,90,18,166,242,242,169,181,172,173,95,218,197,230,53,171,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,20,123,52,22,113,155,106,167,61,96,211,220,13,176,202,18,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,21,202,11,96,104,78,223,208,7,103,42,15,0,17,59,161,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,49,157,99,144,176,89,107,142,238,50,16,33,198,172,12,98,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,49,160,189,244,223,205,155,34,245,110,74,38,170,63,47,165,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,101,43,36,88,127,139,248,176,98,81,75,151,178,155,65,235,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,104,72,125,26,22,39,88,236,174,2,180,0,186,44,23,100,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,106,192,146,37,220,38,124,176,133,96,99,183,52,146,51,32,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,137,185,129,79,171,192,93,254,162,191,198,11,166,169,184,231,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,183,221,99,120,31,214,103,85,152,145,225,205,226,10,71,148,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,204,247,249,8,135,23,98,57,29,144,111,93,62,1,176,68,34,225,118,28,62,201,42,135,211,98,13,27,192,7,189,248,58,0,0,243,90,241,176,57,235,58,247,98,38,71,96,245,193,178,119,34,229,150,245,136,76,151,59,113,93,112,149,234,7,68,20,213,0,0,23,61,123,236,184,3,106,194,171,46,241,84,223,211,110,241,34,229,150,245,136,76,151,59,113,93,112,149,234,7,68,20,213,0,0,32,181,113,40,11,161,118,67,217,36,93,201,189,221,55,174,34,229,150,245,136,76,151,59,113,93,112,149,234,7,68,20,213,0,0,97,188,15,173,96,163,123,87,228,32,227,245,56,237,53,228,34,229,150,245,136,76,151,59,113,93,112,149,234,7,68,20,213,0,0,97,221,248,228,210,133,45,170,105,131,177,2,9,124,254,61,16,255,46,217,125,15,181,79,74,181,101,95,13,121,190,236,160,1,210,160,80,13,173,27,78,241,202,15,102,1,90,226,79,92,215,190,200,49,106,168,225,17,86,64,166,101,225,136,20,126,8,1,48,3,78,19,38,35,3,53,77,64,17,67,20,86,3,14,1,11,2,17,17,165,1,27,242,1,21,202,1,33,238,1,43,187,2,52,4,66,51,86,78,95,177,2,128,1,45,129,1,15,131,1,23,127,7,21,4,24,8,12,15,3,14,15,6,193,0,12,8,13,3,10,127,9,6,11,127,16,3,2,20,3,10,5,33,3,126,5,17,17,1,11,17,24,21,12,0,125,18,20,19,126,1,0,20,1,127,108,23,1,127,105,11,1,127,117,2,1,127,126,14,1,127,114,192,0,1,127,64,7,1,127,121,2,1,126,126,0,5,1,126,123,0,2,1,127,126,19,1,127,109,9,1,127,11,32,1,125,86,118,0,16,1,127,113,10,1,127,117,23,1,127,105,11,1,127,117,2,0,127,14,2,3,16,1,127,3,2,1,127,2,9,1,2,2,14,1,121,2,1,2,1,2,1,2,171,1,1,127,2,28,1,126,0,4,38,1,167,2,0,255,1,0,127,70,123,34,97,117,116,104,111,114,73,100,34,58,34,101,49,55,54,49,99,51,101,99,57,50,97,56,55,100,51,54,50,48,100,49,98,99,48,48,55,98,100,102,56,51,97,34,44,34,109,101,115,115,97,103,101,34,58,34,74,97,102,102,97,32,67,97,107,101,34,125,39,0,127,0,192,1,1,127,2,32,1,127,2,18,1,127,2,49,1,127,0,191,1,1,126,119,10,33,1,126,95,34,17,1,126,112,17,49,1,167,2,7,0,17,161,2,7,127,4,3,8,4,15,0,17,3,9,157,2,10,119,12,19,42,53,55,71,74,77,80,0,21,5,7,124,6,7,15,7,3,4,127,3,4,4,127,6,7,4,124,6,12,3,4,2,15,127,3,2,15,127,6,15,3,127,6,14,3,11,21,2,3,127,6,11,4,126,6,12,15,1,5,4,126,12,3,13,4,127,14,3,4,2,14,124,6,12,3,4,3,8,127,10,9,17,126,8,12,10,21,126,8,6,5,8,127,3,3,8,127,12,8,13,3,8,127,6,6,8,2,14,124,6,12,3,8,4,15,127,6,2,8,123,15,12,3,15,6,2,8,127,12,2,2,123,8,6,12,3,8,4,15,127,6,6,8,125,12,10,3,5,8,127,6,2,8,11,0,16,8,127,6,2,12,127,9,6,11,126,16,3,2,4,0,10,69,142,189,75,66,97,28,133,207,121,223,50,149,140,140,43,213,32,45,125,64,208,93,226,6,81,75,83,67,4,145,91,91,67,91,229,216,7,250,187,16,33,53,93,250,35,154,171,185,177,32,130,162,156,36,18,84,156,28,85,80,55,125,95,21,228,89,14,156,7,206,129,35,152,28,195,113,30,79,254,230,110,8,250,133,63,2,226,23,207,62,97,121,67,162,128,188,184,184,103,91,48,27,112,138,82,149,46,98,244,189,40,211,59,108,202,235,80,181,124,192,185,179,74,32,84,56,225,63,62,81,148,65,165,161,76,114,15,21,214,48,190,250,142,178,85,36,199,10,194,204,62,72,17,143,140,48,211,202,122,123,74,243,58,96,221,28,249,195,66,136,87,184,49,11,235,251,70,191,32,22,161,189,173,154,36,53,206,166,83,42,254,5,55,231,39,15,88,198,10,59,178,180,189,81,147,121,83,57,41,104,5,150,48,23,239,244,247,151,143,194,13,70,121,122,43,151,163,183,150,196,55,24,155,96,102,166,32,233,115,68,122,127,8,97,114,99,104,105,118,101,100,2,6,97,117,116,104,111,114,126,8,99,111,109,109,101,110,116,115,12,99,111,110,116,114,105,98,117,116,111,114,115,3,7,109,101,115,115,97,103,101,2,9,112,97,114,101,110,116,95,105,100,125,6,112,105,110,110,101,100,6,115,104,97,114,101,100,4,116,101,120,116,3,4,116,105,109,101,124,5,116,105,116,108,101,32,48,48,97,101,48,56,49,52,48,99,50,54,55,54,56,99,53,102,52,99,55,98,56,98,48,54,100,52,98,98,49,54,32,101,49,55,54,49,99,51,101,99,57,50,97,56,55,100,51,54,50,48,100,49,98,99,48,48,55,98,100,102,56,51,97,32,101,53,57,54,102,53,56,56,52,99,57,55,51,98,55,49,53,100,55,48,57,53,101,97,48,55,52,52,49,52,100,53,0,157,2,9,4,116,121,112,101,2,7,127,21,3,7,124,4,21,4,21,4,7,119,4,21,7,1,7,17,7,5,3,2,12,121,6,12,15,12,4,12,4,2,3,111,12,4,12,6,12,4,12,4,12,4,12,6,12,3,5,15,5,2,3,126,12,6,30,3,12,21,2,3,115,12,4,12,4,12,4,12,4,12,4,12,6,12,16,1,123,4,10,12,4,12,2,3,102,4,12,4,12,4,12,4,12,4,12,4,12,14,12,4,12,14,12,6,12,3,5,8,5,3,10,10,17,127,12,11,21,126,6,12,4,8,2,3,125,12,8,12,7,13,102,5,13,8,12,6,12,8,5,12,8,12,14,12,6,12,3,5,15,5,3,12,6,12,8,15,12,2,3,124,6,12,8,12,3,2,117,6,12,3,5,15,5,3,12,6,12,15,2,8,124,12,8,12,10,2,3,121,8,12,8,12,6,12,8,12,0,112,12,8,12,8,12,8,12,8,12,8,12,8,12,8,12,6,2,12,127,9,6,11,125,16,3,5,2,4,2,7,127,4,3,8,4,15,117,143,207,43,131,113,28,199,63,239,103,101,241,236,49,19,113,115,81,147,178,56,108,10,7,92,108,56,40,162,20,218,193,109,139,210,52,179,231,251,41,23,148,103,59,40,7,57,176,54,155,205,197,143,63,65,106,53,57,49,108,53,187,56,104,23,66,118,179,135,92,148,195,235,211,235,240,254,244,249,188,157,85,193,93,169,164,194,95,187,37,125,168,154,244,164,178,113,85,214,164,103,181,140,144,120,129,38,46,193,147,253,203,242,76,235,17,223,81,138,79,128,40,167,248,150,2,198,36,103,233,23,79,99,150,228,55,161,33,104,218,97,79,24,102,176,91,1,219,101,44,14,96,157,227,252,64,127,241,54,108,84,98,179,97,177,13,231,33,231,40,77,200,139,28,233,170,147,224,123,210,79,234,209,137,14,125,142,219,166,218,47,200,116,35,142,177,226,46,82,53,68,73,196,80,3,53,41,218,98,108,64,32,12,184,244,181,150,42,204,93,211,41,175,85,124,218,26,231,12,66,223,31,12,119,143,218,123,149,178,216,132,111,112,172,43,202,150,12,217,16,225,206,3,126,36,171,132,249,61,238,115,40,239,149,18,75,174,17,199,25,123,165,2,69,184,64,205,22,124,138,31,29,234,73,240,43,100,120,243,98,159,139,244,31,231,124,69,80,140,240,213,155,211,98,193,47,25,155,100,169,206,96,248,2,20,157,2,9,3,1,2,0,7,1,127,4,7,1,127,0,28,1,127,0,235,0,1,127,0,43,1,127,7,6,1,127,9,9,1,127,0,5,1,127,0,15,1,127,0,5,1,2,0,55,1,127,0,10,1,127,1,2,134,4,2,0,125,230,1,166,1,182,1,2,214,2,2,1,126,0,105,2,100,127,6,3,2,127,0,28,22,127,0,30,22,127,38,204,0,22,127,0,43,22,125,102,2,6,6,22,127,2,9,22,127,0,5,22,127,0,15,22,127,0,5,22,2,0,54,22,125,38,0,22,9,150,1,173,80,75,78,195,48,16,189,74,110,208,184,78,234,100,87,17,85,21,16,169,5,129,248,108,208,56,246,164,85,211,16,98,47,210,172,145,216,245,2,108,122,150,46,144,224,74,116,193,36,161,233,5,106,217,158,55,243,230,243,108,205,196,136,37,92,39,225,16,2,161,248,104,232,42,38,19,215,21,82,97,192,65,251,225,8,253,32,240,146,80,112,41,152,175,132,27,250,26,92,225,121,204,83,254,253,252,97,18,199,47,183,179,217,221,120,60,190,2,68,112,34,88,233,233,50,79,117,41,75,13,10,163,236,102,184,201,46,39,69,52,227,79,38,136,179,231,183,245,133,143,108,90,101,139,98,190,102,122,163,217,99,140,215,209,251,215,246,251,119,240,209,222,135,159,207,237,97,32,107,107,43,203,44,88,131,96,149,229,22,27,76,177,90,202,26,201,46,150,198,161,13,14,161,60,117,122,127,191,107,2,70,33,28,15,145,170,45,111,110,110,13,41,51,10,254,121,84,176,34,138,16,145,202,58,132,129,186,55,147,186,89,85,223,135,90,244,80,85,173,131,77,54,118,203,216,242,53,79,201,240,118,208,177,158,140,75,237,121,151,216,199,154,199,164,8,157,164,147,86,210,211,82,39,193,21,125,4,45,206,121,85,203,253,206,41,160,132,180,132,98,113,46,240,7,126,0,1,3,0,2,1,126,0,1,4,0,2,1,51,0,127,1,10,0,127,1,219,0,0,127,1,15,0,3,1,36,0,127,1,21,0,2,1,54,0,127,1,11,0,126,21,4,2,21,123,4,21,18,19,20,6,15,127,4,118,156,2,243,125,140,2,3,242,125,141,2,37,2,127,145,126,2,127,3,125,127,92' + +let doc = Automerge.loadDoc(new Uint8Array(data.toString().split(",").map((n) => parseInt(n)))) + +console.log(doc.text(text,heads)) +console.log(doc.text(text,newHeads)) +console.log(doc.text(text)) +console.log(util.inspect(doc.attribute(text,heads,[newHeads]), false, null, false)) + diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts new file mode 100644 index 00000000..04373f11 --- /dev/null +++ b/automerge-wasm/index.d.ts @@ -0,0 +1,249 @@ + +export type Actor = string; +export type ObjID = string; +export type Change = Uint8Array; +export type SyncMessage = Uint8Array; +export type Prop = string | number; +export type Hash = string; +export type Heads = Hash[]; +export type Value = string | number | boolean | null | Date | Uint8Array +export type ObjType = string | Array | Object +export type FullValue = + ["str", string] | + ["int", number] | + ["uint", number] | + ["f64", number] | + ["boolean", boolean] | + ["timestamp", Date] | + ["counter", number] | + ["bytes", Uint8Array] | + ["null", Uint8Array] | + ["map", ObjID] | + ["list", ObjID] | + ["text", ObjID] | + ["table", ObjID] + +export enum ObjTypeName { + list = "list", + map = "map", + table = "table", + text = "text", +} + +export type Datatype = + "boolean" | + "str" | + "int" | + "uint" | + "f64" | + "null" | + "timestamp" | + "counter" | + "bytes" | + "map" | + "text" | + "list"; + +export type DecodedSyncMessage = { + heads: Heads, + need: Heads, + have: any[] + changes: Change[] +} + +export type DecodedChange = { + actor: Actor, + seq: number + startOp: number, + time: number, + message: string | null, + deps: Heads, + hash: Hash, + ops: Op[] +} + +export type ChangeSetAddition = { + actor: string, + start: number, + end: number, +} + +export type ChangeSetDeletion = { + actor: string, + pos: number, + val: string +} + +export type ChangeSet = { + add: ChangeSetAddition[], + del: ChangeSetDeletion[] +} + +export type Op = { + action: string, + obj: ObjID, + key: string, + value?: string | number | boolean, + datatype?: string, + pred: string[], +} + +export function create(actor?: Actor): Automerge; +export function loadDoc(data: Uint8Array, actor?: Actor): Automerge; +export function encodeChange(change: DecodedChange): Change; +export function decodeChange(change: Change): DecodedChange; +export function initSyncState(): SyncState; +export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; +export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; +export function encodeSyncState(state: SyncState): Uint8Array; +export function decodeSyncState(data: Uint8Array): SyncState; + +export class Automerge { + // change state + set(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined; + set_object(obj: ObjID, prop: Prop, value: ObjType): ObjID; + insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined; + insert_object(obj: ObjID, index: number, value: ObjType): ObjID; + push(obj: ObjID, value: Value, datatype?: Datatype): undefined; + push_object(obj: ObjID, value: ObjType): ObjID; + splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; + inc(obj: ObjID, prop: Prop, value: number): void; + del(obj: ObjID, prop: Prop): void; + + // returns a single value - if there is a conflict return the winner + value(obj: ObjID, prop: any, heads?: Heads): FullValue | null; + // return all values in case of a conflict + values(obj: ObjID, arg: any, heads?: Heads): FullValue[]; + keys(obj: ObjID, heads?: Heads): string[]; + text(obj: ObjID, heads?: Heads): string; + length(obj: ObjID, heads?: Heads): number; + materialize(obj?: ObjID): any; + + // experimental spans api - unstable! + mark(obj: ObjID, name: string, range: string, value: Value, datatype?: Datatype): void; + unmark(obj: ObjID, mark: ObjID): void; + spans(obj: ObjID): any; + raw_spans(obj: ObjID): any; + blame(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[]; + attribute(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[]; + attribute2(obj: ObjID, baseline: Heads, changeset: Heads[]): ChangeSet[]; + + // transactions + commit(message?: string, time?: number): Heads; + merge(other: Automerge): ObjID[]; + getActorId(): Actor; + pendingOps(): number; + rollback(): number; + + // save and load to local store + save(): Uint8Array; + saveIncremental(): Uint8Array; + loadIncremental(data: Uint8Array): ObjID[]; + + // sync over network + receiveSyncMessage(state: SyncState, message: SyncMessage): ObjID[]; + generateSyncMessage(state: SyncState): SyncMessage | null; + + // low level change functions + applyChanges(changes: Change[]): ObjID[]; + getChanges(have_deps: Heads): Change[]; + getChangeByHash(hash: Hash): Change | null; + getChangesAdded(other: Automerge): Change[]; + getHeads(): Heads; + getLastLocalChange(): Change; + getMissingDeps(heads?: Heads): Heads; + + // memory management + free(): void; + clone(actor?: string): Automerge; + fork(actor?: string): Automerge; + + // dump internal state to console.log + dump(): void; + + // dump internal state to a JS object + toJS(): any; +} + +export class SyncState { + free(): void; + clone(): SyncState; + lastSentHeads: any; + sentHashes: any; + readonly sharedHeads: any; +} + +export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module; + +export interface InitOutput { + readonly memory: WebAssembly.Memory; + readonly __wbg_automerge_free: (a: number) => void; + readonly automerge_new: (a: number, b: number, c: number) => void; + readonly automerge_clone: (a: number, b: number, c: number, d: number) => void; + readonly automerge_free: (a: number) => void; + readonly automerge_pendingOps: (a: number) => number; + readonly automerge_commit: (a: number, b: number, c: number, d: number, e: number) => number; + readonly automerge_rollback: (a: number) => number; + readonly automerge_keys: (a: number, b: number, c: number, d: number, e: number) => void; + readonly automerge_text: (a: number, b: number, c: number, d: number, e: number) => void; + readonly automerge_splice: (a: number, b: number, c: number, d: number, e: number, f: number, g: number) => void; + readonly automerge_push: (a: number, b: number, c: number, d: number, e: number, f: number, g: number) => void; + readonly automerge_insert: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void; + readonly automerge_set: (a: number, b: number, c: number, d: number, e: number, f: number, g: number, h: number) => void; + readonly automerge_inc: (a: number, b: number, c: number, d: number, e: number, f: number) => void; + readonly automerge_value: (a: number, b: number, c: number, d: number, e: number, f: number) => void; + readonly automerge_values: (a: number, b: number, c: number, d: number, e: number, f: number) => void; + readonly automerge_length: (a: number, b: number, c: number, d: number, e: number) => void; + readonly automerge_del: (a: number, b: number, c: number, d: number, e: number) => void; + readonly automerge_save: (a: number, b: number) => void; + readonly automerge_saveIncremental: (a: number) => number; + readonly automerge_loadIncremental: (a: number, b: number, c: number) => void; + readonly automerge_applyChanges: (a: number, b: number, c: number) => void; + readonly automerge_getChanges: (a: number, b: number, c: number) => void; + readonly automerge_getChangesAdded: (a: number, b: number, c: number) => void; + readonly automerge_getHeads: (a: number) => number; + readonly automerge_getActorId: (a: number, b: number) => void; + readonly automerge_getLastLocalChange: (a: number, b: number) => void; + readonly automerge_dump: (a: number) => void; + readonly automerge_getMissingDeps: (a: number, b: number, c: number) => void; + readonly automerge_receiveSyncMessage: (a: number, b: number, c: number, d: number) => void; + readonly automerge_generateSyncMessage: (a: number, b: number, c: number) => void; + readonly automerge_toJS: (a: number) => number; + readonly create: (a: number, b: number, c: number) => void; + readonly loadDoc: (a: number, b: number, c: number, d: number) => void; + readonly encodeChange: (a: number, b: number) => void; + readonly decodeChange: (a: number, b: number) => void; + readonly initSyncState: () => number; + readonly importSyncState: (a: number, b: number) => void; + readonly exportSyncState: (a: number) => number; + readonly encodeSyncMessage: (a: number, b: number) => void; + readonly decodeSyncMessage: (a: number, b: number) => void; + readonly encodeSyncState: (a: number, b: number) => void; + readonly decodeSyncState: (a: number, b: number) => void; + readonly __wbg_list_free: (a: number) => void; + readonly __wbg_map_free: (a: number) => void; + readonly __wbg_text_free: (a: number) => void; + readonly __wbg_table_free: (a: number) => void; + readonly __wbg_syncstate_free: (a: number) => void; + readonly syncstate_sharedHeads: (a: number) => number; + readonly syncstate_lastSentHeads: (a: number) => number; + readonly syncstate_set_lastSentHeads: (a: number, b: number, c: number) => void; + readonly syncstate_set_sentHashes: (a: number, b: number, c: number) => void; + readonly syncstate_clone: (a: number) => number; + readonly __wbindgen_malloc: (a: number) => number; + readonly __wbindgen_realloc: (a: number, b: number, c: number) => number; + readonly __wbindgen_add_to_stack_pointer: (a: number) => number; + readonly __wbindgen_free: (a: number, b: number) => void; + readonly __wbindgen_exn_store: (a: number) => void; +} + +/** +* If `module_or_path` is {RequestInfo} or {URL}, makes a request and +* for everything else, calls `WebAssembly.instantiate` directly. +* +* @param {InitInput | Promise} module_or_path +* +* @returns {Promise} +*/ + +export default function init (module_or_path?: InitInput | Promise): Promise; diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json new file mode 100644 index 00000000..336f78f6 --- /dev/null +++ b/automerge-wasm/package.json @@ -0,0 +1,43 @@ +{ + "collaborators": [ + "Orion Henry ", + "Alex Good ", + "Martin Kleppmann" + ], + "name": "automerge-wasm-pack", + "description": "wasm-bindgen bindings to the automerge rust implementation", + "version": "0.0.23", + "license": "MIT", + "files": [ + "README.md", + "package.json", + "index.d.ts", + "node/index.js", + "node/index_bg.wasm", + "web/index.js", + "web/index_bg.wasm" + ], + "types": "index.d.ts", + "module": "./web/index.js", + "main": "./node/index.js", + "scripts": { + "build": "rimraf ./node && wasm-pack build --target nodejs --dev --out-name index -d node && cp index.d.ts node", + "release-w": "rimraf ./web && wasm-pack build --target web --release --out-name index -d web && cp index.d.ts web", + "release-n": "rimraf ./node && wasm-pack build --target nodejs --release --out-name index -d node && cp index.d.ts node", + "release": "yarn release-w && yarn release-n", + "test": "yarn build && ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" + }, + "dependencies": {}, + "devDependencies": { + "@types/expect": "^24.3.0", + "@types/jest": "^27.4.0", + "@types/mocha": "^9.1.0", + "@types/node": "^17.0.13", + "fast-sha256": "^1.3.0", + "mocha": "^9.1.3", + "pako": "^2.0.4", + "rimraf": "^3.0.2", + "ts-mocha": "^9.0.2", + "typescript": "^4.5.5" + } +} diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs new file mode 100644 index 00000000..4fec2359 --- /dev/null +++ b/automerge-wasm/src/interop.rs @@ -0,0 +1,388 @@ +use automerge as am; +use automerge::transaction::Transactable; +use automerge::{Change, ChangeHash, Prop}; +use js_sys::{Array, Object, Reflect, Uint8Array}; +use std::collections::HashSet; +use std::fmt::Display; +use unicode_segmentation::UnicodeSegmentation; +use wasm_bindgen::prelude::*; +use wasm_bindgen::JsCast; + +use crate::{ObjId, ScalarValue, Value}; + +pub(crate) struct JS(pub JsValue); +pub(crate) struct AR(pub Array); + +impl From for JsValue { + fn from(ar: AR) -> Self { + ar.0.into() + } +} + +impl From for JsValue { + fn from(js: JS) -> Self { + js.0 + } +} + +impl From for JS { + fn from(state: am::sync::State) -> Self { + let shared_heads: JS = state.shared_heads.into(); + let last_sent_heads: JS = state.last_sent_heads.into(); + let their_heads: JS = state.their_heads.into(); + let their_need: JS = state.their_need.into(); + let sent_hashes: JS = state.sent_hashes.into(); + let their_have = if let Some(have) = &state.their_have { + JsValue::from(AR::from(have.as_slice()).0) + } else { + JsValue::null() + }; + let result: JsValue = Object::new().into(); + // we can unwrap here b/c we made the object and know its not frozen + Reflect::set(&result, &"sharedHeads".into(), &shared_heads.0).unwrap(); + Reflect::set(&result, &"lastSentHeads".into(), &last_sent_heads.0).unwrap(); + Reflect::set(&result, &"theirHeads".into(), &their_heads.0).unwrap(); + Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap(); + Reflect::set(&result, &"theirHave".into(), &their_have).unwrap(); + Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap(); + JS(result) + } +} + +impl From> for JS { + fn from(heads: Vec) -> Self { + let heads: Array = heads + .iter() + .map(|h| JsValue::from_str(&h.to_string())) + .collect(); + JS(heads.into()) + } +} + +impl From> for JS { + fn from(heads: HashSet) -> Self { + let result: JsValue = Object::new().into(); + for key in &heads { + Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); + } + JS(result) + } +} + +impl From>> for JS { + fn from(heads: Option>) -> Self { + if let Some(v) = heads { + let v: Array = v + .iter() + .map(|h| JsValue::from_str(&h.to_string())) + .collect(); + JS(v.into()) + } else { + JS(JsValue::null()) + } + } +} + +impl TryFrom for HashSet { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let mut result = HashSet::new(); + for key in Reflect::own_keys(&value.0)?.iter() { + if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { + result.insert(key.into_serde().map_err(to_js_err)?); + } + } + Ok(result) + } +} + +impl TryFrom for Vec { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value = value.0.dyn_into::()?; + let value: Result, _> = value.iter().map(|j| j.into_serde()).collect(); + let value = value.map_err(to_js_err)?; + Ok(value) + } +} + +impl From for Option> { + fn from(value: JS) -> Self { + let value = value.0.dyn_into::().ok()?; + let value: Result, _> = value.iter().map(|j| j.into_serde()).collect(); + let value = value.ok()?; + Some(value) + } +} + +impl TryFrom for Vec { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value = value.0.dyn_into::()?; + let changes: Result, _> = value.iter().map(|j| j.dyn_into()).collect(); + let changes = changes?; + let changes: Result, _> = changes + .iter() + .map(|a| Change::try_from(a.to_vec())) + .collect(); + let changes = changes.map_err(to_js_err)?; + Ok(changes) + } +} + +impl TryFrom for am::sync::State { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value = value.0; + let shared_heads = js_get(&value, "sharedHeads")?.try_into()?; + let last_sent_heads = js_get(&value, "lastSentHeads")?.try_into()?; + let their_heads = js_get(&value, "theirHeads")?.into(); + let their_need = js_get(&value, "theirNeed")?.into(); + let their_have = js_get(&value, "theirHave")?.try_into()?; + let sent_hashes = js_get(&value, "sentHashes")?.try_into()?; + Ok(am::sync::State { + shared_heads, + last_sent_heads, + their_heads, + their_need, + their_have, + sent_hashes, + }) + } +} + +impl TryFrom for Option> { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + if value.0.is_null() { + Ok(None) + } else { + Ok(Some(value.try_into()?)) + } + } +} + +impl TryFrom for Vec { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value = value.0.dyn_into::()?; + let have: Result, JsValue> = value + .iter() + .map(|s| { + let last_sync = js_get(&s, "lastSync")?.try_into()?; + let bloom = js_get(&s, "bloom")?.try_into()?; + Ok(am::sync::Have { last_sync, bloom }) + }) + .collect(); + let have = have?; + Ok(have) + } +} + +impl TryFrom for am::sync::BloomFilter { + type Error = JsValue; + + fn try_from(value: JS) -> Result { + let value: Uint8Array = value.0.dyn_into()?; + let value = value.to_vec(); + let value = value.as_slice().try_into().map_err(to_js_err)?; + Ok(value) + } +} + +impl From<&[ChangeHash]> for AR { + fn from(value: &[ChangeHash]) -> Self { + AR(value + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect()) + } +} + +impl From<&[Change]> for AR { + fn from(value: &[Change]) -> Self { + let changes: Array = value + .iter() + .map(|c| Uint8Array::from(c.raw_bytes())) + .collect(); + AR(changes) + } +} + +impl From<&[am::sync::Have]> for AR { + fn from(value: &[am::sync::Have]) -> Self { + AR(value + .iter() + .map(|have| { + let last_sync: Array = have + .last_sync + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect(); + // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes() + let bloom = Uint8Array::from(have.bloom.to_bytes().as_slice()); + let obj: JsValue = Object::new().into(); + // we can unwrap here b/c we created the object and know its not frozen + Reflect::set(&obj, &"lastSync".into(), &last_sync.into()).unwrap(); + Reflect::set(&obj, &"bloom".into(), &bloom.into()).unwrap(); + obj + }) + .collect()) + } +} + +pub(crate) fn to_js_err(err: T) -> JsValue { + js_sys::Error::new(&std::format!("{}", err)).into() +} + +pub(crate) fn js_get>(obj: J, prop: &str) -> Result { + Ok(JS(Reflect::get(&obj.into(), &prop.into())?)) +} + +pub(crate) fn js_set>(obj: &JsValue, prop: &str, val: V) -> Result { + Reflect::set(obj, &prop.into(), &val.into()) +} + +pub(crate) fn to_prop(p: JsValue) -> Result { + if let Some(s) = p.as_string() { + Ok(Prop::Map(s)) + } else if let Some(n) = p.as_f64() { + Ok(Prop::Seq(n as usize)) + } else { + Err(to_js_err("prop must me a string or number")) + } +} + +pub(crate) fn to_objtype( + value: &JsValue, + datatype: &Option, +) -> Option<(am::ObjType, Vec<(Prop, JsValue)>)> { + match datatype.as_deref() { + Some("map") => { + let map = value.clone().dyn_into::().ok()?; + // FIXME unwrap + let map = js_sys::Object::keys(&map) + .iter() + .zip(js_sys::Object::values(&map).iter()) + .map(|(key, val)| (key.as_string().unwrap().into(), val)) + .collect(); + Some((am::ObjType::Map, map)) + } + Some("list") => { + let list = value.clone().dyn_into::().ok()?; + let list = list + .iter() + .enumerate() + .map(|(i, e)| (i.into(), e)) + .collect(); + Some((am::ObjType::List, list)) + } + Some("text") => { + let text = value.as_string()?; + let text = text + .graphemes(true) + .enumerate() + .map(|(i, ch)| (i.into(), ch.into())) + .collect(); + Some((am::ObjType::Text, text)) + } + Some(_) => None, + None => { + if let Ok(list) = value.clone().dyn_into::() { + let list = list + .iter() + .enumerate() + .map(|(i, e)| (i.into(), e)) + .collect(); + Some((am::ObjType::List, list)) + } else if let Ok(map) = value.clone().dyn_into::() { + // FIXME unwrap + let map = js_sys::Object::keys(&map) + .iter() + .zip(js_sys::Object::values(&map).iter()) + .map(|(key, val)| (key.as_string().unwrap().into(), val)) + .collect(); + Some((am::ObjType::Map, map)) + } else if let Some(text) = value.as_string() { + let text = text + .graphemes(true) + .enumerate() + .map(|(i, ch)| (i.into(), ch.into())) + .collect(); + Some((am::ObjType::Text, text)) + } else { + None + } + } + } +} + +pub(crate) fn get_heads(heads: Option) -> Option> { + let heads = heads?; + let heads: Result, _> = heads.iter().map(|j| j.into_serde()).collect(); + heads.ok() +} + +pub(crate) fn get_js_heads(heads: JsValue) -> Result, JsValue> { + let heads = heads.dyn_into::()?; + heads + .iter() + .map(|j| j.into_serde()) + .collect::, _>>() + .map_err(to_js_err) +} + +pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { + let keys = doc.keys(obj); + let map = Object::new(); + for k in keys { + let val = doc.value(obj, &k); + match val { + Ok(Some((Value::Object(o), exid))) + if o == am::ObjType::Map || o == am::ObjType::Table => + { + Reflect::set(&map, &k.into(), &map_to_js(doc, &exid)).unwrap(); + } + Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { + Reflect::set(&map, &k.into(), &list_to_js(doc, &exid)).unwrap(); + } + Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { + Reflect::set(&map, &k.into(), &doc.text(&exid).unwrap().into()).unwrap(); + } + Ok(Some((Value::Scalar(v), _))) => { + Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); + } + _ => (), + }; + } + map.into() +} + +pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue { + let len = doc.length(obj); + let array = Array::new(); + for i in 0..len { + let val = doc.value(obj, i as usize); + match val { + Ok(Some((Value::Object(o), exid))) + if o == am::ObjType::Map || o == am::ObjType::Table => + { + array.push(&map_to_js(doc, &exid)); + } + Ok(Some((Value::Object(_), exid))) => { + array.push(&list_to_js(doc, &exid)); + } + Ok(Some((Value::Scalar(v), _))) => { + array.push(&ScalarValue(v).into()); + } + _ => (), + }; + } + array.into() +} diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs new file mode 100644 index 00000000..8e8dce53 --- /dev/null +++ b/automerge-wasm/src/lib.rs @@ -0,0 +1,912 @@ +#![allow(clippy::unused_unit)] +use am::transaction::CommitOptions; +use am::transaction::Transactable; +use automerge as am; +use automerge::{Change, ObjId, Prop, Value, ROOT}; +use js_sys::{Array, Object, Uint8Array}; +use regex::Regex; +use std::convert::TryInto; +use wasm_bindgen::prelude::*; +use wasm_bindgen::JsCast; + +mod interop; +mod sync; +mod value; + +use interop::{ + get_heads, get_js_heads, js_get, js_set, list_to_js, map_to_js, to_js_err, to_objtype, to_prop, + AR, JS, +}; +use sync::SyncState; +use value::{datatype, ScalarValue}; + +#[allow(unused_macros)] +macro_rules! log { + ( $( $t:tt )* ) => { + web_sys::console::log_1(&format!( $( $t )* ).into()); + }; +} + +#[cfg(feature = "wee_alloc")] +#[global_allocator] +static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; + +#[wasm_bindgen] +#[derive(Debug)] +pub struct Automerge(automerge::AutoCommit); + +#[wasm_bindgen] +impl Automerge { + pub fn new(actor: Option) -> Result { + let mut automerge = automerge::AutoCommit::new(); + if let Some(a) = actor { + let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec()); + automerge.set_actor(a); + } + Ok(Automerge(automerge)) + } + + #[allow(clippy::should_implement_trait)] + pub fn clone(&mut self, actor: Option) -> Result { + if self.0.pending_ops() > 0 { + self.0.commit(); + } + let mut automerge = Automerge(self.0.clone()); + if let Some(s) = actor { + let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + automerge.0.set_actor(actor); + } + Ok(automerge) + } + + #[allow(clippy::should_implement_trait)] + pub fn fork(&mut self, actor: Option) -> Result { + let mut automerge = Automerge(self.0.fork()); + if let Some(s) = actor { + let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + automerge.0.set_actor(actor); + } + Ok(automerge) + } + + pub fn free(self) {} + + #[wasm_bindgen(js_name = pendingOps)] + pub fn pending_ops(&self) -> JsValue { + (self.0.pending_ops() as u32).into() + } + + pub fn commit(&mut self, message: Option, time: Option) -> JsValue { + let mut commit_opts = CommitOptions::default(); + if let Some(message) = message { + commit_opts.set_message(message); + } + if let Some(time) = time { + commit_opts.set_time(time as i64); + } + let hash = self.0.commit_with(commit_opts); + let result = Array::new(); + result.push(&JsValue::from_str(&hex::encode(&hash.0))); + result.into() + } + + pub fn merge(&mut self, other: &mut Automerge) -> Result { + let objs = self.0.merge(&mut other.0)?; + let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect(); + Ok(objs) + } + + pub fn rollback(&mut self) -> f64 { + self.0.rollback() as f64 + } + + pub fn keys(&mut self, obj: JsValue, heads: Option) -> Result { + let obj = self.import(obj)?; + let result = if let Some(heads) = get_heads(heads) { + self.0 + .keys_at(&obj, &heads) + .map(|s| JsValue::from_str(&s)) + .collect() + } else { + self.0.keys(&obj).map(|s| JsValue::from_str(&s)).collect() + }; + Ok(result) + } + + pub fn text(&mut self, obj: JsValue, heads: Option) -> Result { + let obj = self.import(obj)?; + if let Some(heads) = get_heads(heads) { + Ok(self.0.text_at(&obj, &heads)?) + } else { + Ok(self.0.text(&obj)?) + } + } + + pub fn splice( + &mut self, + obj: JsValue, + start: f64, + delete_count: f64, + text: JsValue, + ) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let start = start as usize; + let delete_count = delete_count as usize; + let mut vals = vec![]; + if let Some(t) = text.as_string() { + self.0.splice_text(&obj, start, delete_count, &t)?; + } else { + if let Ok(array) = text.dyn_into::() { + for i in array.iter() { + let value = self + .import_scalar(&i, &None) + .ok_or_else(|| to_js_err("expected scalar"))?; + vals.push(value); + } + } + self.0.splice(&obj, start, delete_count, vals.into_iter())?; + } + Ok(()) + } + + pub fn push(&mut self, obj: JsValue, value: JsValue, datatype: JsValue) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or_else(|| to_js_err("invalid scalar value"))?; + let index = self.0.length(&obj); + self.0.insert(&obj, index, value)?; + Ok(()) + } + + pub fn push_object(&mut self, obj: JsValue, value: JsValue) -> Result, JsValue> { + let obj = self.import(obj)?; + let (value, subvals) = + to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; + let index = self.0.length(&obj); + let opid = self.0.insert_object(&obj, index, value)?; + self.subset(&opid, subvals)?; + Ok(opid.to_string().into()) + } + + pub fn insert( + &mut self, + obj: JsValue, + index: f64, + value: JsValue, + datatype: JsValue, + ) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let index = index as f64; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or_else(|| to_js_err("expected scalar value"))?; + self.0.insert(&obj, index as usize, value)?; + Ok(()) + } + + pub fn insert_object( + &mut self, + obj: JsValue, + index: f64, + value: JsValue, + ) -> Result, JsValue> { + let obj = self.import(obj)?; + let index = index as f64; + let (value, subvals) = + to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; + let opid = self.0.insert_object(&obj, index as usize, value)?; + self.subset(&opid, subvals)?; + Ok(opid.to_string().into()) + } + + pub fn set( + &mut self, + obj: JsValue, + prop: JsValue, + value: JsValue, + datatype: JsValue, + ) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let prop = self.import_prop(prop)?; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or_else(|| to_js_err("expected scalar value"))?; + self.0.set(&obj, prop, value)?; + Ok(()) + } + + pub fn make( + &mut self, + obj: JsValue, + prop: JsValue, + value: JsValue, + _datatype: JsValue, + ) -> Result { + // remove this + am::log!("doc.make() is depricated - please use doc.set_object() or doc.insert_object()"); + self.set_object(obj, prop, value) + } + + pub fn set_object( + &mut self, + obj: JsValue, + prop: JsValue, + value: JsValue, + ) -> Result { + let obj = self.import(obj)?; + let prop = self.import_prop(prop)?; + let (value, subvals) = + to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; + let opid = self.0.set_object(&obj, prop, value)?; + self.subset(&opid, subvals)?; + Ok(opid.to_string().into()) + } + + fn subset(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), JsValue> { + for (p, v) in vals { + let (value, subvals) = self.import_value(&v, None)?; + //let opid = self.0.set(id, p, value)?; + let opid = match (p, value) { + (Prop::Map(s), Value::Object(objtype)) => Some(self.0.set_object(obj, s, objtype)?), + (Prop::Map(s), Value::Scalar(scalar)) => { + self.0.set(obj, s, scalar)?; + None + } + (Prop::Seq(i), Value::Object(objtype)) => { + Some(self.0.insert_object(obj, i, objtype)?) + } + (Prop::Seq(i), Value::Scalar(scalar)) => { + self.0.insert(obj, i, scalar)?; + None + } + }; + if let Some(opid) = opid { + self.subset(&opid, subvals)?; + } + } + Ok(()) + } + + pub fn inc(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let prop = self.import_prop(prop)?; + let value: f64 = value + .as_f64() + .ok_or_else(|| to_js_err("inc needs a numberic value"))?; + self.0.inc(&obj, prop, value as i64)?; + Ok(()) + } + + pub fn value( + &mut self, + obj: JsValue, + prop: JsValue, + heads: Option, + ) -> Result, JsValue> { + let obj = self.import(obj)?; + let result = Array::new(); + let prop = to_prop(prop); + let heads = get_heads(heads); + if let Ok(prop) = prop { + let value = if let Some(h) = heads { + self.0.value_at(&obj, prop, &h)? + } else { + self.0.value(&obj, prop)? + }; + match value { + Some((Value::Object(obj_type), obj_id)) => { + result.push(&obj_type.to_string().into()); + result.push(&obj_id.to_string().into()); + Ok(Some(result)) + } + Some((Value::Scalar(value), _)) => { + result.push(&datatype(&value).into()); + result.push(&ScalarValue(value).into()); + Ok(Some(result)) + } + None => Ok(None), + } + } else { + Ok(None) + } + } + + pub fn values( + &mut self, + obj: JsValue, + arg: JsValue, + heads: Option, + ) -> Result { + let obj = self.import(obj)?; + let result = Array::new(); + let prop = to_prop(arg); + if let Ok(prop) = prop { + let values = if let Some(heads) = get_heads(heads) { + self.0.values_at(&obj, prop, &heads) + } else { + self.0.values(&obj, prop) + } + .map_err(to_js_err)?; + for value in values { + match value { + (Value::Object(obj_type), obj_id) => { + let sub = Array::new(); + sub.push(&obj_type.to_string().into()); + sub.push(&obj_id.to_string().into()); + result.push(&sub.into()); + } + (Value::Scalar(value), id) => { + let sub = Array::new(); + sub.push(&datatype(&value).into()); + sub.push(&ScalarValue(value).into()); + sub.push(&id.to_string().into()); + result.push(&sub.into()); + } + } + } + } + Ok(result) + } + + pub fn length(&mut self, obj: JsValue, heads: Option) -> Result { + let obj = self.import(obj)?; + if let Some(heads) = get_heads(heads) { + Ok(self.0.length_at(&obj, &heads) as f64) + } else { + Ok(self.0.length(&obj) as f64) + } + } + + pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let prop = to_prop(prop)?; + self.0.del(&obj, prop).map_err(to_js_err)?; + Ok(()) + } + + pub fn mark( + &mut self, + obj: JsValue, + range: JsValue, + name: JsValue, + value: JsValue, + datatype: JsValue, + ) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let re = Regex::new(r"([\[\(])(\d+)\.\.(\d+)([\)\]])").unwrap(); + let range = range.as_string().ok_or("range must be a string")?; + let cap = re.captures_iter(&range).next().ok_or("range must be in the form of (start..end] or [start..end) etc... () for sticky, [] for normal")?; + let start: usize = cap[2].parse().map_err(|_| to_js_err("invalid start"))?; + let end: usize = cap[3].parse().map_err(|_| to_js_err("invalid end"))?; + let start_sticky = &cap[1] == "("; + let end_sticky = &cap[4] == ")"; + let name = name + .as_string() + .ok_or("invalid mark name") + .map_err(to_js_err)?; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or_else(|| to_js_err("invalid value"))?; + self.0 + .mark(&obj, start, start_sticky, end, end_sticky, &name, value) + .map_err(to_js_err)?; + Ok(()) + } + + pub fn unmark(&mut self, obj: JsValue, mark: JsValue) -> Result<(), JsValue> { + let obj = self.import(obj)?; + let mark = self.import(mark)?; + self.0.unmark(&obj, &mark).map_err(to_js_err)?; + Ok(()) + } + + pub fn spans(&mut self, obj: JsValue) -> Result { + let obj = self.import(obj)?; + let text = self.0.list(&obj).map_err(to_js_err)?; + let spans = self.0.spans(&obj).map_err(to_js_err)?; + let mut last_pos = 0; + let result = Array::new(); + for s in spans { + let marks = Array::new(); + for m in s.marks { + let mark = Array::new(); + mark.push(&m.0.into()); + mark.push(&datatype(&m.1).into()); + mark.push(&ScalarValue(m.1).into()); + marks.push(&mark.into()); + } + let text_span = &text[last_pos..s.pos]; //.slice(last_pos, s.pos); + if !text_span.is_empty() { + let t: String = text_span + .iter() + .filter_map(|(v, _)| v.as_string()) + .collect(); + result.push(&t.into()); + } + result.push(&marks); + last_pos = s.pos; + //let obj = Object::new().into(); + //js_set(&obj, "pos", s.pos as i32)?; + //js_set(&obj, "marks", marks)?; + //result.push(&obj.into()); + } + let text_span = &text[last_pos..]; + if !text_span.is_empty() { + let t: String = text_span + .iter() + .filter_map(|(v, _)| v.as_string()) + .collect(); + result.push(&t.into()); + } + Ok(result.into()) + } + + pub fn raw_spans(&mut self, obj: JsValue) -> Result { + let obj = self.import(obj)?; + let spans = self.0.raw_spans(&obj).map_err(to_js_err)?; + let result = Array::new(); + for s in spans { + result.push(&JsValue::from_serde(&s).map_err(to_js_err)?); + } + Ok(result) + } + + pub fn blame( + &mut self, + obj: JsValue, + baseline: JsValue, + change_sets: JsValue, + ) -> Result { + am::log!("doc.blame() is depricated - please use doc.attribute()"); + self.attribute(obj, baseline, change_sets) + } + + pub fn attribute( + &mut self, + obj: JsValue, + baseline: JsValue, + change_sets: JsValue, + ) -> Result { + let obj = self.import(obj)?; + let baseline = get_js_heads(baseline)?; + let change_sets = change_sets.dyn_into::()?; + let change_sets = change_sets + .iter() + .map(get_js_heads) + .collect::, _>>()?; + let result = self.0.attribute(&obj, &baseline, &change_sets)?; + let result = result + .into_iter() + .map(|cs| { + let add = cs + .add + .iter() + .map::, _>(|range| { + let r = Object::new(); + js_set(&r, "start", range.start as f64)?; + js_set(&r, "end", range.end as f64)?; + Ok(JsValue::from(&r)) + }) + .collect::, JsValue>>()? + .iter() + .collect::(); + let del = cs + .del + .iter() + .map::, _>(|d| { + let r = Object::new(); + js_set(&r, "pos", d.0 as f64)?; + js_set(&r, "val", &d.1)?; + Ok(JsValue::from(&r)) + }) + .collect::, JsValue>>()? + .iter() + .collect::(); + let obj = Object::new(); + js_set(&obj, "add", add)?; + js_set(&obj, "del", del)?; + Ok(obj.into()) + }) + .collect::, JsValue>>()? + .iter() + .collect::(); + Ok(result) + } + + pub fn attribute2( + &mut self, + obj: JsValue, + baseline: JsValue, + change_sets: JsValue, + ) -> Result { + let obj = self.import(obj)?; + let baseline = get_js_heads(baseline)?; + let change_sets = change_sets.dyn_into::()?; + let change_sets = change_sets + .iter() + .map(get_js_heads) + .collect::, _>>()?; + let result = self.0.attribute2(&obj, &baseline, &change_sets)?; + let result = result + .into_iter() + .map(|cs| { + let add = cs + .add + .iter() + .map::, _>(|a| { + let r = Object::new(); + js_set(&r, "actor", &self.0.actor_to_str(a.actor))?; + js_set(&r, "start", a.range.start as f64)?; + js_set(&r, "end", a.range.end as f64)?; + Ok(JsValue::from(&r)) + }) + .collect::, JsValue>>()? + .iter() + .collect::(); + let del = cs + .del + .iter() + .map::, _>(|d| { + let r = Object::new(); + js_set(&r, "actor", &self.0.actor_to_str(d.actor))?; + js_set(&r, "pos", d.pos as f64)?; + js_set(&r, "val", &d.span)?; + Ok(JsValue::from(&r)) + }) + .collect::, JsValue>>()? + .iter() + .collect::(); + let obj = Object::new(); + js_set(&obj, "add", add)?; + js_set(&obj, "del", del)?; + Ok(obj.into()) + }) + .collect::, JsValue>>()? + .iter() + .collect::(); + Ok(result) + } + + pub fn save(&mut self) -> Uint8Array { + Uint8Array::from(self.0.save().as_slice()) + } + + #[wasm_bindgen(js_name = saveIncremental)] + pub fn save_incremental(&mut self) -> Uint8Array { + let bytes = self.0.save_incremental(); + Uint8Array::from(bytes.as_slice()) + } + + #[wasm_bindgen(js_name = loadIncremental)] + pub fn load_incremental(&mut self, data: Uint8Array) -> Result { + let data = data.to_vec(); + let objs = self.0.load_incremental(&data).map_err(to_js_err)?; + let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect(); + Ok(objs) + } + + #[wasm_bindgen(js_name = applyChanges)] + pub fn apply_changes(&mut self, changes: JsValue) -> Result { + let changes: Vec<_> = JS(changes).try_into()?; + let objs = self.0.apply_changes(changes).map_err(to_js_err)?; + let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect(); + Ok(objs) + } + + #[wasm_bindgen(js_name = getChanges)] + pub fn get_changes(&mut self, have_deps: JsValue) -> Result { + let deps: Vec<_> = JS(have_deps).try_into()?; + let changes = self.0.get_changes(&deps); + let changes: Array = changes + .iter() + .map(|c| Uint8Array::from(c.raw_bytes())) + .collect(); + Ok(changes) + } + + #[wasm_bindgen(js_name = getChangeByHash)] + pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result { + let hash = hash.into_serde().map_err(to_js_err)?; + let change = self.0.get_change_by_hash(&hash); + if let Some(c) = change { + Ok(Uint8Array::from(c.raw_bytes()).into()) + } else { + Ok(JsValue::null()) + } + } + + #[wasm_bindgen(js_name = getChangesAdded)] + pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result { + let changes = self.0.get_changes_added(&mut other.0); + let changes: Array = changes + .iter() + .map(|c| Uint8Array::from(c.raw_bytes())) + .collect(); + Ok(changes) + } + + #[wasm_bindgen(js_name = getHeads)] + pub fn get_heads(&mut self) -> Array { + let heads = self.0.get_heads(); + let heads: Array = heads + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect(); + heads + } + + #[wasm_bindgen(js_name = getActorId)] + pub fn get_actor_id(&mut self) -> String { + let actor = self.0.get_actor(); + actor.to_string() + } + + #[wasm_bindgen(js_name = getLastLocalChange)] + pub fn get_last_local_change(&mut self) -> Result { + if let Some(change) = self.0.get_last_local_change() { + Ok(Uint8Array::from(change.raw_bytes())) + } else { + Err(to_js_err("no local changes")) + } + } + + pub fn dump(&self) { + self.0.dump() + } + + #[wasm_bindgen(js_name = getMissingDeps)] + pub fn get_missing_deps(&mut self, heads: Option) -> Result { + let heads = get_heads(heads).unwrap_or_default(); + let deps = self.0.get_missing_deps(&heads); + let deps: Array = deps + .iter() + .map(|h| JsValue::from_str(&hex::encode(&h.0))) + .collect(); + Ok(deps) + } + + #[wasm_bindgen(js_name = receiveSyncMessage)] + pub fn receive_sync_message( + &mut self, + state: &mut SyncState, + message: Uint8Array, + ) -> Result { + let message = message.to_vec(); + let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?; + let objs = self + .0 + .receive_sync_message(&mut state.0, message) + .map_err(to_js_err)?; + let objs: Array = objs.iter().map(|o| JsValue::from(o.to_string())).collect(); + Ok(objs) + } + + #[wasm_bindgen(js_name = generateSyncMessage)] + pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { + if let Some(message) = self.0.generate_sync_message(&mut state.0) { + Ok(Uint8Array::from(message.encode().as_slice()).into()) + } else { + Ok(JsValue::null()) + } + } + + #[wasm_bindgen(js_name = toJS)] + pub fn to_js(&self) -> JsValue { + map_to_js(&self.0, &ROOT) + } + + pub fn materialize(&self, obj: JsValue) -> Result { + let obj = self.import(obj).unwrap_or(ROOT); + match self.0.object_type(&obj) { + Some(am::ObjType::Map) => Ok(map_to_js(&self.0, &obj)), + Some(am::ObjType::List) => Ok(list_to_js(&self.0, &obj)), + Some(am::ObjType::Text) => Ok(self.0.text(&obj)?.into()), + Some(am::ObjType::Table) => Ok(map_to_js(&self.0, &obj)), + None => Err(to_js_err(format!("invalid obj {}", obj))), + } + } + + fn import(&self, id: JsValue) -> Result { + if let Some(s) = id.as_string() { + if let Some(post) = s.strip_prefix('/') { + let mut obj = ROOT; + let mut is_map = true; + let parts = post.split('/'); + for prop in parts { + if prop.is_empty() { + break; + } + let val = if is_map { + self.0.value(obj, prop)? + } else { + self.0.value(obj, am::Prop::Seq(prop.parse().unwrap()))? + }; + match val { + Some((am::Value::Object(am::ObjType::Map), id)) => { + is_map = true; + obj = id; + } + Some((am::Value::Object(am::ObjType::Table), id)) => { + is_map = true; + obj = id; + } + Some((am::Value::Object(_), id)) => { + is_map = false; + obj = id; + } + None => return Err(to_js_err(format!("invalid path '{}'", s))), + _ => return Err(to_js_err(format!("path '{}' is not an object", s))), + }; + } + Ok(obj) + } else { + Ok(self.0.import(&s)?) + } + } else { + Err(to_js_err("invalid objid")) + } + } + + fn import_prop(&mut self, prop: JsValue) -> Result { + if let Some(s) = prop.as_string() { + Ok(s.into()) + } else if let Some(n) = prop.as_f64() { + Ok((n as usize).into()) + } else { + Err(to_js_err(format!("invalid prop {:?}", prop))) + } + } + + fn import_scalar( + &mut self, + value: &JsValue, + datatype: &Option, + ) -> Option { + match datatype.as_deref() { + Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean), + Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)), + Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)), + Some("f64") => value.as_f64().map(am::ScalarValue::F64), + Some("bytes") => Some(am::ScalarValue::Bytes( + value.clone().dyn_into::().unwrap().to_vec(), + )), + Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)), + Some("timestamp") => value.as_f64().map(|v| am::ScalarValue::Timestamp(v as i64)), + Some("null") => Some(am::ScalarValue::Null), + Some(_) => None, + None => { + if value.is_null() { + Some(am::ScalarValue::Null) + } else if let Some(b) = value.as_bool() { + Some(am::ScalarValue::Boolean(b)) + } else if let Some(s) = value.as_string() { + Some(am::ScalarValue::Str(s.into())) + } else if let Some(n) = value.as_f64() { + if (n.round() - n).abs() < f64::EPSILON { + Some(am::ScalarValue::Int(n as i64)) + } else { + Some(am::ScalarValue::F64(n)) + } + } else if let Ok(d) = value.clone().dyn_into::() { + Some(am::ScalarValue::Timestamp(d.get_time() as i64)) + } else if let Ok(o) = &value.clone().dyn_into::() { + Some(am::ScalarValue::Bytes(o.to_vec())) + } else { + None + } + } + } + } + + fn import_value( + &mut self, + value: &JsValue, + datatype: Option, + ) -> Result<(Value, Vec<(Prop, JsValue)>), JsValue> { + match self.import_scalar(value, &datatype) { + Some(val) => Ok((val.into(), vec![])), + None => { + if let Some((o, subvals)) = to_objtype(value, &datatype) { + Ok((o.into(), subvals)) + } else { + web_sys::console::log_2(&"Invalid value".into(), value); + Err(to_js_err("invalid value")) + } + } + } + } +} + +#[wasm_bindgen(js_name = create)] +pub fn init(actor: Option) -> Result { + console_error_panic_hook::set_once(); + Automerge::new(actor) +} + +#[wasm_bindgen(js_name = loadDoc)] +pub fn load(data: Uint8Array, actor: Option) -> Result { + let data = data.to_vec(); + let mut automerge = am::AutoCommit::load(&data).map_err(to_js_err)?; + if let Some(s) = actor { + let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); + automerge.set_actor(actor); + } + Ok(Automerge(automerge)) +} + +#[wasm_bindgen(js_name = encodeChange)] +pub fn encode_change(change: JsValue) -> Result { + let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?; + let change: Change = change.into(); + Ok(Uint8Array::from(change.raw_bytes())) +} + +#[wasm_bindgen(js_name = decodeChange)] +pub fn decode_change(change: Uint8Array) -> Result { + let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?; + let change: am::ExpandedChange = change.decode(); + JsValue::from_serde(&change).map_err(to_js_err) +} + +#[wasm_bindgen(js_name = initSyncState)] +pub fn init_sync_state() -> SyncState { + SyncState(am::sync::State::new()) +} + +// this is needed to be compatible with the automerge-js api +#[wasm_bindgen(js_name = importSyncState)] +pub fn import_sync_state(state: JsValue) -> Result { + Ok(SyncState(JS(state).try_into()?)) +} + +// this is needed to be compatible with the automerge-js api +#[wasm_bindgen(js_name = exportSyncState)] +pub fn export_sync_state(state: SyncState) -> JsValue { + JS::from(state.0).into() +} + +#[wasm_bindgen(js_name = encodeSyncMessage)] +pub fn encode_sync_message(message: JsValue) -> Result { + let heads = js_get(&message, "heads")?.try_into()?; + let need = js_get(&message, "need")?.try_into()?; + let changes = js_get(&message, "changes")?.try_into()?; + let have = js_get(&message, "have")?.try_into()?; + Ok(Uint8Array::from( + am::sync::Message { + heads, + need, + have, + changes, + } + .encode() + .as_slice(), + )) +} + +#[wasm_bindgen(js_name = decodeSyncMessage)] +pub fn decode_sync_message(msg: Uint8Array) -> Result { + let data = msg.to_vec(); + let msg = am::sync::Message::decode(&data).map_err(to_js_err)?; + let heads = AR::from(msg.heads.as_slice()); + let need = AR::from(msg.need.as_slice()); + let changes = AR::from(msg.changes.as_slice()); + let have = AR::from(msg.have.as_slice()); + let obj = Object::new().into(); + js_set(&obj, "heads", heads)?; + js_set(&obj, "need", need)?; + js_set(&obj, "have", have)?; + js_set(&obj, "changes", changes)?; + Ok(obj) +} + +#[wasm_bindgen(js_name = encodeSyncState)] +pub fn encode_sync_state(state: SyncState) -> Result { + let state = state.0; + Ok(Uint8Array::from(state.encode().as_slice())) +} + +#[wasm_bindgen(js_name = decodeSyncState)] +pub fn decode_sync_state(data: Uint8Array) -> Result { + SyncState::decode(data) +} diff --git a/rust/automerge-wasm/src/sync.rs b/automerge-wasm/src/sync.rs similarity index 60% rename from rust/automerge-wasm/src/sync.rs rename to automerge-wasm/src/sync.rs index c4fd4a86..5a24a28c 100644 --- a/rust/automerge-wasm/src/sync.rs +++ b/automerge-wasm/src/sync.rs @@ -1,11 +1,11 @@ use automerge as am; use automerge::ChangeHash; use js_sys::Uint8Array; -use std::collections::{BTreeSet, HashMap}; +use std::collections::{HashMap, HashSet}; use std::convert::TryInto; use wasm_bindgen::prelude::*; -use crate::interop::{self, to_js_err, AR, JS}; +use crate::interop::{to_js_err, AR, JS}; #[wasm_bindgen] #[derive(Debug)] @@ -24,10 +24,7 @@ impl SyncState { } #[wasm_bindgen(setter, js_name = lastSentHeads)] - pub fn set_last_sent_heads( - &mut self, - heads: JsValue, - ) -> Result<(), interop::error::BadChangeHashes> { + pub fn set_last_sent_heads(&mut self, heads: JsValue) -> Result<(), JsValue> { let heads: Vec = JS(heads).try_into()?; self.0.last_sent_heads = heads; Ok(()) @@ -35,9 +32,8 @@ impl SyncState { #[wasm_bindgen(setter, js_name = sentHashes)] pub fn set_sent_hashes(&mut self, hashes: JsValue) -> Result<(), JsValue> { - let hashes_map: HashMap = - serde_wasm_bindgen::from_value(hashes).map_err(to_js_err)?; - let hashes_set: BTreeSet = hashes_map.keys().cloned().collect(); + let hashes_map: HashMap = hashes.into_serde().map_err(to_js_err)?; + let hashes_set: HashSet = hashes_map.keys().cloned().collect(); self.0.sent_hashes = hashes_set; Ok(()) } @@ -47,19 +43,10 @@ impl SyncState { SyncState(self.0.clone()) } - pub(crate) fn decode(data: Uint8Array) -> Result { + pub(crate) fn decode(data: Uint8Array) -> Result { let data = data.to_vec(); - let s = am::sync::State::decode(&data)?; + let s = am::sync::State::decode(&data); + let s = s.map_err(to_js_err)?; Ok(SyncState(s)) } } - -#[derive(Debug, thiserror::Error)] -#[error(transparent)] -pub struct DecodeSyncStateErr(#[from] automerge::sync::DecodeStateError); - -impl From for JsValue { - fn from(e: DecodeSyncStateErr) -> Self { - JsValue::from(e.to_string()) - } -} diff --git a/automerge-wasm/src/value.rs b/automerge-wasm/src/value.rs new file mode 100644 index 00000000..a2388436 --- /dev/null +++ b/automerge-wasm/src/value.rs @@ -0,0 +1,36 @@ +use automerge as am; +use js_sys::Uint8Array; +use wasm_bindgen::prelude::*; + +#[derive(Debug)] +pub struct ScalarValue(pub(crate) am::ScalarValue); + +impl From for JsValue { + fn from(val: ScalarValue) -> Self { + match &val.0 { + am::ScalarValue::Bytes(v) => Uint8Array::from(v.as_slice()).into(), + am::ScalarValue::Str(v) => v.to_string().into(), + am::ScalarValue::Int(v) => (*v as f64).into(), + am::ScalarValue::Uint(v) => (*v as f64).into(), + am::ScalarValue::F64(v) => (*v).into(), + am::ScalarValue::Counter(v) => (f64::from(v)).into(), + am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(), + am::ScalarValue::Boolean(v) => (*v).into(), + am::ScalarValue::Null => JsValue::null(), + } + } +} + +pub(crate) fn datatype(s: &am::ScalarValue) -> String { + match s { + am::ScalarValue::Bytes(_) => "bytes".into(), + am::ScalarValue::Str(_) => "str".into(), + am::ScalarValue::Int(_) => "int".into(), + am::ScalarValue::Uint(_) => "uint".into(), + am::ScalarValue::F64(_) => "f64".into(), + am::ScalarValue::Counter(_) => "counter".into(), + am::ScalarValue::Timestamp(_) => "timestamp".into(), + am::ScalarValue::Boolean(_) => "boolean".into(), + am::ScalarValue::Null => "null".into(), + } +} diff --git a/automerge-wasm/test/attribute.ts b/automerge-wasm/test/attribute.ts new file mode 100644 index 00000000..a83dc5d4 --- /dev/null +++ b/automerge-wasm/test/attribute.ts @@ -0,0 +1,189 @@ +import { describe, it } from 'mocha'; +//@ts-ignore +import assert from 'assert' +//@ts-ignore +import { BloomFilter } from './helpers/sync' +import { create, loadDoc, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { DecodedSyncMessage, Hash } from '..' + +describe('Automerge', () => { + describe('attribute', () => { + it('should be able to attribute text segments on change sets', () => { + let doc1 = create() + let text = doc1.set_object("_root", "notes","hello little world") + let h1 = doc1.getHeads(); + + let doc2 = doc1.fork(); + doc2.splice(text, 5, 7, " big"); + doc2.text(text) + let h2 = doc2.getHeads(); + assert.deepEqual(doc2.text(text), "hello big world") + + let doc3 = doc1.fork(); + doc3.splice(text, 0, 0, "Well, "); + let h3 = doc3.getHeads(); + assert.deepEqual(doc3.text(text), "Well, hello little world") + + doc1.merge(doc2) + doc1.merge(doc3) + assert.deepEqual(doc1.text(text), "Well, hello big world") + let attribute = doc1.attribute(text, h1, [h2, h3]) + + assert.deepEqual(attribute, [ + { add: [ { start: 11, end: 15 } ], del: [ { pos: 15, val: ' little' } ] }, + { add: [ { start: 0, end: 6 } ], del: [] } + ]) + }) + + it('should be able to hand complex attribute change sets', () => { + let doc1 = create("aaaa") + let text = doc1.set_object("_root", "notes","AAAAAA") + let h1 = doc1.getHeads(); + + let doc2 = doc1.fork("bbbb"); + doc2.splice(text, 0, 2, "BB"); + doc2.commit() + doc2.splice(text, 2, 2, "BB"); + doc2.commit() + doc2.splice(text, 6, 0, "BB"); + doc2.commit() + let h2 = doc2.getHeads(); + assert.deepEqual(doc2.text(text), "BBBBAABB") + + let doc3 = doc1.fork("cccc"); + doc3.splice(text, 1, 1, "C"); + doc3.commit() + doc3.splice(text, 3, 1, "C"); + doc3.commit() + doc3.splice(text, 5, 1, "C"); + doc3.commit() + let h3 = doc3.getHeads(); + // with tombstones its + // AC.AC.AC. + assert.deepEqual(doc3.text(text), "ACACAC") + + doc1.merge(doc2) + + assert.deepEqual(doc1.attribute(text, h1, [h2]), [ + { add: [ {start:0, end: 4}, { start: 6, end: 8 } ], del: [ { pos: 4, val: 'AAAA' } ] }, + ]) + + doc1.merge(doc3) + + assert.deepEqual(doc1.text(text), "BBBBCCACBB") + + // with tombstones its + // BBBB.C..C.AC.BB + assert.deepEqual(doc1.attribute(text, h1, [h2,h3]), [ + { add: [ {start:0, end: 4}, { start: 8, end: 10 } ], del: [ { pos: 4, val: 'A' }, { pos: 5, val: 'AA' }, { pos: 6, val: 'A' } ] }, + { add: [ {start:4, end: 6}, { start: 7, end: 8 } ], del: [ { pos: 5, val: 'A' }, { pos: 6, val: 'A' }, { pos: 8, val: 'A' } ] } + ]) + }) + + it('should not include attribution of text that is inserted and deleted only within change sets', () => { + let doc1 = create() + let text = doc1.set_object("_root", "notes","hello little world") + let h1 = doc1.getHeads(); + + let doc2 = doc1.fork(); + doc2.splice(text, 5, 7, " big"); + doc2.splice(text, 9, 0, " bad"); + doc2.splice(text, 9, 4) + doc2.text(text) + let h2 = doc2.getHeads(); + assert.deepEqual(doc2.text(text), "hello big world") + + let doc3 = doc1.fork(); + doc3.splice(text, 0, 0, "Well, HI THERE"); + doc3.splice(text, 6, 8, "") + let h3 = doc3.getHeads(); + assert.deepEqual(doc3.text(text), "Well, hello little world") + + doc1.merge(doc2) + doc1.merge(doc3) + assert.deepEqual(doc1.text(text), "Well, hello big world") + let attribute = doc1.attribute(text, h1, [h2, h3]) + + assert.deepEqual(attribute, [ + { add: [ { start: 11, end: 15 } ], del: [ { pos: 15, val: ' little' } ] }, + { add: [ { start: 0, end: 6 } ], del: [] } + ]) + }) + + }) + describe('attribute2', () => { + it('should be able to attribute text segments on change sets', () => { + let doc1 = create("aaaa") + let text = doc1.set_object("_root", "notes","hello little world") + let h1 = doc1.getHeads(); + + let doc2 = doc1.fork("bbbb"); + doc2.splice(text, 5, 7, " big"); + doc2.text(text) + let h2 = doc2.getHeads(); + assert.deepEqual(doc2.text(text), "hello big world") + + let doc3 = doc1.fork("cccc"); + doc3.splice(text, 0, 0, "Well, "); + let doc4 = doc3.fork("dddd") + doc4.splice(text, 0, 0, "Gee, "); + let h3 = doc4.getHeads(); + assert.deepEqual(doc4.text(text), "Gee, Well, hello little world") + + doc1.merge(doc2) + doc1.merge(doc4) + assert.deepEqual(doc1.text(text), "Gee, Well, hello big world") + let attribute = doc1.attribute2(text, h1, [h2, h3]) + + assert.deepEqual(attribute, [ + { add: [ { actor: "bbbb", start: 16, end: 20 } ], del: [ { actor: "bbbb", pos: 20, val: ' little' } ] }, + { add: [ { actor: "dddd", start:0, end: 5 }, { actor: "cccc", start: 5, end: 11 } ], del: [] } + ]) + }) + + it('should not include attribution of text that is inserted and deleted only within change sets', () => { + let doc1 = create("aaaa") + let text = doc1.set_object("_root", "notes","hello little world") + let h1 = doc1.getHeads(); + + let doc2 = doc1.fork("bbbb"); + doc2.splice(text, 5, 7, " big"); + doc2.splice(text, 9, 0, " bad"); + doc2.splice(text, 9, 4) + doc2.text(text) + let h2 = doc2.getHeads(); + assert.deepEqual(doc2.text(text), "hello big world") + + let doc3 = doc1.fork("cccc"); + doc3.splice(text, 0, 0, "Well, HI THERE"); + doc3.splice(text, 6, 8, "") + let h3 = doc3.getHeads(); + assert.deepEqual(doc3.text(text), "Well, hello little world") + + doc1.merge(doc2) + doc1.merge(doc3) + assert.deepEqual(doc1.text(text), "Well, hello big world") + let attribute = doc1.attribute2(text, h1, [h2, h3]) + + assert.deepEqual(attribute, [ + { add: [ { start: 11, end: 15, actor: "bbbb" } ], del: [ { pos: 15, val: ' little', actor: "bbbb" } ] }, + { add: [ { start: 0, end: 6, actor: "cccc" } ], del: [] } + ]) + + let h4 = doc1.getHeads() + + doc3.splice(text, 24, 0, "!!!") + doc1.merge(doc3) + + let h5 = doc1.getHeads() + + assert.deepEqual(doc1.text(text), "Well, hello big world!!!") + attribute = doc1.attribute2(text, h4, [h5]) + + assert.deepEqual(attribute, [ + { add: [ { start: 21, end: 24, actor: "cccc" } ], del: [] }, + { add: [], del: [] } + ]) + }) + }) +}) diff --git a/automerge-wasm/test/helpers/columnar.js b/automerge-wasm/test/helpers/columnar.js new file mode 100644 index 00000000..8d266f5b --- /dev/null +++ b/automerge-wasm/test/helpers/columnar.js @@ -0,0 +1,1415 @@ +const pako = require('pako') +const { copyObject, parseOpId, equalBytes } = require('./common') +const { + utf8ToString, hexStringToBytes, bytesToHexString, + Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder +} = require('./encoding') + +// Maybe we should be using the platform's built-in hash implementation? +// Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have +// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest +// However, the WebCrypto API is asynchronous (returns promises), which would +// force all our APIs to become asynchronous as well, which would be annoying. +// +// I think on balance, it's safe enough to use a random library off npm: +// - We only need one hash function (not a full suite of crypto algorithms); +// - SHA256 is quite simple and has fairly few opportunities for subtle bugs +// (compared to asymmetric cryptography anyway); +// - It does not need a secure source of random bits and does not need to be +// constant-time; +// - I have reviewed the source code and it seems pretty reasonable. +const { Hash } = require('fast-sha256') + +// These bytes don't mean anything, they were generated randomly +const MAGIC_BYTES = new Uint8Array([0x85, 0x6f, 0x4a, 0x83]) + +const CHUNK_TYPE_DOCUMENT = 0 +const CHUNK_TYPE_CHANGE = 1 +const CHUNK_TYPE_DEFLATE = 2 // like CHUNK_TYPE_CHANGE but with DEFLATE compression + +// Minimum number of bytes in a value before we enable DEFLATE compression (there is no point +// compressing very short values since compression may actually make them bigger) +const DEFLATE_MIN_SIZE = 256 + +// The least-significant 3 bits of a columnId indicate its datatype +const COLUMN_TYPE = { + GROUP_CARD: 0, ACTOR_ID: 1, INT_RLE: 2, INT_DELTA: 3, BOOLEAN: 4, + STRING_RLE: 5, VALUE_LEN: 6, VALUE_RAW: 7 +} + +// The 4th-least-significant bit of a columnId is set if the column is DEFLATE-compressed +const COLUMN_TYPE_DEFLATE = 8 + +// In the values in a column of type VALUE_LEN, the bottom four bits indicate the type of the value, +// one of the following types in VALUE_TYPE. The higher bits indicate the length of the value in the +// associated VALUE_RAW column (in bytes). +const VALUE_TYPE = { + NULL: 0, FALSE: 1, TRUE: 2, LEB128_UINT: 3, LEB128_INT: 4, IEEE754: 5, + UTF8: 6, BYTES: 7, COUNTER: 8, TIMESTAMP: 9, MIN_UNKNOWN: 10, MAX_UNKNOWN: 15 +} + +// make* actions must be at even-numbered indexes in this list +const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] + +const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} + +const COMMON_COLUMNS = [ + {columnName: 'objActor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'objCtr', columnId: 0 << 4 | COLUMN_TYPE.INT_RLE}, + {columnName: 'keyActor', columnId: 1 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'keyCtr', columnId: 1 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'keyStr', columnId: 1 << 4 | COLUMN_TYPE.STRING_RLE}, + {columnName: 'idActor', columnId: 2 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'idCtr', columnId: 2 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'insert', columnId: 3 << 4 | COLUMN_TYPE.BOOLEAN}, + {columnName: 'action', columnId: 4 << 4 | COLUMN_TYPE.INT_RLE}, + {columnName: 'valLen', columnId: 5 << 4 | COLUMN_TYPE.VALUE_LEN}, + {columnName: 'valRaw', columnId: 5 << 4 | COLUMN_TYPE.VALUE_RAW}, + {columnName: 'chldActor', columnId: 6 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'chldCtr', columnId: 6 << 4 | COLUMN_TYPE.INT_DELTA} +] + +const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ + {columnName: 'predNum', columnId: 7 << 4 | COLUMN_TYPE.GROUP_CARD}, + {columnName: 'predActor', columnId: 7 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'predCtr', columnId: 7 << 4 | COLUMN_TYPE.INT_DELTA} +]) + +const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ + {columnName: 'succNum', columnId: 8 << 4 | COLUMN_TYPE.GROUP_CARD}, + {columnName: 'succActor', columnId: 8 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'succCtr', columnId: 8 << 4 | COLUMN_TYPE.INT_DELTA} +]) + +const DOCUMENT_COLUMNS = [ + {columnName: 'actor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, + {columnName: 'seq', columnId: 0 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'maxOp', columnId: 1 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'time', columnId: 2 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'message', columnId: 3 << 4 | COLUMN_TYPE.STRING_RLE}, + {columnName: 'depsNum', columnId: 4 << 4 | COLUMN_TYPE.GROUP_CARD}, + {columnName: 'depsIndex', columnId: 4 << 4 | COLUMN_TYPE.INT_DELTA}, + {columnName: 'extraLen', columnId: 5 << 4 | COLUMN_TYPE.VALUE_LEN}, + {columnName: 'extraRaw', columnId: 5 << 4 | COLUMN_TYPE.VALUE_RAW} +] + +/** + * Maps an opId of the form {counter: 12345, actorId: 'someActorId'} to the form + * {counter: 12345, actorNum: 123, actorId: 'someActorId'}, where the actorNum + * is the index into the `actorIds` array. + */ +function actorIdToActorNum(opId, actorIds) { + if (!opId || !opId.actorId) return opId + const counter = opId.counter + const actorNum = actorIds.indexOf(opId.actorId) + if (actorNum < 0) throw new RangeError('missing actorId') // should not happen + return {counter, actorNum, actorId: opId.actorId} +} + +/** + * Comparison function to pass to Array.sort(), which compares two opIds in the + * form produced by `actorIdToActorNum` so that they are sorted in increasing + * Lamport timestamp order (sorted first by counter, then by actorId). + */ +function compareParsedOpIds(id1, id2) { + if (id1.counter < id2.counter) return -1 + if (id1.counter > id2.counter) return +1 + if (id1.actorId < id2.actorId) return -1 + if (id1.actorId > id2.actorId) return +1 + return 0 +} + +/** + * Takes `changes`, an array of changes (represented as JS objects). Returns an + * object `{changes, actorIds}`, where `changes` is a copy of the argument in + * which all string opIds have been replaced with `{counter, actorNum}` objects, + * and where `actorIds` is a lexicographically sorted array of actor IDs occurring + * in any of the operations. `actorNum` is an index into that array of actorIds. + * If `single` is true, the actorId of the author of the change is moved to the + * beginning of the array of actorIds, so that `actorNum` is zero when referencing + * the author of the change itself. This special-casing is omitted if `single` is + * false. + */ +function parseAllOpIds(changes, single) { + const actors = {}, newChanges = [] + for (let change of changes) { + change = copyObject(change) + actors[change.actor] = true + change.ops = expandMultiOps(change.ops, change.startOp, change.actor) + change.ops = change.ops.map(op => { + op = copyObject(op) + if (op.obj !== '_root') op.obj = parseOpId(op.obj) + if (op.elemId && op.elemId !== '_head') op.elemId = parseOpId(op.elemId) + if (op.child) op.child = parseOpId(op.child) + if (op.pred) op.pred = op.pred.map(parseOpId) + if (op.obj.actorId) actors[op.obj.actorId] = true + if (op.elemId && op.elemId.actorId) actors[op.elemId.actorId] = true + if (op.child && op.child.actorId) actors[op.child.actorId] = true + for (let pred of op.pred) actors[pred.actorId] = true + return op + }) + newChanges.push(change) + } + + let actorIds = Object.keys(actors).sort() + if (single) { + actorIds = [changes[0].actor].concat(actorIds.filter(actor => actor !== changes[0].actor)) + } + for (let change of newChanges) { + change.actorNum = actorIds.indexOf(change.actor) + for (let i = 0; i < change.ops.length; i++) { + let op = change.ops[i] + op.id = {counter: change.startOp + i, actorNum: change.actorNum, actorId: change.actor} + op.obj = actorIdToActorNum(op.obj, actorIds) + op.elemId = actorIdToActorNum(op.elemId, actorIds) + op.child = actorIdToActorNum(op.child, actorIds) + op.pred = op.pred.map(pred => actorIdToActorNum(pred, actorIds)) + } + } + return {changes: newChanges, actorIds} +} + +/** + * Encodes the `obj` property of operation `op` into the two columns + * `objActor` and `objCtr`. + */ +function encodeObjectId(op, columns) { + if (op.obj === '_root') { + columns.objActor.appendValue(null) + columns.objCtr.appendValue(null) + } else if (op.obj.actorNum >= 0 && op.obj.counter > 0) { + columns.objActor.appendValue(op.obj.actorNum) + columns.objCtr.appendValue(op.obj.counter) + } else { + throw new RangeError(`Unexpected objectId reference: ${JSON.stringify(op.obj)}`) + } +} + +/** + * Encodes the `key` and `elemId` properties of operation `op` into the three + * columns `keyActor`, `keyCtr`, and `keyStr`. + */ +function encodeOperationKey(op, columns) { + if (op.key) { + columns.keyActor.appendValue(null) + columns.keyCtr.appendValue(null) + columns.keyStr.appendValue(op.key) + } else if (op.elemId === '_head' && op.insert) { + columns.keyActor.appendValue(null) + columns.keyCtr.appendValue(0) + columns.keyStr.appendValue(null) + } else if (op.elemId && op.elemId.actorNum >= 0 && op.elemId.counter > 0) { + columns.keyActor.appendValue(op.elemId.actorNum) + columns.keyCtr.appendValue(op.elemId.counter) + columns.keyStr.appendValue(null) + } else { + throw new RangeError(`Unexpected operation key: ${JSON.stringify(op)}`) + } +} + +/** + * Encodes the `action` property of operation `op` into the `action` column. + */ +function encodeOperationAction(op, columns) { + const actionCode = ACTIONS.indexOf(op.action) + if (actionCode >= 0) { + columns.action.appendValue(actionCode) + } else if (typeof op.action === 'number') { + columns.action.appendValue(op.action) + } else { + throw new RangeError(`Unexpected operation action: ${op.action}`) + } +} + +/** + * Encodes the integer `value` into the two columns `valLen` and `valRaw`, + * with the datatype tag set to `typeTag`. If `typeTag` is zero, it is set + * automatically to signed or unsigned depending on the sign of the value. + * Values with non-zero type tags are always encoded as signed integers. + */ +function encodeInteger(value, typeTag, columns) { + let numBytes + if (value < 0 || typeTag > 0) { + numBytes = columns.valRaw.appendInt53(value) + if (!typeTag) typeTag = VALUE_TYPE.LEB128_INT + } else { + numBytes = columns.valRaw.appendUint53(value) + typeTag = VALUE_TYPE.LEB128_UINT + } + columns.valLen.appendValue(numBytes << 4 | typeTag) +} + +/** + * Encodes the `value` property of operation `op` into the two columns + * `valLen` and `valRaw`. + */ +function encodeValue(op, columns) { + if ((op.action !== 'set' && op.action !== 'inc') || op.value === null) { + columns.valLen.appendValue(VALUE_TYPE.NULL) + } else if (op.value === false) { + columns.valLen.appendValue(VALUE_TYPE.FALSE) + } else if (op.value === true) { + columns.valLen.appendValue(VALUE_TYPE.TRUE) + } else if (typeof op.value === 'string') { + const numBytes = columns.valRaw.appendRawString(op.value) + columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.UTF8) + } else if (ArrayBuffer.isView(op.value)) { + const numBytes = columns.valRaw.appendRawBytes(new Uint8Array(op.value.buffer)) + columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.BYTES) + } else if (op.datatype === 'counter' && typeof op.value === 'number') { + encodeInteger(op.value, VALUE_TYPE.COUNTER, columns) + } else if (op.datatype === 'timestamp' && typeof op.value === 'number') { + encodeInteger(op.value, VALUE_TYPE.TIMESTAMP, columns) + } else if (typeof op.datatype === 'number' && op.datatype >= VALUE_TYPE.MIN_UNKNOWN && + op.datatype <= VALUE_TYPE.MAX_UNKNOWN && op.value instanceof Uint8Array) { + const numBytes = columns.valRaw.appendRawBytes(op.value) + columns.valLen.appendValue(numBytes << 4 | op.datatype) + } else if (op.datatype) { + throw new RangeError(`Unknown datatype ${op.datatype} for value ${op.value}`) + } else if (typeof op.value === 'number') { + if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) { + encodeInteger(op.value, 0, columns) + } else { + // Encode number in 32-bit float if this can be done without loss of precision + const buf32 = new ArrayBuffer(4), view32 = new DataView(buf32) + view32.setFloat32(0, op.value, true) // true means little-endian + if (view32.getFloat32(0, true) === op.value) { + columns.valRaw.appendRawBytes(new Uint8Array(buf32)) + columns.valLen.appendValue(4 << 4 | VALUE_TYPE.IEEE754) + } else { + const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) + view64.setFloat64(0, op.value, true) // true means little-endian + columns.valRaw.appendRawBytes(new Uint8Array(buf64)) + columns.valLen.appendValue(8 << 4 | VALUE_TYPE.IEEE754) + } + } + } else { + throw new RangeError(`Unsupported value in operation: ${op.value}`) + } +} + +/** + * Given `sizeTag` (an unsigned integer read from a VALUE_LEN column) and `bytes` (a Uint8Array + * read from a VALUE_RAW column, with length `sizeTag >> 4`), this function returns an object of the + * form `{value: value, datatype: datatypeTag}` where `value` is a JavaScript primitive datatype + * corresponding to the value, and `datatypeTag` is a datatype annotation such as 'counter'. + */ +function decodeValue(sizeTag, bytes) { + if (sizeTag === VALUE_TYPE.NULL) { + return {value: null} + } else if (sizeTag === VALUE_TYPE.FALSE) { + return {value: false} + } else if (sizeTag === VALUE_TYPE.TRUE) { + return {value: true} + } else if (sizeTag % 16 === VALUE_TYPE.UTF8) { + return {value: utf8ToString(bytes)} + } else { + if (sizeTag % 16 === VALUE_TYPE.LEB128_UINT) { + return {value: new Decoder(bytes).readUint53()} + } else if (sizeTag % 16 === VALUE_TYPE.LEB128_INT) { + return {value: new Decoder(bytes).readInt53()} + } else if (sizeTag % 16 === VALUE_TYPE.IEEE754) { + const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength) + if (bytes.byteLength === 4) { + return {value: view.getFloat32(0, true)} // true means little-endian + } else if (bytes.byteLength === 8) { + return {value: view.getFloat64(0, true)} + } else { + throw new RangeError(`Invalid length for floating point number: ${bytes.byteLength}`) + } + } else if (sizeTag % 16 === VALUE_TYPE.COUNTER) { + return {value: new Decoder(bytes).readInt53(), datatype: 'counter'} + } else if (sizeTag % 16 === VALUE_TYPE.TIMESTAMP) { + return {value: new Decoder(bytes).readInt53(), datatype: 'timestamp'} + } else { + return {value: bytes, datatype: sizeTag % 16} + } + } +} + +/** + * Reads one value from the column `columns[colIndex]` and interprets it based + * on the column type. `actorIds` is a list of actors that appear in the change; + * `actorIds[0]` is the actorId of the change's author. Mutates the `result` + * object with the value, and returns the number of columns processed (this is 2 + * in the case of a pair of VALUE_LEN and VALUE_RAW columns, which are processed + * in one go). + */ +function decodeValueColumns(columns, colIndex, actorIds, result) { + const { columnId, columnName, decoder } = columns[colIndex] + if (columnId % 8 === COLUMN_TYPE.VALUE_LEN && colIndex + 1 < columns.length && + columns[colIndex + 1].columnId === columnId + 1) { + const sizeTag = decoder.readValue() + const rawValue = columns[colIndex + 1].decoder.readRawBytes(sizeTag >> 4) + const { value, datatype } = decodeValue(sizeTag, rawValue) + result[columnName] = value + if (datatype) result[columnName + '_datatype'] = datatype + return 2 + } else if (columnId % 8 === COLUMN_TYPE.ACTOR_ID) { + const actorNum = decoder.readValue() + if (actorNum === null) { + result[columnName] = null + } else { + if (!actorIds[actorNum]) throw new RangeError(`No actor index ${actorNum}`) + result[columnName] = actorIds[actorNum] + } + } else { + result[columnName] = decoder.readValue() + } + return 1 +} + +/** + * Encodes an array of operations in a set of columns. The operations need to + * be parsed with `parseAllOpIds()` beforehand. If `forDocument` is true, we use + * the column structure of a whole document, otherwise we use the column + * structure for an individual change. Returns an array of `{id, name, encoder}` + * objects. + */ +function encodeOps(ops, forDocument) { + const columns = { + objActor : new RLEEncoder('uint'), + objCtr : new RLEEncoder('uint'), + keyActor : new RLEEncoder('uint'), + keyCtr : new DeltaEncoder(), + keyStr : new RLEEncoder('utf8'), + insert : new BooleanEncoder(), + action : new RLEEncoder('uint'), + valLen : new RLEEncoder('uint'), + valRaw : new Encoder(), + chldActor : new RLEEncoder('uint'), + chldCtr : new DeltaEncoder() + } + + if (forDocument) { + columns.idActor = new RLEEncoder('uint') + columns.idCtr = new DeltaEncoder() + columns.succNum = new RLEEncoder('uint') + columns.succActor = new RLEEncoder('uint') + columns.succCtr = new DeltaEncoder() + } else { + columns.predNum = new RLEEncoder('uint') + columns.predCtr = new DeltaEncoder() + columns.predActor = new RLEEncoder('uint') + } + + for (let op of ops) { + encodeObjectId(op, columns) + encodeOperationKey(op, columns) + columns.insert.appendValue(!!op.insert) + encodeOperationAction(op, columns) + encodeValue(op, columns) + + if (op.child && op.child.counter) { + columns.chldActor.appendValue(op.child.actorNum) + columns.chldCtr.appendValue(op.child.counter) + } else { + columns.chldActor.appendValue(null) + columns.chldCtr.appendValue(null) + } + + if (forDocument) { + columns.idActor.appendValue(op.id.actorNum) + columns.idCtr.appendValue(op.id.counter) + columns.succNum.appendValue(op.succ.length) + op.succ.sort(compareParsedOpIds) + for (let i = 0; i < op.succ.length; i++) { + columns.succActor.appendValue(op.succ[i].actorNum) + columns.succCtr.appendValue(op.succ[i].counter) + } + } else { + columns.predNum.appendValue(op.pred.length) + op.pred.sort(compareParsedOpIds) + for (let i = 0; i < op.pred.length; i++) { + columns.predActor.appendValue(op.pred[i].actorNum) + columns.predCtr.appendValue(op.pred[i].counter) + } + } + } + + let columnList = [] + for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { + if (columns[columnName]) columnList.push({id: columnId, name: columnName, encoder: columns[columnName]}) + } + return columnList.sort((a, b) => a.id - b.id) +} + +function expandMultiOps(ops, startOp, actor) { + let opNum = startOp + let expandedOps = [] + for (const op of ops) { + if (op.action === 'set' && op.values && op.insert) { + if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty') + let lastElemId = op.elemId + for (const value of op.values) { + expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, value, pred: [], insert: true}) + lastElemId = `${opNum}@${actor}` + opNum += 1 + } + } else if (op.action === 'del' && op.multiOp > 1) { + if (op.pred.length !== 1) throw new RangeError('multiOp deletion must have exactly one pred') + const startElemId = parseOpId(op.elemId), startPred = parseOpId(op.pred[0]) + for (let i = 0; i < op.multiOp; i++) { + const elemId = `${startElemId.counter + i}@${startElemId.actorId}` + const pred = [`${startPred.counter + i}@${startPred.actorId}`] + expandedOps.push({action: 'del', obj: op.obj, elemId, pred}) + opNum += 1 + } + } else { + expandedOps.push(op) + opNum += 1 + } + } + return expandedOps +} + +/** + * Takes a change as decoded by `decodeColumns`, and changes it into the form + * expected by the rest of the backend. If `forDocument` is true, we use the op + * structure of a whole document, otherwise we use the op structure for an + * individual change. + */ +function decodeOps(ops, forDocument) { + const newOps = [] + for (let op of ops) { + const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}` + const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`) + const action = ACTIONS[op.action] || op.action + const newOp = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} + newOp.insert = !!op.insert + if (ACTIONS[op.action] === 'set' || ACTIONS[op.action] === 'inc') { + newOp.value = op.valLen + if (op.valLen_datatype) newOp.datatype = op.valLen_datatype + } + if (!!op.chldCtr !== !!op.chldActor) { + throw new RangeError(`Mismatched child columns: ${op.chldCtr} and ${op.chldActor}`) + } + if (op.chldCtr !== null) newOp.child = `${op.chldCtr}@${op.chldActor}` + if (forDocument) { + newOp.id = `${op.idCtr}@${op.idActor}` + newOp.succ = op.succNum.map(succ => `${succ.succCtr}@${succ.succActor}`) + checkSortedOpIds(op.succNum.map(succ => ({counter: succ.succCtr, actorId: succ.succActor}))) + } else { + newOp.pred = op.predNum.map(pred => `${pred.predCtr}@${pred.predActor}`) + checkSortedOpIds(op.predNum.map(pred => ({counter: pred.predCtr, actorId: pred.predActor}))) + } + newOps.push(newOp) + } + return newOps +} + +/** + * Throws an exception if the opIds in the given array are not in sorted order. + */ +function checkSortedOpIds(opIds) { + let last = null + for (let opId of opIds) { + if (last && compareParsedOpIds(last, opId) !== -1) { + throw new RangeError('operation IDs are not in ascending order') + } + last = opId + } +} + +function encoderByColumnId(columnId) { + if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { + return new DeltaEncoder() + } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { + return new BooleanEncoder() + } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) { + return new RLEEncoder('utf8') + } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) { + return new Encoder() + } else { + return new RLEEncoder('uint') + } +} + +function decoderByColumnId(columnId, buffer) { + if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { + return new DeltaDecoder(buffer) + } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { + return new BooleanDecoder(buffer) + } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) { + return new RLEDecoder('utf8', buffer) + } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) { + return new Decoder(buffer) + } else { + return new RLEDecoder('uint', buffer) + } +} + +function makeDecoders(columns, columnSpec) { + const emptyBuf = new Uint8Array(0) + let decoders = [], columnIndex = 0, specIndex = 0 + + while (columnIndex < columns.length || specIndex < columnSpec.length) { + if (columnIndex === columns.length || + (specIndex < columnSpec.length && columnSpec[specIndex].columnId < columns[columnIndex].columnId)) { + const {columnId, columnName} = columnSpec[specIndex] + decoders.push({columnId, columnName, decoder: decoderByColumnId(columnId, emptyBuf)}) + specIndex++ + } else if (specIndex === columnSpec.length || columns[columnIndex].columnId < columnSpec[specIndex].columnId) { + const {columnId, buffer} = columns[columnIndex] + decoders.push({columnId, decoder: decoderByColumnId(columnId, buffer)}) + columnIndex++ + } else { // columns[columnIndex].columnId === columnSpec[specIndex].columnId + const {columnId, buffer} = columns[columnIndex], {columnName} = columnSpec[specIndex] + decoders.push({columnId, columnName, decoder: decoderByColumnId(columnId, buffer)}) + columnIndex++ + specIndex++ + } + } + return decoders +} + +function decodeColumns(columns, actorIds, columnSpec) { + columns = makeDecoders(columns, columnSpec) + let parsedRows = [] + while (columns.some(col => !col.decoder.done)) { + let row = {}, col = 0 + while (col < columns.length) { + const columnId = columns[col].columnId + let groupId = columnId >> 4, groupCols = 1 + while (col + groupCols < columns.length && columns[col + groupCols].columnId >> 4 === groupId) { + groupCols++ + } + + if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { + const values = [], count = columns[col].decoder.readValue() + for (let i = 0; i < count; i++) { + let value = {} + for (let colOffset = 1; colOffset < groupCols; colOffset++) { + decodeValueColumns(columns, col + colOffset, actorIds, value) + } + values.push(value) + } + row[columns[col].columnName] = values + col += groupCols + } else { + col += decodeValueColumns(columns, col, actorIds, row) + } + } + parsedRows.push(row) + } + return parsedRows +} + +function decodeColumnInfo(decoder) { + // A number that is all 1 bits except for the bit that indicates whether a column is + // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. + const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 + + let lastColumnId = -1, columns = [], numColumns = decoder.readUint53() + for (let i = 0; i < numColumns; i++) { + const columnId = decoder.readUint53(), bufferLen = decoder.readUint53() + if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { + throw new RangeError('Columns must be in ascending order') + } + lastColumnId = columnId + columns.push({columnId, bufferLen}) + } + return columns +} + +function encodeColumnInfo(encoder, columns) { + const nonEmptyColumns = columns.filter(column => column.encoder.buffer.byteLength > 0) + encoder.appendUint53(nonEmptyColumns.length) + for (let column of nonEmptyColumns) { + encoder.appendUint53(column.id) + encoder.appendUint53(column.encoder.buffer.byteLength) + } +} + +function decodeChangeHeader(decoder) { + const numDeps = decoder.readUint53(), deps = [] + for (let i = 0; i < numDeps; i++) { + deps.push(bytesToHexString(decoder.readRawBytes(32))) + } + let change = { + actor: decoder.readHexString(), + seq: decoder.readUint53(), + startOp: decoder.readUint53(), + time: decoder.readInt53(), + message: decoder.readPrefixedString(), + deps + } + const actorIds = [change.actor], numActorIds = decoder.readUint53() + for (let i = 0; i < numActorIds; i++) actorIds.push(decoder.readHexString()) + change.actorIds = actorIds + return change +} + +/** + * Assembles a chunk of encoded data containing a checksum, headers, and a + * series of encoded columns. Calls `encodeHeaderCallback` with an encoder that + * should be used to add the headers. The columns should be given as `columns`. + */ +function encodeContainer(chunkType, encodeContentsCallback) { + const CHECKSUM_SIZE = 4 // checksum is first 4 bytes of SHA-256 hash of the rest of the data + const HEADER_SPACE = MAGIC_BYTES.byteLength + CHECKSUM_SIZE + 1 + 5 // 1 byte type + 5 bytes length + const body = new Encoder() + // Make space for the header at the beginning of the body buffer. We will + // copy the header in here later. This is cheaper than copying the body since + // the body is likely to be much larger than the header. + body.appendRawBytes(new Uint8Array(HEADER_SPACE)) + encodeContentsCallback(body) + + const bodyBuf = body.buffer + const header = new Encoder() + header.appendByte(chunkType) + header.appendUint53(bodyBuf.byteLength - HEADER_SPACE) + + // Compute the hash over chunkType, length, and body + const headerBuf = header.buffer + const sha256 = new Hash() + sha256.update(headerBuf) + sha256.update(bodyBuf.subarray(HEADER_SPACE)) + const hash = sha256.digest(), checksum = hash.subarray(0, CHECKSUM_SIZE) + + // Copy header into the body buffer so that they are contiguous + bodyBuf.set(MAGIC_BYTES, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength) + bodyBuf.set(checksum, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE) + bodyBuf.set(headerBuf, HEADER_SPACE - headerBuf.byteLength) + return {hash, bytes: bodyBuf.subarray(HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength)} +} + +function decodeContainerHeader(decoder, computeHash) { + if (!equalBytes(decoder.readRawBytes(MAGIC_BYTES.byteLength), MAGIC_BYTES)) { + throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83') + } + const expectedHash = decoder.readRawBytes(4) + const hashStartOffset = decoder.offset + const chunkType = decoder.readByte() + const chunkLength = decoder.readUint53() + const header = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} + + if (computeHash) { + const sha256 = new Hash() + sha256.update(decoder.buf.subarray(hashStartOffset, decoder.offset)) + const binaryHash = sha256.digest() + if (!equalBytes(binaryHash.subarray(0, 4), expectedHash)) { + throw new RangeError('checksum does not match data') + } + header.hash = bytesToHexString(binaryHash) + } + return header +} + +/** + * Returns the checksum of a change (bytes 4 to 7) as a 32-bit unsigned integer. + */ +function getChangeChecksum(change) { + if (change[0] !== MAGIC_BYTES[0] || change[1] !== MAGIC_BYTES[1] || + change[2] !== MAGIC_BYTES[2] || change[3] !== MAGIC_BYTES[3]) { + throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83') + } + return ((change[4] << 24) | (change[5] << 16) | (change[6] << 8) | change[7]) >>> 0 +} + +function encodeChange(changeObj) { + const { changes, actorIds } = parseAllOpIds([changeObj], true) + const change = changes[0] + + const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { + if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array') + encoder.appendUint53(change.deps.length) + for (let hash of change.deps.slice().sort()) { + encoder.appendRawBytes(hexStringToBytes(hash)) + } + encoder.appendHexString(change.actor) + encoder.appendUint53(change.seq) + encoder.appendUint53(change.startOp) + encoder.appendInt53(change.time) + encoder.appendPrefixedString(change.message || '') + encoder.appendUint53(actorIds.length - 1) + for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) + + const columns = encodeOps(change.ops, false) + encodeColumnInfo(encoder, columns) + for (let column of columns) encoder.appendRawBytes(column.encoder.buffer) + if (change.extraBytes) encoder.appendRawBytes(change.extraBytes) + }) + + const hexHash = bytesToHexString(hash) + if (changeObj.hash && changeObj.hash !== hexHash) { + throw new RangeError(`Change hash does not match encoding: ${changeObj.hash} != ${hexHash}`) + } + return (bytes.byteLength >= DEFLATE_MIN_SIZE) ? deflateChange(bytes) : bytes +} + +function decodeChangeColumns(buffer) { + if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) + const decoder = new Decoder(buffer) + const header = decodeContainerHeader(decoder, true) + const chunkDecoder = new Decoder(header.chunkData) + if (!decoder.done) throw new RangeError('Encoded change has trailing data') + if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + + const change = decodeChangeHeader(chunkDecoder) + const columns = decodeColumnInfo(chunkDecoder) + for (let i = 0; i < columns.length; i++) { + if ((columns[i].columnId & COLUMN_TYPE_DEFLATE) !== 0) { + throw new RangeError('change must not contain deflated columns') + } + columns[i].buffer = chunkDecoder.readRawBytes(columns[i].bufferLen) + } + if (!chunkDecoder.done) { + const restLen = chunkDecoder.buf.byteLength - chunkDecoder.offset + change.extraBytes = chunkDecoder.readRawBytes(restLen) + } + + change.columns = columns + change.hash = header.hash + return change +} + +/** + * Decodes one change in binary format into its JS object representation. + */ +function decodeChange(buffer) { + const change = decodeChangeColumns(buffer) + change.ops = decodeOps(decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), false) + delete change.actorIds + delete change.columns + return change +} + +/** + * Decodes the header fields of a change in binary format, but does not decode + * the operations. Saves work when we only need to inspect the headers. Only + * computes the hash of the change if `computeHash` is true. + */ +function decodeChangeMeta(buffer, computeHash) { + if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) + const header = decodeContainerHeader(new Decoder(buffer), computeHash) + if (header.chunkType !== CHUNK_TYPE_CHANGE) { + throw new RangeError('Buffer chunk type is not a change') + } + const meta = decodeChangeHeader(new Decoder(header.chunkData)) + meta.change = buffer + if (computeHash) meta.hash = header.hash + return meta +} + +/** + * Compresses a binary change using DEFLATE. + */ +function deflateChange(buffer) { + const header = decodeContainerHeader(new Decoder(buffer), false) + if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + const compressed = pako.deflateRaw(header.chunkData) + const encoder = new Encoder() + encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum + encoder.appendByte(CHUNK_TYPE_DEFLATE) + encoder.appendUint53(compressed.byteLength) + encoder.appendRawBytes(compressed) + return encoder.buffer +} + +/** + * Decompresses a binary change that has been compressed with DEFLATE. + */ +function inflateChange(buffer) { + const header = decodeContainerHeader(new Decoder(buffer), false) + if (header.chunkType !== CHUNK_TYPE_DEFLATE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + const decompressed = pako.inflateRaw(header.chunkData) + const encoder = new Encoder() + encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum + encoder.appendByte(CHUNK_TYPE_CHANGE) + encoder.appendUint53(decompressed.byteLength) + encoder.appendRawBytes(decompressed) + return encoder.buffer +} + +/** + * Takes an Uint8Array that may contain multiple concatenated changes, and + * returns an array of subarrays, each subarray containing one change. + */ +function splitContainers(buffer) { + let decoder = new Decoder(buffer), chunks = [], startOffset = 0 + while (!decoder.done) { + decodeContainerHeader(decoder, false) + chunks.push(buffer.subarray(startOffset, decoder.offset)) + startOffset = decoder.offset + } + return chunks +} + +/** + * Decodes a list of changes from the binary format into JS objects. + * `binaryChanges` is an array of `Uint8Array` objects. + */ +function decodeChanges(binaryChanges) { + let decoded = [] + for (let binaryChange of binaryChanges) { + for (let chunk of splitContainers(binaryChange)) { + if (chunk[8] === CHUNK_TYPE_DOCUMENT) { + decoded = decoded.concat(decodeDocument(chunk)) + } else if (chunk[8] === CHUNK_TYPE_CHANGE || chunk[8] === CHUNK_TYPE_DEFLATE) { + decoded.push(decodeChange(chunk)) + } else { + // ignoring chunk of unknown type + } + } + } + return decoded +} + +function sortOpIds(a, b) { + if (a === b) return 0 + if (a === '_root') return -1 + if (b === '_root') return +1 + const a_ = parseOpId(a), b_ = parseOpId(b) + if (a_.counter < b_.counter) return -1 + if (a_.counter > b_.counter) return +1 + if (a_.actorId < b_.actorId) return -1 + if (a_.actorId > b_.actorId) return +1 + return 0 +} + +function groupDocumentOps(changes) { + let byObjectId = {}, byReference = {}, objectType = {} + for (let change of changes) { + for (let i = 0; i < change.ops.length; i++) { + const op = change.ops[i], opId = `${op.id.counter}@${op.id.actorId}` + const objectId = (op.obj === '_root') ? '_root' : `${op.obj.counter}@${op.obj.actorId}` + if (op.action.startsWith('make')) { + objectType[opId] = op.action + if (op.action === 'makeList' || op.action === 'makeText') { + byReference[opId] = {'_head': []} + } + } + + let key + if (objectId === '_root' || objectType[objectId] === 'makeMap' || objectType[objectId] === 'makeTable') { + key = op.key + } else if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { + if (op.insert) { + key = opId + const ref = (op.elemId === '_head') ? '_head' : `${op.elemId.counter}@${op.elemId.actorId}` + byReference[objectId][ref].push(opId) + byReference[objectId][opId] = [] + } else { + key = `${op.elemId.counter}@${op.elemId.actorId}` + } + } else { + throw new RangeError(`Unknown object type for object ${objectId}`) + } + + if (!byObjectId[objectId]) byObjectId[objectId] = {} + if (!byObjectId[objectId][key]) byObjectId[objectId][key] = {} + byObjectId[objectId][key][opId] = op + op.succ = [] + + for (let pred of op.pred) { + const predId = `${pred.counter}@${pred.actorId}` + if (!byObjectId[objectId][key][predId]) { + throw new RangeError(`No predecessor operation ${predId}`) + } + byObjectId[objectId][key][predId].succ.push(op.id) + } + } + } + + let ops = [] + for (let objectId of Object.keys(byObjectId).sort(sortOpIds)) { + let keys = [] + if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') { + let stack = ['_head'] + while (stack.length > 0) { + const key = stack.pop() + if (key !== '_head') keys.push(key) + for (let opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId) + } + } else { + // FIXME JavaScript sorts based on UTF-16 encoding. We should change this to use the UTF-8 + // encoding instead (the sort order will be different beyond the basic multilingual plane) + keys = Object.keys(byObjectId[objectId]).sort() + } + + for (let key of keys) { + for (let opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) { + const op = byObjectId[objectId][key][opId] + if (op.action !== 'del') ops.push(op) + } + } + } + return ops +} + +/** + * Takes a set of operations `ops` loaded from an encoded document, and + * reconstructs the changes that they originally came from. + * Does not return anything, only mutates `changes`. + */ +function groupChangeOps(changes, ops) { + let changesByActor = {} // map from actorId to array of changes by that actor + for (let change of changes) { + change.ops = [] + if (!changesByActor[change.actor]) changesByActor[change.actor] = [] + if (change.seq !== changesByActor[change.actor].length + 1) { + throw new RangeError(`Expected seq = ${changesByActor[change.actor].length + 1}, got ${change.seq}`) + } + if (change.seq > 1 && changesByActor[change.actor][change.seq - 2].maxOp > change.maxOp) { + throw new RangeError('maxOp must increase monotonically per actor') + } + changesByActor[change.actor].push(change) + } + + let opsById = {} + for (let op of ops) { + if (op.action === 'del') throw new RangeError('document should not contain del operations') + op.pred = opsById[op.id] ? opsById[op.id].pred : [] + opsById[op.id] = op + for (let succ of op.succ) { + if (!opsById[succ]) { + if (op.elemId) { + const elemId = op.insert ? op.id : op.elemId + opsById[succ] = {id: succ, action: 'del', obj: op.obj, elemId, pred: []} + } else { + opsById[succ] = {id: succ, action: 'del', obj: op.obj, key: op.key, pred: []} + } + } + opsById[succ].pred.push(op.id) + } + delete op.succ + } + for (let op of Object.values(opsById)) { + if (op.action === 'del') ops.push(op) + } + + for (let op of ops) { + const { counter, actorId } = parseOpId(op.id) + const actorChanges = changesByActor[actorId] + // Binary search to find the change that should contain this operation + let left = 0, right = actorChanges.length + while (left < right) { + const index = Math.floor((left + right) / 2) + if (actorChanges[index].maxOp < counter) { + left = index + 1 + } else { + right = index + } + } + if (left >= actorChanges.length) { + throw new RangeError(`Operation ID ${op.id} outside of allowed range`) + } + actorChanges[left].ops.push(op) + } + + for (let change of changes) { + change.ops.sort((op1, op2) => sortOpIds(op1.id, op2.id)) + change.startOp = change.maxOp - change.ops.length + 1 + delete change.maxOp + for (let i = 0; i < change.ops.length; i++) { + const op = change.ops[i], expectedId = `${change.startOp + i}@${change.actor}` + if (op.id !== expectedId) { + throw new RangeError(`Expected opId ${expectedId}, got ${op.id}`) + } + delete op.id + } + } +} + +function encodeDocumentChanges(changes) { + const columns = { // see DOCUMENT_COLUMNS + actor : new RLEEncoder('uint'), + seq : new DeltaEncoder(), + maxOp : new DeltaEncoder(), + time : new DeltaEncoder(), + message : new RLEEncoder('utf8'), + depsNum : new RLEEncoder('uint'), + depsIndex : new DeltaEncoder(), + extraLen : new RLEEncoder('uint'), + extraRaw : new Encoder() + } + let indexByHash = {} // map from change hash to its index in the changes array + let heads = {} // change hashes that are not a dependency of any other change + + for (let i = 0; i < changes.length; i++) { + const change = changes[i] + indexByHash[change.hash] = i + heads[change.hash] = true + + columns.actor.appendValue(change.actorNum) + columns.seq.appendValue(change.seq) + columns.maxOp.appendValue(change.startOp + change.ops.length - 1) + columns.time.appendValue(change.time) + columns.message.appendValue(change.message) + columns.depsNum.appendValue(change.deps.length) + + for (let dep of change.deps) { + if (typeof indexByHash[dep] !== 'number') { + throw new RangeError(`Unknown dependency hash: ${dep}`) + } + columns.depsIndex.appendValue(indexByHash[dep]) + if (heads[dep]) delete heads[dep] + } + + if (change.extraBytes) { + columns.extraLen.appendValue(change.extraBytes.byteLength << 4 | VALUE_TYPE.BYTES) + columns.extraRaw.appendRawBytes(change.extraBytes) + } else { + columns.extraLen.appendValue(VALUE_TYPE.BYTES) // zero-length byte array + } + } + + let changesColumns = [] + for (let {columnName, columnId} of DOCUMENT_COLUMNS) { + changesColumns.push({id: columnId, name: columnName, encoder: columns[columnName]}) + } + changesColumns.sort((a, b) => a.id - b.id) + return { changesColumns, heads: Object.keys(heads).sort() } +} + +function decodeDocumentChanges(changes, expectedHeads) { + let heads = {} // change hashes that are not a dependency of any other change + for (let i = 0; i < changes.length; i++) { + let change = changes[i] + change.deps = [] + for (let index of change.depsNum.map(d => d.depsIndex)) { + if (!changes[index] || !changes[index].hash) { + throw new RangeError(`No hash for index ${index} while processing index ${i}`) + } + const hash = changes[index].hash + change.deps.push(hash) + if (heads[hash]) delete heads[hash] + } + change.deps.sort() + delete change.depsNum + + if (change.extraLen_datatype !== VALUE_TYPE.BYTES) { + throw new RangeError(`Bad datatype for extra bytes: ${VALUE_TYPE.BYTES}`) + } + change.extraBytes = change.extraLen + delete change.extraLen_datatype + + // Encoding and decoding again to compute the hash of the change + changes[i] = decodeChange(encodeChange(change)) + heads[changes[i].hash] = true + } + + const actualHeads = Object.keys(heads).sort() + let headsEqual = (actualHeads.length === expectedHeads.length), i = 0 + while (headsEqual && i < actualHeads.length) { + headsEqual = (actualHeads[i] === expectedHeads[i]) + i++ + } + if (!headsEqual) { + throw new RangeError(`Mismatched heads hashes: expected ${expectedHeads.join(', ')}, got ${actualHeads.join(', ')}`) + } +} + +/** + * Transforms a list of changes into a binary representation of the document state. + */ +function encodeDocument(binaryChanges) { + const { changes, actorIds } = parseAllOpIds(decodeChanges(binaryChanges), false) + const { changesColumns, heads } = encodeDocumentChanges(changes) + const opsColumns = encodeOps(groupDocumentOps(changes), true) + for (let column of changesColumns) deflateColumn(column) + for (let column of opsColumns) deflateColumn(column) + + return encodeContainer(CHUNK_TYPE_DOCUMENT, encoder => { + encoder.appendUint53(actorIds.length) + for (let actor of actorIds) { + encoder.appendHexString(actor) + } + encoder.appendUint53(heads.length) + for (let head of heads.sort()) { + encoder.appendRawBytes(hexStringToBytes(head)) + } + encodeColumnInfo(encoder, changesColumns) + encodeColumnInfo(encoder, opsColumns) + for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) + for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) + }).bytes +} + +function decodeDocumentHeader(buffer) { + const documentDecoder = new Decoder(buffer) + const header = decodeContainerHeader(documentDecoder, true) + const decoder = new Decoder(header.chunkData) + if (!documentDecoder.done) throw new RangeError('Encoded document has trailing data') + if (header.chunkType !== CHUNK_TYPE_DOCUMENT) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + + const actorIds = [], numActors = decoder.readUint53() + for (let i = 0; i < numActors; i++) { + actorIds.push(decoder.readHexString()) + } + const heads = [], numHeads = decoder.readUint53() + for (let i = 0; i < numHeads; i++) { + heads.push(bytesToHexString(decoder.readRawBytes(32))) + } + + const changesColumns = decodeColumnInfo(decoder) + const opsColumns = decodeColumnInfo(decoder) + for (let i = 0; i < changesColumns.length; i++) { + changesColumns[i].buffer = decoder.readRawBytes(changesColumns[i].bufferLen) + inflateColumn(changesColumns[i]) + } + for (let i = 0; i < opsColumns.length; i++) { + opsColumns[i].buffer = decoder.readRawBytes(opsColumns[i].bufferLen) + inflateColumn(opsColumns[i]) + } + + const extraBytes = decoder.readRawBytes(decoder.buf.byteLength - decoder.offset) + return { changesColumns, opsColumns, actorIds, heads, extraBytes } +} + +function decodeDocument(buffer) { + const { changesColumns, opsColumns, actorIds, heads } = decodeDocumentHeader(buffer) + const changes = decodeColumns(changesColumns, actorIds, DOCUMENT_COLUMNS) + const ops = decodeOps(decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), true) + groupChangeOps(changes, ops) + decodeDocumentChanges(changes, heads) + return changes +} + +/** + * DEFLATE-compresses the given column if it is large enough to make the compression worthwhile. + */ +function deflateColumn(column) { + if (column.encoder.buffer.byteLength >= DEFLATE_MIN_SIZE) { + column.encoder = {buffer: pako.deflateRaw(column.encoder.buffer)} + column.id |= COLUMN_TYPE_DEFLATE + } +} + +/** + * Decompresses the given column if it is DEFLATE-compressed. + */ +function inflateColumn(column) { + if ((column.columnId & COLUMN_TYPE_DEFLATE) !== 0) { + column.buffer = pako.inflateRaw(column.buffer) + column.columnId ^= COLUMN_TYPE_DEFLATE + } +} + +/** + * Takes all the operations for the same property (i.e. the same key in a map, or the same list + * element) and mutates the object patch to reflect the current value(s) of that property. There + * might be multiple values in the case of a conflict. `objects` is a map from objectId to the + * patch for that object. `property` contains `objId`, `key`, a list of `ops`, and `index` (the + * current list index if the object is a list). Returns true if one or more values are present, + * or false if the property has been deleted. + */ +function addPatchProperty(objects, property) { + let values = {}, counter = null + for (let op of property.ops) { + // Apply counters and their increments regardless of the number of successor operations + if (op.actionName === 'set' && op.value.datatype === 'counter') { + if (!counter) counter = {opId: op.opId, value: 0, succ: {}} + counter.value += op.value.value + for (let succId of op.succ) counter.succ[succId] = true + } else if (op.actionName === 'inc') { + if (!counter) throw new RangeError(`inc operation ${op.opId} without a counter`) + counter.value += op.value.value + delete counter.succ[op.opId] + for (let succId of op.succ) counter.succ[succId] = true + + } else if (op.succ.length === 0) { // Ignore any ops that have been overwritten + if (op.actionName.startsWith('make')) { + values[op.opId] = objects[op.opId] + } else if (op.actionName === 'set') { + values[op.opId] = {value: op.value.value, type: 'value'} + if (op.value.datatype) { + values[op.opId].datatype = op.value.datatype + } + } else if (op.actionName === 'link') { + // NB. This assumes that the ID of the child object is greater than the ID of the current + // object. This is true as long as link operations are only used to redo undone make* + // operations, but it will cease to be true once subtree moves are allowed. + if (!op.childId) throw new RangeError(`link operation ${op.opId} without a childId`) + values[op.opId] = objects[op.childId] + } else { + throw new RangeError(`Unexpected action type: ${op.actionName}`) + } + } + } + + // If the counter had any successor operation that was not an increment, that means the counter + // must have been deleted, so we omit it from the patch. + if (counter && Object.keys(counter.succ).length === 0) { + values[counter.opId] = {type: 'value', value: counter.value, datatype: 'counter'} + } + + if (Object.keys(values).length > 0) { + let obj = objects[property.objId] + if (obj.type === 'map' || obj.type === 'table') { + obj.props[property.key] = values + } else if (obj.type === 'list' || obj.type === 'text') { + makeListEdits(obj, values, property.key, property.index) + } + return true + } else { + return false + } +} + +/** + * When constructing a patch to instantiate a loaded document, this function adds the edits to + * insert one list element. Usually there is one value, but in the case of a conflict there may be + * several values. `elemId` is the ID of the list element, and `index` is the list index at which + * the value(s) should be placed. + */ +function makeListEdits(list, values, elemId, index) { + let firstValue = true + const opIds = Object.keys(values).sort((id1, id2) => compareParsedOpIds(parseOpId(id1), parseOpId(id2))) + for (const opId of opIds) { + if (firstValue) { + list.edits.push({action: 'insert', value: values[opId], elemId, opId, index}) + } else { + list.edits.push({action: 'update', value: values[opId], opId, index}) + } + firstValue = false + } +} + +/** + * Recursively walks the patch tree, calling appendEdit on every list edit in order to consense + * consecutive sequences of insertions into multi-inserts. + */ +function condenseEdits(diff) { + if (diff.type === 'list' || diff.type === 'text') { + diff.edits.forEach(e => condenseEdits(e.value)) + let newEdits = diff.edits + diff.edits = [] + for (const edit of newEdits) appendEdit(diff.edits, edit) + } else if (diff.type === 'map' || diff.type === 'table') { + for (const prop of Object.keys(diff.props)) { + for (const opId of Object.keys(diff.props[prop])) { + condenseEdits(diff.props[prop][opId]) + } + } + } +} + +/** + * Appends a list edit operation (insert, update, remove) to an array of existing operations. If the + * last existing operation can be extended (as a multi-op), we do that. + */ +function appendEdit(existingEdits, nextEdit) { + if (existingEdits.length === 0) { + existingEdits.push(nextEdit) + return + } + + let lastEdit = existingEdits[existingEdits.length - 1] + if (lastEdit.action === 'insert' && nextEdit.action === 'insert' && + lastEdit.index === nextEdit.index - 1 && + lastEdit.value.type === 'value' && nextEdit.value.type === 'value' && + lastEdit.elemId === lastEdit.opId && nextEdit.elemId === nextEdit.opId && + opIdDelta(lastEdit.elemId, nextEdit.elemId, 1)) { + lastEdit.action = 'multi-insert' + lastEdit.values = [lastEdit.value.value, nextEdit.value.value] + delete lastEdit.value + delete lastEdit.opId + + } else if (lastEdit.action === 'multi-insert' && nextEdit.action === 'insert' && + lastEdit.index + lastEdit.values.length === nextEdit.index && + nextEdit.value.type === 'value' && nextEdit.elemId === nextEdit.opId && + opIdDelta(lastEdit.elemId, nextEdit.elemId, lastEdit.values.length)) { + lastEdit.values.push(nextEdit.value.value) + + } else if (lastEdit.action === 'remove' && nextEdit.action === 'remove' && + lastEdit.index === nextEdit.index) { + lastEdit.count += nextEdit.count + + } else { + existingEdits.push(nextEdit) + } +} + +/** + * Returns true if the two given operation IDs have the same actor ID, and the counter of `id2` is + * exactly `delta` greater than the counter of `id1`. + */ +function opIdDelta(id1, id2, delta = 1) { + const parsed1 = parseOpId(id1), parsed2 = parseOpId(id2) + return parsed1.actorId === parsed2.actorId && parsed1.counter + delta === parsed2.counter +} + +/** + * Parses the document (in compressed binary format) given as `documentBuffer` + * and returns a patch that can be sent to the frontend to instantiate the + * current state of that document. + */ +function constructPatch(documentBuffer) { + const { opsColumns, actorIds } = decodeDocumentHeader(documentBuffer) + const col = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce( + (acc, col) => Object.assign(acc, {[col.columnName]: col.decoder}), {}) + + let objects = {_root: {objectId: '_root', type: 'map', props: {}}} + let property = null + + while (!col.idActor.done) { + const opId = `${col.idCtr.readValue()}@${actorIds[col.idActor.readValue()]}` + const action = col.action.readValue(), actionName = ACTIONS[action] + if (action % 2 === 0) { // even-numbered actions are object creation + const type = OBJECT_TYPE[actionName] || 'unknown' + if (type === 'list' || type === 'text') { + objects[opId] = {objectId: opId, type, edits: []} + } else { + objects[opId] = {objectId: opId, type, props: {}} + } + } + + const objActor = col.objActor.readValue(), objCtr = col.objCtr.readValue() + const objId = objActor === null ? '_root' : `${objCtr}@${actorIds[objActor]}` + let obj = objects[objId] + if (!obj) throw new RangeError(`Operation for nonexistent object: ${objId}`) + + const keyActor = col.keyActor.readValue(), keyCtr = col.keyCtr.readValue() + const keyStr = col.keyStr.readValue(), insert = !!col.insert.readValue() + const chldActor = col.chldActor.readValue(), chldCtr = col.chldCtr.readValue() + const childId = chldActor === null ? null : `${chldCtr}@${actorIds[chldActor]}` + const sizeTag = col.valLen.readValue() + const rawValue = col.valRaw.readRawBytes(sizeTag >> 4) + const value = decodeValue(sizeTag, rawValue) + const succNum = col.succNum.readValue() + let succ = [] + for (let i = 0; i < succNum; i++) { + succ.push(`${col.succCtr.readValue()}@${actorIds[col.succActor.readValue()]}`) + } + + if (!actionName || obj.type === 'unknown') continue + + let key + if (obj.type === 'list' || obj.type === 'text') { + if (keyCtr === null || (keyCtr === 0 && !insert)) { + throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`) + } + key = insert ? opId : `${keyCtr}@${actorIds[keyActor]}` + } else { + if (keyStr === null) { + throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`) + } + key = keyStr + } + + if (!property || property.objId !== objId || property.key !== key) { + let index = 0 + if (property) { + index = property.index + if (addPatchProperty(objects, property)) index += 1 + if (property.objId !== objId) index = 0 + } + property = {objId, key, index, ops: []} + } + property.ops.push({opId, actionName, value, childId, succ}) + } + + if (property) addPatchProperty(objects, property) + condenseEdits(objects._root) + return objects._root +} + +module.exports = { + COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS, + encoderByColumnId, decoderByColumnId, makeDecoders, decodeValue, + splitContainers, encodeChange, decodeChangeColumns, decodeChange, decodeChangeMeta, decodeChanges, + decodeDocumentHeader, encodeDocument, decodeDocument, + getChangeChecksum, appendEdit, constructPatch +} diff --git a/javascript/test/legacy/common.js b/automerge-wasm/test/helpers/common.js similarity index 65% rename from javascript/test/legacy/common.js rename to automerge-wasm/test/helpers/common.js index 7668e982..b41cadc8 100644 --- a/javascript/test/legacy/common.js +++ b/automerge-wasm/test/helpers/common.js @@ -1,5 +1,5 @@ function isObject(obj) { - return typeof obj === "object" && obj !== null + return typeof obj === 'object' && obj !== null } /** @@ -20,11 +20,11 @@ function copyObject(obj) { * with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`. */ function parseOpId(opId) { - const match = /^(\d+)@(.*)$/.exec(opId || "") + const match = /^(\d+)@(.*)$/.exec(opId || '') if (!match) { throw new RangeError(`Not a valid opId: ${opId}`) } - return { counter: parseInt(match[1], 10), actorId: match[2] } + return {counter: parseInt(match[1], 10), actorId: match[2]} } /** @@ -32,7 +32,7 @@ function parseOpId(opId) { */ function equalBytes(array1, array2) { if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) { - throw new TypeError("equalBytes can only compare Uint8Arrays") + throw new TypeError('equalBytes can only compare Uint8Arrays') } if (array1.byteLength !== array2.byteLength) return false for (let i = 0; i < array1.byteLength; i++) { @@ -41,19 +41,6 @@ function equalBytes(array1, array2) { return true } -/** - * Creates an array containing the value `null` repeated `length` times. - */ -function createArrayOfNulls(length) { - const array = new Array(length) - for (let i = 0; i < length; i++) array[i] = null - return array -} - module.exports = { - isObject, - copyObject, - parseOpId, - equalBytes, - createArrayOfNulls, + isObject, copyObject, parseOpId, equalBytes } diff --git a/javascript/test/legacy/encoding.js b/automerge-wasm/test/helpers/encoding.js similarity index 74% rename from javascript/test/legacy/encoding.js rename to automerge-wasm/test/helpers/encoding.js index f7650faf..92b62df6 100644 --- a/javascript/test/legacy/encoding.js +++ b/automerge-wasm/test/helpers/encoding.js @@ -6,7 +6,7 @@ * https://github.com/anonyco/FastestSmallestTextEncoderDecoder */ const utf8encoder = new TextEncoder() -const utf8decoder = new TextDecoder("utf-8") +const utf8decoder = new TextDecoder('utf-8') function stringToUtf8(string) { return utf8encoder.encode(string) @@ -20,48 +20,30 @@ function utf8ToString(buffer) { * Converts a string consisting of hexadecimal digits into an Uint8Array. */ function hexStringToBytes(value) { - if (typeof value !== "string") { - throw new TypeError("value is not a string") + if (typeof value !== 'string') { + throw new TypeError('value is not a string') } if (!/^([0-9a-f][0-9a-f])*$/.test(value)) { - throw new RangeError("value is not hexadecimal") + throw new RangeError('value is not hexadecimal') } - if (value === "") { + if (value === '') { return new Uint8Array(0) } else { return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16))) } } -const NIBBLE_TO_HEX = [ - "0", - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8", - "9", - "a", - "b", - "c", - "d", - "e", - "f", -] +const NIBBLE_TO_HEX = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'] const BYTE_TO_HEX = new Array(256) for (let i = 0; i < 256; i++) { - BYTE_TO_HEX[i] = `${NIBBLE_TO_HEX[(i >>> 4) & 0xf]}${NIBBLE_TO_HEX[i & 0xf]}` + BYTE_TO_HEX[i] = `${NIBBLE_TO_HEX[(i >>> 4) & 0xf]}${NIBBLE_TO_HEX[i & 0xf]}`; } /** * Converts a Uint8Array into the equivalent hexadecimal string. */ function bytesToHexString(bytes) { - let hex = "", - len = bytes.byteLength + let hex = '', len = bytes.byteLength for (let i = 0; i < len; i++) { hex += BYTE_TO_HEX[bytes[i]] } @@ -113,17 +95,14 @@ class Encoder { * appends it to the buffer. Returns the number of bytes written. */ appendUint32(value) { - if (!Number.isInteger(value)) - throw new RangeError("value is not an integer") - if (value < 0 || value > 0xffffffff) - throw new RangeError("number out of range") + if (!Number.isInteger(value)) throw new RangeError('value is not an integer') + if (value < 0 || value > 0xffffffff) throw new RangeError('number out of range') const numBytes = Math.max(1, Math.ceil((32 - Math.clz32(value)) / 7)) if (this.offset + numBytes > this.buf.byteLength) this.grow() for (let i = 0; i < numBytes; i++) { - this.buf[this.offset + i] = - (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) value >>>= 7 // zero-filling right shift } this.offset += numBytes @@ -136,19 +115,14 @@ class Encoder { * it to the buffer. Returns the number of bytes written. */ appendInt32(value) { - if (!Number.isInteger(value)) - throw new RangeError("value is not an integer") - if (value < -0x80000000 || value > 0x7fffffff) - throw new RangeError("number out of range") + if (!Number.isInteger(value)) throw new RangeError('value is not an integer') + if (value < -0x80000000 || value > 0x7fffffff) throw new RangeError('number out of range') - const numBytes = Math.ceil( - (33 - Math.clz32(value >= 0 ? value : -value - 1)) / 7 - ) + const numBytes = Math.ceil((33 - Math.clz32(value >= 0 ? value : -value - 1)) / 7) if (this.offset + numBytes > this.buf.byteLength) this.grow() for (let i = 0; i < numBytes; i++) { - this.buf[this.offset + i] = - (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) value >>= 7 // sign-propagating right shift } this.offset += numBytes @@ -161,10 +135,9 @@ class Encoder { * (53 bits). */ appendUint53(value) { - if (!Number.isInteger(value)) - throw new RangeError("value is not an integer") + if (!Number.isInteger(value)) throw new RangeError('value is not an integer') if (value < 0 || value > Number.MAX_SAFE_INTEGER) { - throw new RangeError("number out of range") + throw new RangeError('number out of range') } const high32 = Math.floor(value / 0x100000000) const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned @@ -177,10 +150,9 @@ class Encoder { * (53 bits). */ appendInt53(value) { - if (!Number.isInteger(value)) - throw new RangeError("value is not an integer") + if (!Number.isInteger(value)) throw new RangeError('value is not an integer') if (value < Number.MIN_SAFE_INTEGER || value > Number.MAX_SAFE_INTEGER) { - throw new RangeError("number out of range") + throw new RangeError('number out of range') } const high32 = Math.floor(value / 0x100000000) const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned @@ -195,10 +167,10 @@ class Encoder { */ appendUint64(high32, low32) { if (!Number.isInteger(high32) || !Number.isInteger(low32)) { - throw new RangeError("value is not an integer") + throw new RangeError('value is not an integer') } if (high32 < 0 || high32 > 0xffffffff || low32 < 0 || low32 > 0xffffffff) { - throw new RangeError("number out of range") + throw new RangeError('number out of range') } if (high32 === 0) return this.appendUint32(low32) @@ -208,12 +180,10 @@ class Encoder { this.buf[this.offset + i] = (low32 & 0x7f) | 0x80 low32 >>>= 7 // zero-filling right shift } - this.buf[this.offset + 4] = - (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) + this.buf[this.offset + 4] = (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) high32 >>>= 3 for (let i = 5; i < numBytes; i++) { - this.buf[this.offset + i] = - (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) high32 >>>= 7 } this.offset += numBytes @@ -230,35 +200,25 @@ class Encoder { */ appendInt64(high32, low32) { if (!Number.isInteger(high32) || !Number.isInteger(low32)) { - throw new RangeError("value is not an integer") + throw new RangeError('value is not an integer') } - if ( - high32 < -0x80000000 || - high32 > 0x7fffffff || - low32 < -0x80000000 || - low32 > 0xffffffff - ) { - throw new RangeError("number out of range") + if (high32 < -0x80000000 || high32 > 0x7fffffff || low32 < -0x80000000 || low32 > 0xffffffff) { + throw new RangeError('number out of range') } low32 >>>= 0 // interpret as unsigned if (high32 === 0 && low32 <= 0x7fffffff) return this.appendInt32(low32) - if (high32 === -1 && low32 >= 0x80000000) - return this.appendInt32(low32 - 0x100000000) + if (high32 === -1 && low32 >= 0x80000000) return this.appendInt32(low32 - 0x100000000) - const numBytes = Math.ceil( - (65 - Math.clz32(high32 >= 0 ? high32 : -high32 - 1)) / 7 - ) + const numBytes = Math.ceil((65 - Math.clz32(high32 >= 0 ? high32 : -high32 - 1)) / 7) if (this.offset + numBytes > this.buf.byteLength) this.grow() for (let i = 0; i < 4; i++) { this.buf[this.offset + i] = (low32 & 0x7f) | 0x80 low32 >>>= 7 // zero-filling right shift } - this.buf[this.offset + 4] = - (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) + this.buf[this.offset + 4] = (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) high32 >>= 3 // sign-propagating right shift for (let i = 5; i < numBytes; i++) { - this.buf[this.offset + i] = - (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) high32 >>= 7 } this.offset += numBytes @@ -283,7 +243,7 @@ class Encoder { * number of bytes appended. */ appendRawString(value) { - if (typeof value !== "string") throw new TypeError("value is not a string") + if (typeof value !== 'string') throw new TypeError('value is not a string') return this.appendRawBytes(stringToUtf8(value)) } @@ -302,7 +262,7 @@ class Encoder { * (where the length is encoded as an unsigned LEB128 integer). */ appendPrefixedString(value) { - if (typeof value !== "string") throw new TypeError("value is not a string") + if (typeof value !== 'string') throw new TypeError('value is not a string') this.appendPrefixedBytes(stringToUtf8(value)) return this } @@ -321,7 +281,8 @@ class Encoder { * Flushes any unwritten data to the buffer. Call this before reading from * the buffer constructed by this Encoder. */ - finish() {} + finish() { + } } /** @@ -360,7 +321,7 @@ class Decoder { */ skip(bytes) { if (this.offset + bytes > this.buf.byteLength) { - throw new RangeError("cannot skip beyond end of buffer") + throw new RangeError('cannot skip beyond end of buffer') } this.offset += bytes } @@ -378,20 +339,18 @@ class Decoder { * Throws an exception if the value doesn't fit in a 32-bit unsigned int. */ readUint32() { - let result = 0, - shift = 0 + let result = 0, shift = 0 while (this.offset < this.buf.byteLength) { const nextByte = this.buf[this.offset] - if (shift === 28 && (nextByte & 0xf0) !== 0) { - // more than 5 bytes, or value > 0xffffffff - throw new RangeError("number out of range") + if (shift === 28 && (nextByte & 0xf0) !== 0) { // more than 5 bytes, or value > 0xffffffff + throw new RangeError('number out of range') } - result = (result | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned + result = (result | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned shift += 7 this.offset++ if ((nextByte & 0x80) === 0) return result } - throw new RangeError("buffer ended with incomplete number") + throw new RangeError('buffer ended with incomplete number') } /** @@ -399,17 +358,13 @@ class Decoder { * Throws an exception if the value doesn't fit in a 32-bit signed int. */ readInt32() { - let result = 0, - shift = 0 + let result = 0, shift = 0 while (this.offset < this.buf.byteLength) { const nextByte = this.buf[this.offset] - if ( - (shift === 28 && (nextByte & 0x80) !== 0) || // more than 5 bytes - (shift === 28 && (nextByte & 0x40) === 0 && (nextByte & 0x38) !== 0) || // positive int > 0x7fffffff - (shift === 28 && (nextByte & 0x40) !== 0 && (nextByte & 0x38) !== 0x38) - ) { - // negative int < -0x80000000 - throw new RangeError("number out of range") + if ((shift === 28 && (nextByte & 0x80) !== 0) || // more than 5 bytes + (shift === 28 && (nextByte & 0x40) === 0 && (nextByte & 0x38) !== 0) || // positive int > 0x7fffffff + (shift === 28 && (nextByte & 0x40) !== 0 && (nextByte & 0x38) !== 0x38)) { // negative int < -0x80000000 + throw new RangeError('number out of range') } result |= (nextByte & 0x7f) << shift shift += 7 @@ -423,7 +378,7 @@ class Decoder { } } } - throw new RangeError("buffer ended with incomplete number") + throw new RangeError('buffer ended with incomplete number') } /** @@ -434,7 +389,7 @@ class Decoder { readUint53() { const { low32, high32 } = this.readUint64() if (high32 < 0 || high32 > 0x1fffff) { - throw new RangeError("number out of range") + throw new RangeError('number out of range') } return high32 * 0x100000000 + low32 } @@ -446,12 +401,8 @@ class Decoder { */ readInt53() { const { low32, high32 } = this.readInt64() - if ( - high32 < -0x200000 || - (high32 === -0x200000 && low32 === 0) || - high32 > 0x1fffff - ) { - throw new RangeError("number out of range") + if (high32 < -0x200000 || (high32 === -0x200000 && low32 === 0) || high32 > 0x1fffff) { + throw new RangeError('number out of range') } return high32 * 0x100000000 + low32 } @@ -463,12 +414,10 @@ class Decoder { * `{high32, low32}`. */ readUint64() { - let low32 = 0, - high32 = 0, - shift = 0 + let low32 = 0, high32 = 0, shift = 0 while (this.offset < this.buf.byteLength && shift <= 28) { const nextByte = this.buf[this.offset] - low32 = (low32 | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned + low32 = (low32 | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned if (shift === 28) { high32 = (nextByte & 0x70) >>> 4 } @@ -480,16 +429,15 @@ class Decoder { shift = 3 while (this.offset < this.buf.byteLength) { const nextByte = this.buf[this.offset] - if (shift === 31 && (nextByte & 0xfe) !== 0) { - // more than 10 bytes, or value > 2^64 - 1 - throw new RangeError("number out of range") + if (shift === 31 && (nextByte & 0xfe) !== 0) { // more than 10 bytes, or value > 2^64 - 1 + throw new RangeError('number out of range') } - high32 = (high32 | ((nextByte & 0x7f) << shift)) >>> 0 + high32 = (high32 | (nextByte & 0x7f) << shift) >>> 0 shift += 7 this.offset++ if ((nextByte & 0x80) === 0) return { high32, low32 } } - throw new RangeError("buffer ended with incomplete number") + throw new RangeError('buffer ended with incomplete number') } /** @@ -500,20 +448,17 @@ class Decoder { * sign of the `high32` half indicates the sign of the 64-bit number. */ readInt64() { - let low32 = 0, - high32 = 0, - shift = 0 + let low32 = 0, high32 = 0, shift = 0 while (this.offset < this.buf.byteLength && shift <= 28) { const nextByte = this.buf[this.offset] - low32 = (low32 | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned + low32 = (low32 | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned if (shift === 28) { high32 = (nextByte & 0x70) >>> 4 } shift += 7 this.offset++ if ((nextByte & 0x80) === 0) { - if ((nextByte & 0x40) !== 0) { - // sign-extend negative integer + if ((nextByte & 0x40) !== 0) { // sign-extend negative integer if (shift < 32) low32 = (low32 | (-1 << shift)) >>> 0 high32 |= -1 << Math.max(shift - 32, 0) } @@ -527,20 +472,19 @@ class Decoder { // On the 10th byte there are only two valid values: all 7 value bits zero // (if the value is positive) or all 7 bits one (if the value is negative) if (shift === 31 && nextByte !== 0 && nextByte !== 0x7f) { - throw new RangeError("number out of range") + throw new RangeError('number out of range') } high32 |= (nextByte & 0x7f) << shift shift += 7 this.offset++ if ((nextByte & 0x80) === 0) { - if ((nextByte & 0x40) !== 0 && shift < 32) { - // sign-extend negative integer + if ((nextByte & 0x40) !== 0 && shift < 32) { // sign-extend negative integer high32 |= -1 << shift } return { high32, low32 } } } - throw new RangeError("buffer ended with incomplete number") + throw new RangeError('buffer ended with incomplete number') } /** @@ -550,7 +494,7 @@ class Decoder { readRawBytes(length) { const start = this.offset if (start + length > this.buf.byteLength) { - throw new RangeError("subarray exceeds buffer size") + throw new RangeError('subarray exceeds buffer size') } this.offset += length return this.buf.subarray(start, this.offset) @@ -615,7 +559,7 @@ class RLEEncoder extends Encoder { constructor(type) { super() this.type = type - this.state = "empty" + this.state = 'empty' this.lastValue = undefined this.count = 0 this.literal = [] @@ -634,81 +578,76 @@ class RLEEncoder extends Encoder { */ _appendValue(value, repetitions = 1) { if (repetitions <= 0) return - if (this.state === "empty") { - this.state = - value === null - ? "nulls" - : repetitions === 1 - ? "loneValue" - : "repetition" + if (this.state === 'empty') { + this.state = (value === null ? 'nulls' : (repetitions === 1 ? 'loneValue' : 'repetition')) this.lastValue = value this.count = repetitions - } else if (this.state === "loneValue") { + } else if (this.state === 'loneValue') { if (value === null) { this.flush() - this.state = "nulls" + this.state = 'nulls' this.count = repetitions } else if (value === this.lastValue) { - this.state = "repetition" + this.state = 'repetition' this.count = 1 + repetitions } else if (repetitions > 1) { this.flush() - this.state = "repetition" + this.state = 'repetition' this.count = repetitions this.lastValue = value } else { - this.state = "literal" + this.state = 'literal' this.literal = [this.lastValue] this.lastValue = value } - } else if (this.state === "repetition") { + } else if (this.state === 'repetition') { if (value === null) { this.flush() - this.state = "nulls" + this.state = 'nulls' this.count = repetitions } else if (value === this.lastValue) { this.count += repetitions } else if (repetitions > 1) { this.flush() - this.state = "repetition" + this.state = 'repetition' this.count = repetitions this.lastValue = value } else { this.flush() - this.state = "loneValue" + this.state = 'loneValue' this.lastValue = value } - } else if (this.state === "literal") { + } else if (this.state === 'literal') { if (value === null) { this.literal.push(this.lastValue) this.flush() - this.state = "nulls" + this.state = 'nulls' this.count = repetitions } else if (value === this.lastValue) { this.flush() - this.state = "repetition" + this.state = 'repetition' this.count = 1 + repetitions } else if (repetitions > 1) { this.literal.push(this.lastValue) this.flush() - this.state = "repetition" + this.state = 'repetition' this.count = repetitions this.lastValue = value } else { this.literal.push(this.lastValue) this.lastValue = value } - } else if (this.state === "nulls") { + } else if (this.state === 'nulls') { if (value === null) { this.count += repetitions } else if (repetitions > 1) { this.flush() - this.state = "repetition" + this.state = 'repetition' this.count = repetitions this.lastValue = value } else { this.flush() - this.state = "loneValue" + this.state = 'loneValue' this.lastValue = value } } @@ -727,16 +666,13 @@ class RLEEncoder extends Encoder { */ copyFrom(decoder, options = {}) { const { count, sumValues, sumShift } = options - if (!(decoder instanceof RLEDecoder) || decoder.type !== this.type) { - throw new TypeError("incompatible type of decoder") + if (!(decoder instanceof RLEDecoder) || (decoder.type !== this.type)) { + throw new TypeError('incompatible type of decoder') } - let remaining = typeof count === "number" ? count : Number.MAX_SAFE_INTEGER - let nonNullValues = 0, - sum = 0 - if (count && remaining > 0 && decoder.done) - throw new RangeError(`cannot copy ${count} values`) - if (remaining === 0 || decoder.done) - return sumValues ? { nonNullValues, sum } : { nonNullValues } + let remaining = (typeof count === 'number' ? count : Number.MAX_SAFE_INTEGER) + let nonNullValues = 0, sum = 0 + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} // Copy a value so that we have a well-defined starting state. NB: when super.copyFrom() is // called by the DeltaEncoder subclass, the following calls to readValue() and appendValue() @@ -748,101 +684,87 @@ class RLEEncoder extends Encoder { remaining -= numNulls decoder.count -= numNulls - 1 this.appendValue(null, numNulls) - if (count && remaining > 0 && decoder.done) - throw new RangeError(`cannot copy ${count} values`) - if (remaining === 0 || decoder.done) - return sumValues ? { nonNullValues, sum } : { nonNullValues } + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} firstValue = decoder.readValue() - if (firstValue === null) - throw new RangeError("null run must be followed by non-null value") + if (firstValue === null) throw new RangeError('null run must be followed by non-null value') } this.appendValue(firstValue) remaining-- nonNullValues++ - if (sumValues) sum += sumShift ? firstValue >>> sumShift : firstValue - if (count && remaining > 0 && decoder.done) - throw new RangeError(`cannot copy ${count} values`) - if (remaining === 0 || decoder.done) - return sumValues ? { nonNullValues, sum } : { nonNullValues } + if (sumValues) sum += (sumShift ? (firstValue >>> sumShift) : firstValue) + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} // Copy data at the record level without expanding repetitions - let firstRun = decoder.count > 0 + let firstRun = (decoder.count > 0) while (remaining > 0 && !decoder.done) { if (!firstRun) decoder.readRecord() const numValues = Math.min(decoder.count, remaining) decoder.count -= numValues - if (decoder.state === "literal") { + if (decoder.state === 'literal') { nonNullValues += numValues for (let i = 0; i < numValues; i++) { - if (decoder.done) throw new RangeError("incomplete literal") + if (decoder.done) throw new RangeError('incomplete literal') const value = decoder.readRawValue() - if (value === decoder.lastValue) - throw new RangeError( - "Repetition of values is not allowed in literal" - ) + if (value === decoder.lastValue) throw new RangeError('Repetition of values is not allowed in literal') decoder.lastValue = value this._appendValue(value) - if (sumValues) sum += sumShift ? value >>> sumShift : value + if (sumValues) sum += (sumShift ? (value >>> sumShift) : value) } - } else if (decoder.state === "repetition") { + } else if (decoder.state === 'repetition') { nonNullValues += numValues - if (sumValues) - sum += - numValues * - (sumShift ? decoder.lastValue >>> sumShift : decoder.lastValue) + if (sumValues) sum += numValues * (sumShift ? (decoder.lastValue >>> sumShift) : decoder.lastValue) const value = decoder.lastValue this._appendValue(value) if (numValues > 1) { this._appendValue(value) - if (this.state !== "repetition") - throw new RangeError(`Unexpected state ${this.state}`) + if (this.state !== 'repetition') throw new RangeError(`Unexpected state ${this.state}`) this.count += numValues - 2 } - } else if (decoder.state === "nulls") { + } else if (decoder.state === 'nulls') { this._appendValue(null) - if (this.state !== "nulls") - throw new RangeError(`Unexpected state ${this.state}`) + if (this.state !== 'nulls') throw new RangeError(`Unexpected state ${this.state}`) this.count += numValues - 1 } firstRun = false remaining -= numValues } - if (count && remaining > 0 && decoder.done) - throw new RangeError(`cannot copy ${count} values`) - return sumValues ? { nonNullValues, sum } : { nonNullValues } + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + return sumValues ? {nonNullValues, sum} : {nonNullValues} } /** * Private method, do not call from outside the class. */ flush() { - if (this.state === "loneValue") { + if (this.state === 'loneValue') { this.appendInt32(-1) this.appendRawValue(this.lastValue) - } else if (this.state === "repetition") { + } else if (this.state === 'repetition') { this.appendInt53(this.count) this.appendRawValue(this.lastValue) - } else if (this.state === "literal") { + } else if (this.state === 'literal') { this.appendInt53(-this.literal.length) for (let v of this.literal) this.appendRawValue(v) - } else if (this.state === "nulls") { + } else if (this.state === 'nulls') { this.appendInt32(0) this.appendUint53(this.count) } - this.state = "empty" + this.state = 'empty' } /** * Private method, do not call from outside the class. */ appendRawValue(value) { - if (this.type === "int") { + if (this.type === 'int') { this.appendInt53(value) - } else if (this.type === "uint") { + } else if (this.type === 'uint') { this.appendUint53(value) - } else if (this.type === "utf8") { + } else if (this.type === 'utf8') { this.appendPrefixedString(value) } else { throw new RangeError(`Unknown RLEEncoder datatype: ${this.type}`) @@ -854,9 +776,9 @@ class RLEEncoder extends Encoder { * the buffer constructed by this Encoder. */ finish() { - if (this.state === "literal") this.literal.push(this.lastValue) + if (this.state === 'literal') this.literal.push(this.lastValue) // Don't write anything if the only values we have seen are nulls - if (this.state !== "nulls" || this.offset > 0) this.flush() + if (this.state !== 'nulls' || this.offset > 0) this.flush() } } @@ -878,7 +800,7 @@ class RLEDecoder extends Decoder { * position, and true if we are at the end of the buffer. */ get done() { - return this.count === 0 && this.offset === this.buf.byteLength + return (this.count === 0) && (this.offset === this.buf.byteLength) } /** @@ -899,10 +821,9 @@ class RLEDecoder extends Decoder { if (this.done) return null if (this.count === 0) this.readRecord() this.count -= 1 - if (this.state === "literal") { + if (this.state === 'literal') { const value = this.readRawValue() - if (value === this.lastValue) - throw new RangeError("Repetition of values is not allowed in literal") + if (value === this.lastValue) throw new RangeError('Repetition of values is not allowed in literal') this.lastValue = value return value } else { @@ -918,22 +839,20 @@ class RLEDecoder extends Decoder { if (this.count === 0) { this.count = this.readInt53() if (this.count > 0) { - this.lastValue = - this.count <= numSkip ? this.skipRawValues(1) : this.readRawValue() - this.state = "repetition" + this.lastValue = (this.count <= numSkip) ? this.skipRawValues(1) : this.readRawValue() + this.state = 'repetition' } else if (this.count < 0) { this.count = -this.count - this.state = "literal" - } else { - // this.count == 0 + this.state = 'literal' + } else { // this.count == 0 this.count = this.readUint53() this.lastValue = null - this.state = "nulls" + this.state = 'nulls' } } const consume = Math.min(numSkip, this.count) - if (this.state === "literal") this.skipRawValues(consume) + if (this.state === 'literal') this.skipRawValues(consume) numSkip -= consume this.count -= consume } @@ -947,34 +866,23 @@ class RLEDecoder extends Decoder { this.count = this.readInt53() if (this.count > 1) { const value = this.readRawValue() - if ( - (this.state === "repetition" || this.state === "literal") && - this.lastValue === value - ) { - throw new RangeError( - "Successive repetitions with the same value are not allowed" - ) + if ((this.state === 'repetition' || this.state === 'literal') && this.lastValue === value) { + throw new RangeError('Successive repetitions with the same value are not allowed') } - this.state = "repetition" + this.state = 'repetition' this.lastValue = value } else if (this.count === 1) { - throw new RangeError( - "Repetition count of 1 is not allowed, use a literal instead" - ) + throw new RangeError('Repetition count of 1 is not allowed, use a literal instead') } else if (this.count < 0) { this.count = -this.count - if (this.state === "literal") - throw new RangeError("Successive literals are not allowed") - this.state = "literal" - } else { - // this.count == 0 - if (this.state === "nulls") - throw new RangeError("Successive null runs are not allowed") + if (this.state === 'literal') throw new RangeError('Successive literals are not allowed') + this.state = 'literal' + } else { // this.count == 0 + if (this.state === 'nulls') throw new RangeError('Successive null runs are not allowed') this.count = this.readUint53() - if (this.count === 0) - throw new RangeError("Zero-length null runs are not allowed") + if (this.count === 0) throw new RangeError('Zero-length null runs are not allowed') this.lastValue = null - this.state = "nulls" + this.state = 'nulls' } } @@ -983,11 +891,11 @@ class RLEDecoder extends Decoder { * Reads one value of the datatype configured on construction. */ readRawValue() { - if (this.type === "int") { + if (this.type === 'int') { return this.readInt53() - } else if (this.type === "uint") { + } else if (this.type === 'uint') { return this.readUint53() - } else if (this.type === "utf8") { + } else if (this.type === 'utf8') { return this.readPrefixedString() } else { throw new RangeError(`Unknown RLEDecoder datatype: ${this.type}`) @@ -999,14 +907,14 @@ class RLEDecoder extends Decoder { * Skips over `num` values of the datatype configured on construction. */ skipRawValues(num) { - if (this.type === "utf8") { + if (this.type === 'utf8') { for (let i = 0; i < num; i++) this.skip(this.readUint53()) } else { while (num > 0 && this.offset < this.buf.byteLength) { if ((this.buf[this.offset] & 0x80) === 0) num-- this.offset++ } - if (num > 0) throw new RangeError("cannot skip beyond end of buffer") + if (num > 0) throw new RangeError('cannot skip beyond end of buffer') } } } @@ -1023,7 +931,7 @@ class RLEDecoder extends Decoder { */ class DeltaEncoder extends RLEEncoder { constructor() { - super("int") + super('int') this.absoluteValue = 0 } @@ -1033,7 +941,7 @@ class DeltaEncoder extends RLEEncoder { */ appendValue(value, repetitions = 1) { if (repetitions <= 0) return - if (typeof value === "number") { + if (typeof value === 'number') { super.appendValue(value - this.absoluteValue, 1) this.absoluteValue = value if (repetitions > 1) super.appendValue(0, repetitions - 1) @@ -1049,29 +957,26 @@ class DeltaEncoder extends RLEEncoder { */ copyFrom(decoder, options = {}) { if (options.sumValues) { - throw new RangeError("unsupported options for DeltaEncoder.copyFrom()") + throw new RangeError('unsupported options for DeltaEncoder.copyFrom()') } if (!(decoder instanceof DeltaDecoder)) { - throw new TypeError("incompatible type of decoder") + throw new TypeError('incompatible type of decoder') } let remaining = options.count - if (remaining > 0 && decoder.done) - throw new RangeError(`cannot copy ${remaining} values`) + if (remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${remaining} values`) if (remaining === 0 || decoder.done) return // Copy any null values, and the first non-null value, so that appendValue() computes the // difference between the encoder's last value and the decoder's first (absolute) value. - let value = decoder.readValue(), - nulls = 0 + let value = decoder.readValue(), nulls = 0 this.appendValue(value) if (value === null) { nulls = decoder.count + 1 if (remaining !== undefined && remaining < nulls) nulls = remaining decoder.count -= nulls - 1 this.count += nulls - 1 - if (remaining > nulls && decoder.done) - throw new RangeError(`cannot copy ${remaining} values`) + if (remaining > nulls && decoder.done) throw new RangeError(`cannot copy ${remaining} values`) if (remaining === nulls || decoder.done) return // The next value read is certain to be non-null because we're not at the end of the decoder, @@ -1084,10 +989,7 @@ class DeltaEncoder extends RLEEncoder { // value, while subsequent values are relative. Thus, the sum of all of the (non-null) copied // values must equal the absolute value of the final element copied. if (remaining !== undefined) remaining -= nulls + 1 - const { nonNullValues, sum } = super.copyFrom(decoder, { - count: remaining, - sumValues: true, - }) + const { nonNullValues, sum } = super.copyFrom(decoder, {count: remaining, sumValues: true}) if (nonNullValues > 0) { this.absoluteValue = sum decoder.absoluteValue = sum @@ -1101,7 +1003,7 @@ class DeltaEncoder extends RLEEncoder { */ class DeltaDecoder extends RLEDecoder { constructor(buffer) { - super("int", buffer) + super('int', buffer) this.absoluteValue = 0 } @@ -1134,12 +1036,12 @@ class DeltaDecoder extends RLEDecoder { while (numSkip > 0 && !this.done) { if (this.count === 0) this.readRecord() const consume = Math.min(numSkip, this.count) - if (this.state === "literal") { + if (this.state === 'literal') { for (let i = 0; i < consume; i++) { this.lastValue = this.readRawValue() this.absoluteValue += this.lastValue } - } else if (this.state === "repetition") { + } else if (this.state === 'repetition') { this.absoluteValue += consume * this.lastValue } numSkip -= consume @@ -1188,13 +1090,12 @@ class BooleanEncoder extends Encoder { */ copyFrom(decoder, options = {}) { if (!(decoder instanceof BooleanDecoder)) { - throw new TypeError("incompatible type of decoder") + throw new TypeError('incompatible type of decoder') } const { count } = options - let remaining = typeof count === "number" ? count : Number.MAX_SAFE_INTEGER - if (count && remaining > 0 && decoder.done) - throw new RangeError(`cannot copy ${count} values`) + let remaining = (typeof count === 'number' ? count : Number.MAX_SAFE_INTEGER) + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) if (remaining === 0 || decoder.done) return // Copy one value to bring decoder and encoder state into sync, then finish that value's repetitions @@ -1207,8 +1108,7 @@ class BooleanEncoder extends Encoder { while (remaining > 0 && !decoder.done) { decoder.count = decoder.readUint53() - if (decoder.count === 0) - throw new RangeError("Zero-length runs are not allowed") + if (decoder.count === 0) throw new RangeError('Zero-length runs are not allowed') decoder.lastValue = !decoder.lastValue this.appendUint53(this.count) @@ -1219,8 +1119,7 @@ class BooleanEncoder extends Encoder { remaining -= numCopied } - if (count && remaining > 0 && decoder.done) - throw new RangeError(`cannot copy ${count} values`) + if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) } /** @@ -1252,7 +1151,7 @@ class BooleanDecoder extends Decoder { * position, and true if we are at the end of the buffer. */ get done() { - return this.count === 0 && this.offset === this.buf.byteLength + return (this.count === 0) && (this.offset === this.buf.byteLength) } /** @@ -1275,7 +1174,7 @@ class BooleanDecoder extends Decoder { this.count = this.readUint53() this.lastValue = !this.lastValue if (this.count === 0 && !this.firstRun) { - throw new RangeError("Zero-length runs are not allowed") + throw new RangeError('Zero-length runs are not allowed') } this.firstRun = false } @@ -1291,8 +1190,7 @@ class BooleanDecoder extends Decoder { if (this.count === 0) { this.count = this.readUint53() this.lastValue = !this.lastValue - if (this.count === 0) - throw new RangeError("Zero-length runs are not allowed") + if (this.count === 0) throw new RangeError('Zero-length runs are not allowed') } if (this.count < numSkip) { numSkip -= this.count @@ -1306,16 +1204,6 @@ class BooleanDecoder extends Decoder { } module.exports = { - stringToUtf8, - utf8ToString, - hexStringToBytes, - bytesToHexString, - Encoder, - Decoder, - RLEEncoder, - RLEDecoder, - DeltaEncoder, - DeltaDecoder, - BooleanEncoder, - BooleanDecoder, + stringToUtf8, utf8ToString, hexStringToBytes, bytesToHexString, + Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder } diff --git a/rust/automerge-wasm/test/helpers/sync.js b/automerge-wasm/test/helpers/sync.js similarity index 100% rename from rust/automerge-wasm/test/helpers/sync.js rename to automerge-wasm/test/helpers/sync.js diff --git a/automerge-wasm/test/marks.ts b/automerge-wasm/test/marks.ts new file mode 100644 index 00000000..73146979 --- /dev/null +++ b/automerge-wasm/test/marks.ts @@ -0,0 +1,203 @@ +import { describe, it } from 'mocha'; +//@ts-ignore +import assert from 'assert' +//@ts-ignore +import { create, loadDoc, Automerge, encodeChange, decodeChange } from '..' + +describe('Automerge', () => { + describe('marks', () => { + it('should handle marks [..]', () => { + let doc = create() + let list = doc.set_object("_root", "list", "") + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[3..6]", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.insert(list, 6, "A") + doc.insert(list, 3, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaaA', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'Accc' ]); + }) + + it('should handle marks [..] at the beginning of a string', () => { + let doc = create() + let list = doc.set_object("_root", "list", "") + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[0..3]", "bold", true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]); + + let doc2 = doc.fork() + doc2.insert(list, 0, "A") + doc2.insert(list, 4, "B") + doc.merge(doc2) + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'A', [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'Bbbbccc' ]); + }) + + it('should handle marks [..] with splice', () => { + let doc = create() + let list = doc.set_object("_root", "list", "") + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[0..3]", "bold", true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]); + + let doc2 = doc.fork() + doc2.splice(list, 0, 2, "AAA") + doc2.splice(list, 4, 0, "BBB") + doc.merge(doc2) + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'AAA', [ [ 'bold', 'boolean', true ] ], 'a', [], 'BBBbbbccc' ]); + }) + + it('should handle marks across multiple forks', () => { + let doc = create() + let list = doc.set_object("_root", "list", "") + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[0..3]", "bold", true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ [ [ 'bold', 'boolean', true ] ], 'aaa', [], 'bbbccc' ]); + + let doc2 = doc.fork() + doc2.splice(list, 1, 1, "Z") // replace 'aaa' with 'aZa' inside mark. + + let doc3 = doc.fork() + doc3.insert(list, 0, "AAA") // should not be included in mark. + + doc.merge(doc2) + doc.merge(doc3) + + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'AAA', [ [ 'bold', 'boolean', true ] ], 'aZa', [], 'bbbccc' ]); + }) + + + it('should handle marks with deleted ends [..]', () => { + let doc = create() + let list = doc.set_object("_root", "list", "") + + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "[3..6]", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.del(list,5); + doc.del(list,5); + doc.del(list,2); + doc.del(list,2); + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) + doc.insert(list, 3, "A") + doc.insert(list, 2, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaA', [ [ 'bold', 'boolean', true ] ], 'b', [], 'Acc' ]) + }) + + it('should handle sticky marks (..)', () => { + let doc = create() + let list = doc.set_object("_root", "list", "") + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "(3..6)", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.insert(list, 6, "A") + doc.insert(list, 3, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'AbbbA', [], 'ccc' ]); + }) + + it('should handle sticky marks with deleted ends (..)', () => { + let doc = create() + let list = doc.set_object("_root", "list", "") + doc.splice(list, 0, 0, "aaabbbccc") + doc.mark(list, "(3..6)", "bold" , true) + let spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aaa', [ [ 'bold', 'boolean', true ] ], 'bbb', [], 'ccc' ]); + doc.del(list,5); + doc.del(list,5); + doc.del(list,2); + doc.del(list,2); + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'b', [], 'cc' ]) + doc.insert(list, 3, "A") + doc.insert(list, 2, "A") + spans = doc.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) + + // make sure save/load can handle marks + + let doc2 = loadDoc(doc.save()) + spans = doc2.spans(list); + assert.deepStrictEqual(spans, [ 'aa', [ [ 'bold', 'boolean', true ] ], 'AbA', [], 'cc' ]) + + assert.deepStrictEqual(doc.getHeads(), doc2.getHeads()) + assert.deepStrictEqual(doc.save(), doc2.save()) + }) + + it('should handle overlapping marks', () => { + let doc : Automerge = create("aabbcc") + let list = doc.set_object("_root", "list", "") + doc.splice(list, 0, 0, "the quick fox jumps over the lazy dog") + doc.mark(list, "[0..37]", "bold" , true) + doc.mark(list, "[4..19]", "itallic" , true) + doc.mark(list, "[10..13]", "comment" , "foxes are my favorite animal!") + doc.commit("marks"); + let spans = doc.spans(list); + assert.deepStrictEqual(spans, + [ + [ [ 'bold', 'boolean', true ] ], + 'the ', + [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], + 'quick ', + [ + [ 'bold', 'boolean', true ], + [ 'comment', 'str', 'foxes are my favorite animal!' ], + [ 'itallic', 'boolean', true ] + ], + 'fox', + [ [ 'bold', 'boolean', true ], [ 'itallic', 'boolean', true ] ], + ' jumps', + [ [ 'bold', 'boolean', true ] ], + ' over the lazy dog', + [], + ] + ) + let text = doc.text(list); + assert.deepStrictEqual(text, "the quick fox jumps over the lazy dog"); + let raw_spans = doc.raw_spans(list); + assert.deepStrictEqual(raw_spans, + [ + { id: "39@aabbcc", start: 0, end: 37, type: 'bold', value: true }, + { id: "41@aabbcc", start: 4, end: 19, type: 'itallic', value: true }, + { id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' } + ]); + + doc.unmark(list, "41@aabbcc") + raw_spans = doc.raw_spans(list); + assert.deepStrictEqual(raw_spans, + [ + { id: "39@aabbcc", start: 0, end: 37, type: 'bold', value: true }, + { id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' } + ]); + // mark sure encode decode can handle marks + + doc.unmark(list, "39@aabbcc") + raw_spans = doc.raw_spans(list); + assert.deepStrictEqual(raw_spans, + [ + { id: "43@aabbcc", start: 10, end: 13, type: 'comment', value: 'foxes are my favorite animal!' } + ]); + + let all = doc.getChanges([]) + let decoded = all.map((c) => decodeChange(c)) + let encoded = decoded.map((c) => encodeChange(c)) + let doc2 = create(); + doc2.applyChanges(encoded) + + doc.dump() + doc2.dump() + assert.deepStrictEqual(doc.spans(list) , doc2.spans(list)) + assert.deepStrictEqual(doc.save(), doc2.save()) + }) + }) +}) diff --git a/automerge-wasm/test/test.ts b/automerge-wasm/test/test.ts new file mode 100644 index 00000000..97c667b6 --- /dev/null +++ b/automerge-wasm/test/test.ts @@ -0,0 +1,1480 @@ +import { describe, it } from 'mocha'; +//@ts-ignore +import assert from 'assert' +//@ts-ignore +import { BloomFilter } from './helpers/sync' +import { create, loadDoc, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { DecodedSyncMessage, Hash } from '..' + +function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { + const MAX_ITER = 10 + let aToBmsg = null, bToAmsg = null, i = 0 + do { + aToBmsg = a.generateSyncMessage(aSyncState) + bToAmsg = b.generateSyncMessage(bSyncState) + + if (aToBmsg) { + b.receiveSyncMessage(bSyncState, aToBmsg) + } + if (bToAmsg) { + a.receiveSyncMessage(aSyncState, bToAmsg) + } + + if (i++ > MAX_ITER) { + throw new Error(`Did not synchronize within ${MAX_ITER} iterations`) + } + } while (aToBmsg || bToAmsg) +} + +describe('Automerge', () => { + describe('basics', () => { + it('should init clone and free', () => { + let doc1 = create() + let doc2 = doc1.clone() + doc1.free() + doc2.free() + }) + + it('should be able to start and commit', () => { + let doc = create() + doc.commit() + doc.free() + }) + + it('getting a nonexistant prop does not throw an error', () => { + let doc = create() + let root = "_root" + let result = doc.value(root,"hello") + assert.deepEqual(result,undefined) + doc.free() + }) + + it('should be able to set and get a simple value', () => { + let doc : Automerge = create("aabbcc") + let root = "_root" + let result + + doc.set(root, "hello", "world") + doc.set(root, "number1", 5, "uint") + doc.set(root, "number2", 5) + doc.set(root, "number3", 5.5) + doc.set(root, "number4", 5.5, "f64") + doc.set(root, "number5", 5.5, "int") + doc.set(root, "bool", true) + doc.set(root, "time1", 1000, "timestamp") + doc.set(root, "time2", new Date(1001)) + doc.set_object(root, "list", []); + doc.set(root, "null", null) + + result = doc.value(root,"hello") + assert.deepEqual(result,["str","world"]) + + result = doc.value(root,"number1") + assert.deepEqual(result,["uint",5]) + + result = doc.value(root,"number2") + assert.deepEqual(result,["int",5]) + + result = doc.value(root,"number3") + assert.deepEqual(result,["f64",5.5]) + + result = doc.value(root,"number4") + assert.deepEqual(result,["f64",5.5]) + + result = doc.value(root,"number5") + assert.deepEqual(result,["int",5]) + + result = doc.value(root,"bool") + assert.deepEqual(result,["boolean",true]) + + doc.set(root, "bool", false, "boolean") + + result = doc.value(root,"bool") + assert.deepEqual(result,["boolean",false]) + + result = doc.value(root,"time1") + assert.deepEqual(result,["timestamp",new Date(1000)]) + + result = doc.value(root,"time2") + assert.deepEqual(result,["timestamp",new Date(1001)]) + + result = doc.value(root,"list") + assert.deepEqual(result,["list","10@aabbcc"]); + + result = doc.value(root,"null") + assert.deepEqual(result,["null",null]); + + doc.free() + }) + + it('should be able to use bytes', () => { + let doc = create() + doc.set("_root","data1", new Uint8Array([10,11,12])); + doc.set("_root","data2", new Uint8Array([13,14,15]), "bytes"); + let value1 = doc.value("_root", "data1") + assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]); + let value2 = doc.value("_root", "data2") + assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]); + doc.free() + }) + + it('should be able to make sub objects', () => { + let doc = create() + let root = "_root" + let result + + let submap = doc.set_object(root, "submap", {}) + doc.set(submap, "number", 6, "uint") + assert.strictEqual(doc.pendingOps(),2) + + result = doc.value(root,"submap") + assert.deepEqual(result,["map",submap]) + + result = doc.value(submap,"number") + assert.deepEqual(result,["uint",6]) + doc.free() + }) + + it('should be able to make lists', () => { + let doc = create() + let root = "_root" + + let submap = doc.set_object(root, "numbers", []) + doc.insert(submap, 0, "a"); + doc.insert(submap, 1, "b"); + doc.insert(submap, 2, "c"); + doc.insert(submap, 0, "z"); + + assert.deepEqual(doc.value(submap, 0),["str","z"]) + assert.deepEqual(doc.value(submap, 1),["str","a"]) + assert.deepEqual(doc.value(submap, 2),["str","b"]) + assert.deepEqual(doc.value(submap, 3),["str","c"]) + assert.deepEqual(doc.length(submap),4) + + doc.set(submap, 2, "b v2"); + + assert.deepEqual(doc.value(submap, 2),["str","b v2"]) + assert.deepEqual(doc.length(submap),4) + doc.free() + }) + + it('lists have insert, set, splice, and push ops', () => { + let doc = create() + let root = "_root" + + let submap = doc.set_object(root, "letters", []) + doc.insert(submap, 0, "a"); + doc.insert(submap, 0, "b"); + assert.deepEqual(doc.materialize(), { letters: ["b", "a" ] }) + doc.push(submap, "c"); + assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c" ] }) + doc.push(submap, 3, "timestamp"); + assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3) ] }) + doc.splice(submap, 1, 1, ["d","e","f"]); + assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3) ] }) + doc.set(submap, 0, "z"); + assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3) ] }) + assert.deepEqual(doc.materialize(submap), ["z", "d", "e", "f", "c", new Date(3) ]) + assert.deepEqual(doc.length(submap),6) + + doc.free() + }) + + it('should be able delete non-existant props', () => { + let doc = create() + + doc.set("_root", "foo","bar") + doc.set("_root", "bip","bap") + let heads1 = doc.commit() + + assert.deepEqual(doc.keys("_root"),["bip","foo"]) + + doc.del("_root", "foo") + doc.del("_root", "baz") + let heads2 = doc.commit() + + assert.deepEqual(doc.keys("_root"),["bip"]) + assert.deepEqual(doc.keys("_root", heads1),["bip", "foo"]) + assert.deepEqual(doc.keys("_root", heads2),["bip"]) + doc.free() + }) + + it('should be able to del', () => { + let doc = create() + let root = "_root" + + doc.set(root, "xxx", "xxx"); + assert.deepEqual(doc.value(root, "xxx"),["str","xxx"]) + doc.del(root, "xxx"); + assert.deepEqual(doc.value(root, "xxx"),undefined) + doc.free() + }) + + it('should be able to use counters', () => { + let doc = create() + let root = "_root" + + doc.set(root, "counter", 10, "counter"); + assert.deepEqual(doc.value(root, "counter"),["counter",10]) + doc.inc(root, "counter", 10); + assert.deepEqual(doc.value(root, "counter"),["counter",20]) + doc.inc(root, "counter", -5); + assert.deepEqual(doc.value(root, "counter"),["counter",15]) + doc.free() + }) + + it('should be able to splice text', () => { + let doc = create() + let root = "_root"; + + let text = doc.set_object(root, "text", ""); + doc.splice(text, 0, 0, "hello ") + doc.splice(text, 6, 0, ["w","o","r","l","d"]) + doc.splice(text, 11, 0, ["!","?"]) + assert.deepEqual(doc.value(text, 0),["str","h"]) + assert.deepEqual(doc.value(text, 1),["str","e"]) + assert.deepEqual(doc.value(text, 9),["str","l"]) + assert.deepEqual(doc.value(text, 10),["str","d"]) + assert.deepEqual(doc.value(text, 11),["str","!"]) + assert.deepEqual(doc.value(text, 12),["str","?"]) + doc.free() + }) + + it('should be able to insert objects into text', () => { + let doc = create() + let text = doc.set_object("/", "text", "Hello world"); + let obj = doc.insert_object(text, 6, { hello: "world" }); + assert.deepEqual(doc.text(text), "Hello \ufffcworld"); + assert.deepEqual(doc.value(text, 6), ["map", obj]); + assert.deepEqual(doc.value(obj, "hello"), ["str", "world"]); + }) + + it('should be able save all or incrementally', () => { + let doc = create() + + doc.set("_root", "foo", 1) + + let save1 = doc.save() + + doc.set("_root", "bar", 2) + + let saveMidway = doc.clone().save(); + + let save2 = doc.saveIncremental(); + + doc.set("_root", "baz", 3); + + let save3 = doc.saveIncremental(); + + let saveA = doc.save(); + let saveB = new Uint8Array([... save1, ...save2, ...save3]); + + assert.notDeepEqual(saveA, saveB); + + let docA = loadDoc(saveA); + let docB = loadDoc(saveB); + let docC = loadDoc(saveMidway) + let touched = docC.loadIncremental(save3) + + assert.deepEqual(touched, ["_root"]); + assert.deepEqual(docA.keys("_root"), docB.keys("_root")); + assert.deepEqual(docA.save(), docB.save()); + assert.deepEqual(docA.save(), docC.save()); + doc.free() + docA.free() + docB.free() + docC.free() + }) + + it('should be able to splice text', () => { + let doc = create() + let text = doc.set_object("_root", "text", ""); + doc.splice(text, 0, 0, "hello world"); + let heads1 = doc.commit(); + doc.splice(text, 6, 0, "big bad "); + let heads2 = doc.commit(); + assert.strictEqual(doc.text(text), "hello big bad world") + assert.strictEqual(doc.length(text), 19) + assert.strictEqual(doc.text(text, heads1), "hello world") + assert.strictEqual(doc.length(text, heads1), 11) + assert.strictEqual(doc.text(text, heads2), "hello big bad world") + assert.strictEqual(doc.length(text, heads2), 19) + doc.free() + }) + + it('local inc increments all visible counters in a map', () => { + let doc1 = create("aaaa") + doc1.set("_root", "hello", "world") + let doc2 = loadDoc(doc1.save(), "bbbb"); + let doc3 = loadDoc(doc1.save(), "cccc"); + doc1.set("_root", "cnt", 20) + doc2.set("_root", "cnt", 0, "counter") + doc3.set("_root", "cnt", 10, "counter") + doc1.applyChanges(doc2.getChanges(doc1.getHeads())) + doc1.applyChanges(doc3.getChanges(doc1.getHeads())) + let result = doc1.values("_root", "cnt") + assert.deepEqual(result,[ + ['int',20,'2@aaaa'], + ['counter',0,'2@bbbb'], + ['counter',10,'2@cccc'], + ]) + doc1.inc("_root", "cnt", 5) + result = doc1.values("_root", "cnt") + assert.deepEqual(result, [ + [ 'counter', 5, '2@bbbb' ], + [ 'counter', 15, '2@cccc' ], + ]) + + let save1 = doc1.save() + let doc4 = loadDoc(save1) + assert.deepEqual(doc4.save(), save1); + doc1.free() + doc2.free() + doc3.free() + doc4.free() + }) + + it('local inc increments all visible counters in a sequence', () => { + let doc1 = create("aaaa") + let seq = doc1.set_object("_root", "seq", []) + doc1.insert(seq, 0, "hello") + let doc2 = loadDoc(doc1.save(), "bbbb"); + let doc3 = loadDoc(doc1.save(), "cccc"); + doc1.set(seq, 0, 20) + doc2.set(seq, 0, 0, "counter") + doc3.set(seq, 0, 10, "counter") + let touched1 = doc1.applyChanges(doc2.getChanges(doc1.getHeads())) + let touched2 = doc1.applyChanges(doc3.getChanges(doc1.getHeads())) + let result = doc1.values(seq, 0) + assert.deepEqual(touched1,["1@aaaa"]) + assert.deepEqual(touched2,["1@aaaa"]) + assert.deepEqual(result,[ + ['int',20,'3@aaaa'], + ['counter',0,'3@bbbb'], + ['counter',10,'3@cccc'], + ]) + doc1.inc(seq, 0, 5) + result = doc1.values(seq, 0) + assert.deepEqual(result, [ + [ 'counter', 5, '3@bbbb' ], + [ 'counter', 15, '3@cccc' ], + ]) + + let save = doc1.save() + let doc4 = loadDoc(save) + assert.deepEqual(doc4.save(), save); + doc1.free() + doc2.free() + doc3.free() + doc4.free() + }) + + it('paths can be used instead of objids', () => { + let doc = create("aaaa") + doc.set_object("_root","list",[{ foo: "bar"}, [1,2,3]]) + assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar"}, [1,2,3]] }) + assert.deepEqual(doc.materialize("/list"), [{ foo: "bar"}, [1,2,3]]) + assert.deepEqual(doc.materialize("/list/0"), { foo: "bar"}) + }) + + it('should be able to fetch changes by hash', () => { + let doc1 = create("aaaa") + let doc2 = create("bbbb") + doc1.set("/","a","b") + doc2.set("/","b","c") + let head1 = doc1.getHeads() + let head2 = doc2.getHeads() + let change1 = doc1.getChangeByHash(head1[0]) + let change2 = doc1.getChangeByHash(head2[0]) + assert.deepEqual(change2, null) + if (change1 === null) { throw new RangeError("change1 should not be null") } + assert.deepEqual(decodeChange(change1).hash, head1[0]) + assert.deepEqual(head1.some((hash) => doc1.getChangeByHash(hash) === null), false) + assert.deepEqual(head2.some((hash) => doc1.getChangeByHash(hash) === null), true) + }) + + it('recursive sets are possible', () => { + let doc = create("aaaa") + let l1 = doc.set_object("_root","list",[{ foo: "bar"}, [1,2,3]]) + let l2 = doc.insert_object(l1, 0, { zip: ["a", "b"] }) + let l3 = doc.set_object("_root","info1","hello world") // 'text' object + doc.set("_root","info2","hello world") // 'str' + let l4 = doc.set_object("_root","info3","hello world") + assert.deepEqual(doc.materialize(), { + "list": [ { zip: ["a", "b"] }, { foo: "bar"}, [ 1,2,3]], + "info1": "hello world", + "info2": "hello world", + "info3": "hello world", + }) + assert.deepEqual(doc.materialize(l2), { zip: ["a","b"] }) + assert.deepEqual(doc.materialize(l1), [ { zip: ["a","b"] }, { foo: "bar" }, [ 1,2,3] ]) + assert.deepEqual(doc.materialize(l4), "hello world") + doc.free() + }) + + it('only returns an object id when objects are created', () => { + let doc = create("aaaa") + let r1 = doc.set("_root","foo","bar") + let r2 = doc.set_object("_root","list",[]) + let r3 = doc.set("_root","counter",10, "counter") + let r4 = doc.inc("_root","counter",1) + let r5 = doc.del("_root","counter") + let r6 = doc.insert(r2,0,10); + let r7 = doc.insert_object(r2,0,{}); + let r8 = doc.splice(r2,1,0,["a","b","c"]); + //let r9 = doc.splice(r2,1,0,["a",[],{},"d"]); + assert.deepEqual(r1,null); + assert.deepEqual(r2,"2@aaaa"); + assert.deepEqual(r3,null); + assert.deepEqual(r4,null); + assert.deepEqual(r5,null); + assert.deepEqual(r6,null); + assert.deepEqual(r7,"7@aaaa"); + assert.deepEqual(r8,null); + //assert.deepEqual(r9,["12@aaaa","13@aaaa"]); + doc.free() + }) + + it('objects without properties are preserved', () => { + let doc1 = create("aaaa") + let a = doc1.set_object("_root","a",{}); + let b = doc1.set_object("_root","b",{}); + let c = doc1.set_object("_root","c",{}); + let d = doc1.set(c,"d","dd"); + let saved = doc1.save(); + let doc2 = loadDoc(saved); + assert.deepEqual(doc2.value("_root","a"),["map",a]) + assert.deepEqual(doc2.keys(a),[]) + assert.deepEqual(doc2.value("_root","b"),["map",b]) + assert.deepEqual(doc2.keys(b),[]) + assert.deepEqual(doc2.value("_root","c"),["map",c]) + assert.deepEqual(doc2.keys(c),["d"]) + assert.deepEqual(doc2.value(c,"d"),["str","dd"]) + doc1.free() + doc2.free() + }) + + it('should handle merging text conflicts then saving & loading', () => { + let A = create("aabbcc") + let At = A.set_object('_root', 'text', "") + A.splice(At, 0, 0, 'hello') + + let B = A.fork() + + assert.deepEqual(B.value("_root","text"), [ "text", At]) + + B.splice(At, 4, 1) + B.splice(At, 4, 0, '!') + B.splice(At, 5, 0, ' ') + B.splice(At, 6, 0, 'world') + + A.merge(B) + + let binary = A.save() + + let C = loadDoc(binary) + + assert.deepEqual(C.value('_root', 'text'), ['text', '1@aabbcc']) + assert.deepEqual(C.text(At), 'hell! world') + }) + + it('should return opIds that were changed', () => { + let A = create("aabbcc") + let At = A.set_object('_root', 'list', []) + A.insert('/list', 0, 'a') + A.insert('/list', 1, 'b') + + let B = A.fork() + + A.insert('/list', 2, 'c') + + let opIds = A.merge(B) + assert.equal(opIds.length, 0) + }) + }) + describe('sync', () => { + it('should send a sync message implying no local data', () => { + let doc = create() + let s1 = initSyncState() + let m1 = doc.generateSyncMessage(s1) + if (m1 === null) { throw new RangeError("message should not be null") } + const message: DecodedSyncMessage = decodeSyncMessage(m1) + assert.deepStrictEqual(message.heads, []) + assert.deepStrictEqual(message.need, []) + assert.deepStrictEqual(message.have.length, 1) + assert.deepStrictEqual(message.have[0].lastSync, []) + assert.deepStrictEqual(message.have[0].bloom.byteLength, 0) + assert.deepStrictEqual(message.changes, []) + }) + + it('should not reply if we have no data as well', () => { + let n1 = create(), n2 = create() + let s1 = initSyncState(), s2 = initSyncState() + let m1 = n1.generateSyncMessage(s1) + if (m1 === null) { throw new RangeError("message should not be null") } + n2.receiveSyncMessage(s2, m1) + let m2 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(m2, null) + }) + + it('repos with equal heads do not need a reply message', () => { + let n1 = create(), n2 = create() + let s1 = initSyncState(), s2 = initSyncState() + + // make two nodes with the same changes + let list = n1.set_object("_root","n", []) + n1.commit("",0) + for (let i = 0; i < 10; i++) { + n1.insert(list,i,i) + n1.commit("",0) + } + n2.applyChanges(n1.getChanges([])) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + + // generate a naive sync message + let m1 = n1.generateSyncMessage(s1) + if (m1 === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) + + // heads are equal so this message should be null + n2.receiveSyncMessage(s2, m1) + let m2 = n2.generateSyncMessage(s2) + assert.strictEqual(m2, null) + }) + + it('n1 should offer all changes to n2 when starting from nothing', () => { + let n1 = create(), n2 = create() + + // make changes for n1 that n2 should request + let list = n1.set_object("_root","n",[]) + n1.commit("",0) + for (let i = 0; i < 10; i++) { + n1.insert(list, i, i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) + sync(n1, n2) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + }) + + it('should sync peers where one has commits the other does not', () => { + let n1 = create(), n2 = create() + + // make changes for n1 that n2 should request + let list = n1.set_object("_root","n",[]) + n1.commit("",0) + for (let i = 0; i < 10; i++) { + n1.insert(list,i,i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) + sync(n1, n2) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + }) + + it('should work with prior sync state', () => { + // create & synchronize two nodes + let n1 = create(), n2 = create() + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // modify the first node further + for (let i = 5; i < 10; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + }) + + it('should not generate messages once synced', () => { + // create & synchronize two nodes + let n1 = create('abc123'), n2 = create('def456') + let s1 = initSyncState(), s2 = initSyncState() + + let message, patch + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + for (let i = 0; i < 5; i++) { + n2.set("_root","y",i) + n2.commit("",0) + } + + // n1 reports what it has + message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } + + // n2 receives that message and sends changes along with what it has + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) + //assert.deepStrictEqual(patch, null) // no changes arrived + + // n1 receives the changes and replies with the changes it now knows n2 needs + n1.receiveSyncMessage(s1, message) + message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) + + // n2 applies the changes and sends confirmation ending the exchange + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") } + + // n1 receives the message and has nothing more to say + n1.receiveSyncMessage(s1, message) + message = n1.generateSyncMessage(s1) + assert.deepStrictEqual(message, null) + //assert.deepStrictEqual(patch, null) // no changes arrived + + // n2 also has nothing left to say + message = n2.generateSyncMessage(s2) + assert.deepStrictEqual(message, null) + }) + + it('should allow simultaneous messages during synchronization', () => { + // create & synchronize two nodes + let n1 = create('abc123'), n2 = create('def456') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + for (let i = 0; i < 5; i++) { + n2.set("_root","y", i) + n2.commit("",0) + } + + const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] + + // both sides report what they have but have no shared peer state + let msg1to2, msg2to1 + msg1to2 = n1.generateSyncMessage(s1) + msg2to1 = n2.generateSyncMessage(s2) + if (msg1to2 === null) { throw new RangeError("message should not be null") } + if (msg2to1 === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) + assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) + assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0) + + // n1 and n2 receives that message and update sync state but make no patch + n1.receiveSyncMessage(s1, msg2to1) + n2.receiveSyncMessage(s2, msg1to2) + + // now both reply with their local changes the other lacks + // (standard warning that 1% of the time this will result in a "need" message) + msg1to2 = n1.generateSyncMessage(s1) + if (msg1to2 === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 5) + msg2to1 = n2.generateSyncMessage(s2) + if (msg2to1 === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) + + // both should now apply the changes and update the frontend + n1.receiveSyncMessage(s1, msg2to1) + assert.deepStrictEqual(n1.getMissingDeps(), []) + //assert.notDeepStrictEqual(patch1, null) + assert.deepStrictEqual(n1.materialize(), {x: 4, y: 4}) + + n2.receiveSyncMessage(s2, msg1to2) + assert.deepStrictEqual(n2.getMissingDeps(), []) + //assert.notDeepStrictEqual(patch2, null) + assert.deepStrictEqual(n2.materialize(), {x: 4, y: 4}) + + // The response acknowledges the changes received, and sends no further changes + msg1to2 = n1.generateSyncMessage(s1) + if (msg1to2 === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) + msg2to1 = n2.generateSyncMessage(s2) + if (msg2to1 === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) + + // After receiving acknowledgements, their shared heads should be equal + n1.receiveSyncMessage(s1, msg2to1) + n2.receiveSyncMessage(s2, msg1to2) + assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) + assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) + //assert.deepStrictEqual(patch1, null) + //assert.deepStrictEqual(patch2, null) + + // We're in sync, no more messages required + msg1to2 = n1.generateSyncMessage(s1) + msg2to1 = n2.generateSyncMessage(s2) + assert.deepStrictEqual(msg1to2, null) + assert.deepStrictEqual(msg2to1, null) + + // If we make one more change, and start another sync, its lastSync should be updated + n1.set("_root","x",5) + msg1to2 = n1.generateSyncMessage(s1) + if (msg1to2 === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) + }) + + it('should assume sent changes were recieved until we hear otherwise', () => { + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState(), message = null + + let items = n1.set_object("_root", "items", []) + n1.commit("",0) + + sync(n1, n2, s1, s2) + + n1.push(items, "x") + n1.commit("",0) + message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + + n1.push(items, "y") + n1.commit("",0) + message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + + n1.push(items, "z") + n1.commit("",0) + + message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + }) + + it('should work regardless of who initiates the exchange', () => { + // create & synchronize two nodes + let n1 = create(), n2 = create() + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // modify the first node further + for (let i = 5; i < 10; i++) { + n1.set("_root", "x", i) + n1.commit("",0) + } + + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + }) + + it('should work without prior sync state', () => { + // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- c15 <-- c16 <-- c17 + // lastSync is undefined. + + // create two peers both with divergent commits + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2) + + for (let i = 10; i < 15; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + for (let i = 15; i < 18; i++) { + n2.set("_root","x",i) + n2.commit("",0) + } + + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) + sync(n1, n2) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + }) + + it('should work with prior sync state', () => { + // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- c15 <-- c16 <-- c17 + // lastSync is c9. + + // create two peers both with divergent commits + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + for (let i = 10; i < 15; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + for (let i = 15; i < 18; i++) { + n2.set("_root","x",i) + n2.commit("",0) + } + + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + + assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + }) + + it('should ensure non-empty state after sync', () => { + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + assert.deepStrictEqual(s1.sharedHeads, n1.getHeads()) + assert.deepStrictEqual(s2.sharedHeads, n1.getHeads()) + }) + + it('should re-sync after one node crashed with data loss', () => { + // Scenario: (r) (n2) (n1) + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 + // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. + // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + // n1 makes three changes, which we sync to n2 + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // save a copy of n2 as "r" to simulate recovering from crash + let r, rSyncState + ;[r, rSyncState] = [n2.clone(), s2.clone()] + + // sync another few commits + for (let i = 3; i < 6; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + // everyone should be on the same page here + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + + // now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r + for (let i = 6; i < 9; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + s1 = decodeSyncState(encodeSyncState(s1)) + rSyncState = decodeSyncState(encodeSyncState(rSyncState)) + + assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) + assert.notDeepStrictEqual(n1.materialize(), r.materialize()) + assert.deepStrictEqual(n1.materialize(), {x: 8}) + assert.deepStrictEqual(r.materialize(), {x: 2}) + sync(n1, r, s1, rSyncState) + assert.deepStrictEqual(n1.getHeads(), r.getHeads()) + assert.deepStrictEqual(n1.materialize(), r.materialize()) + }) + + it('should resync after one node experiences data loss without disconnecting', () => { + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + // n1 makes three changes, which we sync to n2 + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i) + n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + + let n2AfterDataLoss = create('89abcdef') + + // "n2" now has no data, but n1 still thinks it does. Note we don't do + // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting + sync(n1, n2AfterDataLoss, s1, initSyncState()) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + }) + + it('should handle changes concurrent to the last sync heads', () => { + let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() + + // Change 1 is known to all three nodes + //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) + n1.set("_root","x",1); n1.commit("",0) + + sync(n1, n2, s12, s21) + sync(n2, n3, s23, s32) + + // Change 2 is known to n1 and n2 + n1.set("_root","x",2); n1.commit("",0) + + sync(n1, n2, s12, s21) + + // Each of the three nodes makes one change (changes 3, 4, 5) + n1.set("_root","x",3); n1.commit("",0) + n2.set("_root","x",4); n2.commit("",0) + n3.set("_root","x",5); n3.commit("",0) + + // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to + // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) + let change = n3.getLastLocalChange() + //@ts-ignore + if (typeof Buffer === 'function') change = Buffer.from(change) + if (change === undefined) { throw new RangeError("last local change failed") } + n2.applyChanges([change]) + + // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads + sync(n1, n2, s12, s21) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + }) + + it('should handle histories with lots of branching and merging', () => { + let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + n1.set("_root","x",0); n1.commit("",0) + n2.applyChanges([n1.getLastLocalChange()]) + n3.applyChanges([n1.getLastLocalChange()]) + n3.set("_root","x",1); n3.commit("",0) + + // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 + // / \/ \/ \/ + // / /\ /\ /\ + // c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 + // \ / + // ---------------------------------------------- n3c1 <----- + for (let i = 1; i < 20; i++) { + n1.set("_root","n1",i); n1.commit("",0) + n2.set("_root","n2",i); n2.commit("",0) + const change1 = n1.getLastLocalChange() + const change2 = n2.getLastLocalChange() + n1.applyChanges([change2]) + n2.applyChanges([change1]) + } + + let s1 = initSyncState(), s2 = initSyncState() + sync(n1, n2, s1, s2) + + // Having n3's last change concurrent to the last sync heads forces us into the slower code path + n2.applyChanges([n3.getLastLocalChange()]) + n1.set("_root","n1","final"); n1.commit("",0) + n2.set("_root","n2","final"); n2.commit("",0) + + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + assert.deepStrictEqual(n1.materialize(), n2.materialize()) + }) + + it('should handle a false-positive head', () => { + // Scenario: ,-- n1 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2 + // where n2 is a false positive in the Bloom filter containing {n1}. + // lastSync is c9. + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + for (let i = 1; ; i++) { // search for false positive; see comment above + const n1up = n1.clone('01234567'); + n1up.set("_root","x",`${i} @ n1`); n1up.commit("",0) + const n2up = n2.clone('89abcdef'); + n2up.set("_root","x",`${i} @ n2`); n2up.commit("",0) + if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { + n1.free(); n2.free() + n1 = n1up; n2 = n2up; break + } + } + const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), allHeads) + assert.deepStrictEqual(n2.getHeads(), allHeads) + }) + + + describe('with a false-positive dependency', () => { + let n1: Automerge, n2: Automerge, s1: SyncState, s2: SyncState, n1hash2: Hash, n2hash2: Hash + + beforeEach(() => { + // Scenario: ,-- n1c1 <-- n1c2 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2c1 <-- n2c2 + // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. + // lastSync is c9. + n1 = create('01234567') + n2 = create('89abcdef') + s1 = initSyncState() + s2 = initSyncState() + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + sync(n1, n2, s1, s2) + + let n1hash1, n2hash1 + for (let i = 29; ; i++) { // search for false positive; see comment above + const n1us1 = n1.clone('01234567') + n1us1.set("_root","x",`${i} @ n1`); n1us1.commit("",0) + + const n2us1 = n2.clone('89abcdef') + n2us1.set("_root","x",`${i} @ n1`); n2us1.commit("",0) + + n1hash1 = n1us1.getHeads()[0]; n2hash1 = n2us1.getHeads()[0] + + const n1us2 = n1us1.clone(); + n1us2.set("_root","x",`final @ n1`); n1us2.commit("",0) + + const n2us2 = n2us1.clone(); + n2us2.set("_root","x",`final @ n2`); n2us2.commit("",0) + + n1hash2 = n1us2.getHeads()[0]; n2hash2 = n2us2.getHeads()[0] + if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { + n1.free(); n2.free() + n1 = n1us2; n2 = n2us2; break + } + } + }) + + it('should sync two nodes without connection reset', () => { + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), [n1hash2, n2hash2].sort()) + assert.deepStrictEqual(n2.getHeads(), [n1hash2, n2hash2].sort()) + }) + + it('should sync two nodes with connection reset', () => { + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), [n1hash2, n2hash2].sort()) + assert.deepStrictEqual(n2.getHeads(), [n1hash2, n2hash2].sort()) + }) + + it('should sync three nodes', () => { + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + + // First n1 and n2 exchange Bloom filters + let m1, m2 + m1 = n1.generateSyncMessage(s1) + m2 = n2.generateSyncMessage(s2) + if (m1 === null) { throw new RangeError("message should not be null") } + if (m2 === null) { throw new RangeError("message should not be null") } + let touched1 = n1.receiveSyncMessage(s1, m2) + let touched2 = n2.receiveSyncMessage(s2, m1) + assert.deepEqual(touched1, []); + assert.deepEqual(touched2, []); + + // Then n1 and n2 send each other their changes, except for the false positive + m1 = n1.generateSyncMessage(s1) + m2 = n2.generateSyncMessage(s2) + if (m1 === null) { throw new RangeError("message should not be null") } + if (m2 === null) { throw new RangeError("message should not be null") } + let touched3 = n1.receiveSyncMessage(s1, m2) + let touched4 = n2.receiveSyncMessage(s2, m1) + assert.deepEqual(touched3, []); + assert.deepEqual(touched4, ["_root"]); + assert.strictEqual(decodeSyncMessage(m1).changes.length, 2) // n1c1 and n1c2 + assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent + + // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it + let n3 = create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + sync(n1, n3, s13, s31) + assert.deepStrictEqual(n1.getHeads(), [n1hash2]) + assert.deepStrictEqual(n3.getHeads(), [n1hash2]) + }) + }) + + it('should not require an additional request when a false-positive depends on a true-negative', () => { + // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ + // `-- n2c1 <-- n2c2 <-- n2c3 + // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. + // lastSync is c4. + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let n1hash3, n2hash3 + + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + sync(n1, n2, s1, s2) + for (let i = 86; ; i++) { // search for false positive; see comment above + const n1us1 = n1.clone('01234567') + n1us1.set("_root","x",`${i} @ n1`); n1us1.commit("",0) + + const n2us1 = n2.clone('89abcdef') + n2us1.set("_root","x",`${i} @ n2`); n2us1.commit("",0) + + //const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) + //const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + const n1hash1 = n1us1.getHeads()[0] + + const n1us2 = n1us1.clone() + n1us2.set("_root","x",`${i + 1} @ n1`); n1us2.commit("",0) + + const n2us2 = n2us1.clone() + n2us2.set("_root","x",`${i + 1} @ n2`); n2us2.commit("",0) + + const n1hash2 = n1us2.getHeads()[0], n2hash2 = n2us2.getHeads()[0] + + const n1us3 = n1us2.clone() + n1us3.set("_root","x",`final @ n1`); n1us3.commit("",0) + + const n2us3 = n2us2.clone() + n2us3.set("_root","x",`final @ n2`); n2us3.commit("",0) + + n1hash3 = n1us3.getHeads()[0]; n2hash3 = n2us3.getHeads()[0] + + if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) { + n1.free(); n2.free(); + n1 = n1us3; n2 = n2us3; break + } + } + const bothHeads = [n1hash3, n2hash3].sort() + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), bothHeads) + assert.deepStrictEqual(n2.getHeads(), bothHeads) + }) + + it('should handle chains of false-positives', () => { + // Scenario: ,-- c5 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ + // `-- n2c1 <-- n2c2 <-- n2c3 + // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. + // lastSync is c4. + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + + for (let i = 0; i < 5; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + n1.set("_root","x",5); n1.commit("",0) + + for (let i = 2; ; i++) { // search for false positive; see comment above + const n2us1 = n2.clone('89abcdef') + n2us1.set("_root","x",`${i} @ n2`); n2us1.commit("",0) + if (new BloomFilter(n1.getHeads()).containsHash(n2us1.getHeads()[0])) { + n2 = n2us1; break + } + } + for (let i = 141; ; i++) { // search for false positive; see comment above + const n2us2 = n2.clone('89abcdef') + n2us2.set("_root","x",`${i} again`); n2us2.commit("",0) + if (new BloomFilter(n1.getHeads()).containsHash(n2us2.getHeads()[0])) { + n2 = n2us2; break + } + } + n2.set("_root","x",`final @ n2`); n2.commit("",0) + + const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + sync(n1, n2, s1, s2) + assert.deepStrictEqual(n1.getHeads(), allHeads) + assert.deepStrictEqual(n2.getHeads(), allHeads) + }) + + it('should allow the false-positive hash to be explicitly requested', () => { + // Scenario: ,-- n1 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2 + // where n2 causes a false positive in the Bloom filter containing {n1}. + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message + + for (let i = 0; i < 10; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + + for (let i = 1; ; i++) { // brute-force search for false positive; see comment above + const n1up = n1.clone('01234567'); n1up.set("_root","x",`${i} @ n1`); n1up.commit("",0) + const n2up = n1.clone('89abcdef'); n2up.set("_root","x",`${i} @ n2`); n2up.commit("",0) + + // check if the bloom filter on n2 will believe n1 already has a particular hash + // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 + if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { + n1 = n1up; n2 = n2up; break + } + } + + // n1 creates a sync message for n2 with an ill-fated bloom + message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } + assert.strictEqual(decodeSyncMessage(message).changes.length, 0) + + // n2 receives it and DOESN'T send a change back + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") } + assert.strictEqual(decodeSyncMessage(message).changes.length, 0) + + // n1 should now realize it's missing that change and request it explicitly + n1.receiveSyncMessage(s1, message) + message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } + assert.deepStrictEqual(decodeSyncMessage(message).need, n2.getHeads()) + + // n2 should fulfill that request + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") } + assert.strictEqual(decodeSyncMessage(message).changes.length, 1) + + // n1 should apply the change and the two should now be in sync + n1.receiveSyncMessage(s1, message) + assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) + }) + + describe('protocol features', () => { + it('should allow multiple Bloom filters', () => { + // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 + // c0 <-- c1 <-- c2 <-+--- n2c1 <-- n2c2 <-- n2c3 + // `-- n3c1 <-- n3c2 <-- n3c3 + // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; + // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; + // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. + let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') + let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() + let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() + let message1, message2, message3 + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + // sync all 3 nodes + sync(n1, n2, s12, s21) // eslint-disable-line no-unused-vars -- kept for consistency + sync(n1, n3, s13, s31) + sync(n3, n2, s32, s23) + for (let i = 0; i < 2; i++) { + n1.set("_root","x",`${i} @ n1`); n1.commit("",0) + } + for (let i = 0; i < 2; i++) { + n2.set("_root","x",`${i} @ n2`); n2.commit("",0) + } + n1.applyChanges(n2.getChanges([])) + n2.applyChanges(n1.getChanges([])) + n1.set("_root","x",`3 @ n1`); n1.commit("",0) + n2.set("_root","x",`3 @ n2`); n2.commit("",0) + + for (let i = 0; i < 3; i++) { + n3.set("_root","x",`${i} @ n3`); n3.commit("",0) + } + const n1c3 = n1.getHeads()[0], n2c3 = n2.getHeads()[0], n3c3 = n3.getHeads()[0] + s13 = decodeSyncState(encodeSyncState(s13)) + s31 = decodeSyncState(encodeSyncState(s31)) + s23 = decodeSyncState(encodeSyncState(s23)) + s32 = decodeSyncState(encodeSyncState(s32)) + + + // Now n3 concurrently syncs with n1 and n2. Doing this naively would result in n3 receiving + // changes {n1c1, n1c2, n2c1, n2c2} twice (those are the changes that both n1 and n2 have, but + // that n3 does not have). We want to prevent this duplication. + message1 = n1.generateSyncMessage(s13) // message from n1 to n3 + if (message1 === null) { throw new RangeError("message should not be null") } + assert.strictEqual(decodeSyncMessage(message1).changes.length, 0) + n3.receiveSyncMessage(s31, message1) + message3 = n3.generateSyncMessage(s31) // message from n3 to n1 + if (message3 === null) { throw new RangeError("message should not be null") } + assert.strictEqual(decodeSyncMessage(message3).changes.length, 3) // {n3c1, n3c2, n3c3} + n1.receiveSyncMessage(s13, message3) + + // Copy the Bloom filter received from n1 into the message sent from n3 to n2. This Bloom + // filter indicates what changes n3 is going to receive from n1. + message3 = n3.generateSyncMessage(s32) // message from n3 to n2 + if (message3 === null) { throw new RangeError("message should not be null") } + const modifiedMessage = decodeSyncMessage(message3) + modifiedMessage.have.push(decodeSyncMessage(message1).have[0]) + assert.strictEqual(modifiedMessage.changes.length, 0) + n2.receiveSyncMessage(s23, encodeSyncMessage(modifiedMessage)) + + // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) + message2 = n2.generateSyncMessage(s23) + if (message2 === null) { throw new RangeError("message should not be null") } + assert.strictEqual(decodeSyncMessage(message2).changes.length, 1) // {n2c3} + n3.receiveSyncMessage(s32, message2) + + // n1 replies to n3 + message1 = n1.generateSyncMessage(s13) + if (message1 === null) { throw new RangeError("message should not be null") } + assert.strictEqual(decodeSyncMessage(message1).changes.length, 5) // {n1c1, n1c2, n1c3, n2c1, n2c2} + n3.receiveSyncMessage(s31, message1) + assert.deepStrictEqual(n3.getHeads(), [n1c3, n2c3, n3c3].sort()) + }) + + it('should allow any change to be requested', () => { + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message = null + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + const lastSync = n1.getHeads() + + for (let i = 3; i < 6; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + sync(n1, n2, s1, s2) + s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed + message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } + const modMsg = decodeSyncMessage(message) + modMsg.need = lastSync // re-request change 2 + n2.receiveSyncMessage(s2, encodeSyncMessage(modMsg)) + message = n2.generateSyncMessage(s2) + if (message === null) { throw new RangeError("message should not be null") } + assert.strictEqual(decodeSyncMessage(message).changes.length, 1) + assert.strictEqual(decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) + }) + + it('should ignore requests for a nonexistent change', () => { + let n1 = create('01234567'), n2 = create('89abcdef') + let s1 = initSyncState(), s2 = initSyncState() + let message = null + + for (let i = 0; i < 3; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + + n2.applyChanges(n1.getChanges([])) + message = n1.generateSyncMessage(s1) + if (message === null) { throw new RangeError("message should not be null") } + message = decodeSyncMessage(message) + message.need = ['0000000000000000000000000000000000000000000000000000000000000000'] + message = encodeSyncMessage(message) + n2.receiveSyncMessage(s2, message) + message = n2.generateSyncMessage(s2) + assert.strictEqual(message, null) + }) + + it('should allow a subset of changes to be sent', () => { + // ,-- c1 <-- c2 + // c0 <-+ + // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 + let n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') + let s1 = initSyncState(), s2 = initSyncState() + let msg, decodedMsg + + n1.set("_root","x",0); n1.commit("",0) + n3.applyChanges(n3.getChangesAdded(n1)) // merge() + for (let i = 1; i <= 2; i++) { + n1.set("_root","x",i); n1.commit("",0) + } + for (let i = 3; i <= 4; i++) { + n3.set("_root","x",i); n3.commit("",0) + } + const c2 = n1.getHeads()[0], c4 = n3.getHeads()[0] + n2.applyChanges(n2.getChangesAdded(n3)) // merge() + + // Sync n1 and n2, so their shared heads are {c2, c4} + sync(n1, n2, s1, s2) + s1 = decodeSyncState(encodeSyncState(s1)) + s2 = decodeSyncState(encodeSyncState(s2)) + assert.deepStrictEqual(s1.sharedHeads, [c2, c4].sort()) + assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort()) + + // n2 and n3 apply {c5, c6, c7, c8} + n3.set("_root","x",5); n3.commit("",0) + const change5 = n3.getLastLocalChange() + n3.set("_root","x",6); n3.commit("",0) + const change6 = n3.getLastLocalChange(), c6 = n3.getHeads()[0] + for (let i = 7; i <= 8; i++) { + n3.set("_root","x",i); n3.commit("",0) + } + const c8 = n3.getHeads()[0] + n2.applyChanges(n2.getChangesAdded(n3)) // merge() + + // Now n1 initiates a sync with n2, and n2 replies with {c5, c6}. n2 does not send {c7, c8} + msg = n1.generateSyncMessage(s1) + if (msg === null) { throw new RangeError("message should not be null") } + n2.receiveSyncMessage(s2, msg) + msg = n2.generateSyncMessage(s2) + if (msg === null) { throw new RangeError("message should not be null") } + decodedMsg = decodeSyncMessage(msg) + decodedMsg.changes = [change5, change6] + msg = encodeSyncMessage(decodedMsg) + const sentHashes: any = {} + + sentHashes[decodeChange(change5).hash] = true + sentHashes[decodeChange(change6).hash] = true + s2.sentHashes = sentHashes + n1.receiveSyncMessage(s1, msg) + assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) + + // n1 replies, confirming the receipt of {c5, c6} and requesting the remaining changes + msg = n1.generateSyncMessage(s1) + if (msg === null) { throw new RangeError("message should not be null") } + n2.receiveSyncMessage(s2, msg) + assert.deepStrictEqual(decodeSyncMessage(msg).need, [c8]) + assert.deepStrictEqual(decodeSyncMessage(msg).have[0].lastSync, [c2, c6].sort()) + assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) + assert.deepStrictEqual(s2.sharedHeads, [c2, c6].sort()) + + // n2 sends the remaining changes {c7, c8} + msg = n2.generateSyncMessage(s2) + if (msg === null) { throw new RangeError("message should not be null") } + n1.receiveSyncMessage(s1, msg) + assert.strictEqual(decodeSyncMessage(msg).changes.length, 2) + assert.deepStrictEqual(s1.sharedHeads, [c2, c8].sort()) + }) + }) + }) +}) diff --git a/rust/automerge-wasm/tsconfig.json b/automerge-wasm/tsconfig.json similarity index 71% rename from rust/automerge-wasm/tsconfig.json rename to automerge-wasm/tsconfig.json index 339eab93..1dc480a4 100644 --- a/rust/automerge-wasm/tsconfig.json +++ b/automerge-wasm/tsconfig.json @@ -11,9 +11,7 @@ "paths": { "dev": ["*"]}, "rootDir": "", "target": "es2016", - "types": ["mocha", "node"], - "typeRoots": ["./index.d.ts"] + "typeRoots": ["./dev/index.d.ts"] }, - "include": ["test/**/*.ts"], - "exclude": ["dist/**/*", "examples/**/*"] + "exclude": ["dist/**/*"] } diff --git a/rust/automerge/.gitignore b/automerge/.gitignore similarity index 100% rename from rust/automerge/.gitignore rename to automerge/.gitignore diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml new file mode 100644 index 00000000..03f7d9c6 --- /dev/null +++ b/automerge/Cargo.toml @@ -0,0 +1,42 @@ +[package] +name = "automerge" +version = "0.1.0" +edition = "2021" +license = "MIT" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[features] +optree-visualisation = ["dot"] +wasm = ["js-sys", "wasm-bindgen"] + +[dependencies] +hex = "^0.4.3" +leb128 = "^0.2.5" +sha2 = "^0.10.0" +rand = { version = "^0.8.4" } +thiserror = "^1.0.16" +itertools = "^0.10.3" +flate2 = "^1.0.22" +nonzero_ext = "^0.2.0" +uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] } +smol_str = "^0.1.21" +tracing = { version = "^0.1.29", features = ["log"] } +fxhash = "^0.2.1" +tinyvec = { version = "^1.5.1", features = ["alloc"] } +unicode-segmentation = "1.7.1" +serde = { version = "^1.0", features=["derive"] } +dot = { version = "0.1.4", optional = true } +js-sys = { version = "^0.3", optional = true } +wasm-bindgen = { version = "^0.2", optional = true } + +[dependencies.web-sys] +version = "^0.3.55" +features = ["console"] + +[dev-dependencies] +pretty_assertions = "1.0.0" +proptest = { version = "^1.0.0", default-features = false, features = ["std"] } +serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } +maplit = { version = "^1.0" } +decorum = "0.3.1" diff --git a/rust/automerge/examples/README.md b/automerge/examples/README.md similarity index 100% rename from rust/automerge/examples/README.md rename to automerge/examples/README.md diff --git a/rust/automerge/examples/quickstart.rs b/automerge/examples/quickstart.rs similarity index 72% rename from rust/automerge/examples/quickstart.rs rename to automerge/examples/quickstart.rs index fcb23d5e..db0024c6 100644 --- a/rust/automerge/examples/quickstart.rs +++ b/automerge/examples/quickstart.rs @@ -2,7 +2,7 @@ use automerge::transaction::CommitOptions; use automerge::transaction::Transactable; use automerge::AutomergeError; use automerge::ObjType; -use automerge::{Automerge, ReadDoc, ROOT}; +use automerge::{Automerge, ROOT}; // Based on https://automerge.github.io/docs/quickstart fn main() { @@ -11,13 +11,13 @@ fn main() { .transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Add card".to_owned()), |tx| { - let cards = tx.put_object(ROOT, "cards", ObjType::List).unwrap(); + let cards = tx.set_object(ROOT, "cards", ObjType::List).unwrap(); let card1 = tx.insert_object(&cards, 0, ObjType::Map)?; - tx.put(&card1, "title", "Rewrite everything in Clojure")?; - tx.put(&card1, "done", false)?; + tx.set(&card1, "title", "Rewrite everything in Clojure")?; + tx.set(&card1, "done", false)?; let card2 = tx.insert_object(&cards, 0, ObjType::Map)?; - tx.put(&card2, "title", "Rewrite everything in Haskell")?; - tx.put(&card2, "done", false)?; + tx.set(&card2, "title", "Rewrite everything in Haskell")?; + tx.set(&card2, "done", false)?; Ok((cards, card1)) }, ) @@ -33,7 +33,7 @@ fn main() { doc1.transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Mark card as done".to_owned()), |tx| { - tx.put(&card1, "done", true)?; + tx.set(&card1, "done", true)?; Ok(()) }, ) @@ -42,7 +42,7 @@ fn main() { doc2.transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Delete card".to_owned()), |tx| { - tx.delete(&cards, 0)?; + tx.del(&cards, 0)?; Ok(()) }, ) @@ -50,8 +50,8 @@ fn main() { doc1.merge(&mut doc2).unwrap(); - for change in doc1.get_changes(&[]).unwrap() { - let length = doc1.length_at(&cards, &[change.hash()]); + for change in doc1.get_changes(&[]) { + let length = doc1.length_at(&cards, &[change.hash]); println!("{} {}", change.message().unwrap(), length); } } diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs new file mode 100644 index 00000000..dec1236c --- /dev/null +++ b/automerge/src/autocommit.rs @@ -0,0 +1,455 @@ +use crate::exid::ExId; +use crate::transaction::{CommitOptions, Transactable}; +use crate::{ + query, sync, transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, + ChangeHash, Keys, KeysAt, ObjType, Prop, ScalarValue, Value, +}; + +/// An automerge document that automatically manages transactions. +#[derive(Debug, Clone)] +pub struct AutoCommit { + doc: Automerge, + transaction: Option, +} + +impl Default for AutoCommit { + fn default() -> Self { + Self::new() + } +} + +impl AutoCommit { + pub fn new() -> Self { + Self { + doc: Automerge::new(), + transaction: None, + } + } + + // FIXME : temp + pub fn actor_to_str(&self, actor: usize) -> String { + self.doc.ops.m.actors.cache[actor].to_hex_string() + } + + /// Get the inner document. + #[doc(hidden)] + pub fn document(&mut self) -> &Automerge { + self.ensure_transaction_closed(); + &self.doc + } + + pub fn with_actor(mut self, actor: ActorId) -> Self { + self.ensure_transaction_closed(); + self.doc.set_actor(actor); + self + } + + pub fn set_actor(&mut self, actor: ActorId) -> &mut Self { + self.ensure_transaction_closed(); + self.doc.set_actor(actor); + self + } + + pub fn get_actor(&self) -> &ActorId { + self.doc.get_actor() + } + + fn ensure_transaction_open(&mut self) { + if self.transaction.is_none() { + self.transaction = Some(self.doc.transaction_inner()); + } + } + + pub fn fork(&mut self) -> Self { + self.ensure_transaction_closed(); + Self { + doc: self.doc.fork(), + transaction: self.transaction.clone(), + } + } + + fn ensure_transaction_closed(&mut self) { + if let Some(tx) = self.transaction.take() { + tx.commit(&mut self.doc, None, None); + } + } + + pub fn load(data: &[u8]) -> Result { + let doc = Automerge::load(data)?; + Ok(Self { + doc, + transaction: None, + }) + } + + pub fn load_incremental(&mut self, data: &[u8]) -> Result, AutomergeError> { + self.ensure_transaction_closed(); + self.doc.load_incremental(data) + } + + pub fn apply_changes(&mut self, changes: Vec) -> Result, AutomergeError> { + self.ensure_transaction_closed(); + self.doc.apply_changes(changes) + } + + /// Takes all the changes in `other` which are not in `self` and applies them + pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { + self.ensure_transaction_closed(); + other.ensure_transaction_closed(); + self.doc.merge(&mut other.doc) + } + + pub fn save(&mut self) -> Vec { + self.ensure_transaction_closed(); + self.doc.save() + } + + // should this return an empty vec instead of None? + pub fn save_incremental(&mut self) -> Vec { + self.ensure_transaction_closed(); + self.doc.save_incremental() + } + + pub fn get_missing_deps(&mut self, heads: &[ChangeHash]) -> Vec { + self.ensure_transaction_closed(); + self.doc.get_missing_deps(heads) + } + + pub fn get_last_local_change(&mut self) -> Option<&Change> { + self.ensure_transaction_closed(); + self.doc.get_last_local_change() + } + + pub fn get_changes(&mut self, have_deps: &[ChangeHash]) -> Vec<&Change> { + self.ensure_transaction_closed(); + self.doc.get_changes(have_deps) + } + + pub fn get_change_by_hash(&mut self, hash: &ChangeHash) -> Option<&Change> { + self.ensure_transaction_closed(); + self.doc.get_change_by_hash(hash) + } + + pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> { + self.ensure_transaction_closed(); + other.ensure_transaction_closed(); + self.doc.get_changes_added(&other.doc) + } + + pub fn import(&self, s: &str) -> Result { + self.doc.import(s) + } + + pub fn dump(&self) { + self.doc.dump() + } + + pub fn generate_sync_message(&mut self, sync_state: &mut sync::State) -> Option { + self.ensure_transaction_closed(); + self.doc.generate_sync_message(sync_state) + } + + pub fn receive_sync_message( + &mut self, + sync_state: &mut sync::State, + message: sync::Message, + ) -> Result, AutomergeError> { + self.ensure_transaction_closed(); + self.doc.receive_sync_message(sync_state, message) + } + + #[cfg(feature = "optree-visualisation")] + pub fn visualise_optree(&self) -> String { + self.doc.visualise_optree() + } + + /// Get the current heads of the document. + /// + /// This closes the transaction first, if one is in progress. + pub fn get_heads(&mut self) -> Vec { + self.ensure_transaction_closed(); + self.doc.get_heads() + } + + pub fn commit(&mut self) -> ChangeHash { + self.commit_with(CommitOptions::default()) + } + + /// Commit the current operations with some options. + /// + /// ``` + /// # use automerge::transaction::CommitOptions; + /// # use automerge::transaction::Transactable; + /// # use automerge::ROOT; + /// # use automerge::AutoCommit; + /// # use automerge::ObjType; + /// # use std::time::SystemTime; + /// let mut doc = AutoCommit::new(); + /// doc.set_object(&ROOT, "todos", ObjType::List).unwrap(); + /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as + /// i64; + /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); + /// ``` + pub fn commit_with(&mut self, options: CommitOptions) -> ChangeHash { + // ensure that even no changes triggers a change + self.ensure_transaction_open(); + let tx = self.transaction.take().unwrap(); + tx.commit(&mut self.doc, options.message, options.time) + } + + pub fn rollback(&mut self) -> usize { + self.transaction + .take() + .map(|tx| tx.rollback(&mut self.doc)) + .unwrap_or(0) + } +} + +impl Transactable for AutoCommit { + fn pending_ops(&self) -> usize { + self.transaction + .as_ref() + .map(|t| t.pending_ops()) + .unwrap_or(0) + } + + // KeysAt::() + // LenAt::() + // PropAt::() + // NthAt::() + + fn keys>(&self, obj: O) -> Keys { + self.doc.keys(obj) + } + + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { + self.doc.keys_at(obj, heads) + } + + fn length>(&self, obj: O) -> usize { + self.doc.length(obj) + } + + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + self.doc.length_at(obj, heads) + } + + fn object_type>(&self, obj: O) -> Option { + self.doc.object_type(obj) + } + + // set(obj, prop, value) - value can be scalar or objtype + // del(obj, prop) + // inc(obj, prop, value) + // insert(obj, index, value) + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document or create a new object. + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn set, P: Into, V: Into>( + &mut self, + obj: O, + prop: P, + value: V, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.set(&mut self.doc, obj.as_ref(), prop, value) + } + + fn set_object, P: Into>( + &mut self, + obj: O, + prop: P, + value: ObjType, + ) -> Result { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.set_object(&mut self.doc, obj.as_ref(), prop, value) + } + + fn insert, V: Into>( + &mut self, + obj: O, + index: usize, + value: V, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.insert(&mut self.doc, obj.as_ref(), index, value) + } + + #[allow(clippy::too_many_arguments)] + fn mark>( + &mut self, + obj: O, + start: usize, + expand_start: bool, + end: usize, + expand_end: bool, + mark: &str, + value: ScalarValue, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.mark( + &mut self.doc, + obj, + start, + expand_start, + end, + expand_end, + mark, + value, + ) + } + + fn unmark>(&mut self, obj: O, mark: O) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.unmark(&mut self.doc, obj, mark) + } + + fn insert_object( + &mut self, + obj: &ExId, + index: usize, + value: ObjType, + ) -> Result { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.insert_object(&mut self.doc, obj, index, value) + } + + fn inc, P: Into>( + &mut self, + obj: O, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.inc(&mut self.doc, obj.as_ref(), prop, value) + } + + fn del, P: Into>( + &mut self, + obj: O, + prop: P, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.del(&mut self.doc, obj.as_ref(), prop) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + fn splice, V: IntoIterator>( + &mut self, + obj: O, + pos: usize, + del: usize, + vals: V, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let tx = self.transaction.as_mut().unwrap(); + tx.splice(&mut self.doc, obj.as_ref(), pos, del, vals) + } + + fn text>(&self, obj: O) -> Result { + self.doc.text(obj) + } + + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result { + self.doc.text_at(obj, heads) + } + + fn list>(&self, obj: O) -> Result, AutomergeError> { + self.doc.list(obj) + } + + fn list_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.list_at(obj, heads) + } + + fn spans>(&self, obj: O) -> Result, AutomergeError> { + self.doc.spans(obj) + } + + fn raw_spans>(&self, obj: O) -> Result, AutomergeError> { + self.doc.raw_spans(obj) + } + + fn attribute>( + &self, + obj: O, + baseline: &[ChangeHash], + change_sets: &[Vec], + ) -> Result, AutomergeError> { + self.doc.attribute(obj, baseline, change_sets) + } + + fn attribute2>( + &self, + obj: O, + baseline: &[ChangeHash], + change_sets: &[Vec], + ) -> Result, AutomergeError> { + self.doc.attribute2(obj, baseline, change_sets) + } + + // TODO - I need to return these OpId's here **only** to get + // the legacy conflicts format of { [opid]: value } + // Something better? + fn value, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, AutomergeError> { + self.doc.value(obj, prop) + } + + fn value_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.value_at(obj, prop, heads) + } + + fn values, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, AutomergeError> { + self.doc.values(obj, prop) + } + + fn values_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.values_at(obj, prop, heads) + } +} diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs new file mode 100644 index 00000000..a1163b1d --- /dev/null +++ b/automerge/src/automerge.rs @@ -0,0 +1,1616 @@ +use std::collections::{HashMap, HashSet, VecDeque}; +use std::num::NonZeroU64; + +use crate::change::encode_document; +use crate::exid::ExId; +use crate::keys::Keys; +use crate::op_set::OpSet; +use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; +use crate::types::{ + ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, + ScalarValue, Value, +}; +use crate::KeysAt; +use crate::{legacy, query, types, ObjType}; +use crate::{AutomergeError, Change, Prop}; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) enum Actor { + Unused(ActorId), + Cached(usize), +} + +/// An automerge document. +#[derive(Debug, Clone)] +pub struct Automerge { + pub(crate) queue: Vec, + pub(crate) history: Vec, + pub(crate) history_index: HashMap, + pub(crate) states: HashMap>, + pub(crate) deps: HashSet, + pub(crate) saved: Vec, + pub(crate) ops: OpSet, + pub(crate) actor: Actor, + pub(crate) max_op: u64, +} + +impl Automerge { + /// Create a new document with a random actor id. + pub fn new() -> Self { + Automerge { + queue: vec![], + history: vec![], + history_index: HashMap::new(), + states: HashMap::new(), + ops: Default::default(), + deps: Default::default(), + saved: Default::default(), + actor: Actor::Unused(ActorId::random()), + max_op: 0, + } + } + + /// Set the actor id for this document. + pub fn with_actor(mut self, actor: ActorId) -> Self { + self.actor = Actor::Unused(actor); + self + } + + /// Set the actor id for this document. + pub fn set_actor(&mut self, actor: ActorId) -> &mut Self { + self.actor = Actor::Unused(actor); + self + } + + /// Get the current actor id of this document. + pub fn get_actor(&self) -> &ActorId { + match &self.actor { + Actor::Unused(actor) => actor, + Actor::Cached(index) => self.ops.m.actors.get(*index), + } + } + + pub(crate) fn get_actor_index(&mut self) -> usize { + match &mut self.actor { + Actor::Unused(actor) => { + let index = self + .ops + .m + .actors + .cache(std::mem::replace(actor, ActorId::from(&[][..]))); + self.actor = Actor::Cached(index); + index + } + Actor::Cached(index) => *index, + } + } + + /// Start a transaction. + pub fn transaction(&mut self) -> Transaction { + Transaction { + inner: Some(self.transaction_inner()), + doc: self, + } + } + + pub(crate) fn transaction_inner(&mut self) -> TransactionInner { + let actor = self.get_actor_index(); + let seq = self.states.get(&actor).map_or(0, |v| v.len()) as u64 + 1; + let mut deps = self.get_heads(); + if seq > 1 { + let last_hash = self.get_hash(actor, seq - 1).unwrap(); + if !deps.contains(&last_hash) { + deps.push(last_hash); + } + } + + TransactionInner { + actor, + seq, + // SAFETY: this unwrap is safe as we always add 1 + start_op: NonZeroU64::new(self.max_op + 1).unwrap(), + time: 0, + message: None, + extra_bytes: Default::default(), + hash: None, + operations: vec![], + deps, + } + } + + /// Run a transaction on this document in a closure, automatically handling commit or rollback + /// afterwards. + pub fn transact(&mut self, f: F) -> transaction::Result + where + F: FnOnce(&mut Transaction) -> Result, + { + let mut tx = self.transaction(); + let result = f(&mut tx); + match result { + Ok(result) => Ok(Success { + result, + hash: tx.commit(), + }), + Err(error) => Err(Failure { + error, + cancelled: tx.rollback(), + }), + } + } + + /// Like [`Self::transact`] but with a function for generating the commit options. + pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result + where + F: FnOnce(&mut Transaction) -> Result, + C: FnOnce(&O) -> CommitOptions, + { + let mut tx = self.transaction(); + let result = f(&mut tx); + match result { + Ok(result) => { + let commit_options = c(&result); + Ok(Success { + result, + hash: tx.commit_with(commit_options), + }) + } + Err(error) => Err(Failure { + error, + cancelled: tx.rollback(), + }), + } + } + + /// Fork this document at the current point for use by a different actor. + pub fn fork(&self) -> Self { + let mut f = self.clone(); + f.set_actor(ActorId::random()); + f + } + + fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op { + let q = self.ops.search(obj, query::SeekOp::new(&op)); + + for i in q.succ { + self.ops.replace(obj, i, |old_op| old_op.add_succ(&op)); + } + + if !op.is_del() { + self.ops.insert(q.pos, obj, op.clone()); + } + op + } + + // KeysAt::() + // LenAt::() + // PropAt::() + // NthAt::() + + /// Get the keys of the object `obj`. + /// + /// For a map this returns the keys of the map. + /// For a list this returns the element ids (opids) encoded as strings. + pub fn keys>(&self, obj: O) -> Keys { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + let iter_keys = self.ops.keys(obj); + Keys::new(self, iter_keys) + } else { + Keys::new(self, None) + } + } + + /// Historical version of [`keys`](Self::keys). + pub fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { + if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { + let clock = self.clock_at(heads); + KeysAt::new(self, self.ops.keys_at(obj, clock)) + } else { + KeysAt::new(self, None) + } + } + + /// Get the length of the given object. + pub fn length>(&self, obj: O) -> usize { + if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { + match self.ops.object_type(&inner_obj) { + Some(ObjType::Map) | Some(ObjType::Table) => self.keys(obj).count(), + Some(ObjType::List) | Some(ObjType::Text) => { + self.ops.search(&inner_obj, query::Len::new()).len + } + None => 0, + } + } else { + 0 + } + } + + /// Historical version of [`length`](Self::length). + pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { + let clock = self.clock_at(heads); + match self.ops.object_type(&inner_obj) { + Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).count(), + Some(ObjType::List) | Some(ObjType::Text) => { + self.ops.search(&inner_obj, query::LenAt::new(clock)).len + } + None => 0, + } + } else { + 0 + } + } + + pub fn object_type>(&self, obj: O) -> Option { + let obj = self.exid_to_obj(obj.as_ref()).ok()?; + self.ops.object_type(&obj) + } + + pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result { + match id { + ExId::Root => Ok(ObjId::root()), + ExId::Id(ctr, actor, idx) => { + // do a direct get here b/c this could be foriegn and not be within the array + // bounds + if self.ops.m.actors.cache.get(*idx) == Some(actor) { + Ok(ObjId(OpId(*ctr, *idx))) + } else { + // FIXME - make a real error + let idx = self + .ops + .m + .actors + .lookup(actor) + .ok_or(AutomergeError::Fail)?; + Ok(ObjId(OpId(*ctr, idx))) + } + } + } + } + + pub(crate) fn id_to_exid(&self, id: OpId) -> ExId { + if id == types::ROOT { + ExId::Root + } else { + ExId::Id(id.0, self.ops.m.actors.cache[id.1].clone(), id.1) + } + } + + /// Get the string represented by the given text object. + pub fn text>(&self, obj: O) -> Result { + let obj = self.exid_to_obj(obj.as_ref())?; + let query = self.ops.search(&obj, query::ListVals::new()); + let mut buffer = String::new(); + for q in &query.ops { + if let OpType::Set(ScalarValue::Str(s)) = &q.action { + buffer.push_str(s); + } else { + buffer.push('\u{fffc}'); + } + } + Ok(buffer) + } + + /// Historical version of [`text`](Self::text). + pub fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result { + let obj = self.exid_to_obj(obj.as_ref())?; + let clock = self.clock_at(heads); + let query = self.ops.search(&obj, query::ListValsAt::new(clock)); + let mut buffer = String::new(); + for q in &query.ops { + if let OpType::Set(ScalarValue::Str(s)) = &q.action { + buffer.push_str(s); + } + } + Ok(buffer) + } + + pub fn list>(&self, obj: O) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj.as_ref())?; + let query = self.ops.search(&obj, query::ListVals::new()); + Ok(query + .ops + .iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect()) + } + + pub fn list_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj.as_ref())?; + let clock = self.clock_at(heads); + let query = self.ops.search(&obj, query::ListValsAt::new(clock)); + Ok(query + .ops + .iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect()) + } + + pub fn spans>(&self, obj: O) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj.as_ref())?; + let mut query = self.ops.search(&obj, query::Spans::new()); + query.check_marks(); + Ok(query.spans) + } + + pub fn attribute>( + &self, + obj: O, + baseline: &[ChangeHash], + change_sets: &[Vec], + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj.as_ref())?; + let baseline = self.clock_at(baseline); + let change_sets: Vec = change_sets.iter().map(|p| self.clock_at(p)).collect(); + let mut query = self + .ops + .search(&obj, query::Attribute::new(baseline, change_sets)); + query.finish(); + Ok(query.change_sets) + } + + pub fn attribute2>( + &self, + obj: O, + baseline: &[ChangeHash], + change_sets: &[Vec], + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj.as_ref())?; + let baseline = self.clock_at(baseline); + let change_sets: Vec = change_sets.iter().map(|p| self.clock_at(p)).collect(); + let mut query = self + .ops + .search(&obj, query::Attribute2::new(baseline, change_sets)); + query.finish(); + Ok(query.change_sets) + } + + pub fn raw_spans>( + &self, + obj: O, + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj.as_ref())?; + let query = self.ops.search(&obj, query::RawSpans::new()); + let result = query + .spans + .into_iter() + .map(|s| query::SpanInfo { + id: self.id_to_exid(s.id), + start: s.start, + end: s.end, + span_type: s.name, + value: s.value, + }) + .collect(); + Ok(result) + } + + // TODO - I need to return these OpId's here **only** to get + // the legacy conflicts format of { [opid]: value } + // Something better? + /// Get a value out of the document. + /// + /// Returns both the value and the id of the operation that created it, useful for handling + /// conflicts and serves as the object id if the value is an object. + pub fn value, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, AutomergeError> { + Ok(self.values(obj, prop.into())?.last().cloned()) + } + + /// Historical version of [`value`](Self::value). + pub fn value_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + Ok(self.values_at(obj, prop, heads)?.last().cloned()) + } + + /// Get all values out of the document at this prop that conflict. + /// + /// Returns both the value and the id of the operation that created it, useful for handling + /// conflicts and serves as the object id if the value is an object. + pub fn values, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, AutomergeError> { + let obj = self.exid_to_obj(obj.as_ref())?; + let result = match prop.into() { + Prop::Map(p) => { + let prop = self.ops.m.props.lookup(&p); + if let Some(p) = prop { + self.ops + .search(&obj, query::Prop::new(p)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } else { + vec![] + } + } + Prop::Seq(n) => self + .ops + .search(&obj, query::Nth::new(n)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect(), + }; + Ok(result) + } + + /// Historical version of [`values`](Self::values). + pub fn values_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + let prop = prop.into(); + let obj = self.exid_to_obj(obj.as_ref())?; + let clock = self.clock_at(heads); + let result = match prop { + Prop::Map(p) => { + let prop = self.ops.m.props.lookup(&p); + if let Some(p) = prop { + self.ops + .search(&obj, query::PropAt::new(p, clock)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } else { + vec![] + } + } + Prop::Seq(n) => self + .ops + .search(&obj, query::NthAt::new(n, clock)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect(), + }; + Ok(result) + } + + /// Load a document. + pub fn load(data: &[u8]) -> Result { + let changes = Change::load_document(data)?; + let mut doc = Self::new(); + doc.apply_changes(changes)?; + Ok(doc) + } + + /// Load an incremental save of a document. + pub fn load_incremental(&mut self, data: &[u8]) -> Result, AutomergeError> { + let changes = Change::load_document(data)?; + self.apply_changes(changes) + } + + fn duplicate_seq(&self, change: &Change) -> bool { + let mut dup = false; + if let Some(actor_index) = self.ops.m.actors.lookup(change.actor_id()) { + if let Some(s) = self.states.get(&actor_index) { + dup = s.len() >= change.seq as usize; + } + } + dup + } + + /// Apply changes to this document. + pub fn apply_changes(&mut self, changes: Vec) -> Result, AutomergeError> { + let mut objs = HashSet::new(); + for c in changes { + if !self.history_index.contains_key(&c.hash) { + if self.duplicate_seq(&c) { + return Err(AutomergeError::DuplicateSeqNumber( + c.seq, + c.actor_id().clone(), + )); + } + if self.is_causally_ready(&c) { + self.apply_change(c, &mut objs); + } else { + self.queue.push(c); + } + } + } + while let Some(c) = self.pop_next_causally_ready_change() { + self.apply_change(c, &mut objs); + } + Ok(objs.into_iter().map(|obj| self.id_to_exid(obj.0)).collect()) + } + + /// Apply a single change to this document. + fn apply_change(&mut self, change: Change, objs: &mut HashSet) { + let ops = self.import_ops(&change); + self.update_history(change, ops.len()); + for (obj, op) in ops { + objs.insert(obj); + self.insert_op(&obj, op); + } + } + + fn is_causally_ready(&self, change: &Change) -> bool { + change + .deps + .iter() + .all(|d| self.history_index.contains_key(d)) + } + + fn pop_next_causally_ready_change(&mut self) -> Option { + let mut index = 0; + while index < self.queue.len() { + if self.is_causally_ready(&self.queue[index]) { + return Some(self.queue.swap_remove(index)); + } + index += 1; + } + None + } + + fn import_ops(&mut self, change: &Change) -> Vec<(ObjId, Op)> { + change + .iter_ops() + .enumerate() + .map(|(i, c)| { + let actor = self.ops.m.actors.cache(change.actor_id().clone()); + let id = OpId(change.start_op.get() + i as u64, actor); + let obj = match c.obj { + legacy::ObjectId::Root => ObjId::root(), + legacy::ObjectId::Id(id) => ObjId(OpId(id.0, self.ops.m.actors.cache(id.1))), + }; + let pred = c + .pred + .iter() + .map(|i| OpId(i.0, self.ops.m.actors.cache(i.1.clone()))) + .collect(); + let key = match &c.key { + legacy::Key::Map(n) => Key::Map(self.ops.m.props.cache(n.to_string())), + legacy::Key::Seq(legacy::ElementId::Head) => Key::Seq(types::HEAD), + legacy::Key::Seq(legacy::ElementId::Id(i)) => { + Key::Seq(ElemId(OpId(i.0, self.ops.m.actors.cache(i.1.clone())))) + } + }; + ( + obj, + Op { + id, + action: c.action, + key, + succ: Default::default(), + pred, + insert: c.insert, + }, + ) + }) + .collect() + } + + /// Takes all the changes in `other` which are not in `self` and applies them + pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { + // TODO: Make this fallible and figure out how to do this transactionally + let changes = self + .get_changes_added(other) + .into_iter() + .cloned() + .collect::>(); + self.apply_changes(changes) + } + + /// Save the entirety of this document in a compact form. + pub fn save(&mut self) -> Vec { + let heads = self.get_heads(); + let c = self.history.iter(); + let ops = self.ops.iter(); + let bytes = encode_document(heads, c, ops, &self.ops.m.actors, &self.ops.m.props.cache); + self.saved = self.get_heads(); + bytes + } + + /// Save the changes since last save in a compact form. + pub fn save_incremental(&mut self) -> Vec { + let changes = self.get_changes(self.saved.as_slice()); + let mut bytes = vec![]; + for c in changes { + bytes.extend(c.raw_bytes()); + } + if !bytes.is_empty() { + self.saved = self.get_heads() + } + bytes + } + + /// Filter the changes down to those that are not transitive dependencies of the heads. + /// + /// Thus a graph with these heads has not seen the remaining changes. + pub(crate) fn filter_changes(&self, heads: &[ChangeHash], changes: &mut HashSet) { + // Reduce the working set to find to those which we may be able to find. + // This filters out those hashes that are successors of or concurrent with all of the + // heads. + // This can help in avoiding traversing the entire graph back to the roots when we try to + // search for a hash we can know won't be found there. + let max_head_index = heads + .iter() + .map(|h| self.history_index.get(h).unwrap_or(&0)) + .max() + .unwrap_or(&0); + let mut may_find: HashSet = changes + .iter() + .filter(|hash| { + let change_index = self.history_index.get(hash).unwrap_or(&0); + change_index <= max_head_index + }) + .copied() + .collect(); + + if may_find.is_empty() { + return; + } + + let mut queue: VecDeque<_> = heads.iter().collect(); + let mut seen = HashSet::new(); + while let Some(hash) = queue.pop_front() { + if seen.contains(hash) { + continue; + } + seen.insert(hash); + + let removed = may_find.remove(hash); + changes.remove(hash); + if may_find.is_empty() { + break; + } + + for dep in self + .history_index + .get(hash) + .and_then(|i| self.history.get(*i)) + .map(|c| c.deps.as_slice()) + .unwrap_or_default() + { + // if we just removed something from our hashes then it is likely there is more + // down here so do a quick inspection on the children. + // When we don't remove anything it is less likely that there is something down + // that chain so delay it. + if removed { + queue.push_front(dep); + } else { + queue.push_back(dep); + } + } + } + } + + /// Get the hashes of the changes in this document that aren't transitive dependencies of the + /// given `heads`. + pub fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { + let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect(); + let mut missing = HashSet::new(); + + for head in self.queue.iter().flat_map(|change| &change.deps) { + if !self.history_index.contains_key(head) { + missing.insert(head); + } + } + + for head in heads { + if !self.history_index.contains_key(head) { + missing.insert(head); + } + } + + let mut missing = missing + .into_iter() + .filter(|hash| !in_queue.contains(hash)) + .copied() + .collect::>(); + missing.sort(); + missing + } + + fn get_changes_fast(&self, have_deps: &[ChangeHash]) -> Option> { + if have_deps.is_empty() { + return Some(self.history.iter().collect()); + } + + let lowest_idx = have_deps + .iter() + .filter_map(|h| self.history_index.get(h)) + .min()? + + 1; + + let mut missing_changes = vec![]; + let mut has_seen: HashSet<_> = have_deps.iter().collect(); + for change in &self.history[lowest_idx..] { + let deps_seen = change.deps.iter().filter(|h| has_seen.contains(h)).count(); + if deps_seen > 0 { + if deps_seen != change.deps.len() { + // future change depends on something we haven't seen - fast path cant work + return None; + } + missing_changes.push(change); + has_seen.insert(&change.hash); + } + } + + // if we get to the end and there is a head we haven't seen then fast path cant work + if self.get_heads().iter().all(|h| has_seen.contains(h)) { + Some(missing_changes) + } else { + None + } + } + + fn get_changes_slow(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { + let mut stack: Vec<_> = have_deps.iter().collect(); + let mut has_seen = HashSet::new(); + while let Some(hash) = stack.pop() { + if has_seen.contains(&hash) { + continue; + } + if let Some(change) = self + .history_index + .get(hash) + .and_then(|i| self.history.get(*i)) + { + stack.extend(change.deps.iter()); + } + has_seen.insert(hash); + } + self.history + .iter() + .filter(|change| !has_seen.contains(&change.hash)) + .collect() + } + + /// Get the last change this actor made to the document. + pub fn get_last_local_change(&self) -> Option<&Change> { + return self + .history + .iter() + .rev() + .find(|c| c.actor_id() == self.get_actor()); + } + + pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Vec<&Change> { + if let Some(changes) = self.get_changes_fast(have_deps) { + changes + } else { + self.get_changes_slow(have_deps) + } + } + + fn clock_at(&self, heads: &[ChangeHash]) -> Clock { + let mut clock = Clock::new(); + let mut seen = HashSet::new(); + let mut to_see = heads.to_vec(); + // FIXME - faster + while let Some(hash) = to_see.pop() { + if let Some(c) = self.get_change_by_hash(&hash) { + for h in &c.deps { + if !seen.contains(h) { + to_see.push(*h); + } + } + let actor = self.ops.m.actors.lookup(c.actor_id()).unwrap(); + clock.include(actor, c.max_op()); + seen.insert(hash); + } + } + clock + } + + /// Get a change by its hash. + pub fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { + self.history_index + .get(hash) + .and_then(|index| self.history.get(*index)) + } + + /// Get the changes that the other document added compared to this document. + pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { + // Depth-first traversal from the heads through the dependency graph, + // until we reach a change that is already present in other + let mut stack: Vec<_> = other.get_heads(); + let mut seen_hashes = HashSet::new(); + let mut added_change_hashes = Vec::new(); + while let Some(hash) = stack.pop() { + if !seen_hashes.contains(&hash) && self.get_change_by_hash(&hash).is_none() { + seen_hashes.insert(hash); + added_change_hashes.push(hash); + if let Some(change) = other.get_change_by_hash(&hash) { + stack.extend(&change.deps); + } + } + } + // Return those changes in the reverse of the order in which the depth-first search + // found them. This is not necessarily a topological sort, but should usually be close. + added_change_hashes.reverse(); + added_change_hashes + .into_iter() + .filter_map(|h| other.get_change_by_hash(&h)) + .collect() + } + + /// Get the heads of this document. + pub fn get_heads(&self) -> Vec { + let mut deps: Vec<_> = self.deps.iter().copied().collect(); + deps.sort_unstable(); + deps + } + + fn get_hash(&self, actor: usize, seq: u64) -> Result { + self.states + .get(&actor) + .and_then(|v| v.get(seq as usize - 1)) + .and_then(|&i| self.history.get(i)) + .map(|c| c.hash) + .ok_or(AutomergeError::InvalidSeq(seq)) + } + + pub(crate) fn update_history(&mut self, change: Change, num_ops: usize) -> usize { + self.max_op = std::cmp::max(self.max_op, change.start_op.get() + num_ops as u64 - 1); + + self.update_deps(&change); + + let history_index = self.history.len(); + + self.states + .entry(self.ops.m.actors.cache(change.actor_id().clone())) + .or_default() + .push(history_index); + + self.history_index.insert(change.hash, history_index); + self.history.push(change); + + history_index + } + + fn update_deps(&mut self, change: &Change) { + for d in &change.deps { + self.deps.remove(d); + } + self.deps.insert(change.hash); + } + + pub fn import(&self, s: &str) -> Result { + if s == "_root" { + Ok(ExId::Root) + } else { + let n = s + .find('@') + .ok_or_else(|| AutomergeError::InvalidOpId(s.to_owned()))?; + let counter = s[0..n] + .parse() + .map_err(|_| AutomergeError::InvalidOpId(s.to_owned()))?; + let actor = ActorId::from(hex::decode(&s[(n + 1)..]).unwrap()); + let actor = self + .ops + .m + .actors + .lookup(&actor) + .ok_or_else(|| AutomergeError::ForeignObjId(s.to_owned()))?; + Ok(ExId::Id( + counter, + self.ops.m.actors.cache[actor].clone(), + actor, + )) + } + } + + pub(crate) fn to_string(&self, id: E) -> String { + match id.export() { + Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.actors[id.actor()]), + Export::Prop(index) => self.ops.m.props[index].clone(), + Export::Special(s) => s, + } + } + + pub fn dump(&self) { + log!( + " {:12} {:12} {:12} {} {} {}", + "id", + "obj", + "key", + "value", + "pred", + "succ" + ); + for (obj, op) in self.ops.iter() { + let id = self.to_string(op.id); + let obj = self.to_string(obj); + let key = match op.key { + Key::Map(n) => self.ops.m.props[n].clone(), + Key::Seq(n) => self.to_string(n), + }; + let value: String = match &op.action { + OpType::Set(value) => format!("{}", value), + OpType::Make(obj) => format!("make({})", obj), + OpType::Inc(obj) => format!("inc({})", obj), + OpType::MarkBegin(m) => format!("mark({}={})", m.name, m.value), + OpType::MarkEnd(_) => "/mark".into(), + OpType::Del => format!("del{}", 0), + }; + let pred: Vec<_> = op.pred.iter().map(|id| self.to_string(*id)).collect(); + let succ: Vec<_> = op.succ.iter().map(|id| self.to_string(*id)).collect(); + log!( + " {:12} {:12} {:12} {} {:?} {:?}", + id, + obj, + key, + value, + pred, + succ + ); + } + } + + #[cfg(feature = "optree-visualisation")] + pub fn visualise_optree(&self) -> String { + self.ops.visualise() + } +} + +impl Default for Automerge { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use itertools::Itertools; + use pretty_assertions::assert_eq; + + use super::*; + use crate::op_set::B; + use crate::transaction::Transactable; + use crate::*; + use std::convert::TryInto; + + #[test] + fn insert_op() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + let mut tx = doc.transaction(); + tx.set(ROOT, "hello", "world")?; + tx.value(ROOT, "hello")?; + tx.commit(); + Ok(()) + } + + #[test] + fn test_set() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + // setting a scalar value shouldn't return an opid as no object was created. + tx.set(ROOT, "a", 1)?; + + // setting the same value shouldn't return an opid as there is no change. + tx.set(ROOT, "a", 1)?; + + assert_eq!(tx.pending_ops(), 1); + + let map = tx.set_object(ROOT, "b", ObjType::Map)?; + // object already exists at b but setting a map again overwrites it so we get an opid. + tx.set(map, "a", 2)?; + + tx.set_object(ROOT, "b", ObjType::Map)?; + + assert_eq!(tx.pending_ops(), 4); + let map = tx.value(ROOT, "b").unwrap().unwrap().1; + assert_eq!(tx.value(&map, "a")?, None); + + tx.commit(); + Ok(()) + } + + #[test] + fn test_list() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + let mut tx = doc.transaction(); + let list_id = tx.set_object(ROOT, "items", ObjType::List)?; + tx.set(ROOT, "zzz", "zzzval")?; + assert!(tx.value(ROOT, "items")?.unwrap().1 == list_id); + tx.insert(&list_id, 0, "a")?; + tx.insert(&list_id, 0, "b")?; + tx.insert(&list_id, 2, "c")?; + tx.insert(&list_id, 1, "d")?; + assert!(tx.value(&list_id, 0)?.unwrap().0 == "b".into()); + assert!(tx.value(&list_id, 1)?.unwrap().0 == "d".into()); + assert!(tx.value(&list_id, 2)?.unwrap().0 == "a".into()); + assert!(tx.value(&list_id, 3)?.unwrap().0 == "c".into()); + assert!(tx.length(&list_id) == 4); + tx.commit(); + doc.save(); + Ok(()) + } + + #[test] + fn test_del() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor(ActorId::random()); + let mut tx = doc.transaction(); + tx.set(ROOT, "xxx", "xxx")?; + assert!(!tx.values(ROOT, "xxx")?.is_empty()); + tx.del(ROOT, "xxx")?; + assert!(tx.values(ROOT, "xxx")?.is_empty()); + tx.commit(); + Ok(()) + } + + #[test] + fn test_inc() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.set(ROOT, "counter", ScalarValue::counter(10))?; + assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(10)); + tx.inc(ROOT, "counter", 10)?; + assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(20)); + tx.inc(ROOT, "counter", -5)?; + assert!(tx.value(ROOT, "counter")?.unwrap().0 == Value::counter(15)); + tx.commit(); + Ok(()) + } + + #[test] + fn test_save_incremental() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + + let mut tx = doc.transaction(); + tx.set(ROOT, "foo", 1)?; + tx.commit(); + + let save1 = doc.save(); + + let mut tx = doc.transaction(); + tx.set(ROOT, "bar", 2)?; + tx.commit(); + + let save2 = doc.save_incremental(); + + let mut tx = doc.transaction(); + tx.set(ROOT, "baz", 3)?; + tx.commit(); + + let save3 = doc.save_incremental(); + + let mut save_a: Vec = vec![]; + save_a.extend(&save1); + save_a.extend(&save2); + save_a.extend(&save3); + + assert!(doc.save_incremental().is_empty()); + + let save_b = doc.save(); + + assert!(save_b.len() < save_a.len()); + + let mut doc_a = Automerge::load(&save_a)?; + let mut doc_b = Automerge::load(&save_b)?; + + assert!(doc_a.values(ROOT, "baz")? == doc_b.values(ROOT, "baz")?); + + assert!(doc_a.save() == doc_b.save()); + + Ok(()) + } + + #[test] + fn test_save_text() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let text = tx.set_object(ROOT, "text", ObjType::Text)?; + tx.commit(); + let heads1 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.splice_text(&text, 0, 0, "hello world")?; + tx.commit(); + let heads2 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.splice_text(&text, 6, 0, "big bad ")?; + tx.commit(); + let heads3 = doc.get_heads(); + + assert!(&doc.text(&text)? == "hello big bad world"); + assert!(&doc.text_at(&text, &heads1)?.is_empty()); + assert!(&doc.text_at(&text, &heads2)? == "hello world"); + assert!(&doc.text_at(&text, &heads3)? == "hello big bad world"); + + Ok(()) + } + + #[test] + fn test_props_vals_at() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor("aaaa".try_into().unwrap()); + let mut tx = doc.transaction(); + tx.set(ROOT, "prop1", "val1")?; + tx.commit(); + doc.get_heads(); + let heads1 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.set(ROOT, "prop1", "val2")?; + tx.commit(); + doc.get_heads(); + let heads2 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.set(ROOT, "prop2", "val3")?; + tx.commit(); + doc.get_heads(); + let heads3 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.del(ROOT, "prop1")?; + tx.commit(); + doc.get_heads(); + let heads4 = doc.get_heads(); + let mut tx = doc.transaction(); + tx.set(ROOT, "prop3", "val4")?; + tx.commit(); + doc.get_heads(); + let heads5 = doc.get_heads(); + assert!(doc.keys_at(ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]); + assert_eq!(doc.length_at(ROOT, &heads1), 1); + assert!(doc.value_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); + assert!(doc.value_at(ROOT, "prop2", &heads1)? == None); + assert!(doc.value_at(ROOT, "prop3", &heads1)? == None); + + assert!(doc.keys_at(ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]); + assert_eq!(doc.length_at(ROOT, &heads2), 1); + assert!(doc.value_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); + assert!(doc.value_at(ROOT, "prop2", &heads2)? == None); + assert!(doc.value_at(ROOT, "prop3", &heads2)? == None); + + assert!( + doc.keys_at(ROOT, &heads3).collect_vec() + == vec!["prop1".to_owned(), "prop2".to_owned()] + ); + assert_eq!(doc.length_at(ROOT, &heads3), 2); + assert!(doc.value_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); + assert!(doc.value_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(ROOT, "prop3", &heads3)? == None); + + assert!(doc.keys_at(ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]); + assert_eq!(doc.length_at(ROOT, &heads4), 1); + assert!(doc.value_at(ROOT, "prop1", &heads4)? == None); + assert!(doc.value_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(ROOT, "prop3", &heads4)? == None); + + assert!( + doc.keys_at(ROOT, &heads5).collect_vec() + == vec!["prop2".to_owned(), "prop3".to_owned()] + ); + assert_eq!(doc.length_at(ROOT, &heads5), 2); + assert_eq!(doc.length(ROOT), 2); + assert!(doc.value_at(ROOT, "prop1", &heads5)? == None); + assert!(doc.value_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); + assert!(doc.value_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); + + assert_eq!(doc.keys_at(ROOT, &[]).count(), 0); + assert_eq!(doc.length_at(ROOT, &[]), 0); + assert!(doc.value_at(ROOT, "prop1", &[])? == None); + assert!(doc.value_at(ROOT, "prop2", &[])? == None); + assert!(doc.value_at(ROOT, "prop3", &[])? == None); + Ok(()) + } + + #[test] + fn test_len_at() -> Result<(), AutomergeError> { + let mut doc = Automerge::new(); + doc.set_actor("aaaa".try_into().unwrap()); + + let mut tx = doc.transaction(); + let list = tx.set_object(ROOT, "list", ObjType::List)?; + tx.commit(); + let heads1 = doc.get_heads(); + + let mut tx = doc.transaction(); + tx.insert(&list, 0, 10)?; + tx.commit(); + let heads2 = doc.get_heads(); + + let mut tx = doc.transaction(); + tx.set(&list, 0, 20)?; + tx.insert(&list, 0, 30)?; + tx.commit(); + let heads3 = doc.get_heads(); + + let mut tx = doc.transaction(); + tx.set(&list, 1, 40)?; + tx.insert(&list, 1, 50)?; + tx.commit(); + let heads4 = doc.get_heads(); + + let mut tx = doc.transaction(); + tx.del(&list, 2)?; + tx.commit(); + let heads5 = doc.get_heads(); + + let mut tx = doc.transaction(); + tx.del(&list, 0)?; + tx.commit(); + let heads6 = doc.get_heads(); + + assert!(doc.length_at(&list, &heads1) == 0); + assert!(doc.value_at(&list, 0, &heads1)?.is_none()); + + assert!(doc.length_at(&list, &heads2) == 1); + assert!(doc.value_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); + + assert!(doc.length_at(&list, &heads3) == 2); + + assert!(doc.value_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); + + assert!(doc.length_at(&list, &heads4) == 3); + assert!(doc.value_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50)); + assert!(doc.value_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40)); + + assert!(doc.length_at(&list, &heads5) == 2); + assert!(doc.value_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30)); + assert!(doc.value_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50)); + + assert!(doc.length_at(&list, &heads6) == 1); + assert!(doc.length(&list) == 1); + assert!(doc.value_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50)); + + Ok(()) + } + + #[test] + fn keys_iter() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.set(ROOT, "a", 3).unwrap(); + tx.set(ROOT, "b", 4).unwrap(); + tx.set(ROOT, "c", 5).unwrap(); + tx.set(ROOT, "d", 6).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.set(ROOT, "a", 7).unwrap(); + tx.commit(); + let mut tx = doc.transaction(); + tx.set(ROOT, "a", 8).unwrap(); + tx.set(ROOT, "d", 9).unwrap(); + tx.commit(); + assert_eq!(doc.keys(ROOT).count(), 4); + + let mut keys = doc.keys(ROOT); + assert_eq!(keys.next(), Some("a".into())); + assert_eq!(keys.next(), Some("b".into())); + assert_eq!(keys.next(), Some("c".into())); + assert_eq!(keys.next(), Some("d".into())); + assert_eq!(keys.next(), None); + + let mut keys = doc.keys(ROOT); + assert_eq!(keys.next_back(), Some("d".into())); + assert_eq!(keys.next_back(), Some("c".into())); + assert_eq!(keys.next_back(), Some("b".into())); + assert_eq!(keys.next_back(), Some("a".into())); + assert_eq!(keys.next_back(), None); + + let mut keys = doc.keys(ROOT); + assert_eq!(keys.next(), Some("a".into())); + assert_eq!(keys.next_back(), Some("d".into())); + assert_eq!(keys.next_back(), Some("c".into())); + assert_eq!(keys.next_back(), Some("b".into())); + assert_eq!(keys.next_back(), None); + + let mut keys = doc.keys(ROOT); + assert_eq!(keys.next_back(), Some("d".into())); + assert_eq!(keys.next(), Some("a".into())); + assert_eq!(keys.next(), Some("b".into())); + assert_eq!(keys.next(), Some("c".into())); + assert_eq!(keys.next(), None); + let keys = doc.keys(ROOT); + assert_eq!(keys.collect::>(), vec!["a", "b", "c", "d"]); + } + + #[test] + fn rolling_back_transaction_has_no_effect() { + let mut doc = Automerge::new(); + let old_states = doc.states.clone(); + let bytes = doc.save(); + let tx = doc.transaction(); + tx.rollback(); + let new_states = doc.states.clone(); + assert_eq!(old_states, new_states); + let new_bytes = doc.save(); + assert_eq!(bytes, new_bytes); + } + + #[test] + fn mutate_old_objects() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + // create a map + let map1 = tx.set_object(ROOT, "a", ObjType::Map).unwrap(); + tx.set(&map1, "b", 1).unwrap(); + // overwrite the first map with a new one + let map2 = tx.set_object(ROOT, "a", ObjType::Map).unwrap(); + tx.set(&map2, "c", 2).unwrap(); + tx.commit(); + + // we can get the new map by traversing the tree + let map = doc.value(&ROOT, "a").unwrap().unwrap().1; + assert_eq!(doc.value(&map, "b").unwrap(), None); + // and get values from it + assert_eq!( + doc.value(&map, "c").unwrap().map(|s| s.0), + Some(ScalarValue::Int(2).into()) + ); + + // but we can still access the old one if we know the ID! + assert_eq!(doc.value(&map1, "b").unwrap().unwrap().0, Value::int(1)); + // and even set new things in it! + let mut tx = doc.transaction(); + tx.set(&map1, "c", 3).unwrap(); + tx.commit(); + + assert_eq!(doc.value(&map1, "c").unwrap().unwrap().0, Value::int(3)); + } + + #[test] + fn delete_nothing_in_map_is_noop() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + // deleting a missing key in a map should just be a noop + assert!(tx.del(ROOT, "a").is_ok()); + tx.commit(); + let last_change = doc.get_last_local_change().unwrap(); + assert_eq!(last_change.len(), 0); + + let bytes = doc.save(); + assert!(Automerge::load(&bytes).is_ok()); + + let mut tx = doc.transaction(); + tx.set(ROOT, "a", 1).unwrap(); + tx.commit(); + let last_change = doc.get_last_local_change().unwrap(); + assert_eq!(last_change.len(), 1); + + let mut tx = doc.transaction(); + // a real op + tx.del(ROOT, "a").unwrap(); + // a no-op + tx.del(ROOT, "a").unwrap(); + tx.commit(); + let last_change = doc.get_last_local_change().unwrap(); + assert_eq!(last_change.len(), 1); + } + + #[test] + fn delete_nothing_in_list_returns_error() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + // deleting an element in a list that does not exist is an error + assert!(tx.del(ROOT, 0).is_err()); + } + + #[test] + fn loaded_doc_changes_have_hash() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + tx.set(ROOT, "a", 1).unwrap(); + tx.commit(); + let hash = doc.get_last_local_change().unwrap().hash; + let bytes = doc.save(); + let doc = Automerge::load(&bytes).unwrap(); + assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash, hash); + } + + #[test] + fn load_change_with_zero_start_op() { + let bytes = &[ + 133, 111, 74, 131, 202, 50, 52, 158, 2, 96, 163, 163, 83, 255, 255, 255, 50, 50, 50, + 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 255, 255, 245, 53, 1, 0, 0, 0, 0, 0, 0, 4, + 233, 245, 239, 255, 1, 0, 0, 0, 133, 111, 74, 131, 163, 96, 0, 0, 2, 10, 202, 144, 125, + 19, 48, 89, 133, 49, 10, 10, 67, 91, 111, 10, 74, 131, 96, 0, 163, 131, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 1, 153, 0, 0, 246, 255, 255, 255, 157, 157, 157, 157, + 157, 157, 157, 157, 157, 157, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 48, 254, 208, + ]; + let _ = Automerge::load(bytes); + } + + #[test] + fn load_broken_list() { + enum Action { + InsertText(usize, char), + DelText(usize), + } + use Action::*; + let actions = [ + InsertText(0, 'a'), + InsertText(0, 'b'), + DelText(1), + InsertText(0, 'c'), + DelText(1), + DelText(0), + InsertText(0, 'd'), + InsertText(0, 'e'), + InsertText(1, 'f'), + DelText(2), + DelText(1), + InsertText(0, 'g'), + DelText(1), + DelText(0), + InsertText(0, 'h'), + InsertText(1, 'i'), + DelText(1), + DelText(0), + InsertText(0, 'j'), + InsertText(0, 'k'), + DelText(1), + DelText(0), + InsertText(0, 'l'), + DelText(0), + InsertText(0, 'm'), + InsertText(0, 'n'), + DelText(1), + DelText(0), + InsertText(0, 'o'), + DelText(0), + InsertText(0, 'p'), + InsertText(1, 'q'), + InsertText(1, 'r'), + InsertText(1, 's'), + InsertText(3, 't'), + InsertText(5, 'u'), + InsertText(0, 'v'), + InsertText(3, 'w'), + InsertText(4, 'x'), + InsertText(0, 'y'), + InsertText(6, 'z'), + InsertText(11, '1'), + InsertText(0, '2'), + InsertText(0, '3'), + InsertText(0, '4'), + InsertText(13, '5'), + InsertText(11, '6'), + InsertText(17, '7'), + ]; + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let list = tx.set_object(ROOT, "list", ObjType::List).unwrap(); + for action in actions { + match action { + Action::InsertText(index, c) => { + println!("inserting {} at {}", c, index); + tx.insert(&list, index, c).unwrap(); + } + Action::DelText(index) => { + println!("deleting at {} ", index); + tx.del(&list, index).unwrap(); + } + } + } + tx.commit(); + let bytes = doc.save(); + println!("doc2 time"); + let mut doc2 = Automerge::load(&bytes).unwrap(); + let bytes2 = doc2.save(); + assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); + + assert_eq!(doc.queue, doc2.queue); + assert_eq!(doc.history, doc2.history); + assert_eq!(doc.history_index, doc2.history_index); + assert_eq!(doc.states, doc2.states); + assert_eq!(doc.deps, doc2.deps); + assert_eq!(doc.saved, doc2.saved); + assert_eq!(doc.ops, doc2.ops); + assert_eq!(doc.max_op, doc2.max_op); + + assert_eq!(bytes, bytes2); + } + + #[test] + fn load_broken_list_short() { + // breaks when the B constant in OpSet is 3 + enum Action { + InsertText(usize, char), + DelText(usize), + } + use Action::*; + let actions = [ + InsertText(0, 'a'), + InsertText(1, 'b'), + DelText(1), + InsertText(1, 'c'), + InsertText(2, 'd'), + InsertText(2, 'e'), + InsertText(0, 'f'), + DelText(4), + InsertText(4, 'g'), + ]; + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + let list = tx.set_object(ROOT, "list", ObjType::List).unwrap(); + for action in actions { + match action { + Action::InsertText(index, c) => { + println!("inserting {} at {}", c, index); + tx.insert(&list, index, c).unwrap(); + } + Action::DelText(index) => { + println!("deleting at {} ", index); + tx.del(&list, index).unwrap(); + } + } + } + tx.commit(); + let bytes = doc.save(); + println!("doc2 time"); + let mut doc2 = Automerge::load(&bytes).unwrap(); + let bytes2 = doc2.save(); + assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); + + assert_eq!(doc.queue, doc2.queue); + assert_eq!(doc.history, doc2.history); + assert_eq!(doc.history_index, doc2.history_index); + assert_eq!(doc.states, doc2.states); + assert_eq!(doc.deps, doc2.deps); + assert_eq!(doc.saved, doc2.saved); + assert_eq!(doc.ops, doc2.ops); + assert_eq!(doc.max_op, doc2.max_op); + + assert_eq!(bytes, bytes2); + } + + #[test] + fn compute_list_indexes_correctly_when_list_element_is_split_across_tree_nodes() { + let max = B as u64 * 2; + let actor1 = ActorId::from(b"aaaa"); + let mut doc1 = AutoCommit::new().with_actor(actor1.clone()); + let actor2 = ActorId::from(b"bbbb"); + let mut doc2 = AutoCommit::new().with_actor(actor2.clone()); + let list = doc1.set_object(ROOT, "list", ObjType::List).unwrap(); + doc1.insert(&list, 0, 0).unwrap(); + doc2.load_incremental(&doc1.save_incremental()).unwrap(); + for i in 1..=max { + doc1.set(&list, 0, i).unwrap() + } + for i in 1..=max { + doc2.set(&list, 0, i).unwrap() + } + let change1 = doc1.save_incremental(); + let change2 = doc2.save_incremental(); + doc2.load_incremental(&change1).unwrap(); + doc1.load_incremental(&change2).unwrap(); + assert_eq!(doc1.length(&list), 1); + assert_eq!(doc2.length(&list), 1); + assert_eq!( + doc1.values(&list, 0).unwrap(), + vec![ + (max.into(), ExId::Id(max + 2, actor1.clone(), 0)), + (max.into(), ExId::Id(max + 2, actor2.clone(), 1)) + ] + ); + assert_eq!( + doc2.values(&list, 0).unwrap(), + vec![ + (max.into(), ExId::Id(max + 2, actor1, 0)), + (max.into(), ExId::Id(max + 2, actor2, 1)) + ] + ); + assert!(doc1.value(&list, 1).unwrap().is_none()); + assert!(doc2.value(&list, 1).unwrap().is_none()); + } +} diff --git a/automerge/src/change.rs b/automerge/src/change.rs new file mode 100644 index 00000000..24baf8ef --- /dev/null +++ b/automerge/src/change.rs @@ -0,0 +1,997 @@ +use crate::columnar::{ + ChangeEncoder, ChangeIterator, ColumnEncoder, DepsIterator, DocChange, DocOp, DocOpEncoder, + DocOpIterator, OperationIterator, COLUMN_TYPE_DEFLATE, +}; +use crate::decoding; +use crate::decoding::{Decodable, InvalidChangeError}; +use crate::encoding::{Encodable, DEFLATE_MIN_SIZE}; +use crate::error::AutomergeError; +use crate::indexed_cache::IndexedCache; +use crate::legacy as amp; +use crate::transaction::TransactionInner; +use crate::types; +use crate::types::{ActorId, ElemId, Key, ObjId, Op, OpId, OpType}; +use core::ops::Range; +use flate2::{ + bufread::{DeflateDecoder, DeflateEncoder}, + Compression, +}; +use itertools::Itertools; +use sha2::Digest; +use sha2::Sha256; +use std::collections::{HashMap, HashSet}; +use std::convert::TryInto; +use std::fmt::Debug; +use std::io::{Read, Write}; +use std::num::NonZeroU64; +use tracing::instrument; + +const MAGIC_BYTES: [u8; 4] = [0x85, 0x6f, 0x4a, 0x83]; +const PREAMBLE_BYTES: usize = 8; +const HEADER_BYTES: usize = PREAMBLE_BYTES + 1; + +const HASH_BYTES: usize = 32; +const BLOCK_TYPE_DOC: u8 = 0; +const BLOCK_TYPE_CHANGE: u8 = 1; +const BLOCK_TYPE_DEFLATE: u8 = 2; +const CHUNK_START: usize = 8; +const HASH_RANGE: Range = 4..8; + +pub(crate) fn encode_document<'a, 'b>( + heads: Vec, + changes: impl Iterator, + doc_ops: impl Iterator, + actors_index: &IndexedCache, + props: &'a [String], +) -> Vec { + let mut bytes: Vec = Vec::new(); + + let actors_map = actors_index.encode_index(); + let actors = actors_index.sorted(); + + /* + // this assumes that all actor_ids referenced are seen in changes.actor_id which is true + // so long as we have a full history + let mut actors: Vec<_> = changes + .iter() + .map(|c| &c.actor) + .unique() + .sorted() + .cloned() + .collect(); + */ + + let (change_bytes, change_info) = ChangeEncoder::encode_changes(changes, &actors); + + //let doc_ops = group_doc_ops(changes, &actors); + + let (ops_bytes, ops_info) = DocOpEncoder::encode_doc_ops(doc_ops, &actors_map, props); + + bytes.extend(MAGIC_BYTES); + bytes.extend([0, 0, 0, 0]); // we dont know the hash yet so fill in a fake + bytes.push(BLOCK_TYPE_DOC); + + let mut chunk = Vec::new(); + + actors.len().encode_vec(&mut chunk); + + for a in actors.into_iter() { + a.to_bytes().encode_vec(&mut chunk); + } + + heads.len().encode_vec(&mut chunk); + for head in heads.iter() { + chunk.write_all(&head.0).unwrap(); + } + + chunk.extend(change_info); + chunk.extend(ops_info); + + chunk.extend(change_bytes); + chunk.extend(ops_bytes); + + leb128::write::unsigned(&mut bytes, chunk.len() as u64).unwrap(); + + bytes.extend(&chunk); + + let hash_result = Sha256::digest(&bytes[CHUNK_START..bytes.len()]); + + bytes.splice(HASH_RANGE, hash_result[0..4].iter().copied()); + + bytes +} + +/// When encoding a change we take all the actor IDs referenced by a change and place them in an +/// array. The array has the actor who authored the change as the first element and all remaining +/// actors (i.e. those referenced in object IDs in the target of an operation or in the `pred` of +/// an operation) lexicographically ordered following the change author. +fn actor_ids_in_change(change: &::Change) -> Vec { + let mut other_ids: Vec<&::ActorId> = change + .operations + .iter() + .flat_map(opids_in_operation) + .filter(|a| *a != &change.actor_id) + .unique() + .collect(); + other_ids.sort(); + // Now prepend the change actor + std::iter::once(&change.actor_id) + .chain(other_ids.into_iter()) + .cloned() + .collect() +} + +fn opids_in_operation(op: &::Op) -> impl Iterator { + let obj_actor_id = match &op.obj { + amp::ObjectId::Root => None, + amp::ObjectId::Id(opid) => Some(opid.actor()), + }; + let pred_ids = op.pred.iter().map(amp::OpId::actor); + let key_actor = match &op.key { + amp::Key::Seq(amp::ElementId::Id(i)) => Some(i.actor()), + _ => None, + }; + obj_actor_id + .into_iter() + .chain(key_actor.into_iter()) + .chain(pred_ids) +} + +impl From for Change { + fn from(value: amp::Change) -> Self { + encode(&value) + } +} + +impl From<&::Change> for Change { + fn from(value: &::Change) -> Self { + encode(value) + } +} + +fn encode(change: &::Change) -> Change { + let mut deps = change.deps.clone(); + deps.sort_unstable(); + + let mut chunk = encode_chunk(change, &deps); + + let mut bytes = Vec::with_capacity(MAGIC_BYTES.len() + 4 + chunk.bytes.len()); + + bytes.extend(&MAGIC_BYTES); + + bytes.extend(vec![0, 0, 0, 0]); // we dont know the hash yet so fill in a fake + + bytes.push(BLOCK_TYPE_CHANGE); + + leb128::write::unsigned(&mut bytes, chunk.bytes.len() as u64).unwrap(); + + let body_start = bytes.len(); + + increment_range(&mut chunk.body, bytes.len()); + increment_range(&mut chunk.message, bytes.len()); + increment_range(&mut chunk.extra_bytes, bytes.len()); + increment_range_map(&mut chunk.ops, bytes.len()); + + bytes.extend(&chunk.bytes); + + let hash_result = Sha256::digest(&bytes[CHUNK_START..bytes.len()]); + let hash: amp::ChangeHash = hash_result[..].try_into().unwrap(); + + bytes.splice(HASH_RANGE, hash_result[0..4].iter().copied()); + + // any time I make changes to the encoder decoder its a good idea + // to run it through a round trip to detect errors the tests might not + // catch + // let c0 = Change::from_bytes(bytes.clone()).unwrap(); + // std::assert_eq!(c1, c0); + // perhaps we should add something like this to the test suite + + let bytes = ChangeBytes::Uncompressed(bytes); + + Change { + bytes, + body_start, + hash, + seq: change.seq, + start_op: change.start_op, + time: change.time, + actors: chunk.actors, + message: chunk.message, + deps, + ops: chunk.ops, + extra_bytes: chunk.extra_bytes, + } +} + +struct ChunkIntermediate { + bytes: Vec, + body: Range, + actors: Vec, + message: Range, + ops: HashMap>, + extra_bytes: Range, +} + +fn encode_chunk(change: &::Change, deps: &[amp::ChangeHash]) -> ChunkIntermediate { + let mut bytes = Vec::new(); + + // All these unwraps are okay because we're writing to an in memory buffer so io erros should + // not happen + + // encode deps + deps.len().encode(&mut bytes).unwrap(); + for hash in deps.iter() { + bytes.write_all(&hash.0).unwrap(); + } + + let actors = actor_ids_in_change(change); + change.actor_id.to_bytes().encode(&mut bytes).unwrap(); + + // encode seq, start_op, time, message + change.seq.encode(&mut bytes).unwrap(); + change.start_op.encode(&mut bytes).unwrap(); + change.time.encode(&mut bytes).unwrap(); + let message = bytes.len() + 1; + change.message.encode(&mut bytes).unwrap(); + let message = message..bytes.len(); + + // encode ops into a side buffer - collect all other actors + let (ops_buf, mut ops) = ColumnEncoder::encode_ops(&change.operations, &actors); + + // encode all other actors + actors[1..].encode(&mut bytes).unwrap(); + + // now we know how many bytes ops are offset by so we can adjust the ranges + increment_range_map(&mut ops, bytes.len()); + + // write out the ops + + bytes.write_all(&ops_buf).unwrap(); + + // write out the extra bytes + let extra_bytes = bytes.len()..(bytes.len() + change.extra_bytes.len()); + bytes.write_all(&change.extra_bytes).unwrap(); + let body = 0..bytes.len(); + + ChunkIntermediate { + bytes, + body, + actors, + message, + ops, + extra_bytes, + } +} + +#[derive(PartialEq, Debug, Clone)] +enum ChangeBytes { + Compressed { + compressed: Vec, + uncompressed: Vec, + }, + Uncompressed(Vec), +} + +impl ChangeBytes { + fn uncompressed(&self) -> &[u8] { + match self { + ChangeBytes::Compressed { uncompressed, .. } => &uncompressed[..], + ChangeBytes::Uncompressed(b) => &b[..], + } + } + + fn compress(&mut self, body_start: usize) { + match self { + ChangeBytes::Compressed { .. } => {} + ChangeBytes::Uncompressed(uncompressed) => { + if uncompressed.len() > DEFLATE_MIN_SIZE { + let mut result = Vec::with_capacity(uncompressed.len()); + result.extend(&uncompressed[0..8]); + result.push(BLOCK_TYPE_DEFLATE); + let mut deflater = + DeflateEncoder::new(&uncompressed[body_start..], Compression::default()); + let mut deflated = Vec::new(); + let deflated_len = deflater.read_to_end(&mut deflated).unwrap(); + leb128::write::unsigned(&mut result, deflated_len as u64).unwrap(); + result.extend(&deflated[..]); + *self = ChangeBytes::Compressed { + compressed: result, + uncompressed: std::mem::take(uncompressed), + } + } + } + } + } + + fn raw(&self) -> &[u8] { + match self { + ChangeBytes::Compressed { compressed, .. } => &compressed[..], + ChangeBytes::Uncompressed(b) => &b[..], + } + } +} + +/// A change represents a group of operations performed by an actor. +#[derive(PartialEq, Debug, Clone)] +pub struct Change { + bytes: ChangeBytes, + body_start: usize, + /// Hash of this change. + pub hash: amp::ChangeHash, + /// The index of this change in the changes from this actor. + pub seq: u64, + /// The start operation index. Starts at 1. + pub start_op: NonZeroU64, + /// The time that this change was committed. + pub time: i64, + /// The message of this change. + message: Range, + /// The actors referenced in this change. + actors: Vec, + /// The dependencies of this change. + pub deps: Vec, + ops: HashMap>, + extra_bytes: Range, +} + +impl Change { + pub fn actor_id(&self) -> &ActorId { + &self.actors[0] + } + + #[instrument(level = "debug", skip(bytes))] + pub fn load_document(bytes: &[u8]) -> Result, AutomergeError> { + load_blocks(bytes) + } + + pub fn from_bytes(bytes: Vec) -> Result { + Change::try_from(bytes) + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + pub fn len(&self) -> usize { + // TODO - this could be a lot more efficient + self.iter_ops().count() + } + + pub fn max_op(&self) -> u64 { + self.start_op.get() + (self.len() as u64) - 1 + } + + pub fn message(&self) -> Option { + let m = &self.bytes.uncompressed()[self.message.clone()]; + if m.is_empty() { + None + } else { + std::str::from_utf8(m).map(ToString::to_string).ok() + } + } + + pub fn decode(&self) -> amp::Change { + amp::Change { + start_op: self.start_op, + seq: self.seq, + time: self.time, + hash: Some(self.hash), + message: self.message(), + actor_id: self.actors[0].clone(), + deps: self.deps.clone(), + operations: self + .iter_ops() + .map(|op| amp::Op { + action: op.action.clone(), + obj: op.obj.clone(), + key: op.key.clone(), + pred: op.pred.clone(), + insert: op.insert, + }) + .collect(), + extra_bytes: self.extra_bytes().into(), + } + } + + pub(crate) fn iter_ops(&self) -> OperationIterator { + OperationIterator::new(self.bytes.uncompressed(), self.actors.as_slice(), &self.ops) + } + + pub fn extra_bytes(&self) -> &[u8] { + &self.bytes.uncompressed()[self.extra_bytes.clone()] + } + + pub fn compress(&mut self) { + self.bytes.compress(self.body_start); + } + + pub fn raw_bytes(&self) -> &[u8] { + self.bytes.raw() + } +} + +fn read_leb128(bytes: &mut &[u8]) -> Result<(usize, usize), decoding::Error> { + let mut buf = &bytes[..]; + let val = leb128::read::unsigned(&mut buf)? as usize; + let leb128_bytes = bytes.len() - buf.len(); + Ok((val, leb128_bytes)) +} + +fn read_slice( + bytes: &[u8], + cursor: &mut Range, +) -> Result { + let mut view = &bytes[cursor.clone()]; + let init_len = view.len(); + let val = T::decode::<&[u8]>(&mut view).ok_or(decoding::Error::NoDecodedValue); + let bytes_read = init_len - view.len(); + *cursor = (cursor.start + bytes_read)..cursor.end; + val +} + +fn slice_bytes(bytes: &[u8], cursor: &mut Range) -> Result, decoding::Error> { + let (val, len) = read_leb128(&mut &bytes[cursor.clone()])?; + let start = cursor.start + len; + let end = start + val; + *cursor = end..cursor.end; + Ok(start..end) +} + +fn increment_range(range: &mut Range, len: usize) { + range.end += len; + range.start += len; +} + +fn increment_range_map(ranges: &mut HashMap>, len: usize) { + for range in ranges.values_mut() { + increment_range(range, len); + } +} + +fn export_objid(id: &ObjId, actors: &IndexedCache) -> amp::ObjectId { + if id == &ObjId::root() { + amp::ObjectId::Root + } else { + export_opid(&id.0, actors).into() + } +} + +fn export_elemid(id: &ElemId, actors: &IndexedCache) -> amp::ElementId { + if id == &types::HEAD { + amp::ElementId::Head + } else { + export_opid(&id.0, actors).into() + } +} + +fn export_opid(id: &OpId, actors: &IndexedCache) -> amp::OpId { + amp::OpId(id.0, actors.get(id.1).clone()) +} + +fn export_op( + op: &Op, + obj: &ObjId, + actors: &IndexedCache, + props: &IndexedCache, +) -> amp::Op { + let action = op.action.clone(); + let key = match &op.key { + Key::Map(n) => amp::Key::Map(props.get(*n).clone().into()), + Key::Seq(id) => amp::Key::Seq(export_elemid(id, actors)), + }; + let obj = export_objid(obj, actors); + let pred = op.pred.iter().map(|id| export_opid(id, actors)).collect(); + amp::Op { + action, + obj, + insert: op.insert, + pred, + key, + } +} + +pub(crate) fn export_change( + change: TransactionInner, + actors: &IndexedCache, + props: &IndexedCache, +) -> Change { + amp::Change { + actor_id: actors.get(change.actor).clone(), + seq: change.seq, + start_op: change.start_op, + time: change.time, + deps: change.deps, + message: change.message, + hash: change.hash, + operations: change + .operations + .iter() + .map(|(obj, op)| export_op(op, obj, actors, props)) + .collect(), + extra_bytes: change.extra_bytes, + } + .into() +} + +impl TryFrom> for Change { + type Error = decoding::Error; + + fn try_from(bytes: Vec) -> Result { + let (chunktype, body) = decode_header_without_hash(&bytes)?; + let bytes = if chunktype == BLOCK_TYPE_DEFLATE { + decompress_chunk(0..PREAMBLE_BYTES, body, bytes)? + } else { + ChangeBytes::Uncompressed(bytes) + }; + + let (chunktype, hash, body) = decode_header(bytes.uncompressed())?; + + if chunktype != BLOCK_TYPE_CHANGE { + return Err(decoding::Error::WrongType { + expected_one_of: vec![BLOCK_TYPE_CHANGE], + found: chunktype, + }); + } + + let body_start = body.start; + let mut cursor = body; + + let deps = decode_hashes(bytes.uncompressed(), &mut cursor)?; + + let actor = + ActorId::from(&bytes.uncompressed()[slice_bytes(bytes.uncompressed(), &mut cursor)?]); + let seq = read_slice(bytes.uncompressed(), &mut cursor)?; + let start_op = read_slice(bytes.uncompressed(), &mut cursor)?; + let time = read_slice(bytes.uncompressed(), &mut cursor)?; + let message = slice_bytes(bytes.uncompressed(), &mut cursor)?; + + let actors = decode_actors(bytes.uncompressed(), &mut cursor, Some(actor))?; + + let ops_info = decode_column_info(bytes.uncompressed(), &mut cursor, false)?; + let ops = decode_columns(&mut cursor, &ops_info); + + Ok(Change { + bytes, + body_start, + hash, + seq, + start_op, + time, + actors, + message, + deps, + ops, + extra_bytes: cursor, + }) + } +} + +fn decompress_chunk( + preamble: Range, + body: Range, + compressed: Vec, +) -> Result { + let mut decoder = DeflateDecoder::new(&compressed[body]); + let mut decompressed = Vec::new(); + decoder.read_to_end(&mut decompressed)?; + let mut result = Vec::with_capacity(decompressed.len() + preamble.len()); + result.extend(&compressed[preamble]); + result.push(BLOCK_TYPE_CHANGE); + leb128::write::unsigned::>(&mut result, decompressed.len() as u64).unwrap(); + result.extend(decompressed); + Ok(ChangeBytes::Compressed { + uncompressed: result, + compressed, + }) +} + +fn decode_hashes( + bytes: &[u8], + cursor: &mut Range, +) -> Result, decoding::Error> { + let num_hashes = read_slice(bytes, cursor)?; + let mut hashes = Vec::with_capacity(num_hashes); + for _ in 0..num_hashes { + let hash = cursor.start..(cursor.start + HASH_BYTES); + *cursor = hash.end..cursor.end; + hashes.push( + bytes + .get(hash) + .ok_or(decoding::Error::NotEnoughBytes)? + .try_into() + .map_err(InvalidChangeError::from)?, + ); + } + Ok(hashes) +} + +fn decode_actors( + bytes: &[u8], + cursor: &mut Range, + first: Option, +) -> Result, decoding::Error> { + let num_actors: usize = read_slice(bytes, cursor)?; + let mut actors = Vec::with_capacity(num_actors + 1); + if let Some(actor) = first { + actors.push(actor); + } + for _ in 0..num_actors { + actors.push(ActorId::from( + bytes + .get(slice_bytes(bytes, cursor)?) + .ok_or(decoding::Error::NotEnoughBytes)?, + )); + } + Ok(actors) +} + +fn decode_column_info( + bytes: &[u8], + cursor: &mut Range, + allow_compressed_column: bool, +) -> Result, decoding::Error> { + let num_columns = read_slice(bytes, cursor)?; + let mut columns = Vec::with_capacity(num_columns); + let mut last_id = 0; + for _ in 0..num_columns { + let id: u32 = read_slice(bytes, cursor)?; + if (id & !COLUMN_TYPE_DEFLATE) <= (last_id & !COLUMN_TYPE_DEFLATE) { + return Err(decoding::Error::ColumnsNotInAscendingOrder { + last: last_id, + found: id, + }); + } + if id & COLUMN_TYPE_DEFLATE != 0 && !allow_compressed_column { + return Err(decoding::Error::ChangeContainedCompressedColumns); + } + last_id = id; + let length = read_slice(bytes, cursor)?; + columns.push((id, length)); + } + Ok(columns) +} + +fn decode_columns( + cursor: &mut Range, + columns: &[(u32, usize)], +) -> HashMap> { + let mut ops = HashMap::new(); + for (id, length) in columns { + let start = cursor.start; + let end = start + length; + *cursor = end..cursor.end; + ops.insert(*id, start..end); + } + ops +} + +fn decode_header(bytes: &[u8]) -> Result<(u8, amp::ChangeHash, Range), decoding::Error> { + let (chunktype, body) = decode_header_without_hash(bytes)?; + + let calculated_hash = Sha256::digest(&bytes[PREAMBLE_BYTES..]); + + let checksum = &bytes[4..8]; + if checksum != &calculated_hash[0..4] { + return Err(decoding::Error::InvalidChecksum { + found: checksum.try_into().unwrap(), + calculated: calculated_hash[0..4].try_into().unwrap(), + }); + } + + let hash = calculated_hash[..] + .try_into() + .map_err(InvalidChangeError::from)?; + + Ok((chunktype, hash, body)) +} + +fn decode_header_without_hash(bytes: &[u8]) -> Result<(u8, Range), decoding::Error> { + if bytes.len() <= HEADER_BYTES { + return Err(decoding::Error::NotEnoughBytes); + } + + if bytes[0..4] != MAGIC_BYTES { + return Err(decoding::Error::WrongMagicBytes); + } + + let (val, len) = read_leb128(&mut &bytes[HEADER_BYTES..])?; + let body = (HEADER_BYTES + len)..(HEADER_BYTES + len + val); + if bytes.len() != body.end { + return Err(decoding::Error::WrongByteLength { + expected: body.end, + found: bytes.len(), + }); + } + + let chunktype = bytes[PREAMBLE_BYTES]; + + Ok((chunktype, body)) +} + +fn load_blocks(bytes: &[u8]) -> Result, AutomergeError> { + let mut changes = Vec::new(); + for slice in split_blocks(bytes)? { + decode_block(slice, &mut changes)?; + } + Ok(changes) +} + +fn split_blocks(bytes: &[u8]) -> Result, decoding::Error> { + // split off all valid blocks - ignore the rest if its corrupted or truncated + let mut blocks = Vec::new(); + let mut cursor = bytes; + while let Some(block) = pop_block(cursor)? { + blocks.push(&cursor[block.clone()]); + if cursor.len() <= block.end { + break; + } + cursor = &cursor[block.end..]; + } + Ok(blocks) +} + +fn pop_block(bytes: &[u8]) -> Result>, decoding::Error> { + if bytes.len() < 4 || bytes[0..4] != MAGIC_BYTES { + // not reporting error here - file got corrupted? + return Ok(None); + } + let (val, len) = read_leb128( + &mut bytes + .get(HEADER_BYTES..) + .ok_or(decoding::Error::NotEnoughBytes)?, + )?; + // val is arbitrary so it could overflow + let end = (HEADER_BYTES + len) + .checked_add(val) + .ok_or(decoding::Error::Overflow)?; + if end > bytes.len() { + // not reporting error here - file got truncated? + return Ok(None); + } + Ok(Some(0..end)) +} + +fn decode_block(bytes: &[u8], changes: &mut Vec) -> Result<(), decoding::Error> { + match bytes[PREAMBLE_BYTES] { + BLOCK_TYPE_DOC => { + changes.extend(decode_document(bytes)?); + Ok(()) + } + BLOCK_TYPE_CHANGE | BLOCK_TYPE_DEFLATE => { + changes.push(Change::try_from(bytes.to_vec())?); + Ok(()) + } + found => Err(decoding::Error::WrongType { + expected_one_of: vec![BLOCK_TYPE_DOC, BLOCK_TYPE_CHANGE, BLOCK_TYPE_DEFLATE], + found, + }), + } +} + +fn decode_document(bytes: &[u8]) -> Result, decoding::Error> { + let (chunktype, _hash, mut cursor) = decode_header(bytes)?; + + // chunktype == 0 is a document, chunktype = 1 is a change + if chunktype > 0 { + return Err(decoding::Error::WrongType { + expected_one_of: vec![0], + found: chunktype, + }); + } + + let actors = decode_actors(bytes, &mut cursor, None)?; + + let heads = decode_hashes(bytes, &mut cursor)?; + + let changes_info = decode_column_info(bytes, &mut cursor, true)?; + let ops_info = decode_column_info(bytes, &mut cursor, true)?; + + let changes_data = decode_columns(&mut cursor, &changes_info); + let mut doc_changes = ChangeIterator::new(bytes, &changes_data).collect::>(); + let doc_changes_deps = DepsIterator::new(bytes, &changes_data); + + let doc_changes_len = doc_changes.len(); + + let ops_data = decode_columns(&mut cursor, &ops_info); + let doc_ops: Vec<_> = DocOpIterator::new(bytes, &actors, &ops_data).collect(); + + group_doc_change_and_doc_ops(&mut doc_changes, doc_ops, &actors)?; + + let uncompressed_changes = + doc_changes_to_uncompressed_changes(doc_changes.into_iter(), &actors); + + let changes = compress_doc_changes(uncompressed_changes, doc_changes_deps, doc_changes_len) + .ok_or(decoding::Error::NoDocChanges)?; + + let mut calculated_heads = HashSet::new(); + for change in &changes { + for dep in &change.deps { + calculated_heads.remove(dep); + } + calculated_heads.insert(change.hash); + } + + if calculated_heads != heads.into_iter().collect::>() { + return Err(decoding::Error::MismatchedHeads); + } + + Ok(changes) +} + +fn compress_doc_changes( + uncompressed_changes: impl Iterator, + doc_changes_deps: impl Iterator>, + num_changes: usize, +) -> Option> { + let mut changes: Vec = Vec::with_capacity(num_changes); + + // fill out the hashes as we go + for (deps, mut uncompressed_change) in doc_changes_deps.zip_eq(uncompressed_changes) { + for idx in deps { + uncompressed_change.deps.push(changes.get(idx)?.hash); + } + changes.push(uncompressed_change.into()); + } + + Some(changes) +} + +fn group_doc_change_and_doc_ops( + changes: &mut [DocChange], + mut ops: Vec, + actors: &[ActorId], +) -> Result<(), decoding::Error> { + let mut changes_by_actor: HashMap> = HashMap::new(); + + for (i, change) in changes.iter().enumerate() { + let actor_change_index = changes_by_actor.entry(change.actor).or_default(); + if change.seq != (actor_change_index.len() + 1) as u64 { + return Err(decoding::Error::ChangeDecompressFailed( + "Doc Seq Invalid".into(), + )); + } + if change.actor >= actors.len() { + return Err(decoding::Error::ChangeDecompressFailed( + "Doc Actor Invalid".into(), + )); + } + actor_change_index.push(i); + } + + let mut op_by_id = HashMap::new(); + ops.iter().enumerate().for_each(|(i, op)| { + op_by_id.insert((op.ctr, op.actor), i); + }); + + for i in 0..ops.len() { + let op = ops[i].clone(); // this is safe - avoid borrow checker issues + //let id = (op.ctr, op.actor); + //op_by_id.insert(id, i); + for succ in &op.succ { + if let Some(index) = op_by_id.get(succ) { + ops[*index].pred.push((op.ctr, op.actor)); + } else { + let key = if op.insert { + amp::OpId(op.ctr, actors[op.actor].clone()).into() + } else { + op.key.clone() + }; + let del = DocOp { + actor: succ.1, + ctr: succ.0, + action: OpType::Del, + obj: op.obj.clone(), + key, + succ: Vec::new(), + pred: vec![(op.ctr, op.actor)], + insert: false, + }; + op_by_id.insert(*succ, ops.len()); + ops.push(del); + } + } + } + + for op in ops { + // binary search for our change + let actor_change_index = changes_by_actor.entry(op.actor).or_default(); + let mut left = 0; + let mut right = actor_change_index.len(); + while left < right { + let seq = (left + right) / 2; + if changes[actor_change_index[seq]].max_op < op.ctr { + left = seq + 1; + } else { + right = seq; + } + } + if left >= actor_change_index.len() { + return Err(decoding::Error::ChangeDecompressFailed( + "Doc MaxOp Invalid".into(), + )); + } + changes[actor_change_index[left]].ops.push(op); + } + + changes + .iter_mut() + .for_each(|change| change.ops.sort_unstable()); + + Ok(()) +} + +fn doc_changes_to_uncompressed_changes<'a>( + changes: impl Iterator + 'a, + actors: &'a [ActorId], +) -> impl Iterator + 'a { + changes.map(move |change| amp::Change { + // we've already confirmed that all change.actor's are valid + actor_id: actors[change.actor].clone(), + seq: change.seq, + time: change.time, + // SAFETY: this unwrap is safe as we always add 1 + start_op: NonZeroU64::new(change.max_op - change.ops.len() as u64 + 1).unwrap(), + hash: None, + message: change.message, + operations: change + .ops + .into_iter() + .map(|op| amp::Op { + action: op.action.clone(), + insert: op.insert, + key: op.key, + obj: op.obj, + // we've already confirmed that all op.actor's are valid + pred: pred_into(op.pred.into_iter(), actors), + }) + .collect(), + deps: Vec::new(), + extra_bytes: change.extra_bytes, + }) +} + +fn pred_into( + pred: impl Iterator, + actors: &[ActorId], +) -> amp::SortedVec { + pred.map(|(ctr, actor)| amp::OpId(ctr, actors[actor].clone())) + .collect() +} + +#[cfg(test)] +mod tests { + use crate::legacy as amp; + #[test] + fn mismatched_head_repro_one() { + let op_json = serde_json::json!({ + "ops": [ + { + "action": "del", + "obj": "1@1485eebc689d47efbf8b892e81653eb3", + "elemId": "3164@0dcdf83d9594477199f80ccd25e87053", + "pred": [ + "3164@0dcdf83d9594477199f80ccd25e87053" + ], + "insert": false + }, + ], + "actor": "e63cf5ed1f0a4fb28b2c5bc6793b9272", + "hash": "e7fd5c02c8fdd2cdc3071ce898a5839bf36229678af3b940f347da541d147ae2", + "seq": 1, + "startOp": 3179, + "time": 1634146652, + "message": null, + "deps": [ + "2603cded00f91e525507fc9e030e77f9253b239d90264ee343753efa99e3fec1" + ] + }); + + let change: amp::Change = serde_json::from_value(op_json).unwrap(); + let expected_hash: super::amp::ChangeHash = + "4dff4665d658a28bb6dcace8764eb35fa8e48e0a255e70b6b8cbf8e8456e5c50" + .parse() + .unwrap(); + let encoded: super::Change = change.into(); + assert_eq!(encoded.hash, expected_hash); + } +} diff --git a/automerge/src/clock.rs b/automerge/src/clock.rs new file mode 100644 index 00000000..d01c7748 --- /dev/null +++ b/automerge/src/clock.rs @@ -0,0 +1,52 @@ +use crate::types::OpId; +use fxhash::FxBuildHasher; +use std::cmp; +use std::collections::HashMap; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Clock(HashMap); + +impl Clock { + pub fn new() -> Self { + Clock(Default::default()) + } + + pub fn include(&mut self, key: usize, n: u64) { + self.0 + .entry(key) + .and_modify(|m| *m = cmp::max(n, *m)) + .or_insert(n); + } + + pub fn covers(&self, id: &OpId) -> bool { + if let Some(val) = self.0.get(&id.1) { + val >= &id.0 + } else { + false + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn covers() { + let mut clock = Clock::new(); + + clock.include(1, 20); + clock.include(2, 10); + + assert!(clock.covers(&OpId(10, 1))); + assert!(clock.covers(&OpId(20, 1))); + assert!(!clock.covers(&OpId(30, 1))); + + assert!(clock.covers(&OpId(5, 2))); + assert!(clock.covers(&OpId(10, 2))); + assert!(!clock.covers(&OpId(15, 2))); + + assert!(!clock.covers(&OpId(1, 3))); + assert!(!clock.covers(&OpId(100, 3))); + } +} diff --git a/automerge/src/columnar.rs b/automerge/src/columnar.rs new file mode 100644 index 00000000..786dde02 --- /dev/null +++ b/automerge/src/columnar.rs @@ -0,0 +1,1384 @@ +#![allow(dead_code)] +#![allow(unused_variables)] +use core::fmt::Debug; +use std::{ + borrow::Cow, + cmp::Ordering, + collections::HashMap, + io, + io::{Read, Write}, + ops::Range, + str, +}; + +use crate::{ + types::{ActorId, ElemId, Key, ObjId, ObjType, Op, OpId, OpType, ScalarValue}, + Change, +}; + +use crate::legacy as amp; +use amp::SortedVec; +use flate2::bufread::DeflateDecoder; +use smol_str::SmolStr; +use tracing::instrument; + +use crate::indexed_cache::IndexedCache; +use crate::{ + decoding::{BooleanDecoder, Decodable, Decoder, DeltaDecoder, RleDecoder}, + encoding::{BooleanEncoder, ColData, DeltaEncoder, Encodable, RleEncoder}, +}; + +impl Encodable for Action { + fn encode(&self, buf: &mut R) -> io::Result { + (*self as u32).encode(buf) + } +} + +impl Encodable for [ActorId] { + fn encode(&self, buf: &mut R) -> io::Result { + let mut len = self.len().encode(buf)?; + for i in self { + len += i.to_bytes().encode(buf)?; + } + Ok(len) + } +} + +fn actor_index(actor: &ActorId, actors: &[ActorId]) -> usize { + actors.iter().position(|a| a == actor).unwrap() +} + +impl Encodable for ActorId { + fn encode_with_actors(&self, buf: &mut R, actors: &[ActorId]) -> io::Result { + actor_index(self, actors).encode(buf) + } + + fn encode(&self, _buf: &mut R) -> io::Result { + // we instead encode actors as their position on a sequence + Ok(0) + } +} + +impl Encodable for Vec { + fn encode(&self, buf: &mut R) -> io::Result { + self.as_slice().encode(buf) + } +} + +impl Encodable for &[u8] { + fn encode(&self, buf: &mut R) -> io::Result { + let head = self.len().encode(buf)?; + buf.write_all(self)?; + Ok(head + self.len()) + } +} + +pub(crate) struct OperationIterator<'a> { + pub(crate) action: RleDecoder<'a, Action>, + pub(crate) objs: ObjIterator<'a>, + pub(crate) keys: KeyIterator<'a>, + pub(crate) insert: BooleanDecoder<'a>, + pub(crate) value: ValueIterator<'a>, + pub(crate) pred: PredIterator<'a>, +} + +impl<'a> OperationIterator<'a> { + pub(crate) fn new( + bytes: &'a [u8], + actors: &'a [ActorId], + ops: &'a HashMap>, + ) -> OperationIterator<'a> { + OperationIterator { + objs: ObjIterator { + actors, + actor: col_iter(bytes, ops, COL_OBJ_ACTOR), + ctr: col_iter(bytes, ops, COL_OBJ_CTR), + }, + keys: KeyIterator { + actors, + actor: col_iter(bytes, ops, COL_KEY_ACTOR), + ctr: col_iter(bytes, ops, COL_KEY_CTR), + str: col_iter(bytes, ops, COL_KEY_STR), + }, + value: ValueIterator { + val_len: col_iter(bytes, ops, COL_VAL_LEN), + val_raw: col_iter(bytes, ops, COL_VAL_RAW), + actors, + actor: col_iter(bytes, ops, COL_REF_ACTOR), + ctr: col_iter(bytes, ops, COL_REF_CTR), + }, + pred: PredIterator { + actors, + pred_num: col_iter(bytes, ops, COL_PRED_NUM), + pred_actor: col_iter(bytes, ops, COL_PRED_ACTOR), + pred_ctr: col_iter(bytes, ops, COL_PRED_CTR), + }, + insert: col_iter(bytes, ops, COL_INSERT), + action: col_iter(bytes, ops, COL_ACTION), + } + } +} + +impl<'a> Iterator for OperationIterator<'a> { + type Item = amp::Op; + + fn next(&mut self) -> Option { + let action = self.action.next()??; + let insert = self.insert.next()?; + let obj = self.objs.next()?; + let key = self.keys.next()?; + let pred = self.pred.next()?; + let value = self.value.next()?; + let action = match action { + Action::Set => OpType::Set(value), + Action::MakeList => OpType::Make(ObjType::List), + Action::MakeText => OpType::Make(ObjType::Text), + Action::MakeMap => OpType::Make(ObjType::Map), + Action::MakeTable => OpType::Make(ObjType::Table), + Action::Del => OpType::Del, + Action::Inc => OpType::Inc(value.to_i64()?), + Action::MarkBegin => { + // mark has 3 things in the val column + let name = value.as_string()?; + let expand = self.value.next()?.to_bool()?; + let value = self.value.next()?; + OpType::mark(name, expand, value) + } + Action::MarkEnd => OpType::MarkEnd(value.to_bool()?), + Action::Unused => panic!("invalid action"), + }; + Some(amp::Op { + action, + obj, + key, + pred, + insert, + }) + } +} + +pub(crate) struct DocOpIterator<'a> { + pub(crate) actor: RleDecoder<'a, usize>, + pub(crate) ctr: DeltaDecoder<'a>, + pub(crate) action: RleDecoder<'a, Action>, + pub(crate) objs: ObjIterator<'a>, + pub(crate) keys: KeyIterator<'a>, + pub(crate) insert: BooleanDecoder<'a>, + pub(crate) value: ValueIterator<'a>, + pub(crate) succ: SuccIterator<'a>, +} + +impl<'a> Iterator for DocOpIterator<'a> { + type Item = DocOp; + fn next(&mut self) -> Option { + let action = self.action.next()??; + let actor = self.actor.next()??; + let ctr = self.ctr.next()??; + let insert = self.insert.next()?; + let obj = self.objs.next()?; + let key = self.keys.next()?; + let succ = self.succ.next()?; + let value = self.value.next()?; + let action = match action { + Action::Set => OpType::Set(value), + Action::MakeList => OpType::Make(ObjType::List), + Action::MakeText => OpType::Make(ObjType::Text), + Action::MakeMap => OpType::Make(ObjType::Map), + Action::MakeTable => OpType::Make(ObjType::Table), + Action::Del => OpType::Del, + Action::Inc => OpType::Inc(value.to_i64()?), + Action::MarkBegin => { + // mark has 3 things in the val column + let name = value.as_string()?; + let expand = self.value.next()?.to_bool()?; + let value = self.value.next()?; + OpType::mark(name, expand, value) + } + Action::MarkEnd => OpType::MarkEnd(value.to_bool()?), + Action::Unused => panic!("invalid action"), + }; + Some(DocOp { + actor, + ctr, + action, + obj, + key, + succ, + pred: Vec::new(), + insert, + }) + } +} + +impl<'a> DocOpIterator<'a> { + pub(crate) fn new( + bytes: &'a [u8], + actors: &'a [ActorId], + ops: &'a HashMap>, + ) -> DocOpIterator<'a> { + DocOpIterator { + actor: col_iter(bytes, ops, COL_ID_ACTOR), + ctr: col_iter(bytes, ops, COL_ID_CTR), + objs: ObjIterator { + actors, + actor: col_iter(bytes, ops, COL_OBJ_ACTOR), + ctr: col_iter(bytes, ops, COL_OBJ_CTR), + }, + keys: KeyIterator { + actors, + actor: col_iter(bytes, ops, COL_KEY_ACTOR), + ctr: col_iter(bytes, ops, COL_KEY_CTR), + str: col_iter(bytes, ops, COL_KEY_STR), + }, + value: ValueIterator { + val_len: col_iter(bytes, ops, COL_VAL_LEN), + val_raw: col_iter(bytes, ops, COL_VAL_RAW), + actors, + actor: col_iter(bytes, ops, COL_REF_ACTOR), + ctr: col_iter(bytes, ops, COL_REF_CTR), + }, + succ: SuccIterator { + succ_num: col_iter(bytes, ops, COL_SUCC_NUM), + succ_actor: col_iter(bytes, ops, COL_SUCC_ACTOR), + succ_ctr: col_iter(bytes, ops, COL_SUCC_CTR), + }, + insert: col_iter(bytes, ops, COL_INSERT), + action: col_iter(bytes, ops, COL_ACTION), + } + } +} + +pub(crate) struct ChangeIterator<'a> { + pub(crate) actor: RleDecoder<'a, usize>, + pub(crate) seq: DeltaDecoder<'a>, + pub(crate) max_op: DeltaDecoder<'a>, + pub(crate) time: DeltaDecoder<'a>, + pub(crate) message: RleDecoder<'a, String>, + pub(crate) extra: ExtraIterator<'a>, +} + +impl<'a> ChangeIterator<'a> { + pub(crate) fn new(bytes: &'a [u8], ops: &'a HashMap>) -> ChangeIterator<'a> { + ChangeIterator { + actor: col_iter(bytes, ops, DOC_ACTOR), + seq: col_iter(bytes, ops, DOC_SEQ), + max_op: col_iter(bytes, ops, DOC_MAX_OP), + time: col_iter(bytes, ops, DOC_TIME), + message: col_iter(bytes, ops, DOC_MESSAGE), + extra: ExtraIterator { + len: col_iter(bytes, ops, DOC_EXTRA_LEN), + extra: col_iter(bytes, ops, DOC_EXTRA_RAW), + }, + } + } +} + +impl<'a> Iterator for ChangeIterator<'a> { + type Item = DocChange; + fn next(&mut self) -> Option { + let actor = self.actor.next()??; + let seq = self.seq.next()??; + let max_op = self.max_op.next()??; + let time = self.time.next()?? as i64; + let message = self.message.next()?; + let extra_bytes = self.extra.next().unwrap_or_default(); + Some(DocChange { + actor, + seq, + max_op, + time, + message, + extra_bytes, + ops: Vec::new(), + }) + } +} + +pub(crate) struct ObjIterator<'a> { + //actors: &'a Vec<&'a [u8]>, + pub(crate) actors: &'a [ActorId], + pub(crate) actor: RleDecoder<'a, usize>, + pub(crate) ctr: RleDecoder<'a, u64>, +} + +pub(crate) struct DepsIterator<'a> { + pub(crate) num: RleDecoder<'a, usize>, + pub(crate) dep: DeltaDecoder<'a>, +} + +impl<'a> DepsIterator<'a> { + pub fn new(bytes: &'a [u8], ops: &'a HashMap>) -> Self { + Self { + num: col_iter(bytes, ops, DOC_DEPS_NUM), + dep: col_iter(bytes, ops, DOC_DEPS_INDEX), + } + } +} + +pub(crate) struct ExtraIterator<'a> { + pub(crate) len: RleDecoder<'a, usize>, + pub(crate) extra: Decoder<'a>, +} + +pub(crate) struct PredIterator<'a> { + pub(crate) actors: &'a [ActorId], + pub(crate) pred_num: RleDecoder<'a, usize>, + pub(crate) pred_actor: RleDecoder<'a, usize>, + pub(crate) pred_ctr: DeltaDecoder<'a>, +} + +pub(crate) struct SuccIterator<'a> { + pub(crate) succ_num: RleDecoder<'a, usize>, + pub(crate) succ_actor: RleDecoder<'a, usize>, + pub(crate) succ_ctr: DeltaDecoder<'a>, +} + +pub(crate) struct KeyIterator<'a> { + pub(crate) actors: &'a [ActorId], + pub(crate) actor: RleDecoder<'a, usize>, + pub(crate) ctr: DeltaDecoder<'a>, + pub(crate) str: RleDecoder<'a, SmolStr>, +} + +pub(crate) struct ValueIterator<'a> { + pub(crate) actors: &'a [ActorId], + pub(crate) val_len: RleDecoder<'a, usize>, + pub(crate) val_raw: Decoder<'a>, + pub(crate) actor: RleDecoder<'a, usize>, + pub(crate) ctr: RleDecoder<'a, u64>, +} + +impl<'a> Iterator for DepsIterator<'a> { + type Item = Vec; + fn next(&mut self) -> Option> { + let num = self.num.next()??; + // I bet there's something simple like `self.dep.take(num).collect()` + let mut p = Vec::with_capacity(num); + for _ in 0..num { + let dep = self.dep.next()??; + p.push(dep as usize); + } + Some(p) + } +} + +impl<'a> Iterator for ExtraIterator<'a> { + type Item = Vec; + fn next(&mut self) -> Option> { + let v = self.len.next()??; + // if v % 16 == VALUE_TYPE_BYTES => { // this should be bytes + let len = v >> 4; + self.extra.read_bytes(len).ok().map(|s| s.to_vec()) + } +} + +impl<'a> Iterator for PredIterator<'a> { + type Item = SortedVec; + fn next(&mut self) -> Option> { + let num = self.pred_num.next()??; + let mut p = Vec::with_capacity(num); + for _ in 0..num { + let actor = self.pred_actor.next()??; + let ctr = self.pred_ctr.next()??; + let actor_id = self.actors.get(actor)?.clone(); + let op_id = amp::OpId::new(ctr, &actor_id); + p.push(op_id); + } + Some(SortedVec::from(p)) + } +} + +impl<'a> Iterator for SuccIterator<'a> { + type Item = Vec<(u64, usize)>; + fn next(&mut self) -> Option> { + let num = self.succ_num.next()??; + let mut p = Vec::with_capacity(num); + for _ in 0..num { + let actor = self.succ_actor.next()??; + let ctr = self.succ_ctr.next()??; + p.push((ctr, actor)); + } + Some(p) + } +} + +impl<'a> Iterator for ValueIterator<'a> { + type Item = ScalarValue; + fn next(&mut self) -> Option { + let val_type = self.val_len.next()??; + let actor = self.actor.next()?; + let ctr = self.ctr.next()?; + match val_type { + VALUE_TYPE_NULL => Some(ScalarValue::Null), + VALUE_TYPE_FALSE => Some(ScalarValue::Boolean(false)), + VALUE_TYPE_TRUE => Some(ScalarValue::Boolean(true)), + v if v % 16 == VALUE_TYPE_COUNTER => { + let len = v >> 4; + let val = self.val_raw.read().ok()?; + if len != self.val_raw.last_read { + return None; + } + Some(ScalarValue::counter(val)) + } + v if v % 16 == VALUE_TYPE_TIMESTAMP => { + let len = v >> 4; + let val = self.val_raw.read().ok()?; + if len != self.val_raw.last_read { + return None; + } + Some(ScalarValue::Timestamp(val)) + } + v if v % 16 == VALUE_TYPE_LEB128_UINT => { + let len = v >> 4; + let val = self.val_raw.read().ok()?; + if len != self.val_raw.last_read { + return None; + } + Some(ScalarValue::Uint(val)) + } + v if v % 16 == VALUE_TYPE_LEB128_INT => { + let len = v >> 4; + let val = self.val_raw.read().ok()?; + if len != self.val_raw.last_read { + return None; + } + Some(ScalarValue::Int(val)) + } + v if v % 16 == VALUE_TYPE_UTF8 => { + let len = v >> 4; + let data = self.val_raw.read_bytes(len).ok()?; + let s = str::from_utf8(data).ok()?; + Some(ScalarValue::Str(SmolStr::new(s))) + } + v if v % 16 == VALUE_TYPE_BYTES => { + let len = v >> 4; + let data = self.val_raw.read_bytes(len).ok()?; + Some(ScalarValue::Bytes(data.to_vec())) + } + v if v % 16 >= VALUE_TYPE_MIN_UNKNOWN && v % 16 <= VALUE_TYPE_MAX_UNKNOWN => { + let len = v >> 4; + let _data = self.val_raw.read_bytes(len).ok()?; + unimplemented!() + //Some((amp::Value::Bytes(data)) + } + v if v % 16 == VALUE_TYPE_IEEE754 => { + let len = v >> 4; + if len == 8 { + // confirm only 8 bytes read + let num = self.val_raw.read().ok()?; + Some(ScalarValue::F64(num)) + } else { + // bad size of float + None + } + } + _ => { + // unknown command + None + } + } + } +} + +impl<'a> Iterator for KeyIterator<'a> { + type Item = amp::Key; + fn next(&mut self) -> Option { + match (self.actor.next()?, self.ctr.next()?, self.str.next()?) { + (None, None, Some(string)) => Some(amp::Key::Map(string)), + (None, Some(0), None) => Some(amp::Key::head()), + (Some(actor), Some(ctr), None) => { + let actor_id = self.actors.get(actor)?; + Some(amp::OpId::new(ctr, actor_id).into()) + } + _ => None, + } + } +} + +impl<'a> Iterator for ObjIterator<'a> { + type Item = amp::ObjectId; + fn next(&mut self) -> Option { + if let (Some(actor), Some(ctr)) = (self.actor.next()?, self.ctr.next()?) { + let actor_id = self.actors.get(actor)?; + Some(amp::ObjectId::Id(amp::OpId::new(ctr, actor_id))) + } else { + Some(amp::ObjectId::Root) + } + } +} + +#[derive(PartialEq, Debug, Clone)] +pub(crate) struct DocChange { + pub actor: usize, + pub seq: u64, + pub max_op: u64, + pub time: i64, + pub message: Option, + pub extra_bytes: Vec, + pub ops: Vec, +} + +#[derive(Debug, Clone)] +pub(crate) struct DocOp { + pub actor: usize, + pub ctr: u64, + pub action: OpType, + pub obj: amp::ObjectId, + pub key: amp::Key, + pub succ: Vec<(u64, usize)>, + pub pred: Vec<(u64, usize)>, + pub insert: bool, +} + +impl Ord for DocOp { + fn cmp(&self, other: &Self) -> Ordering { + self.ctr.cmp(&other.ctr) + } +} + +impl PartialOrd for DocOp { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl PartialEq for DocOp { + fn eq(&self, other: &Self) -> bool { + self.ctr == other.ctr + } +} + +impl Eq for DocOp {} + +struct ValEncoder { + len: RleEncoder, + ref_actor: RleEncoder, + ref_counter: RleEncoder, + raw: Vec, +} + +impl ValEncoder { + const COLUMNS: usize = 4; + + fn new() -> ValEncoder { + ValEncoder { + len: RleEncoder::new(), + raw: Vec::new(), + ref_actor: RleEncoder::new(), + ref_counter: RleEncoder::new(), + } + } + + fn append_value(&mut self, val: &ScalarValue, actors: &[usize]) { + // It may seem weird to have two consecutive matches on the same value. The reason is so + // that we don't have to repeat the `append_null` calls on ref_actor and ref_counter in + // every arm of the next match + self.ref_actor.append_null(); + self.ref_counter.append_null(); + match val { + ScalarValue::Null => self.len.append_value(VALUE_TYPE_NULL), + ScalarValue::Boolean(true) => self.len.append_value(VALUE_TYPE_TRUE), + ScalarValue::Boolean(false) => self.len.append_value(VALUE_TYPE_FALSE), + ScalarValue::Bytes(bytes) => { + let len = bytes.len(); + self.raw.extend(bytes); + self.len.append_value(len << 4 | VALUE_TYPE_BYTES); + } + ScalarValue::Str(s) => { + let bytes = s.as_bytes(); + let len = bytes.len(); + self.raw.extend(bytes); + self.len.append_value(len << 4 | VALUE_TYPE_UTF8); + } + ScalarValue::Counter(count) => { + let len = count.start.encode(&mut self.raw).unwrap(); + self.len.append_value(len << 4 | VALUE_TYPE_COUNTER); + } + ScalarValue::Timestamp(time) => { + let len = time.encode(&mut self.raw).unwrap(); + self.len.append_value(len << 4 | VALUE_TYPE_TIMESTAMP); + } + ScalarValue::Int(n) => { + let len = n.encode(&mut self.raw).unwrap(); + self.len.append_value(len << 4 | VALUE_TYPE_LEB128_INT); + } + ScalarValue::Uint(n) => { + let len = n.encode(&mut self.raw).unwrap(); + self.len.append_value(len << 4 | VALUE_TYPE_LEB128_UINT); + } + ScalarValue::F64(n) => { + let len = (*n).encode(&mut self.raw).unwrap(); + self.len.append_value(len << 4 | VALUE_TYPE_IEEE754); + } + } + } + + fn append_value2(&mut self, val: &ScalarValue, actors: &[ActorId]) { + // It may seem weird to have two consecutive matches on the same value. The reason is so + // that we don't have to repeat the `append_null` calls on ref_actor and ref_counter in + // every arm of the next match + self.ref_actor.append_null(); + self.ref_counter.append_null(); + match val { + ScalarValue::Null => self.len.append_value(VALUE_TYPE_NULL), + ScalarValue::Boolean(true) => self.len.append_value(VALUE_TYPE_TRUE), + ScalarValue::Boolean(false) => self.len.append_value(VALUE_TYPE_FALSE), + ScalarValue::Bytes(bytes) => { + let len = bytes.len(); + self.raw.extend(bytes); + self.len.append_value(len << 4 | VALUE_TYPE_BYTES); + } + ScalarValue::Str(s) => { + let bytes = s.as_bytes(); + let len = bytes.len(); + self.raw.extend(bytes); + self.len.append_value(len << 4 | VALUE_TYPE_UTF8); + } + ScalarValue::Counter(c) => { + let len = c.start.encode(&mut self.raw).unwrap(); + self.len.append_value(len << 4 | VALUE_TYPE_COUNTER); + } + ScalarValue::Timestamp(time) => { + let len = time.encode(&mut self.raw).unwrap(); + self.len.append_value(len << 4 | VALUE_TYPE_TIMESTAMP); + } + ScalarValue::Int(n) => { + let len = n.encode(&mut self.raw).unwrap(); + self.len.append_value(len << 4 | VALUE_TYPE_LEB128_INT); + } + ScalarValue::Uint(n) => { + let len = n.encode(&mut self.raw).unwrap(); + self.len.append_value(len << 4 | VALUE_TYPE_LEB128_UINT); + } + ScalarValue::F64(n) => { + let len = (*n).encode(&mut self.raw).unwrap(); + self.len.append_value(len << 4 | VALUE_TYPE_IEEE754); + } + } + } + + fn append_null(&mut self) { + self.ref_counter.append_null(); + self.ref_actor.append_null(); + self.len.append_value(VALUE_TYPE_NULL); + } + + fn finish(self) -> Vec { + vec![ + self.ref_counter.finish(COL_REF_CTR), + self.ref_actor.finish(COL_REF_ACTOR), + self.len.finish(COL_VAL_LEN), + ColData::new(COL_VAL_RAW, self.raw), + ] + } +} + +struct KeyEncoder { + actor: RleEncoder, + ctr: DeltaEncoder, + str: RleEncoder, +} + +impl KeyEncoder { + const COLUMNS: usize = 3; + + fn new() -> KeyEncoder { + KeyEncoder { + actor: RleEncoder::new(), + ctr: DeltaEncoder::new(), + str: RleEncoder::new(), + } + } + + fn append(&mut self, key: Key, actors: &[usize], props: &[String]) { + match key { + Key::Map(i) => { + self.actor.append_null(); + self.ctr.append_null(); + self.str.append_value(props[i].clone()); + } + Key::Seq(ElemId(OpId(0, 0))) => { + // HEAD + self.actor.append_null(); + self.ctr.append_value(0); + self.str.append_null(); + } + Key::Seq(ElemId(OpId(ctr, actor))) => { + self.actor.append_value(actors[actor]); + self.ctr.append_value(ctr); + self.str.append_null(); + } + } + } + + fn finish(self) -> Vec { + vec![ + self.actor.finish(COL_KEY_ACTOR), + self.ctr.finish(COL_KEY_CTR), + self.str.finish(COL_KEY_STR), + ] + } +} + +struct KeyEncoderOld { + actor: RleEncoder, + ctr: DeltaEncoder, + str: RleEncoder, +} + +impl KeyEncoderOld { + const COLUMNS: usize = 3; + + fn new() -> KeyEncoderOld { + KeyEncoderOld { + actor: RleEncoder::new(), + ctr: DeltaEncoder::new(), + str: RleEncoder::new(), + } + } + + fn append(&mut self, key: amp::Key, actors: &[ActorId]) { + match key { + amp::Key::Map(s) => { + self.actor.append_null(); + self.ctr.append_null(); + self.str.append_value(s); + } + amp::Key::Seq(amp::ElementId::Head) => { + self.actor.append_null(); + self.ctr.append_value(0); + self.str.append_null(); + } + amp::Key::Seq(amp::ElementId::Id(amp::OpId(ctr, actor))) => { + self.actor.append_value(actor_index(&actor, actors)); + self.ctr.append_value(ctr); + self.str.append_null(); + } + } + } + + fn finish(self) -> Vec { + vec![ + self.actor.finish(COL_KEY_ACTOR), + self.ctr.finish(COL_KEY_CTR), + self.str.finish(COL_KEY_STR), + ] + } +} + +struct SuccEncoder { + num: RleEncoder, + actor: RleEncoder, + ctr: DeltaEncoder, +} + +impl SuccEncoder { + fn new() -> SuccEncoder { + SuccEncoder { + num: RleEncoder::new(), + actor: RleEncoder::new(), + ctr: DeltaEncoder::new(), + } + } + + fn append(&mut self, succ: &[OpId], actors: &[usize]) { + self.num.append_value(succ.len()); + for s in succ.iter() { + self.ctr.append_value(s.0); + self.actor.append_value(actors[s.1]); + } + } + + fn append_old(&mut self, succ: &[(u64, usize)]) { + self.num.append_value(succ.len()); + for s in succ.iter() { + self.ctr.append_value(s.0); + self.actor.append_value(s.1); + } + } + + fn finish(self) -> Vec { + vec![ + self.num.finish(COL_SUCC_NUM), + self.actor.finish(COL_SUCC_ACTOR), + self.ctr.finish(COL_SUCC_CTR), + ] + } +} + +struct PredEncoder { + num: RleEncoder, + actor: RleEncoder, + ctr: DeltaEncoder, +} + +impl PredEncoder { + const COLUMNS: usize = 3; + + fn new() -> PredEncoder { + PredEncoder { + num: RleEncoder::new(), + actor: RleEncoder::new(), + ctr: DeltaEncoder::new(), + } + } + + fn append(&mut self, pred: &SortedVec, actors: &[ActorId]) { + self.num.append_value(pred.len()); + for p in pred.iter() { + self.ctr.append_value(p.0); + self.actor.append_value(actor_index(&p.1, actors)); + } + } + + fn finish(self) -> Vec { + vec![ + self.num.finish(COL_PRED_NUM), + self.actor.finish(COL_PRED_ACTOR), + self.ctr.finish(COL_PRED_CTR), + ] + } +} + +struct ObjEncoder { + actor: RleEncoder, + ctr: RleEncoder, +} + +impl ObjEncoder { + const COLUMNS: usize = 2; + + fn new() -> ObjEncoder { + ObjEncoder { + actor: RleEncoder::new(), + ctr: RleEncoder::new(), + } + } + + fn append(&mut self, obj: &ObjId, actors: &[usize]) { + match obj.0 { + OpId(ctr, _) if ctr == 0 => { + self.actor.append_null(); + self.ctr.append_null(); + } + OpId(ctr, actor) => { + self.actor.append_value(actors[actor]); + self.ctr.append_value(ctr); + } + } + } + + fn finish(self) -> Vec { + vec![ + self.actor.finish(COL_OBJ_ACTOR), + self.ctr.finish(COL_OBJ_CTR), + ] + } +} + +struct ObjEncoderOld { + actor: RleEncoder, + ctr: RleEncoder, +} + +impl ObjEncoderOld { + const COLUMNS: usize = 2; + + fn new() -> ObjEncoderOld { + ObjEncoderOld { + actor: RleEncoder::new(), + ctr: RleEncoder::new(), + } + } + + fn append(&mut self, obj: &::ObjectId, actors: &[ActorId]) { + match obj { + amp::ObjectId::Root => { + self.actor.append_null(); + self.ctr.append_null(); + } + amp::ObjectId::Id(amp::OpId(ctr, actor)) => { + self.actor.append_value(actor_index(actor, actors)); + self.ctr.append_value(*ctr); + } + } + } + + fn finish(self) -> Vec { + vec![ + self.actor.finish(COL_OBJ_ACTOR), + self.ctr.finish(COL_OBJ_CTR), + ] + } +} + +pub(crate) struct ChangeEncoder { + actor: RleEncoder, + seq: DeltaEncoder, + max_op: DeltaEncoder, + time: DeltaEncoder, + message: RleEncoder>, + deps_num: RleEncoder, + deps_index: DeltaEncoder, + extra_len: RleEncoder, + extra_raw: Vec, +} + +impl ChangeEncoder { + #[instrument(level = "debug", skip(changes, actors))] + pub fn encode_changes<'a, 'b, I>( + changes: I, + actors: &'a IndexedCache, + ) -> (Vec, Vec) + where + I: IntoIterator, + { + let mut e = Self::new(); + e.encode(changes, actors); + e.finish() + } + + fn new() -> ChangeEncoder { + ChangeEncoder { + actor: RleEncoder::new(), + seq: DeltaEncoder::new(), + max_op: DeltaEncoder::new(), + time: DeltaEncoder::new(), + message: RleEncoder::new(), + deps_num: RleEncoder::new(), + deps_index: DeltaEncoder::new(), + extra_len: RleEncoder::new(), + extra_raw: Vec::new(), + } + } + + fn encode<'a, I>(&mut self, changes: I, actors: &IndexedCache) + where + I: IntoIterator, + { + let mut index_by_hash: HashMap = HashMap::new(); + for (index, change) in changes.into_iter().enumerate() { + index_by_hash.insert(change.hash, index); + self.actor + .append_value(actors.lookup(change.actor_id()).unwrap()); //actors.iter().position(|a| a == &change.actor_id).unwrap()); + self.seq.append_value(change.seq); + // FIXME iterops.count is crazy slow + self.max_op + .append_value(change.start_op.get() + change.iter_ops().count() as u64 - 1); + self.time.append_value(change.time as u64); + self.message.append_value(change.message()); + self.deps_num.append_value(change.deps.len()); + for dep in &change.deps { + if let Some(dep_index) = index_by_hash.get(dep) { + self.deps_index.append_value(*dep_index as u64); + } else { + // FIXME This relies on the changes being in causal order, which they may not + // be, we could probably do something cleverer like accumulate the values to + // write and the dependency tree in an intermediate value, then write it to the + // encoder in a second pass over the intermediates + panic!("Missing dependency for hash: {:?}", dep); + } + } + self.extra_len + .append_value(change.extra_bytes().len() << 4 | VALUE_TYPE_BYTES); + self.extra_raw.extend(change.extra_bytes()); + } + } + + fn finish(self) -> (Vec, Vec) { + let mut coldata = vec![ + self.actor.finish(DOC_ACTOR), + self.seq.finish(DOC_SEQ), + self.max_op.finish(DOC_MAX_OP), + self.time.finish(DOC_TIME), + self.message.finish(DOC_MESSAGE), + self.deps_num.finish(DOC_DEPS_NUM), + self.deps_index.finish(DOC_DEPS_INDEX), + self.extra_len.finish(DOC_EXTRA_LEN), + ColData::new(DOC_EXTRA_RAW, self.extra_raw), + ]; + coldata.sort_unstable_by(|a, b| a.col.cmp(&b.col)); + + let mut data = Vec::new(); + let mut info = Vec::new(); + coldata + .iter() + .filter(|&d| !d.data.is_empty()) + .count() + .encode(&mut info) + .ok(); + for d in &mut coldata { + d.deflate(); + d.encode_col_len(&mut info).ok(); + } + for d in &coldata { + data.write_all(d.data.as_slice()).ok(); + } + (data, info) + } +} + +pub(crate) struct DocOpEncoder { + actor: RleEncoder, + ctr: DeltaEncoder, + obj: ObjEncoder, + key: KeyEncoder, + insert: BooleanEncoder, + action: RleEncoder, + val: ValEncoder, + succ: SuccEncoder, +} + +impl DocOpEncoder { + #[instrument(level = "debug", skip(ops, actors))] + pub(crate) fn encode_doc_ops<'a, 'b, 'c, I>( + ops: I, + actors: &'a [usize], + props: &'b [String], + ) -> (Vec, Vec) + where + I: IntoIterator, + { + let mut e = Self::new(); + e.encode(ops, actors, props); + e.finish() + } + + fn new() -> DocOpEncoder { + DocOpEncoder { + actor: RleEncoder::new(), + ctr: DeltaEncoder::new(), + obj: ObjEncoder::new(), + key: KeyEncoder::new(), + insert: BooleanEncoder::new(), + action: RleEncoder::new(), + val: ValEncoder::new(), + succ: SuccEncoder::new(), + } + } + + fn encode<'a, I>(&mut self, ops: I, actors: &[usize], props: &[String]) + where + I: IntoIterator, + { + for (obj, op) in ops { + self.actor.append_value(actors[op.id.actor()]); + self.ctr.append_value(op.id.counter()); + self.obj.append(obj, actors); + self.key.append(op.key, actors, props); + self.insert.append(op.insert); + self.succ.append(&op.succ, actors); + let action = match &op.action { + amp::OpType::Set(value) => { + self.val.append_value(value, actors); + Action::Set + } + amp::OpType::Inc(val) => { + self.val.append_value(&ScalarValue::Int(*val), actors); + Action::Inc + } + amp::OpType::Del => { + self.val.append_null(); + Action::Del + } + amp::OpType::MarkBegin(m) => { + self.val.append_value(&m.name.clone().into(), actors); + self.val.append_value(&m.expand.into(), actors); + self.val.append_value(&m.value.clone(), actors); + Action::MarkBegin + } + amp::OpType::MarkEnd(s) => { + self.val.append_value(&(*s).into(), actors); + Action::MarkEnd + } + amp::OpType::Make(kind) => { + self.val.append_null(); + match kind { + ObjType::Map => Action::MakeMap, + ObjType::Table => Action::MakeTable, + ObjType::List => Action::MakeList, + ObjType::Text => Action::MakeText, + } + } + }; + self.action.append_value(action); + } + } + + fn finish(self) -> (Vec, Vec) { + let mut coldata = vec![ + self.actor.finish(COL_ID_ACTOR), + self.ctr.finish(COL_ID_CTR), + self.insert.finish(COL_INSERT), + self.action.finish(COL_ACTION), + ]; + coldata.extend(self.obj.finish()); + coldata.extend(self.key.finish()); + coldata.extend(self.val.finish()); + coldata.extend(self.succ.finish()); + coldata.sort_unstable_by(|a, b| a.col.cmp(&b.col)); + + let mut info = Vec::new(); + let mut data = Vec::new(); + coldata + .iter() + .filter(|&d| !d.data.is_empty()) + .count() + .encode(&mut info) + .ok(); + for d in &mut coldata { + d.deflate(); + d.encode_col_len(&mut info).ok(); + } + for d in &coldata { + data.write_all(d.data.as_slice()).ok(); + } + (data, info) + } +} + +//pub(crate) encode_cols(a) -> (Vec, HashMap>) { } + +pub(crate) struct ColumnEncoder { + obj: ObjEncoderOld, + key: KeyEncoderOld, + insert: BooleanEncoder, + action: RleEncoder, + val: ValEncoder, + pred: PredEncoder, +} + +impl ColumnEncoder { + pub fn encode_ops<'a, I>(ops: I, actors: &[ActorId]) -> (Vec, HashMap>) + where + I: IntoIterator, + { + let mut e = Self::new(); + e.encode(ops, actors); + e.finish() + } + + fn new() -> ColumnEncoder { + ColumnEncoder { + obj: ObjEncoderOld::new(), + key: KeyEncoderOld::new(), + insert: BooleanEncoder::new(), + action: RleEncoder::new(), + val: ValEncoder::new(), + pred: PredEncoder::new(), + } + } + + fn encode<'a, 'b, I>(&'a mut self, ops: I, actors: &[ActorId]) + where + I: IntoIterator, + { + for op in ops { + self.append(op, actors); + } + } + + fn append(&mut self, op: &::Op, actors: &[ActorId]) { + self.obj.append(&op.obj, actors); + self.key.append(op.key.clone(), actors); + self.insert.append(op.insert); + + self.pred.append(&op.pred, actors); + let action = match &op.action { + OpType::Set(value) => { + self.val.append_value2(value, actors); + Action::Set + } + OpType::Inc(val) => { + self.val.append_value2(&ScalarValue::Int(*val), actors); + Action::Inc + } + OpType::Del => { + self.val.append_null(); + Action::Del + } + OpType::MarkBegin(m) => { + self.val.append_value2(&m.name.clone().into(), actors); + self.val.append_value2(&m.expand.into(), actors); + self.val.append_value2(&m.value.clone(), actors); + Action::MarkBegin + } + OpType::MarkEnd(s) => { + self.val.append_value2(&(*s).into(), actors); + Action::MarkEnd + } + OpType::Make(kind) => { + self.val.append_null(); + match kind { + ObjType::Map => Action::MakeMap, + ObjType::Table => Action::MakeTable, + ObjType::List => Action::MakeList, + ObjType::Text => Action::MakeText, + } + } + }; + self.action.append_value(action); + } + + fn finish(self) -> (Vec, HashMap>) { + // allocate for the exact number of columns + let mut coldata = Vec::with_capacity( + 2 + ObjEncoderOld::COLUMNS + + KeyEncoderOld::COLUMNS + + ValEncoder::COLUMNS + + PredEncoder::COLUMNS, + ); + coldata.push(self.insert.finish(COL_INSERT)); + coldata.push(self.action.finish(COL_ACTION)); + coldata.extend(self.obj.finish()); + coldata.extend(self.key.finish()); + coldata.extend(self.val.finish()); + coldata.extend(self.pred.finish()); + coldata.sort_unstable_by(|a, b| a.col.cmp(&b.col)); + + let non_empty_column_count = coldata.iter().filter(|&d| !d.data.is_empty()).count(); + let data_len: usize = coldata.iter().map(|d| d.data.len()).sum(); + // 1 for the non_empty_column_count, 2 for each non_empty column (encode_col_len), data_len + // for all the actual data + let mut data = Vec::with_capacity(1 + (non_empty_column_count * 2) + data_len); + + non_empty_column_count.encode(&mut data).ok(); + for d in &mut coldata { + d.encode_col_len(&mut data).ok(); + } + + let mut rangemap = HashMap::with_capacity(non_empty_column_count); + for d in &coldata { + let begin = data.len(); + data.write_all(d.data.as_slice()).ok(); + if !d.data.is_empty() { + rangemap.insert(d.col, begin..data.len()); + } + } + (data, rangemap) + } +} + +fn col_iter<'a, T>(bytes: &'a [u8], ops: &'a HashMap>, col_id: u32) -> T +where + T: From>, +{ + let bytes = if let Some(r) = ops.get(&col_id) { + Cow::Borrowed(&bytes[r.clone()]) + } else if let Some(r) = ops.get(&(col_id | COLUMN_TYPE_DEFLATE)) { + let mut decoder = DeflateDecoder::new(&bytes[r.clone()]); + let mut inflated = Vec::new(); + //TODO this could throw if the compression is corrupt, we should propagate the error rather + //than unwrapping + decoder.read_to_end(&mut inflated).unwrap(); + Cow::Owned(inflated) + } else { + Cow::from(&[] as &[u8]) + }; + T::from(bytes) +} + +const VALUE_TYPE_NULL: usize = 0; +const VALUE_TYPE_FALSE: usize = 1; +const VALUE_TYPE_TRUE: usize = 2; +const VALUE_TYPE_LEB128_UINT: usize = 3; +const VALUE_TYPE_LEB128_INT: usize = 4; +const VALUE_TYPE_IEEE754: usize = 5; +const VALUE_TYPE_UTF8: usize = 6; +const VALUE_TYPE_BYTES: usize = 7; +const VALUE_TYPE_COUNTER: usize = 8; +const VALUE_TYPE_TIMESTAMP: usize = 9; +const VALUE_TYPE_CURSOR: usize = 10; +const VALUE_TYPE_MIN_UNKNOWN: usize = 11; +const VALUE_TYPE_MAX_UNKNOWN: usize = 15; + +pub(crate) const COLUMN_TYPE_GROUP_CARD: u32 = 0; +pub(crate) const COLUMN_TYPE_ACTOR_ID: u32 = 1; +pub(crate) const COLUMN_TYPE_INT_RLE: u32 = 2; +pub(crate) const COLUMN_TYPE_INT_DELTA: u32 = 3; +pub(crate) const COLUMN_TYPE_BOOLEAN: u32 = 4; +pub(crate) const COLUMN_TYPE_STRING_RLE: u32 = 5; +pub(crate) const COLUMN_TYPE_VALUE_LEN: u32 = 6; +pub(crate) const COLUMN_TYPE_VALUE_RAW: u32 = 7; +pub(crate) const COLUMN_TYPE_DEFLATE: u32 = 8; + +#[derive(PartialEq, Debug, Clone, Copy)] +#[repr(u32)] +pub(crate) enum Action { + MakeMap, + Set, + MakeList, + Del, + MakeText, + Inc, + MakeTable, + MarkBegin, + Unused, // final bit is used to mask `Make` actions + MarkEnd, +} +const ACTIONS: [Action; 10] = [ + Action::MakeMap, + Action::Set, + Action::MakeList, + Action::Del, + Action::MakeText, + Action::Inc, + Action::MakeTable, + Action::MarkBegin, + Action::Unused, + Action::MarkEnd, +]; + +impl Decodable for Action { + fn decode(bytes: &mut R) -> Option + where + R: Read, + { + let num = usize::decode::(bytes)?; + ACTIONS.get(num).copied() + } +} + +const COL_OBJ_ACTOR: u32 = COLUMN_TYPE_ACTOR_ID; +const COL_OBJ_CTR: u32 = COLUMN_TYPE_INT_RLE; +const COL_KEY_ACTOR: u32 = 1 << 4 | COLUMN_TYPE_ACTOR_ID; +const COL_KEY_CTR: u32 = 1 << 4 | COLUMN_TYPE_INT_DELTA; +const COL_KEY_STR: u32 = 1 << 4 | COLUMN_TYPE_STRING_RLE; +const COL_ID_ACTOR: u32 = 2 << 4 | COLUMN_TYPE_ACTOR_ID; +const COL_ID_CTR: u32 = 2 << 4 | COLUMN_TYPE_INT_DELTA; +const COL_INSERT: u32 = 3 << 4 | COLUMN_TYPE_BOOLEAN; +const COL_ACTION: u32 = 4 << 4 | COLUMN_TYPE_INT_RLE; +const COL_VAL_LEN: u32 = 5 << 4 | COLUMN_TYPE_VALUE_LEN; +const COL_VAL_RAW: u32 = 5 << 4 | COLUMN_TYPE_VALUE_RAW; +const COL_PRED_NUM: u32 = 7 << 4 | COLUMN_TYPE_GROUP_CARD; +const COL_PRED_ACTOR: u32 = 7 << 4 | COLUMN_TYPE_ACTOR_ID; +const COL_PRED_CTR: u32 = 7 << 4 | COLUMN_TYPE_INT_DELTA; +const COL_SUCC_NUM: u32 = 8 << 4 | COLUMN_TYPE_GROUP_CARD; +const COL_SUCC_ACTOR: u32 = 8 << 4 | COLUMN_TYPE_ACTOR_ID; +const COL_SUCC_CTR: u32 = 8 << 4 | COLUMN_TYPE_INT_DELTA; +const COL_REF_CTR: u32 = 6 << 4 | COLUMN_TYPE_INT_RLE; +const COL_REF_ACTOR: u32 = 6 << 4 | COLUMN_TYPE_ACTOR_ID; + +const DOC_ACTOR: u32 = /* 0 << 4 */ COLUMN_TYPE_ACTOR_ID; +const DOC_SEQ: u32 = /* 0 << 4 */ COLUMN_TYPE_INT_DELTA; +const DOC_MAX_OP: u32 = 1 << 4 | COLUMN_TYPE_INT_DELTA; +const DOC_TIME: u32 = 2 << 4 | COLUMN_TYPE_INT_DELTA; +const DOC_MESSAGE: u32 = 3 << 4 | COLUMN_TYPE_STRING_RLE; +const DOC_DEPS_NUM: u32 = 4 << 4 | COLUMN_TYPE_GROUP_CARD; +const DOC_DEPS_INDEX: u32 = 4 << 4 | COLUMN_TYPE_INT_DELTA; +const DOC_EXTRA_LEN: u32 = 5 << 4 | COLUMN_TYPE_VALUE_LEN; +const DOC_EXTRA_RAW: u32 = 5 << 4 | COLUMN_TYPE_VALUE_RAW; + +/* +const DOCUMENT_COLUMNS = { + actor: 0 << 3 | COLUMN_TYPE.ACTOR_ID, + seq: 0 << 3 | COLUMN_TYPE.INT_DELTA, + maxOp: 1 << 3 | COLUMN_TYPE.INT_DELTA, + time: 2 << 3 | COLUMN_TYPE.INT_DELTA, + message: 3 << 3 | COLUMN_TYPE.STRING_RLE, + depsNum: 4 << 3 | COLUMN_TYPE.GROUP_CARD, + depsIndex: 4 << 3 | COLUMN_TYPE.INT_DELTA, + extraLen: 5 << 3 | COLUMN_TYPE.VALUE_LEN, + extraRaw: 5 << 3 | COLUMN_TYPE.VALUE_RAW +} +*/ diff --git a/rust/automerge/src/decoding.rs b/automerge/src/decoding.rs similarity index 81% rename from rust/automerge/src/decoding.rs rename to automerge/src/decoding.rs index cd938a3c..739e856d 100644 --- a/rust/automerge/src/decoding.rs +++ b/automerge/src/decoding.rs @@ -52,60 +52,7 @@ pub enum Error { Io(#[from] io::Error), } -impl PartialEq for Error { - fn eq(&self, other: &Error) -> bool { - match (self, other) { - ( - Self::WrongType { - expected_one_of: l_expected_one_of, - found: l_found, - }, - Self::WrongType { - expected_one_of: r_expected_one_of, - found: r_found, - }, - ) => l_expected_one_of == r_expected_one_of && l_found == r_found, - (Self::BadChangeFormat(l0), Self::BadChangeFormat(r0)) => l0 == r0, - ( - Self::WrongByteLength { - expected: l_expected, - found: l_found, - }, - Self::WrongByteLength { - expected: r_expected, - found: r_found, - }, - ) => l_expected == r_expected && l_found == r_found, - ( - Self::ColumnsNotInAscendingOrder { - last: l_last, - found: l_found, - }, - Self::ColumnsNotInAscendingOrder { - last: r_last, - found: r_found, - }, - ) => l_last == r_last && l_found == r_found, - ( - Self::InvalidChecksum { - found: l_found, - calculated: l_calculated, - }, - Self::InvalidChecksum { - found: r_found, - calculated: r_calculated, - }, - ) => l_found == r_found && l_calculated == r_calculated, - (Self::InvalidChange(l0), Self::InvalidChange(r0)) => l0 == r0, - (Self::ChangeDecompressFailed(l0), Self::ChangeDecompressFailed(r0)) => l0 == r0, - (Self::Leb128(_l0), Self::Leb128(_r0)) => true, - (Self::Io(l0), Self::Io(r0)) => l0.kind() == r0.kind(), - _ => core::mem::discriminant(self) == core::mem::discriminant(other), - } - } -} - -#[derive(thiserror::Error, PartialEq, Debug)] +#[derive(thiserror::Error, Debug)] pub enum InvalidChangeError { #[error("Change contained an operation with action 'set' which did not have a 'value'")] SetOpWithoutValue, @@ -125,13 +72,13 @@ pub enum InvalidChangeError { #[derive(Clone, Debug)] pub(crate) struct Decoder<'a> { - pub(crate) offset: usize, - pub(crate) last_read: usize, + pub offset: usize, + pub last_read: usize, data: Cow<'a, [u8]>, } impl<'a> Decoder<'a> { - pub(crate) fn new(data: Cow<'a, [u8]>) -> Self { + pub fn new(data: Cow<'a, [u8]>) -> Self { Decoder { offset: 0, last_read: 0, @@ -139,7 +86,7 @@ impl<'a> Decoder<'a> { } } - pub(crate) fn read(&mut self) -> Result { + pub fn read(&mut self) -> Result { let mut buf = &self.data[self.offset..]; let init_len = buf.len(); let val = T::decode::<&[u8]>(&mut buf).ok_or(Error::NoDecodedValue)?; @@ -153,7 +100,7 @@ impl<'a> Decoder<'a> { } } - pub(crate) fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> { + pub fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> { if self.offset + index > self.data.len() { Err(Error::TryingToReadPastEnd) } else { @@ -164,7 +111,7 @@ impl<'a> Decoder<'a> { } } - pub(crate) fn done(&self) -> bool { + pub fn done(&self) -> bool { self.offset >= self.data.len() } } @@ -212,7 +159,7 @@ impl<'a> Iterator for BooleanDecoder<'a> { /// See discussion on [`crate::encoding::RleEncoder`] for the format data is stored in. #[derive(Debug)] pub(crate) struct RleDecoder<'a, T> { - pub(crate) decoder: Decoder<'a>, + pub decoder: Decoder<'a>, last_value: Option, count: isize, literal: bool, diff --git a/automerge/src/encoding.rs b/automerge/src/encoding.rs new file mode 100644 index 00000000..c5aa6fa2 --- /dev/null +++ b/automerge/src/encoding.rs @@ -0,0 +1,383 @@ +use core::fmt::Debug; +use std::{ + io, + io::{Read, Write}, + mem, + num::NonZeroU64, +}; + +use flate2::{bufread::DeflateEncoder, Compression}; +use smol_str::SmolStr; + +use crate::columnar::COLUMN_TYPE_DEFLATE; +use crate::ActorId; + +pub(crate) const DEFLATE_MIN_SIZE: usize = 256; + +/// The error type for encoding operations. +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error(transparent)] + Io(#[from] io::Error), +} + +/// Encodes booleans by storing the count of the same value. +/// +/// The sequence of numbers describes the count of false values on even indices (0-indexed) and the +/// count of true values on odd indices (0-indexed). +/// +/// Counts are encoded as usize. +pub(crate) struct BooleanEncoder { + buf: Vec, + last: bool, + count: usize, +} + +impl BooleanEncoder { + pub fn new() -> BooleanEncoder { + BooleanEncoder { + buf: Vec::new(), + last: false, + count: 0, + } + } + + pub fn append(&mut self, value: bool) { + if value == self.last { + self.count += 1; + } else { + self.count.encode(&mut self.buf).ok(); + self.last = value; + self.count = 1; + } + } + + pub fn finish(mut self, col: u32) -> ColData { + if self.count > 0 { + self.count.encode(&mut self.buf).ok(); + } + ColData::new(col, self.buf) + } +} + +/// Encodes integers as the change since the previous value. +/// +/// The initial value is 0 encoded as u64. Deltas are encoded as i64. +/// +/// Run length encoding is then applied to the resulting sequence. +pub(crate) struct DeltaEncoder { + rle: RleEncoder, + absolute_value: u64, +} + +impl DeltaEncoder { + pub fn new() -> DeltaEncoder { + DeltaEncoder { + rle: RleEncoder::new(), + absolute_value: 0, + } + } + + pub fn append_value(&mut self, value: u64) { + self.rle + .append_value(value as i64 - self.absolute_value as i64); + self.absolute_value = value; + } + + pub fn append_null(&mut self) { + self.rle.append_null(); + } + + pub fn finish(self, col: u32) -> ColData { + self.rle.finish(col) + } +} + +enum RleState { + Empty, + NullRun(usize), + LiteralRun(T, Vec), + LoneVal(T), + Run(T, usize), +} + +/// Encodes data in run lengh encoding format. This is very efficient for long repeats of data +/// +/// There are 3 types of 'run' in this encoder: +/// - a normal run (compresses repeated values) +/// - a null run (compresses repeated nulls) +/// - a literal run (no compression) +/// +/// A normal run consists of the length of the run (encoded as an i64) followed by the encoded value that this run contains. +/// +/// A null run consists of a zero value (encoded as an i64) followed by the length of the null run (encoded as a usize). +/// +/// A literal run consists of the **negative** length of the run (encoded as an i64) followed by the values in the run. +/// +/// Therefore all the types start with an encoded i64, the value of which determines the type of the following data. +pub(crate) struct RleEncoder +where + T: Encodable + PartialEq + Clone, +{ + buf: Vec, + state: RleState, +} + +impl RleEncoder +where + T: Encodable + PartialEq + Clone, +{ + pub fn new() -> RleEncoder { + RleEncoder { + buf: Vec::new(), + state: RleState::Empty, + } + } + + pub fn finish(mut self, col: u32) -> ColData { + match self.take_state() { + // this covers `only_nulls` + RleState::NullRun(size) => { + if !self.buf.is_empty() { + self.flush_null_run(size); + } + } + RleState::LoneVal(value) => self.flush_lit_run(vec![value]), + RleState::Run(value, len) => self.flush_run(&value, len), + RleState::LiteralRun(last, mut run) => { + run.push(last); + self.flush_lit_run(run); + } + RleState::Empty => {} + } + ColData::new(col, self.buf) + } + + fn flush_run(&mut self, val: &T, len: usize) { + self.encode(&(len as i64)); + self.encode(val); + } + + fn flush_null_run(&mut self, len: usize) { + self.encode::(&0); + self.encode(&len); + } + + fn flush_lit_run(&mut self, run: Vec) { + self.encode(&-(run.len() as i64)); + for val in run { + self.encode(&val); + } + } + + fn take_state(&mut self) -> RleState { + let mut state = RleState::Empty; + mem::swap(&mut self.state, &mut state); + state + } + + pub fn append_null(&mut self) { + self.state = match self.take_state() { + RleState::Empty => RleState::NullRun(1), + RleState::NullRun(size) => RleState::NullRun(size + 1), + RleState::LoneVal(other) => { + self.flush_lit_run(vec![other]); + RleState::NullRun(1) + } + RleState::Run(other, len) => { + self.flush_run(&other, len); + RleState::NullRun(1) + } + RleState::LiteralRun(last, mut run) => { + run.push(last); + self.flush_lit_run(run); + RleState::NullRun(1) + } + } + } + + pub fn append_value(&mut self, value: T) { + self.state = match self.take_state() { + RleState::Empty => RleState::LoneVal(value), + RleState::LoneVal(other) => { + if other == value { + RleState::Run(value, 2) + } else { + let mut v = Vec::with_capacity(2); + v.push(other); + RleState::LiteralRun(value, v) + } + } + RleState::Run(other, len) => { + if other == value { + RleState::Run(other, len + 1) + } else { + self.flush_run(&other, len); + RleState::LoneVal(value) + } + } + RleState::LiteralRun(last, mut run) => { + if last == value { + self.flush_lit_run(run); + RleState::Run(value, 2) + } else { + run.push(last); + RleState::LiteralRun(value, run) + } + } + RleState::NullRun(size) => { + self.flush_null_run(size); + RleState::LoneVal(value) + } + } + } + + fn encode(&mut self, val: &V) + where + V: Encodable, + { + val.encode(&mut self.buf).ok(); + } +} + +pub(crate) trait Encodable { + fn encode_with_actors_to_vec(&self, actors: &mut [ActorId]) -> io::Result> { + let mut buf = Vec::new(); + self.encode_with_actors(&mut buf, actors)?; + Ok(buf) + } + + fn encode_with_actors(&self, buf: &mut R, _actors: &[ActorId]) -> io::Result { + self.encode(buf) + } + + fn encode(&self, buf: &mut R) -> io::Result; + + fn encode_vec(&self, buf: &mut Vec) -> usize { + self.encode(buf).unwrap() + } +} + +impl Encodable for SmolStr { + fn encode(&self, buf: &mut R) -> io::Result { + let bytes = self.as_bytes(); + let head = bytes.len().encode(buf)?; + buf.write_all(bytes)?; + Ok(head + bytes.len()) + } +} + +impl Encodable for String { + fn encode(&self, buf: &mut R) -> io::Result { + let bytes = self.as_bytes(); + let head = bytes.len().encode(buf)?; + buf.write_all(bytes)?; + Ok(head + bytes.len()) + } +} + +impl Encodable for Option { + fn encode(&self, buf: &mut R) -> io::Result { + if let Some(s) = self { + s.encode(buf) + } else { + 0.encode(buf) + } + } +} + +impl Encodable for u64 { + fn encode(&self, buf: &mut R) -> io::Result { + leb128::write::unsigned(buf, *self) + } +} + +impl Encodable for NonZeroU64 { + fn encode(&self, buf: &mut R) -> io::Result { + leb128::write::unsigned(buf, self.get()) + } +} + +impl Encodable for f64 { + fn encode(&self, buf: &mut R) -> io::Result { + let bytes = self.to_le_bytes(); + buf.write_all(&bytes)?; + Ok(bytes.len()) + } +} + +impl Encodable for f32 { + fn encode(&self, buf: &mut R) -> io::Result { + let bytes = self.to_le_bytes(); + buf.write_all(&bytes)?; + Ok(bytes.len()) + } +} + +impl Encodable for i64 { + fn encode(&self, buf: &mut R) -> io::Result { + leb128::write::signed(buf, *self) + } +} + +impl Encodable for usize { + fn encode(&self, buf: &mut R) -> io::Result { + (*self as u64).encode(buf) + } +} + +impl Encodable for u32 { + fn encode(&self, buf: &mut R) -> io::Result { + u64::from(*self).encode(buf) + } +} + +impl Encodable for i32 { + fn encode(&self, buf: &mut R) -> io::Result { + i64::from(*self).encode(buf) + } +} + +#[derive(Debug)] +pub(crate) struct ColData { + pub col: u32, + pub data: Vec, + #[cfg(debug_assertions)] + has_been_deflated: bool, +} + +impl ColData { + pub fn new(col_id: u32, data: Vec) -> ColData { + ColData { + col: col_id, + data, + #[cfg(debug_assertions)] + has_been_deflated: false, + } + } + + pub fn encode_col_len(&self, buf: &mut R) -> io::Result { + let mut len = 0; + if !self.data.is_empty() { + len += self.col.encode(buf)?; + len += self.data.len().encode(buf)?; + } + Ok(len) + } + + pub fn deflate(&mut self) { + #[cfg(debug_assertions)] + { + debug_assert!(!self.has_been_deflated); + self.has_been_deflated = true; + } + if self.data.len() > DEFLATE_MIN_SIZE { + let mut deflated = Vec::new(); + let mut deflater = DeflateEncoder::new(&self.data[..], Compression::default()); + //This unwrap should be okay as we're reading and writing to in memory buffers + deflater.read_to_end(&mut deflated).unwrap(); + self.col |= COLUMN_TYPE_DEFLATE; + self.data = deflated; + } + } +} diff --git a/automerge/src/error.rs b/automerge/src/error.rs new file mode 100644 index 00000000..aaf9b61d --- /dev/null +++ b/automerge/src/error.rs @@ -0,0 +1,62 @@ +use crate::types::{ActorId, ScalarValue}; +use crate::value::DataType; +use crate::{decoding, encoding}; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum AutomergeError { + #[error("invalid opid format `{0}`")] + InvalidOpId(String), + #[error("obj id not from this document `{0}`")] + ForeignObjId(String), + #[error("there was an encoding problem: {0}")] + Encoding(#[from] encoding::Error), + #[error("there was a decoding problem: {0}")] + Decoding(#[from] decoding::Error), + #[error("key must not be an empty string")] + EmptyStringKey, + #[error("invalid seq {0}")] + InvalidSeq(u64), + #[error("index {0} is out of bounds")] + InvalidIndex(usize), + #[error("duplicate seq {0} found for actor {1}")] + DuplicateSeqNumber(u64, ActorId), + #[error("generic automerge error")] + Fail, +} + +#[cfg(feature = "wasm")] +impl From for wasm_bindgen::JsValue { + fn from(err: AutomergeError) -> Self { + js_sys::Error::new(&std::format!("{}", err)).into() + } +} + +#[derive(Error, Debug)] +#[error("Invalid actor ID: {0}")] +pub struct InvalidActorId(pub String); + +#[derive(Error, Debug, PartialEq)] +#[error("Invalid scalar value, expected {expected} but received {unexpected}")] +pub(crate) struct InvalidScalarValue { + pub raw_value: ScalarValue, + pub datatype: DataType, + pub unexpected: String, + pub expected: String, +} + +#[derive(Error, Debug, PartialEq)] +#[error("Invalid change hash slice: {0:?}")] +pub struct InvalidChangeHashSlice(pub Vec); + +#[derive(Error, Debug, PartialEq)] +#[error("Invalid object ID: {0}")] +pub struct InvalidObjectId(pub String); + +#[derive(Error, Debug)] +#[error("Invalid element ID: {0}")] +pub struct InvalidElementId(pub String); + +#[derive(Error, Debug)] +#[error("Invalid OpID: {0}")] +pub struct InvalidOpId(pub String); diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs new file mode 100644 index 00000000..2c174e28 --- /dev/null +++ b/automerge/src/exid.rs @@ -0,0 +1,82 @@ +use crate::ActorId; +use serde::Serialize; +use serde::Serializer; +use std::cmp::{Ord, Ordering}; +use std::fmt; +use std::hash::{Hash, Hasher}; + +#[derive(Debug, Clone)] +pub enum ExId { + Root, + Id(u64, ActorId, usize), +} + +impl PartialEq for ExId { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (ExId::Root, ExId::Root) => true, + (ExId::Id(ctr1, actor1, _), ExId::Id(ctr2, actor2, _)) + if ctr1 == ctr2 && actor1 == actor2 => + { + true + } + _ => false, + } + } +} + +impl Eq for ExId {} + +impl fmt::Display for ExId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ExId::Root => write!(f, "_root"), + ExId::Id(ctr, actor, _) => write!(f, "{}@{}", ctr, actor), + } + } +} + +impl Hash for ExId { + fn hash(&self, state: &mut H) { + match self { + ExId::Root => 0.hash(state), + ExId::Id(ctr, actor, _) => { + ctr.hash(state); + actor.hash(state); + } + } + } +} + +impl Ord for ExId { + fn cmp(&self, other: &Self) -> Ordering { + match (self, other) { + (ExId::Root, ExId::Root) => Ordering::Equal, + (ExId::Root, _) => Ordering::Less, + (_, ExId::Root) => Ordering::Greater, + (ExId::Id(c1, a1, _), ExId::Id(c2, a2, _)) if c1 == c2 => a2.cmp(a1), + (ExId::Id(c1, _, _), ExId::Id(c2, _, _)) => c1.cmp(c2), + } + } +} + +impl PartialOrd for ExId { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Serialize for ExId { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(self.to_string().as_str()) + } +} + +impl AsRef for ExId { + fn as_ref(&self) -> &ExId { + self + } +} diff --git a/rust/automerge/src/indexed_cache.rs b/automerge/src/indexed_cache.rs similarity index 50% rename from rust/automerge/src/indexed_cache.rs rename to automerge/src/indexed_cache.rs index b907a6f1..6d760637 100644 --- a/rust/automerge/src/indexed_cache.rs +++ b/automerge/src/indexed_cache.rs @@ -5,7 +5,7 @@ use std::ops::Index; #[derive(Debug, Clone)] pub(crate) struct IndexedCache { - pub(crate) cache: Vec, + pub cache: Vec, lookup: HashMap, } @@ -22,14 +22,14 @@ impl IndexedCache where T: Clone + Eq + Hash + Ord, { - pub(crate) fn new() -> Self { + pub fn new() -> Self { IndexedCache { cache: Default::default(), lookup: Default::default(), } } - pub(crate) fn cache(&mut self, item: T) -> usize { + pub fn cache(&mut self, item: T) -> usize { if let Some(n) = self.lookup.get(&item) { *n } else { @@ -40,37 +40,32 @@ where } } - pub(crate) fn lookup(&self, item: &T) -> Option { + pub fn lookup(&self, item: &T) -> Option { self.lookup.get(item).cloned() } - #[allow(dead_code)] - pub(crate) fn len(&self) -> usize { + pub fn len(&self) -> usize { self.cache.len() } - pub(crate) fn get(&self, index: usize) -> &T { + pub fn get(&self, index: usize) -> &T { &self.cache[index] } - pub(crate) fn safe_get(&self, index: usize) -> Option<&T> { - self.cache.get(index) - } - /// Remove the last inserted entry into this cache. /// This is safe to do as it does not require reshuffling other entries. /// /// # Panics /// /// Panics on an empty cache. - pub(crate) fn remove_last(&mut self) -> T { + pub fn remove_last(&mut self) -> T { let last = self.cache.len() - 1; let t = self.cache.remove(last); self.lookup.remove(&t); t } - pub(crate) fn sorted(&self) -> IndexedCache { + pub fn sorted(&self) -> IndexedCache { let mut sorted = Self::new(); self.cache.iter().sorted().cloned().for_each(|item| { let n = sorted.cache.len(); @@ -80,26 +75,7 @@ where sorted } - /// Create a vector from positions in this index to positions in an equivalent sorted index - /// - /// This is useful primarily when encoding an `IndexedCache` in the document format. - /// In this case we encode the actors in sorted order in the document and all ops reference the - /// offset into this sorted actor array. But the `IndexedCache` we have in the - /// application does not contain actors in sorted order because we add them as we encounter - /// them, so we must map from the actor IDs in the application to the actor IDs in the document - /// format - /// - /// # Examples - /// - /// ```rust,ignore - /// let idx: IndexedCache = IndexedCache::new(); - /// let first_idx = idx.cache("b"); // first_idx is `0` - /// let second_idx = idx.cache("a"); // second_idx i `1` - /// let encoded = idx.encode_index(); - /// // first_idx (0) maps to `1` whilst second_idx (1) maps to `0` because "a" < "b" - /// assert_eq!(encoded, vec![1,0]) - /// ``` - pub(crate) fn encode_index(&self) -> Vec { + pub fn encode_index(&self) -> Vec { let sorted: Vec<_> = self.cache.iter().sorted().cloned().collect(); self.cache .iter() @@ -123,15 +99,3 @@ impl Index for IndexedCache { &self.cache[i] } } - -impl FromIterator for IndexedCache { - fn from_iter>(iter: T) -> Self { - let mut cache = Vec::new(); - let mut lookup = HashMap::new(); - for (index, elem) in iter.into_iter().enumerate() { - cache.push(elem.clone()); - lookup.insert(elem, index); - } - Self { cache, lookup } - } -} diff --git a/rust/automerge/src/keys.rs b/automerge/src/keys.rs similarity index 71% rename from rust/automerge/src/keys.rs rename to automerge/src/keys.rs index 838015ef..89b20f90 100644 --- a/rust/automerge/src/keys.rs +++ b/automerge/src/keys.rs @@ -1,17 +1,13 @@ +use crate::op_set::B; use crate::{query, Automerge}; -/// An iterator over the keys of an object -/// -/// This is returned by [`crate::ReadDoc::keys`] and method. The returned item is either -/// the keys of a map, or the encoded element IDs of a sequence. -#[derive(Debug)] pub struct Keys<'a, 'k> { - keys: Option>, + keys: Option>, doc: &'a Automerge, } impl<'a, 'k> Keys<'a, 'k> { - pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { + pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { Self { keys, doc } } } diff --git a/rust/automerge/src/keys_at.rs b/automerge/src/keys_at.rs similarity index 68% rename from rust/automerge/src/keys_at.rs rename to automerge/src/keys_at.rs index fd747bbc..42a2ec9c 100644 --- a/rust/automerge/src/keys_at.rs +++ b/automerge/src/keys_at.rs @@ -1,17 +1,13 @@ +use crate::op_set::B; use crate::{query, Automerge}; -/// An iterator over the keys of an object at a particular point in history -/// -/// This is returned by [`crate::ReadDoc::keys_at`] method. The returned item is either the keys of a map, -/// or the encoded element IDs of a sequence. -#[derive(Debug)] pub struct KeysAt<'a, 'k> { - keys: Option>, + keys: Option>, doc: &'a Automerge, } impl<'a, 'k> KeysAt<'a, 'k> { - pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { + pub(crate) fn new(doc: &'a Automerge, keys: Option>) -> Self { Self { keys, doc } } } diff --git a/rust/automerge/src/legacy/mod.rs b/automerge/src/legacy/mod.rs similarity index 96% rename from rust/automerge/src/legacy/mod.rs rename to automerge/src/legacy/mod.rs index 6e6acec5..835c6597 100644 --- a/rust/automerge/src/legacy/mod.rs +++ b/automerge/src/legacy/mod.rs @@ -132,7 +132,7 @@ impl Key { } } -#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize)] +#[derive(Debug, Default, Clone, PartialEq, Serialize)] #[serde(transparent)] pub struct SortedVec(Vec); @@ -157,7 +157,7 @@ impl SortedVec { self.0.get_mut(index) } - pub fn iter(&self) -> std::slice::Iter<'_, T> { + pub fn iter(&self) -> impl Iterator { self.0.iter() } } @@ -216,8 +216,8 @@ pub struct Op { impl Op { pub fn primitive_value(&self) -> Option { match &self.action { - OpType::Put(v) => Some(v.clone()), - OpType::Increment(i) => Some(ScalarValue::Int(*i)), + OpType::Set(v) => Some(v.clone()), + OpType::Inc(i) => Some(ScalarValue::Int(*i)), _ => None, } } diff --git a/rust/automerge/src/legacy/serde_impls/actor_id.rs b/automerge/src/legacy/serde_impls/actor_id.rs similarity index 100% rename from rust/automerge/src/legacy/serde_impls/actor_id.rs rename to automerge/src/legacy/serde_impls/actor_id.rs diff --git a/rust/automerge/src/legacy/serde_impls/change_hash.rs b/automerge/src/legacy/serde_impls/change_hash.rs similarity index 93% rename from rust/automerge/src/legacy/serde_impls/change_hash.rs rename to automerge/src/legacy/serde_impls/change_hash.rs index 04b876af..4d637909 100644 --- a/rust/automerge/src/legacy/serde_impls/change_hash.rs +++ b/automerge/src/legacy/serde_impls/change_hash.rs @@ -9,7 +9,7 @@ impl Serialize for ChangeHash { where S: Serializer, { - hex::encode(self.0).serialize(serializer) + hex::encode(&self.0).serialize(serializer) } } diff --git a/rust/automerge/src/legacy/serde_impls/element_id.rs b/automerge/src/legacy/serde_impls/element_id.rs similarity index 100% rename from rust/automerge/src/legacy/serde_impls/element_id.rs rename to automerge/src/legacy/serde_impls/element_id.rs diff --git a/rust/automerge/src/legacy/serde_impls/mod.rs b/automerge/src/legacy/serde_impls/mod.rs similarity index 100% rename from rust/automerge/src/legacy/serde_impls/mod.rs rename to automerge/src/legacy/serde_impls/mod.rs diff --git a/rust/automerge/src/legacy/serde_impls/object_id.rs b/automerge/src/legacy/serde_impls/object_id.rs similarity index 100% rename from rust/automerge/src/legacy/serde_impls/object_id.rs rename to automerge/src/legacy/serde_impls/object_id.rs diff --git a/rust/automerge/src/legacy/serde_impls/op.rs b/automerge/src/legacy/serde_impls/op.rs similarity index 87% rename from rust/automerge/src/legacy/serde_impls/op.rs rename to automerge/src/legacy/serde_impls/op.rs index a3719fd6..b91ae7e8 100644 --- a/rust/automerge/src/legacy/serde_impls/op.rs +++ b/automerge/src/legacy/serde_impls/op.rs @@ -19,7 +19,7 @@ impl Serialize for Op { } let numerical_datatype = match &self.action { - OpType::Put(value) => value.as_numerical_datatype(), + OpType::Set(value) => value.as_numerical_datatype(), _ => None, }; @@ -47,9 +47,14 @@ impl Serialize for Op { op.serialize_field("datatype", &datatype)?; } match &self.action { - OpType::Increment(n) => op.serialize_field("value", &n)?, - OpType::Put(ScalarValue::Counter(c)) => op.serialize_field("value", &c.start)?, - OpType::Put(value) => op.serialize_field("value", &value)?, + OpType::Inc(n) => op.serialize_field("value", &n)?, + OpType::Set(value) => op.serialize_field("value", &value)?, + OpType::MarkBegin(m) => { + op.serialize_field("name", &m.name)?; + op.serialize_field("expand", &m.expand)?; + op.serialize_field("value", &m.value)?; + } + OpType::MarkEnd(s) => op.serialize_field("expand", &s)?, _ => {} } op.serialize_field("pred", &self.pred)?; @@ -71,6 +76,8 @@ pub(crate) enum RawOpType { Del, Inc, Set, + MarkBegin, + MarkEnd, } impl Serialize for RawOpType { @@ -86,6 +93,8 @@ impl Serialize for RawOpType { RawOpType::Del => "del", RawOpType::Inc => "inc", RawOpType::Set => "set", + RawOpType::MarkBegin => "mark_begin", + RawOpType::MarkEnd => "mark_end", }; serializer.serialize_str(s) } @@ -117,6 +126,8 @@ impl<'de> Deserialize<'de> for RawOpType { "del" => Ok(RawOpType::Del), "inc" => Ok(RawOpType::Inc), "set" => Ok(RawOpType::Set), + "mark_begin" => Ok(RawOpType::MarkBegin), + "mark_end" => Ok(RawOpType::MarkEnd), other => Err(Error::unknown_variant(other, VARIANTS)), } } @@ -132,7 +143,7 @@ impl<'de> Deserialize<'de> for Op { impl<'de> Visitor<'de> for OperationVisitor { type Value = Op; - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("An operation object") } @@ -188,7 +199,31 @@ impl<'de> Deserialize<'de> for Op { RawOpType::MakeTable => OpType::Make(ObjType::Table), RawOpType::MakeList => OpType::Make(ObjType::List), RawOpType::MakeText => OpType::Make(ObjType::Text), - RawOpType::Del => OpType::Delete, + RawOpType::Del => OpType::Del, + RawOpType::MarkBegin => { + let name = name.ok_or_else(|| Error::missing_field("mark(name)"))?; + let expand = expand.unwrap_or(false); + let value = if let Some(datatype) = datatype { + let raw_value = value + .ok_or_else(|| Error::missing_field("value"))? + .unwrap_or(ScalarValue::Null); + raw_value.as_datatype(datatype).map_err(|e| { + Error::invalid_value( + Unexpected::Other(e.unexpected.as_str()), + &e.expected.as_str(), + ) + })? + } else { + value + .ok_or_else(|| Error::missing_field("value"))? + .unwrap_or(ScalarValue::Null) + }; + OpType::mark(name, expand, value) + } + RawOpType::MarkEnd => { + let expand = expand.unwrap_or(true); + OpType::MarkEnd(expand) + } RawOpType::Set => { let value = if let Some(datatype) = datatype { let raw_value = value @@ -205,20 +240,17 @@ impl<'de> Deserialize<'de> for Op { .ok_or_else(|| Error::missing_field("value"))? .unwrap_or(ScalarValue::Null) }; - OpType::Put(value) + OpType::Set(value) } RawOpType::Inc => match value.flatten() { - Some(ScalarValue::Int(n)) => Ok(OpType::Increment(n)), - Some(ScalarValue::Uint(n)) => Ok(OpType::Increment(n as i64)), - Some(ScalarValue::F64(n)) => Ok(OpType::Increment(n as i64)), - Some(ScalarValue::Counter(n)) => Ok(OpType::Increment(n.into())), - Some(ScalarValue::Timestamp(n)) => Ok(OpType::Increment(n)), + Some(ScalarValue::Int(n)) => Ok(OpType::Inc(n)), + Some(ScalarValue::Uint(n)) => Ok(OpType::Inc(n as i64)), + Some(ScalarValue::F64(n)) => Ok(OpType::Inc(n as i64)), + Some(ScalarValue::Counter(n)) => Ok(OpType::Inc(n.into())), + Some(ScalarValue::Timestamp(n)) => Ok(OpType::Inc(n)), Some(ScalarValue::Bytes(s)) => { Err(Error::invalid_value(Unexpected::Bytes(&s), &"a number")) } - Some(ScalarValue::Unknown { bytes, .. }) => { - Err(Error::invalid_value(Unexpected::Bytes(&bytes), &"a number")) - } Some(ScalarValue::Str(s)) => { Err(Error::invalid_value(Unexpected::Str(&s), &"a number")) } @@ -270,7 +302,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Put(ScalarValue::Uint(123)), + action: OpType::Set(ScalarValue::Uint(123)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -288,7 +320,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Put(ScalarValue::Int(-123)), + action: OpType::Set(ScalarValue::Int(-123)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -306,7 +338,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Put(ScalarValue::F64(-123.0)), + action: OpType::Set(ScalarValue::F64(-123.0)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -323,7 +355,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Put(ScalarValue::Str("somestring".into())), + action: OpType::Set(ScalarValue::Str("somestring".into())), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -340,7 +372,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Put(ScalarValue::F64(1.23)), + action: OpType::Set(ScalarValue::F64(1.23)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -357,7 +389,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Put(ScalarValue::Boolean(true)), + action: OpType::Set(ScalarValue::Boolean(true)), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -386,7 +418,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Put(ScalarValue::Counter(123.into())), + action: OpType::Set(ScalarValue::Counter(123.into())), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -434,7 +466,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Increment(12), + action: OpType::Inc(12), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -451,7 +483,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Increment(12), + action: OpType::Inc(12), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -478,7 +510,7 @@ mod tests { "pred": [] }), expected: Ok(Op { - action: OpType::Put(ScalarValue::Null), + action: OpType::Set(ScalarValue::Null), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -556,7 +588,7 @@ mod tests { #[test] fn test_serialize_key() { let map_key = Op { - action: OpType::Increment(12), + action: OpType::Inc(12), obj: ObjectId::Root, key: "somekey".into(), insert: false, @@ -567,7 +599,7 @@ mod tests { assert_eq!(json.as_object().unwrap().get("key"), Some(&expected)); let elemid_key = Op { - action: OpType::Increment(12), + action: OpType::Inc(12), obj: ObjectId::Root, key: OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716") .unwrap() @@ -584,35 +616,35 @@ mod tests { fn test_round_trips() { let testcases = vec![ Op { - action: OpType::Put(ScalarValue::Uint(12)), + action: OpType::Set(ScalarValue::Uint(12)), obj: ObjectId::Root, key: "somekey".into(), insert: false, pred: SortedVec::new(), }, Op { - action: OpType::Increment(12), + action: OpType::Inc(12), obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(), key: "somekey".into(), insert: false, pred: SortedVec::new(), }, Op { - action: OpType::Put(ScalarValue::Uint(12)), + action: OpType::Set(ScalarValue::Uint(12)), obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(), key: "somekey".into(), insert: false, pred: vec![OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap()].into(), }, Op { - action: OpType::Increment(12), + action: OpType::Inc(12), obj: ObjectId::Root, key: "somekey".into(), insert: false, pred: SortedVec::new(), }, Op { - action: OpType::Put("seomthing".into()), + action: OpType::Set("seomthing".into()), obj: ObjectId::from_str("1@7ef48769b04d47e9a88e98a134d62716").unwrap(), key: OpId::from_str("1@7ef48769b04d47e9a88e98a134d62716") .unwrap() diff --git a/rust/automerge/src/legacy/serde_impls/op_type.rs b/automerge/src/legacy/serde_impls/op_type.rs similarity index 75% rename from rust/automerge/src/legacy/serde_impls/op_type.rs rename to automerge/src/legacy/serde_impls/op_type.rs index b054bad7..0959b11d 100644 --- a/rust/automerge/src/legacy/serde_impls/op_type.rs +++ b/automerge/src/legacy/serde_impls/op_type.rs @@ -15,9 +15,11 @@ impl Serialize for OpType { OpType::Make(ObjType::Table) => RawOpType::MakeTable, OpType::Make(ObjType::List) => RawOpType::MakeList, OpType::Make(ObjType::Text) => RawOpType::MakeText, - OpType::Delete => RawOpType::Del, - OpType::Increment(_) => RawOpType::Inc, - OpType::Put(_) => RawOpType::Set, + OpType::MarkBegin(_) => RawOpType::MarkBegin, + OpType::MarkEnd(_) => RawOpType::MarkEnd, + OpType::Del => RawOpType::Del, + OpType::Inc(_) => RawOpType::Inc, + OpType::Set(_) => RawOpType::Set, }; raw_type.serialize(serializer) } diff --git a/rust/automerge/src/legacy/serde_impls/opid.rs b/automerge/src/legacy/serde_impls/opid.rs similarity index 100% rename from rust/automerge/src/legacy/serde_impls/opid.rs rename to automerge/src/legacy/serde_impls/opid.rs diff --git a/rust/automerge/src/legacy/serde_impls/scalar_value.rs b/automerge/src/legacy/serde_impls/scalar_value.rs similarity index 98% rename from rust/automerge/src/legacy/serde_impls/scalar_value.rs rename to automerge/src/legacy/serde_impls/scalar_value.rs index b2a559ea..7a08f697 100644 --- a/rust/automerge/src/legacy/serde_impls/scalar_value.rs +++ b/automerge/src/legacy/serde_impls/scalar_value.rs @@ -12,7 +12,7 @@ impl<'de> Deserialize<'de> for ScalarValue { impl<'de> de::Visitor<'de> for ValueVisitor { type Value = ScalarValue; - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("a number, string, bool, or null") } diff --git a/rust/automerge/src/legacy/utility_impls/element_id.rs b/automerge/src/legacy/utility_impls/element_id.rs similarity index 100% rename from rust/automerge/src/legacy/utility_impls/element_id.rs rename to automerge/src/legacy/utility_impls/element_id.rs diff --git a/rust/automerge/src/legacy/utility_impls/key.rs b/automerge/src/legacy/utility_impls/key.rs similarity index 100% rename from rust/automerge/src/legacy/utility_impls/key.rs rename to automerge/src/legacy/utility_impls/key.rs diff --git a/rust/automerge/src/legacy/utility_impls/mod.rs b/automerge/src/legacy/utility_impls/mod.rs similarity index 100% rename from rust/automerge/src/legacy/utility_impls/mod.rs rename to automerge/src/legacy/utility_impls/mod.rs diff --git a/rust/automerge/src/legacy/utility_impls/object_id.rs b/automerge/src/legacy/utility_impls/object_id.rs similarity index 100% rename from rust/automerge/src/legacy/utility_impls/object_id.rs rename to automerge/src/legacy/utility_impls/object_id.rs diff --git a/rust/automerge/src/legacy/utility_impls/opid.rs b/automerge/src/legacy/utility_impls/opid.rs similarity index 100% rename from rust/automerge/src/legacy/utility_impls/opid.rs rename to automerge/src/legacy/utility_impls/opid.rs diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs new file mode 100644 index 00000000..ed91adbc --- /dev/null +++ b/automerge/src/lib.rs @@ -0,0 +1,64 @@ +#[macro_export] +macro_rules! log { + ( $( $t:tt )* ) => { + { + use $crate::__log; + __log!( $( $t )* ); + } + } + } + +#[cfg(all(feature = "wasm", target_family = "wasm"))] +#[macro_export] +macro_rules! __log { + ( $( $t:tt )* ) => { + web_sys::console::log_1(&format!( $( $t )* ).into()); + } + } + +#[cfg(not(all(feature = "wasm", target_family = "wasm")))] +#[macro_export] +macro_rules! __log { + ( $( $t:tt )* ) => { + println!( $( $t )* ); + } + } + +mod autocommit; +mod automerge; +mod change; +mod clock; +mod columnar; +mod decoding; +mod encoding; +mod error; +mod exid; +mod indexed_cache; +mod keys; +mod keys_at; +mod legacy; +mod op_set; +mod op_tree; +mod query; +pub mod sync; +pub mod transaction; +mod types; +mod value; +#[cfg(feature = "optree-visualisation")] +mod visualisation; + +pub use crate::automerge::Automerge; +pub use autocommit::AutoCommit; +pub use change::Change; +pub use decoding::Error as DecodingError; +pub use decoding::InvalidChangeError; +pub use encoding::Error as EncodingError; +pub use error::AutomergeError; +pub use exid::ExId as ObjId; +pub use keys::Keys; +pub use keys_at::KeysAt; +pub use legacy::Change as ExpandedChange; +pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; +pub use value::{ScalarValue, Value}; + +pub const ROOT: ObjId = ObjId::Root; diff --git a/automerge/src/op_set.rs b/automerge/src/op_set.rs new file mode 100644 index 00000000..6859df04 --- /dev/null +++ b/automerge/src/op_set.rs @@ -0,0 +1,192 @@ +use crate::clock::Clock; +use crate::indexed_cache::IndexedCache; +use crate::op_tree::OpTreeInternal; +use crate::query::{self, TreeQuery}; +use crate::types::{ActorId, Key, ObjId, Op, OpId, OpType}; +use crate::ObjType; +use fxhash::FxBuildHasher; +use std::cmp::Ordering; +use std::collections::HashMap; + +pub(crate) const B: usize = 16; +pub(crate) type OpSet = OpSetInternal; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct OpSetInternal { + trees: HashMap), FxBuildHasher>, + length: usize, + pub m: OpSetMetadata, +} + +impl OpSetInternal { + pub fn new() -> Self { + let mut trees: HashMap<_, _, _> = Default::default(); + trees.insert(ObjId::root(), (ObjType::Map, Default::default())); + OpSetInternal { + trees, + length: 0, + m: OpSetMetadata { + actors: IndexedCache::new(), + props: IndexedCache::new(), + }, + } + } + + pub fn iter(&self) -> Iter<'_, B> { + let mut objs: Vec<_> = self.trees.keys().collect(); + objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0)); + Iter { + inner: self, + index: 0, + sub_index: 0, + objs, + } + } + + pub fn keys(&self, obj: ObjId) -> Option> { + if let Some((_typ, tree)) = self.trees.get(&obj) { + tree.keys() + } else { + None + } + } + + pub fn keys_at(&self, obj: ObjId, clock: Clock) -> Option> { + if let Some((_typ, tree)) = self.trees.get(&obj) { + tree.keys_at(clock) + } else { + None + } + } + + pub fn search(&self, obj: &ObjId, query: Q) -> Q + where + Q: TreeQuery, + { + if let Some((_typ, tree)) = self.trees.get(obj) { + tree.search(query, &self.m) + } else { + query + } + } + + pub fn replace(&mut self, obj: &ObjId, index: usize, f: F) + where + F: FnMut(&mut Op), + { + if let Some((_typ, tree)) = self.trees.get_mut(obj) { + tree.replace(index, f) + } + } + + pub fn remove(&mut self, obj: &ObjId, index: usize) -> Op { + // this happens on rollback - be sure to go back to the old state + let (_typ, tree) = self.trees.get_mut(obj).unwrap(); + self.length -= 1; + let op = tree.remove(index); + if let OpType::Make(_) = &op.action { + self.trees.remove(&op.id.into()); + } + op + } + + pub fn insert(&mut self, index: usize, obj: &ObjId, element: Op) { + if let OpType::Make(typ) = element.action { + self.trees + .insert(element.id.into(), (typ, Default::default())); + } + + if let Some((_typ, tree)) = self.trees.get_mut(obj) { + //let tree = self.trees.get_mut(&element.obj).unwrap(); + tree.insert(index, element); + self.length += 1; + } + } + + pub fn object_type(&self, id: &ObjId) -> Option { + self.trees.get(id).map(|(typ, _)| *typ) + } + + #[cfg(feature = "optree-visualisation")] + pub fn visualise(&self) -> String { + let mut out = Vec::new(); + let graph = super::visualisation::GraphVisualisation::construct(&self.trees, &self.m); + dot::render(&graph, &mut out).unwrap(); + String::from_utf8_lossy(&out[..]).to_string() + } +} + +impl Default for OpSetInternal { + fn default() -> Self { + Self::new() + } +} + +impl<'a, const B: usize> IntoIterator for &'a OpSetInternal { + type Item = (&'a ObjId, &'a Op); + + type IntoIter = Iter<'a, B>; + + fn into_iter(self) -> Self::IntoIter { + let mut objs: Vec<_> = self.trees.keys().collect(); + objs.sort_by(|a, b| self.m.lamport_cmp(a.0, b.0)); + Iter { + inner: self, + index: 0, + objs, + sub_index: 0, + } + } +} + +pub(crate) struct Iter<'a, const B: usize> { + inner: &'a OpSetInternal, + index: usize, + objs: Vec<&'a ObjId>, + sub_index: usize, +} + +impl<'a, const B: usize> Iterator for Iter<'a, B> { + type Item = (&'a ObjId, &'a Op); + + fn next(&mut self) -> Option { + let mut result = None; + for obj in self.objs.iter().skip(self.index) { + let (_typ, tree) = self.inner.trees.get(obj)?; + result = tree.get(self.sub_index).map(|op| (*obj, op)); + if result.is_some() { + self.sub_index += 1; + break; + } else { + self.index += 1; + self.sub_index = 0; + } + } + result + } +} + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct OpSetMetadata { + pub actors: IndexedCache, + pub props: IndexedCache, +} + +impl OpSetMetadata { + pub fn key_cmp(&self, left: &Key, right: &Key) -> Ordering { + match (left, right) { + (Key::Map(a), Key::Map(b)) => self.props[*a].cmp(&self.props[*b]), + _ => panic!("can only compare map keys"), + } + } + + pub fn lamport_cmp(&self, left: OpId, right: OpId) -> Ordering { + match (left, right) { + (OpId(0, _), OpId(0, _)) => Ordering::Equal, + (OpId(0, _), OpId(_, _)) => Ordering::Less, + (OpId(_, _), OpId(0, _)) => Ordering::Greater, + (OpId(a, x), OpId(b, y)) if a == b => self.actors[x].cmp(&self.actors[y]), + (OpId(a, _), OpId(b, _)) => a.cmp(&b), + } + } +} diff --git a/rust/automerge/src/op_tree/node.rs b/automerge/src/op_tree.rs similarity index 53% rename from rust/automerge/src/op_tree/node.rs rename to automerge/src/op_tree.rs index ed1b7646..55503b2a 100644 --- a/rust/automerge/src/op_tree/node.rs +++ b/automerge/src/op_tree.rs @@ -5,20 +5,175 @@ use std::{ }; pub(crate) use crate::op_set::OpSetMetadata; -use crate::query::{ChangeVisibility, Index, QueryResult, TreeQuery}; -use crate::types::Op; -pub(crate) const B: usize = 16; +use crate::types::{Op, OpId}; +use crate::{ + clock::Clock, + query::{self, Index, QueryResult, TreeQuery}, +}; +use std::collections::HashSet; + +#[allow(dead_code)] +pub(crate) type OpTree = OpTreeInternal<16>; #[derive(Clone, Debug)] -pub(crate) struct OpTreeNode { - pub(crate) children: Vec, - pub(crate) elements: Vec, - pub(crate) index: Index, - pub(crate) length: usize, +pub(crate) struct OpTreeInternal { + pub(crate) root_node: Option>, } -impl OpTreeNode { - pub(crate) fn new() -> Self { +#[derive(Clone, Debug)] +pub(crate) struct OpTreeNode { + pub(crate) elements: Vec, + pub(crate) children: Vec>, + pub index: Index, + length: usize, +} + +impl OpTreeInternal { + /// Construct a new, empty, sequence. + pub fn new() -> Self { + Self { root_node: None } + } + + /// Get the length of the sequence. + pub fn len(&self) -> usize { + self.root_node.as_ref().map_or(0, |n| n.len()) + } + + pub fn keys(&self) -> Option> { + self.root_node.as_ref().map(query::Keys::new) + } + + pub fn keys_at(&self, clock: Clock) -> Option> { + self.root_node + .as_ref() + .map(|root| query::KeysAt::new(root, clock)) + } + + pub fn search(&self, mut query: Q, m: &OpSetMetadata) -> Q + where + Q: TreeQuery, + { + self.root_node + .as_ref() + .map(|root| match query.query_node_with_metadata(root, m) { + QueryResult::Descend => root.search(&mut query, m), + _ => true, + }); + query + } + + /// Create an iterator through the sequence. + pub fn iter(&self) -> Iter<'_, B> { + Iter { + inner: self, + index: 0, + } + } + + /// Insert the `element` into the sequence at `index`. + /// + /// # Panics + /// + /// Panics if `index > len`. + pub fn insert(&mut self, index: usize, element: Op) { + let old_len = self.len(); + if let Some(root) = self.root_node.as_mut() { + #[cfg(debug_assertions)] + root.check(); + + if root.is_full() { + let original_len = root.len(); + let new_root = OpTreeNode::new(); + + // move new_root to root position + let old_root = mem::replace(root, new_root); + + root.length += old_root.len(); + root.index = old_root.index.clone(); + root.children.push(old_root); + root.split_child(0); + + assert_eq!(original_len, root.len()); + + // after splitting the root has one element and two children, find which child the + // index is in + let first_child_len = root.children[0].len(); + let (child, insertion_index) = if first_child_len < index { + (&mut root.children[1], index - (first_child_len + 1)) + } else { + (&mut root.children[0], index) + }; + root.length += 1; + root.index.insert(&element); + child.insert_into_non_full_node(insertion_index, element) + } else { + root.insert_into_non_full_node(index, element) + } + } else { + let mut root = OpTreeNode::new(); + root.insert_into_non_full_node(index, element); + self.root_node = Some(root) + } + assert_eq!(self.len(), old_len + 1, "{:#?}", self); + } + + /// Get the `element` at `index` in the sequence. + pub fn get(&self, index: usize) -> Option<&Op> { + self.root_node.as_ref().and_then(|n| n.get(index)) + } + + // this replaces get_mut() because it allows the indexes to update correctly + pub fn replace(&mut self, index: usize, mut f: F) + where + F: FnMut(&mut Op), + { + if self.len() > index { + let op = self.get(index).unwrap(); + let mut new_op = op.clone(); + f(&mut new_op); + self.set(index, new_op); + } + } + + /// Removes the element at `index` from the sequence. + /// + /// # Panics + /// + /// Panics if `index` is out of bounds. + pub fn remove(&mut self, index: usize) -> Op { + if let Some(root) = self.root_node.as_mut() { + #[cfg(debug_assertions)] + let len = root.check(); + let old = root.remove(index); + + if root.elements.is_empty() { + if root.is_leaf() { + self.root_node = None; + } else { + self.root_node = Some(root.children.remove(0)); + } + } + + #[cfg(debug_assertions)] + debug_assert_eq!(len, self.root_node.as_ref().map_or(0, |r| r.check()) + 1); + old + } else { + panic!("remove from empty tree") + } + } + + /// Update the `element` at `index` in the sequence, returning the old value. + /// + /// # Panics + /// + /// Panics if `index > len` + pub fn set(&mut self, index: usize, element: Op) -> Op { + self.root_node.as_mut().unwrap().set(index, element) + } +} + +impl OpTreeNode { + fn new() -> Self { Self { elements: Vec::new(), children: Vec::new(), @@ -27,77 +182,31 @@ impl OpTreeNode { } } - fn search_element<'a, 'b: 'a, Q>( - &'b self, - query: &mut Q, - m: &OpSetMetadata, - ops: &'a [Op], - index: usize, - ) -> bool + pub fn search(&self, query: &mut Q, m: &OpSetMetadata) -> bool where - Q: TreeQuery<'a>, - { - if let Some(e) = self.elements.get(index) { - if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish { - return true; - } - } - false - } - - pub(crate) fn search<'a, 'b: 'a, Q>( - &'b self, - query: &mut Q, - m: &OpSetMetadata, - ops: &'a [Op], - mut skip: Option, - ) -> bool - where - Q: TreeQuery<'a>, + Q: TreeQuery, { if self.is_leaf() { - for e in self.elements.iter().skip(skip.unwrap_or(0)) { - if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish { + for e in &self.elements { + if query.query_element_with_metadata(e, m) == QueryResult::Finish { return true; } } false } else { for (child_index, child) in self.children.iter().enumerate() { - match skip { - Some(n) if n > child.len() => { - skip = Some(n - child.len() - 1); - } - Some(n) if n == child.len() => { - skip = Some(0); // important to not be None so we never call query_node again - if self.search_element(query, m, ops, child_index) { + match query.query_node_with_metadata(child, m) { + QueryResult::Descend => { + if child.search(query, m) { return true; } } - Some(n) => { - if child.search(query, m, ops, Some(n)) { - return true; - } - skip = Some(0); // important to not be None so we never call query_node again - if self.search_element(query, m, ops, child_index) { - return true; - } - } - None => { - // descend and try find it - match query.query_node_with_metadata(child, m, ops) { - QueryResult::Descend => { - if child.search(query, m, ops, None) { - return true; - } - } - QueryResult::Finish => return true, - QueryResult::Next => (), - QueryResult::Skip(_) => panic!("had skip from non-root node"), - } - if self.search_element(query, m, ops, child_index) { - return true; - } + QueryResult::Finish => return true, + QueryResult::Next => (), + } + if let Some(e) = self.elements.get(child_index) { + if query.query_element_with_metadata(e, m) == QueryResult::Finish { + return true; } } } @@ -105,26 +214,26 @@ impl OpTreeNode { } } - pub(crate) fn len(&self) -> usize { + pub fn len(&self) -> usize { self.length } - fn reindex(&mut self, ops: &[Op]) { + fn reindex(&mut self) { let mut index = Index::new(); for c in &self.children { index.merge(&c.index); } - for i in &self.elements { - index.insert(&ops[*i]); + for e in &self.elements { + index.insert(e); } self.index = index } - pub(crate) fn is_leaf(&self) -> bool { + fn is_leaf(&self) -> bool { self.children.is_empty() } - pub(crate) fn is_full(&self) -> bool { + fn is_full(&self) -> bool { self.elements.len() >= 2 * B - 1 } @@ -139,13 +248,13 @@ impl OpTreeNode { cumulative_len += child.len() + 1; } } - panic!("index {} not found in node with len {}", index, self.len()) + panic!("index not found in node") } - pub(crate) fn insert_into_non_full_node(&mut self, index: usize, element: usize, ops: &[Op]) { + fn insert_into_non_full_node(&mut self, index: usize, element: Op) { assert!(!self.is_full()); - self.index.insert(&ops[element]); + self.index.insert(&element); if self.is_leaf() { self.length += 1; @@ -155,14 +264,14 @@ impl OpTreeNode { let child = &mut self.children[child_index]; if child.is_full() { - self.split_child(child_index, ops); + self.split_child(child_index); // child structure has changed so we need to find the index again let (child_index, sub_index) = self.find_child_index(index); let child = &mut self.children[child_index]; - child.insert_into_non_full_node(sub_index, element, ops); + child.insert_into_non_full_node(sub_index, element); } else { - child.insert_into_non_full_node(sub_index, element, ops); + child.insert_into_non_full_node(sub_index, element); } self.length += 1; } @@ -170,7 +279,7 @@ impl OpTreeNode { // A utility function to split the child `full_child_index` of this node // Note that `full_child_index` must be full when this function is called. - pub(crate) fn split_child(&mut self, full_child_index: usize, ops: &[Op]) { + fn split_child(&mut self, full_child_index: usize) { let original_len_self = self.len(); let full_child = &mut self.children[full_child_index]; @@ -204,8 +313,8 @@ impl OpTreeNode { let full_child_len = full_child.len(); - full_child.reindex(ops); - successor_sibling.reindex(ops); + full_child.reindex(); + successor_sibling.reindex(); self.children .insert(full_child_index + 1, successor_sibling); @@ -217,37 +326,32 @@ impl OpTreeNode { assert_eq!(original_len_self, self.len()); } - fn remove_from_leaf(&mut self, index: usize) -> usize { + fn remove_from_leaf(&mut self, index: usize) -> Op { self.length -= 1; self.elements.remove(index) } - fn remove_element_from_non_leaf( - &mut self, - index: usize, - element_index: usize, - ops: &[Op], - ) -> usize { + fn remove_element_from_non_leaf(&mut self, index: usize, element_index: usize) -> Op { self.length -= 1; if self.children[element_index].elements.len() >= B { let total_index = self.cumulative_index(element_index); // recursively delete index - 1 in predecessor_node - let predecessor = self.children[element_index].remove(index - 1 - total_index, ops); + let predecessor = self.children[element_index].remove(index - 1 - total_index); // replace element with that one mem::replace(&mut self.elements[element_index], predecessor) } else if self.children[element_index + 1].elements.len() >= B { // recursively delete index + 1 in successor_node let total_index = self.cumulative_index(element_index + 1); - let successor = self.children[element_index + 1].remove(index + 1 - total_index, ops); + let successor = self.children[element_index + 1].remove(index + 1 - total_index); // replace element with that one mem::replace(&mut self.elements[element_index], successor) } else { let middle_element = self.elements.remove(element_index); let successor_child = self.children.remove(element_index + 1); - self.children[element_index].merge(middle_element, successor_child, ops); + self.children[element_index].merge(middle_element, successor_child); let total_index = self.cumulative_index(element_index); - self.children[element_index].remove(index - total_index, ops) + self.children[element_index].remove(index - total_index) } } @@ -258,12 +362,7 @@ impl OpTreeNode { .sum() } - fn remove_from_internal_child( - &mut self, - index: usize, - mut child_index: usize, - ops: &[Op], - ) -> usize { + fn remove_from_internal_child(&mut self, index: usize, mut child_index: usize) -> Op { if self.children[child_index].elements.len() < B && if child_index > 0 { self.children[child_index - 1].elements.len() < B @@ -287,14 +386,14 @@ impl OpTreeNode { let successor = self.children.remove(child_index); child_index -= 1; - self.children[child_index].merge(middle, successor, ops); + self.children[child_index].merge(middle, successor); } else { let middle = self.elements.remove(child_index); // use the sucessor sibling let successor = self.children.remove(child_index + 1); - self.children[child_index].merge(middle, successor, ops); + self.children[child_index].merge(middle, successor); } } else if self.children[child_index].elements.len() < B { if child_index > 0 @@ -306,16 +405,12 @@ impl OpTreeNode { let last_element = self.children[child_index - 1].elements.pop().unwrap(); assert!(!self.children[child_index - 1].elements.is_empty()); self.children[child_index - 1].length -= 1; - self.children[child_index - 1] - .index - .remove(&ops[last_element]); + self.children[child_index - 1].index.remove(&last_element); let parent_element = mem::replace(&mut self.elements[child_index - 1], last_element); - self.children[child_index] - .index - .insert(&ops[parent_element]); + self.children[child_index].index.insert(&parent_element); self.children[child_index] .elements .insert(0, parent_element); @@ -323,10 +418,10 @@ impl OpTreeNode { if let Some(last_child) = self.children[child_index - 1].children.pop() { self.children[child_index - 1].length -= last_child.len(); - self.children[child_index - 1].reindex(ops); + self.children[child_index - 1].reindex(); self.children[child_index].length += last_child.len(); self.children[child_index].children.insert(0, last_child); - self.children[child_index].reindex(ops); + self.children[child_index].reindex(); } } else if self .children @@ -334,9 +429,7 @@ impl OpTreeNode { .map_or(false, |c| c.elements.len() >= B) { let first_element = self.children[child_index + 1].elements.remove(0); - self.children[child_index + 1] - .index - .remove(&ops[first_element]); + self.children[child_index + 1].index.remove(&first_element); self.children[child_index + 1].length -= 1; assert!(!self.children[child_index + 1].elements.is_empty()); @@ -344,39 +437,37 @@ impl OpTreeNode { let parent_element = mem::replace(&mut self.elements[child_index], first_element); self.children[child_index].length += 1; - self.children[child_index] - .index - .insert(&ops[parent_element]); + self.children[child_index].index.insert(&parent_element); self.children[child_index].elements.push(parent_element); if !self.children[child_index + 1].is_leaf() { let first_child = self.children[child_index + 1].children.remove(0); self.children[child_index + 1].length -= first_child.len(); - self.children[child_index + 1].reindex(ops); + self.children[child_index + 1].reindex(); self.children[child_index].length += first_child.len(); self.children[child_index].children.push(first_child); - self.children[child_index].reindex(ops); + self.children[child_index].reindex(); } } } self.length -= 1; let total_index = self.cumulative_index(child_index); - self.children[child_index].remove(index - total_index, ops) + self.children[child_index].remove(index - total_index) } - pub(crate) fn check(&self) -> usize { + fn check(&self) -> usize { let l = self.elements.len() + self.children.iter().map(|c| c.check()).sum::(); assert_eq!(self.len(), l, "{:#?}", self); l } - pub(crate) fn remove(&mut self, index: usize, ops: &[Op]) -> usize { + pub fn remove(&mut self, index: usize) -> Op { let original_len = self.len(); if self.is_leaf() { let v = self.remove_from_leaf(index); - self.index.remove(&ops[v]); + self.index.remove(&v); assert_eq!(original_len, self.len() + 1); debug_assert_eq!(self.check(), self.len()); v @@ -393,16 +484,15 @@ impl OpTreeNode { let v = self.remove_element_from_non_leaf( index, min(child_index, self.elements.len() - 1), - ops, ); - self.index.remove(&ops[v]); + self.index.remove(&v); assert_eq!(original_len, self.len() + 1); debug_assert_eq!(self.check(), self.len()); return v; } Ordering::Greater => { - let v = self.remove_from_internal_child(index, child_index, ops); - self.index.remove(&ops[v]); + let v = self.remove_from_internal_child(index, child_index); + self.index.remove(&v); assert_eq!(original_len, self.len() + 1); debug_assert_eq!(self.check(), self.len()); return v; @@ -419,8 +509,8 @@ impl OpTreeNode { } } - fn merge(&mut self, middle: usize, successor_sibling: OpTreeNode, ops: &[Op]) { - self.index.insert(&ops[middle]); + fn merge(&mut self, middle: Op, successor_sibling: OpTreeNode) { + self.index.insert(&middle); self.index.merge(&successor_sibling.index); self.elements.push(middle); self.elements.extend(successor_sibling.elements); @@ -429,50 +519,47 @@ impl OpTreeNode { assert!(self.is_full()); } - /// Update the operation at the given index using the provided function. - /// - /// This handles updating the indices after the update. - pub(crate) fn update<'a>( - &mut self, - index: usize, - vis: ChangeVisibility<'a>, - ) -> ChangeVisibility<'a> { + pub fn set(&mut self, index: usize, element: Op) -> Op { if self.is_leaf() { - self.index.change_vis(vis) + let old_element = self.elements.get_mut(index).unwrap(); + self.index.replace(old_element, &element); + mem::replace(old_element, element) } else { let mut cumulative_len = 0; - let len = self.len(); - for (_child_index, child) in self.children.iter_mut().enumerate() { + for (child_index, child) in self.children.iter_mut().enumerate() { match (cumulative_len + child.len()).cmp(&index) { Ordering::Less => { cumulative_len += child.len() + 1; } Ordering::Equal => { - return self.index.change_vis(vis); + let old_element = self.elements.get_mut(child_index).unwrap(); + self.index.replace(old_element, &element); + return mem::replace(old_element, element); } Ordering::Greater => { - let vis = child.update(index - cumulative_len, vis); - return self.index.change_vis(vis); + let old_element = child.set(index - cumulative_len, element.clone()); + self.index.replace(&old_element, &element); + return old_element; } } } - panic!("Invalid index to set: {} but len was {}", index, len) + panic!("Invalid index to set: {} but len was {}", index, self.len()) } } - pub(crate) fn last(&self) -> usize { + pub fn last(&self) -> &Op { if self.is_leaf() { // node is never empty so this is safe - *self.elements.last().unwrap() + self.elements.last().unwrap() } else { // if not a leaf then there is always at least one child self.children.last().unwrap().last() } } - pub(crate) fn get(&self, index: usize) -> Option { + pub fn get(&self, index: usize) -> Option<&Op> { if self.is_leaf() { - return self.elements.get(index).copied(); + return self.elements.get(index); } else { let mut cumulative_len = 0; for (child_index, child) in self.children.iter().enumerate() { @@ -480,7 +567,7 @@ impl OpTreeNode { Ordering::Less => { cumulative_len += child.len() + 1; } - Ordering::Equal => return self.elements.get(child_index).copied(), + Ordering::Equal => return self.elements.get(child_index), Ordering::Greater => { return child.get(index - cumulative_len); } @@ -490,3 +577,110 @@ impl OpTreeNode { None } } + +impl Default for OpTreeInternal { + fn default() -> Self { + Self::new() + } +} + +impl PartialEq for OpTreeInternal { + fn eq(&self, other: &Self) -> bool { + self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b) + } +} + +impl<'a, const B: usize> IntoIterator for &'a OpTreeInternal { + type Item = &'a Op; + + type IntoIter = Iter<'a, B>; + + fn into_iter(self) -> Self::IntoIter { + Iter { + inner: self, + index: 0, + } + } +} + +pub(crate) struct Iter<'a, const B: usize> { + inner: &'a OpTreeInternal, + index: usize, +} + +impl<'a, const B: usize> Iterator for Iter<'a, B> { + type Item = &'a Op; + + fn next(&mut self) -> Option { + self.index += 1; + self.inner.get(self.index - 1) + } + + fn nth(&mut self, n: usize) -> Option { + self.index += n + 1; + self.inner.get(self.index - 1) + } +} + +#[derive(Debug, Clone, PartialEq)] +struct CounterData { + pos: usize, + val: i64, + succ: HashSet, + op: Op, +} + +#[cfg(test)] +mod tests { + use crate::legacy as amp; + use crate::types::{Op, OpId}; + + use super::*; + + fn op() -> Op { + let zero = OpId(0, 0); + Op { + id: zero, + action: amp::OpType::Set(0.into()), + key: zero.into(), + succ: vec![], + pred: vec![], + insert: false, + } + } + + #[test] + fn insert() { + let mut t = OpTree::new(); + + t.insert(0, op()); + t.insert(1, op()); + t.insert(0, op()); + t.insert(0, op()); + t.insert(0, op()); + t.insert(3, op()); + t.insert(4, op()); + } + + #[test] + fn insert_book() { + let mut t = OpTree::new(); + + for i in 0..100 { + t.insert(i % 2, op()); + } + } + + #[test] + fn insert_book_vec() { + let mut t = OpTree::new(); + let mut v = Vec::new(); + + for i in 0..100 { + t.insert(i % 3, op()); + v.insert(i % 3, op()); + + assert_eq!(v, t.iter().cloned().collect::>()) + } + } +} diff --git a/automerge/src/query.rs b/automerge/src/query.rs new file mode 100644 index 00000000..f413d590 --- /dev/null +++ b/automerge/src/query.rs @@ -0,0 +1,288 @@ +use crate::exid::ExId; +use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::types::{Clock, Counter, ElemId, Op, OpId, OpType, ScalarValue}; +use fxhash::FxBuildHasher; +use serde::Serialize; +use std::cmp::Ordering; +use std::collections::{HashMap, HashSet}; +use std::fmt::Debug; + +mod attribute; +mod attribute2; +mod insert; +mod keys; +mod keys_at; +mod len; +mod len_at; +mod list_vals; +mod list_vals_at; +mod nth; +mod nth_at; +mod opid; +mod prop; +mod prop_at; +mod raw_spans; +mod seek_op; +mod spans; + +pub(crate) use attribute::{Attribute, ChangeSet}; +pub(crate) use attribute2::{Attribute2, ChangeSet2}; +pub(crate) use insert::InsertNth; +pub(crate) use keys::Keys; +pub(crate) use keys_at::KeysAt; +pub(crate) use len::Len; +pub(crate) use len_at::LenAt; +pub(crate) use list_vals::ListVals; +pub(crate) use list_vals_at::ListValsAt; +pub(crate) use nth::Nth; +pub(crate) use nth_at::NthAt; +pub(crate) use opid::OpIdSearch; +pub(crate) use prop::Prop; +pub(crate) use prop_at::PropAt; +pub(crate) use raw_spans::RawSpans; +pub(crate) use seek_op::SeekOp; +pub(crate) use spans::{Span, Spans}; + +#[derive(Serialize, Debug, Clone, PartialEq)] +pub struct SpanInfo { + pub id: ExId, + pub start: usize, + pub end: usize, + #[serde(rename = "type")] + pub span_type: String, + pub value: ScalarValue, +} + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct CounterData { + pos: usize, + val: i64, + succ: HashSet, + op: Op, +} + +pub(crate) trait TreeQuery { + #[inline(always)] + fn query_node_with_metadata( + &mut self, + child: &OpTreeNode, + _m: &OpSetMetadata, + ) -> QueryResult { + self.query_node(child) + } + + fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult { + QueryResult::Descend + } + + #[inline(always)] + fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult { + self.query_element(element) + } + + fn query_element(&mut self, _element: &Op) -> QueryResult { + panic!("invalid element query") + } +} + +#[derive(Debug, Clone, PartialEq)] +pub(crate) enum QueryResult { + Next, + Descend, + Finish, +} + +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct Index { + pub visible: HashMap, + /// Set of opids found in this node and below. + pub ops: HashSet, +} + +impl Index { + pub fn new() -> Self { + Index { + visible: Default::default(), + ops: Default::default(), + } + } + + /// Get the number of visible elements in this index. + pub fn visible_len(&self) -> usize { + self.visible.len() + } + + pub fn has_visible(&self, e: &Option) -> bool { + if let Some(seen) = e { + self.visible.contains_key(seen) + } else { + false + } + } + + pub fn replace(&mut self, old: &Op, new: &Op) { + if old.id != new.id { + self.ops.remove(&old.id); + self.ops.insert(new.id); + } + + assert!(new.key == old.key); + + match (new.visible(), old.visible(), new.elemid()) { + (false, true, Some(elem)) => match self.visible.get(&elem).copied() { + Some(n) if n == 1 => { + self.visible.remove(&elem); + } + Some(n) => { + self.visible.insert(elem, n - 1); + } + None => panic!("remove overun in index"), + }, + (true, false, Some(elem)) => match self.visible.get(&elem).copied() { + Some(n) => { + self.visible.insert(elem, n + 1); + } + None => { + self.visible.insert(elem, 1); + } + }, + _ => {} + } + } + + pub fn insert(&mut self, op: &Op) { + self.ops.insert(op.id); + if op.visible() { + if let Some(elem) = op.elemid() { + match self.visible.get(&elem).copied() { + Some(n) => { + self.visible.insert(elem, n + 1); + } + None => { + self.visible.insert(elem, 1); + } + } + } + } + } + + pub fn remove(&mut self, op: &Op) { + self.ops.remove(&op.id); + if op.visible() { + if let Some(elem) = op.elemid() { + match self.visible.get(&elem).copied() { + Some(n) if n == 1 => { + self.visible.remove(&elem); + } + Some(n) => { + self.visible.insert(elem, n - 1); + } + None => panic!("remove overun in index"), + } + } + } + } + + pub fn merge(&mut self, other: &Index) { + for id in &other.ops { + self.ops.insert(*id); + } + for (elem, n) in other.visible.iter() { + match self.visible.get(elem).cloned() { + None => { + self.visible.insert(*elem, 1); + } + Some(m) => { + self.visible.insert(*elem, m + n); + } + } + } + } +} + +impl Default for Index { + fn default() -> Self { + Self::new() + } +} + +#[derive(Debug, Clone, PartialEq, Default)] +pub(crate) struct VisWindow { + counters: HashMap, +} + +impl VisWindow { + fn visible_at(&mut self, op: &Op, pos: usize, clock: &Clock) -> bool { + if !clock.covers(&op.id) { + return false; + } + + let mut visible = false; + match op.action { + OpType::Set(ScalarValue::Counter(Counter { start, .. })) => { + self.counters.insert( + op.id, + CounterData { + pos, + val: start, + succ: op.succ.iter().cloned().collect(), + op: op.clone(), + }, + ); + if !op.succ.iter().any(|i| clock.covers(i)) { + visible = true; + } + } + OpType::Inc(inc_val) => { + for id in &op.pred { + // pred is always before op.id so we can see them + if let Some(mut entry) = self.counters.get_mut(id) { + entry.succ.remove(&op.id); + entry.val += inc_val; + entry.op.action = OpType::Set(ScalarValue::counter(entry.val)); + if !entry.succ.iter().any(|i| clock.covers(i)) { + visible = true; + } + } + } + } + _ => { + if !op.succ.iter().any(|i| clock.covers(i)) { + visible = true; + } + } + }; + visible + } + + pub fn seen_op(&self, op: &Op, pos: usize) -> Vec<(usize, Op)> { + let mut result = vec![]; + for pred in &op.pred { + if let Some(entry) = self.counters.get(pred) { + result.push((entry.pos, entry.op.clone())); + } + } + if result.is_empty() { + vec![(pos, op.clone())] + } else { + result + } + } +} + +pub(crate) fn binary_search_by(node: &OpTreeNode, f: F) -> usize +where + F: Fn(&Op) -> Ordering, +{ + let mut right = node.len(); + let mut left = 0; + while left < right { + let seq = (left + right) / 2; + if f(node.get(seq).unwrap()) == Ordering::Less { + left = seq + 1; + } else { + right = seq; + } + } + left +} diff --git a/automerge/src/query/attribute.rs b/automerge/src/query/attribute.rs new file mode 100644 index 00000000..72415483 --- /dev/null +++ b/automerge/src/query/attribute.rs @@ -0,0 +1,128 @@ +use crate::clock::Clock; +use crate::query::{OpSetMetadata, QueryResult, TreeQuery}; +use crate::types::{ElemId, Op}; +use std::fmt::Debug; +use std::ops::Range; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Attribute { + pos: usize, + seen: usize, + last_seen: Option, + baseline: Clock, + pub change_sets: Vec, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct ChangeSet { + clock: Clock, + next_add: Option>, + next_del: Option<(usize, String)>, + pub add: Vec>, + pub del: Vec<(usize, String)>, +} + +impl From for ChangeSet { + fn from(clock: Clock) -> Self { + ChangeSet { + clock, + next_add: None, + next_del: None, + add: Vec::new(), + del: Vec::new(), + } + } +} + +impl ChangeSet { + fn cut_add(&mut self) { + if let Some(add) = self.next_add.take() { + self.add.push(add) + } + } + + fn cut_del(&mut self) { + if let Some(del) = self.next_del.take() { + self.del.push(del) + } + } +} + +impl Attribute { + pub fn new(baseline: Clock, change_sets: Vec) -> Self { + Attribute { + pos: 0, + seen: 0, + last_seen: None, + baseline, + change_sets: change_sets.into_iter().map(|c| c.into()).collect(), + } + } + + fn update_add(&mut self, element: &Op) { + let baseline = self.baseline.covers(&element.id); + for cs in &mut self.change_sets { + if !baseline && cs.clock.covers(&element.id) { + // is part of the change_set + if let Some(range) = &mut cs.next_add { + range.end += 1; + } else { + cs.next_add = Some(Range { + start: self.seen, + end: self.seen + 1, + }); + } + } else { + cs.cut_add(); + } + cs.cut_del(); + } + } + + // id is in baseline + // succ is not in baseline but is in cs + + fn update_del(&mut self, element: &Op) { + let baseline = self.baseline.covers(&element.id); + for cs in &mut self.change_sets { + if baseline && element.succ.iter().any(|id| cs.clock.covers(id)) { + // was deleted by change set + if let Some(s) = element.as_string() { + if let Some((_, span)) = &mut cs.next_del { + span.push_str(&s); + } else { + cs.next_del = Some((self.seen, s)) + } + } + } else { + //cs.cut_del(); + } + //cs.cut_add(); + } + } + + pub fn finish(&mut self) { + for cs in &mut self.change_sets { + cs.cut_add(); + cs.cut_del(); + } + } +} + +impl TreeQuery for Attribute { + fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult { + if element.insert { + self.last_seen = None; + } + if self.last_seen.is_none() && element.visible() { + self.update_add(element); + self.seen += 1; + self.last_seen = element.elemid(); + } + if !element.succ.is_empty() { + self.update_del(element); + } + self.pos += 1; + QueryResult::Next + } +} diff --git a/automerge/src/query/attribute2.rs b/automerge/src/query/attribute2.rs new file mode 100644 index 00000000..02f158c6 --- /dev/null +++ b/automerge/src/query/attribute2.rs @@ -0,0 +1,172 @@ +use crate::clock::Clock; +use crate::query::{OpSetMetadata, QueryResult, TreeQuery}; +use crate::types::{ElemId, Op}; +use std::fmt::Debug; +use std::ops::Range; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Attribute2 { + pos: usize, + seen: usize, + last_seen: Option, + baseline: Clock, + pub change_sets: Vec, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct ChangeSet2 { + clock: Clock, + next_add: Option, + next_del: Option, + pub add: Vec, + pub del: Vec, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct CS2Add { + pub actor: usize, + pub range: Range, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct CS2Del { + pub pos: usize, + pub actor: usize, + pub span: String, +} + +impl From for ChangeSet2 { + fn from(clock: Clock) -> Self { + ChangeSet2 { + clock, + next_add: None, + next_del: None, + add: Vec::new(), + del: Vec::new(), + } + } +} + +impl ChangeSet2 { + fn cut_add(&mut self) { + if let Some(add) = self.next_add.take() { + self.add.push(add) + } + } + + fn cut_del(&mut self) { + if let Some(del) = self.next_del.take() { + self.del.push(del) + } + } +} + +impl Attribute2 { + pub fn new(baseline: Clock, change_sets: Vec) -> Self { + Attribute2 { + pos: 0, + seen: 0, + last_seen: None, + baseline, + change_sets: change_sets.into_iter().map(|c| c.into()).collect(), + } + } + + fn update_add(&mut self, element: &Op) { + let baseline = self.baseline.covers(&element.id); + for cs in &mut self.change_sets { + if !baseline && cs.clock.covers(&element.id) { + // is part of the change_set + if let Some(CS2Add { range, actor }) = &mut cs.next_add { + if *actor == element.id.actor() { + range.end += 1; + } else { + cs.cut_add(); + cs.next_add = Some(CS2Add { + actor: element.id.actor(), + range: Range { + start: self.seen, + end: self.seen + 1, + }, + }); + } + } else { + cs.next_add = Some(CS2Add { + actor: element.id.actor(), + range: Range { + start: self.seen, + end: self.seen + 1, + }, + }); + } + } else { + cs.cut_add(); + } + cs.cut_del(); + } + } + + // id is in baseline + // succ is not in baseline but is in cs + + fn update_del(&mut self, element: &Op) { + if !self.baseline.covers(&element.id) { + return; + } + for cs in &mut self.change_sets { + let succ: Vec<_> = element + .succ + .iter() + .filter(|id| cs.clock.covers(id)) + .collect(); + // was deleted by change set + if let Some(suc) = succ.get(0) { + if let Some(s) = element.as_string() { + if let Some(CS2Del { actor, span, .. }) = &mut cs.next_del { + if suc.actor() == *actor { + span.push_str(&s); + } else { + cs.cut_del(); + cs.next_del = Some(CS2Del { + pos: self.seen, + actor: suc.actor(), + span: s, + }) + } + } else { + cs.next_del = Some(CS2Del { + pos: self.seen, + actor: suc.actor(), + span: s, + }) + } + } + } + } + } + + pub fn finish(&mut self) { + for cs in &mut self.change_sets { + cs.cut_add(); + cs.cut_del(); + } + } +} + +impl TreeQuery for Attribute2 { + fn query_element_with_metadata(&mut self, element: &Op, _m: &OpSetMetadata) -> QueryResult { + if element.insert { + self.last_seen = None; + } + if self.last_seen.is_none() && element.visible() { + self.update_add(element); + self.seen += 1; + self.last_seen = element.elemid(); + } + if !element.succ.is_empty() { + self.update_del(element); + } + self.pos += 1; + QueryResult::Next + } +} diff --git a/rust/automerge/src/query/insert.rs b/automerge/src/query/insert.rs similarity index 60% rename from rust/automerge/src/query/insert.rs rename to automerge/src/query/insert.rs index 0dc0e98d..34ee4059 100644 --- a/rust/automerge/src/query/insert.rs +++ b/automerge/src/query/insert.rs @@ -1,7 +1,7 @@ use crate::error::AutomergeError; use crate::op_tree::OpTreeNode; -use crate::query::{OpTree, QueryResult, TreeQuery}; -use crate::types::{ElemId, Key, ListEncoding, Op, HEAD}; +use crate::query::{QueryResult, TreeQuery}; +use crate::types::{ElemId, Key, Op, HEAD}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -10,31 +10,27 @@ pub(crate) struct InsertNth { target: usize, /// the number of visible operations seen seen: usize, - last_width: usize, - encoding: ListEncoding, //pub pos: usize, /// the number of operations (including non-visible) that we have seen n: usize, valid: Option, /// last_seen is the target elemid of the last `seen` operation. /// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. - last_seen: Option, + last_seen: Option, last_insert: Option, - last_valid_insert: Option, + last_valid_insert: Option, } impl InsertNth { - pub(crate) fn new(target: usize, encoding: ListEncoding) -> Self { + pub fn new(target: usize) -> Self { let (valid, last_valid_insert) = if target == 0 { - (Some(0), Some(Key::Seq(HEAD))) + (Some(0), Some(HEAD)) } else { (None, None) }; InsertNth { target, seen: 0, - last_width: 0, - encoding, n: 0, valid, last_seen: None, @@ -43,41 +39,34 @@ impl InsertNth { } } - pub(crate) fn pos(&self) -> usize { + pub fn pos(&self) -> usize { self.valid.unwrap_or(self.n) } - pub(crate) fn key(&self) -> Result { - self.last_valid_insert - .ok_or(AutomergeError::InvalidIndex(self.target)) + pub fn key(&self) -> Result { + Ok(self + .last_valid_insert + .ok_or(AutomergeError::InvalidIndex(self.target))? + .into()) + //if self.target == 0 { + /* + if self.last_insert.is_none() { + Ok(HEAD.into()) + } else if self.seen == self.target && self.last_insert.is_some() { + Ok(Key::Seq(self.last_insert.unwrap())) + } else { + Err(AutomergeError::InvalidIndex(self.target)) + } + */ } } -impl<'a> TreeQuery<'a> for InsertNth { - fn equiv(&mut self, other: &Self) -> bool { - self.pos() == other.pos() && self.key() == other.key() - } - - fn can_shortcut_search(&mut self, tree: &'a OpTree) -> bool { - if let Some((index, pos)) = &tree.last_insert { - if let Some(op) = tree.internal.get(*pos) { - if *index + op.width(self.encoding) == self.target { - self.valid = Some(*pos + 1); - self.last_valid_insert = Some(op.elemid_or_key()); - return true; - } - } - } - false - } - - fn query_node(&mut self, child: &OpTreeNode, ops: &[Op]) -> QueryResult { +impl TreeQuery for InsertNth { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { // if this node has some visible elements then we may find our target within - let mut num_vis = child.index.visible_len(self.encoding); - if let Some(last_seen) = self.last_seen { - if child.index.has_visible(&last_seen) { - num_vis -= 1; - } + let mut num_vis = child.index.visible_len(); + if child.index.has_visible(&self.last_seen) { + num_vis -= 1; } if self.seen + num_vis >= self.target { @@ -94,9 +83,9 @@ impl<'a> TreeQuery<'a> for InsertNth { // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly // - the visible op is in this node and the elemid references it so it can be set here // - the visible op is in a future node and so it will be counted as seen there - let last_elemid = ops[child.last()].elemid_or_key(); + let last_elemid = child.last().elemid(); if child.index.has_visible(&last_elemid) { - self.last_seen = Some(last_elemid); + self.last_seen = last_elemid; } QueryResult::Next } @@ -110,13 +99,16 @@ impl<'a> TreeQuery<'a> for InsertNth { self.last_seen = None; self.last_insert = element.elemid(); } + if self.valid.is_some() && element.valid_mark_anchor() { + self.last_valid_insert = element.elemid(); + self.valid = None; + } if self.last_seen.is_none() && element.visible() { if self.seen >= self.target { return QueryResult::Finish; } - self.last_width = element.width(self.encoding); - self.seen += self.last_width; - self.last_seen = Some(element.elemid_or_key()); + self.seen += 1; + self.last_seen = element.elemid(); self.last_valid_insert = self.last_seen } self.n += 1; diff --git a/automerge/src/query/keys.rs b/automerge/src/query/keys.rs new file mode 100644 index 00000000..f780effa --- /dev/null +++ b/automerge/src/query/keys.rs @@ -0,0 +1,54 @@ +use crate::op_tree::OpTreeNode; +use crate::types::Key; +use std::fmt::Debug; + +#[derive(Debug)] +pub(crate) struct Keys<'a, const B: usize> { + index: usize, + last_key: Option, + index_back: usize, + last_key_back: Option, + root_child: &'a OpTreeNode, +} + +impl<'a, const B: usize> Keys<'a, B> { + pub(crate) fn new(root_child: &'a OpTreeNode) -> Self { + Self { + index: 0, + last_key: None, + index_back: root_child.len(), + last_key_back: None, + root_child, + } + } +} + +impl<'a, const B: usize> Iterator for Keys<'a, B> { + type Item = Key; + + fn next(&mut self) -> Option { + for i in self.index..self.index_back { + let op = self.root_child.get(i)?; + self.index += 1; + if Some(op.key) != self.last_key && op.visible() { + self.last_key = Some(op.key); + return Some(op.key); + } + } + None + } +} + +impl<'a, const B: usize> DoubleEndedIterator for Keys<'a, B> { + fn next_back(&mut self) -> Option { + for i in (self.index..self.index_back).rev() { + let op = self.root_child.get(i)?; + self.index_back -= 1; + if Some(op.key) != self.last_key_back && op.visible() { + self.last_key_back = Some(op.key); + return Some(op.key); + } + } + None + } +} diff --git a/rust/automerge/src/query/keys_at.rs b/automerge/src/query/keys_at.rs similarity index 50% rename from rust/automerge/src/query/keys_at.rs rename to automerge/src/query/keys_at.rs index bf5b5e0e..b5262ed6 100644 --- a/rust/automerge/src/query/keys_at.rs +++ b/automerge/src/query/keys_at.rs @@ -1,59 +1,59 @@ -use crate::op_tree::OpTreeInternal; +use crate::op_tree::OpTreeNode; use crate::query::VisWindow; use crate::types::{Clock, Key}; use std::fmt::Debug; #[derive(Debug)] -pub(crate) struct KeysAt<'a> { +pub(crate) struct KeysAt<'a, const B: usize> { clock: Clock, window: VisWindow, index: usize, last_key: Option, index_back: usize, last_key_back: Option, - op_tree: &'a OpTreeInternal, + root_child: &'a OpTreeNode, } -impl<'a> KeysAt<'a> { - pub(crate) fn new(op_tree: &'a OpTreeInternal, clock: Clock) -> Self { +impl<'a, const B: usize> KeysAt<'a, B> { + pub(crate) fn new(root_child: &'a OpTreeNode, clock: Clock) -> Self { Self { clock, window: VisWindow::default(), index: 0, last_key: None, - index_back: op_tree.len(), + index_back: root_child.len(), last_key_back: None, - op_tree, + root_child, } } } -impl<'a> Iterator for KeysAt<'a> { +impl<'a, const B: usize> Iterator for KeysAt<'a, B> { type Item = Key; fn next(&mut self) -> Option { - for i in self.index..self.index_back { - let op = self.op_tree.get(i)?; + for i in self.index..self.root_child.len() { + let op = self.root_child.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; - if Some(op.elemid_or_key()) != self.last_key && visible { - self.last_key = Some(op.elemid_or_key()); - return Some(op.elemid_or_key()); + if Some(op.key) != self.last_key && visible { + self.last_key = Some(op.key); + return Some(op.key); } } None } } -impl<'a> DoubleEndedIterator for KeysAt<'a> { +impl<'a, const B: usize> DoubleEndedIterator for KeysAt<'a, B> { fn next_back(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.op_tree.get(i)?; + let op = self.root_child.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index_back -= 1; - if Some(op.elemid_or_key()) != self.last_key_back && visible { - self.last_key_back = Some(op.elemid_or_key()); - return Some(op.elemid_or_key()); + if Some(op.key) != self.last_key_back && visible { + self.last_key_back = Some(op.key); + return Some(op.key); } } None diff --git a/automerge/src/query/len.rs b/automerge/src/query/len.rs new file mode 100644 index 00000000..ab745f75 --- /dev/null +++ b/automerge/src/query/len.rs @@ -0,0 +1,21 @@ +use crate::op_tree::OpTreeNode; +use crate::query::{QueryResult, TreeQuery}; +use std::fmt::Debug; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Len { + pub len: usize, +} + +impl Len { + pub fn new() -> Self { + Len { len: 0 } + } +} + +impl TreeQuery for Len { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + self.len = child.index.visible_len(); + QueryResult::Finish + } +} diff --git a/rust/automerge/src/query/len_at.rs b/automerge/src/query/len_at.rs similarity index 67% rename from rust/automerge/src/query/len_at.rs rename to automerge/src/query/len_at.rs index 9380501e..2f277f3e 100644 --- a/rust/automerge/src/query/len_at.rs +++ b/automerge/src/query/len_at.rs @@ -1,39 +1,37 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::types::{Clock, ElemId, ListEncoding, Op}; +use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct LenAt { - pub(crate) len: usize, + pub len: usize, clock: Clock, pos: usize, - encoding: ListEncoding, last: Option, window: VisWindow, } impl LenAt { - pub(crate) fn new(clock: Clock, encoding: ListEncoding) -> Self { + pub fn new(clock: Clock) -> Self { LenAt { clock, pos: 0, len: 0, - encoding, last: None, window: Default::default(), } } } -impl<'a> TreeQuery<'a> for LenAt { - fn query_element(&mut self, op: &'a Op) -> QueryResult { +impl TreeQuery for LenAt { + fn query_element(&mut self, op: &Op) -> QueryResult { if op.insert { self.last = None; } let elem = op.elemid(); let visible = self.window.visible_at(op, self.pos, &self.clock); if elem != self.last && visible { - self.len += op.width(self.encoding); + self.len += 1; self.last = elem; } self.pos += 1; diff --git a/rust/automerge/src/query/list_vals.rs b/automerge/src/query/list_vals.rs similarity index 74% rename from rust/automerge/src/query/list_vals.rs rename to automerge/src/query/list_vals.rs index 6c056621..6e433a77 100644 --- a/rust/automerge/src/query/list_vals.rs +++ b/automerge/src/query/list_vals.rs @@ -6,11 +6,11 @@ use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct ListVals { last_elem: Option, - pub(crate) ops: Vec, + pub ops: Vec, } impl ListVals { - pub(crate) fn new() -> Self { + pub fn new() -> Self { ListVals { last_elem: None, ops: vec![], @@ -18,11 +18,11 @@ impl ListVals { } } -impl<'a> TreeQuery<'a> for ListVals { - fn query_node(&mut self, child: &OpTreeNode, ops: &[Op]) -> QueryResult { +impl TreeQuery for ListVals { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { let start = 0; for pos in start..child.len() { - let op = &ops[child.get(pos).unwrap()]; + let op = child.get(pos).unwrap(); if op.insert { self.last_elem = None; } diff --git a/rust/automerge/src/query/list_vals_at.rs b/automerge/src/query/list_vals_at.rs similarity index 85% rename from rust/automerge/src/query/list_vals_at.rs rename to automerge/src/query/list_vals_at.rs index 57c7596b..c447f314 100644 --- a/rust/automerge/src/query/list_vals_at.rs +++ b/automerge/src/query/list_vals_at.rs @@ -6,13 +6,13 @@ use std::fmt::Debug; pub(crate) struct ListValsAt { clock: Clock, last_elem: Option, - pub(crate) ops: Vec, + pub ops: Vec, window: VisWindow, pos: usize, } impl ListValsAt { - pub(crate) fn new(clock: Clock) -> Self { + pub fn new(clock: Clock) -> Self { ListValsAt { clock, last_elem: None, @@ -23,8 +23,8 @@ impl ListValsAt { } } -impl<'a> TreeQuery<'a> for ListValsAt { - fn query_element_with_metadata(&mut self, op: &'a Op, m: &OpSetMetadata) -> QueryResult { +impl TreeQuery for ListValsAt { + fn query_element_with_metadata(&mut self, op: &Op, m: &OpSetMetadata) -> QueryResult { if op.insert { self.last_elem = None; } diff --git a/rust/automerge/src/query/nth.rs b/automerge/src/query/nth.rs similarity index 51% rename from rust/automerge/src/query/nth.rs rename to automerge/src/query/nth.rs index ed374b9b..f8c136f6 100644 --- a/rust/automerge/src/query/nth.rs +++ b/automerge/src/query/nth.rs @@ -1,31 +1,26 @@ use crate::error::AutomergeError; -use crate::op_set::OpSet; -use crate::op_tree::{OpTree, OpTreeNode}; +use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::types::{Key, ListEncoding, Op, OpIds}; +use crate::types::{ElemId, Key, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] -pub(crate) struct Nth<'a> { +pub(crate) struct Nth { target: usize, seen: usize, - encoding: ListEncoding, - last_width: usize, /// last_seen is the target elemid of the last `seen` operation. /// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. - last_seen: Option, - pub(crate) ops: Vec<&'a Op>, - pub(crate) ops_pos: Vec, - pub(crate) pos: usize, + last_seen: Option, + pub ops: Vec, + pub ops_pos: Vec, + pub pos: usize, } -impl<'a> Nth<'a> { - pub(crate) fn new(target: usize, encoding: ListEncoding) -> Self { +impl Nth { + pub fn new(target: usize) -> Self { Nth { target, seen: 0, - last_width: 1, - encoding, last_seen: None, ops: vec![], ops_pos: vec![], @@ -33,12 +28,8 @@ impl<'a> Nth<'a> { } } - pub(crate) fn pred(&self, ops: &OpSet) -> OpIds { - ops.m.sorted_opids(self.ops.iter().map(|o| o.id)) - } - /// Get the key - pub(crate) fn key(&self) -> Result { + pub fn key(&self) -> Result { // the query collects the ops so we can use that to get the key they all use if let Some(e) = self.ops.first().and_then(|op| op.elemid()) { Ok(Key::Seq(e)) @@ -46,39 +37,13 @@ impl<'a> Nth<'a> { Err(AutomergeError::InvalidIndex(self.target)) } } - - pub(crate) fn index(&self) -> usize { - self.seen - self.last_width - } } -impl<'a> TreeQuery<'a> for Nth<'a> { - fn equiv(&mut self, other: &Self) -> bool { - self.index() == other.index() && self.key() == other.key() - } - - fn can_shortcut_search(&mut self, tree: &'a OpTree) -> bool { - if let Some((index, pos)) = &tree.last_insert { - if *index == self.target { - if let Some(op) = tree.internal.get(*pos) { - self.last_width = op.width(self.encoding); - self.seen = *index + self.last_width; - self.ops.push(op); - self.ops_pos.push(*pos); - self.pos = *pos + 1; - return true; - } - } - } - false - } - - fn query_node(&mut self, child: &OpTreeNode, ops: &[Op]) -> QueryResult { - let mut num_vis = child.index.visible_len(self.encoding); - if let Some(last_seen) = self.last_seen { - if child.index.has_visible(&last_seen) { - num_vis -= 1; - } +impl TreeQuery for Nth { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + let mut num_vis = child.index.visible_len(); + if child.index.has_visible(&self.last_seen) { + num_vis -= 1; } if self.seen + num_vis > self.target { @@ -94,15 +59,15 @@ impl<'a> TreeQuery<'a> for Nth<'a> { // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly // - the visible op is in this node and the elemid references it so it can be set here // - the visible op is in a future node and so it will be counted as seen there - let last_elemid = ops[child.last()].elemid_or_key(); + let last_elemid = child.last().elemid(); if child.index.has_visible(&last_elemid) { - self.last_seen = Some(last_elemid); + self.last_seen = last_elemid; } QueryResult::Next } } - fn query_element(&mut self, element: &'a Op) -> QueryResult { + fn query_element(&mut self, element: &Op) -> QueryResult { if element.insert { if self.seen > self.target { return QueryResult::Finish; @@ -112,13 +77,12 @@ impl<'a> TreeQuery<'a> for Nth<'a> { } let visible = element.visible(); if visible && self.last_seen.is_none() { - self.last_width = element.width(self.encoding); - self.seen += self.last_width; + self.seen += 1; // we have a new visible element - self.last_seen = Some(element.elemid_or_key()) + self.last_seen = element.elemid() } - if self.seen > self.target && visible { - self.ops.push(element); + if self.seen == self.target + 1 && visible { + self.ops.push(element.clone()); self.ops_pos.push(self.pos); } self.pos += 1; diff --git a/rust/automerge/src/query/nth_at.rs b/automerge/src/query/nth_at.rs similarity index 76% rename from rust/automerge/src/query/nth_at.rs rename to automerge/src/query/nth_at.rs index e193ca03..39f29a47 100644 --- a/rust/automerge/src/query/nth_at.rs +++ b/automerge/src/query/nth_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::types::{Clock, ElemId, ListEncoding, Op}; +use crate::types::{Clock, ElemId, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -7,21 +7,19 @@ pub(crate) struct NthAt { clock: Clock, target: usize, seen: usize, - encoding: ListEncoding, last_seen: Option, window: VisWindow, - pub(crate) ops: Vec, - pub(crate) ops_pos: Vec, - pub(crate) pos: usize, + pub ops: Vec, + pub ops_pos: Vec, + pub pos: usize, } impl NthAt { - pub(crate) fn new(target: usize, clock: Clock, encoding: ListEncoding) -> Self { + pub fn new(target: usize, clock: Clock) -> Self { NthAt { clock, target, seen: 0, - encoding, last_seen: None, ops: vec![], ops_pos: vec![], @@ -31,8 +29,8 @@ impl NthAt { } } -impl<'a> TreeQuery<'a> for NthAt { - fn query_element(&mut self, element: &'a Op) -> QueryResult { +impl TreeQuery for NthAt { + fn query_element(&mut self, element: &Op) -> QueryResult { if element.insert { if self.seen > self.target { return QueryResult::Finish; @@ -41,10 +39,10 @@ impl<'a> TreeQuery<'a> for NthAt { } let visible = self.window.visible_at(element, self.pos, &self.clock); if visible && self.last_seen.is_none() { - self.seen += element.width(self.encoding); + self.seen += 1; self.last_seen = element.elemid() } - if self.seen > self.target && visible { + if self.seen == self.target + 1 && visible { for (vpos, vop) in self.window.seen_op(element, self.pos) { if vop.is_counter() { // this could be out of order because of inc's - we can find the right place diff --git a/rust/automerge/src/query/opid.rs b/automerge/src/query/opid.rs similarity index 77% rename from rust/automerge/src/query/opid.rs rename to automerge/src/query/opid.rs index 3d4c8b24..2a68ad1c 100644 --- a/rust/automerge/src/query/opid.rs +++ b/automerge/src/query/opid.rs @@ -1,6 +1,6 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::types::{Key, Op, OpId}; +use crate::types::{Op, OpId}; /// Search for an OpId in a tree. /// Returns the index of the operation in the tree. @@ -9,21 +9,19 @@ pub(crate) struct OpIdSearch { target: OpId, pos: usize, found: bool, - key: Option, } impl OpIdSearch { - pub(crate) fn new(target: OpId) -> Self { + pub fn new(target: OpId) -> Self { OpIdSearch { target, pos: 0, found: false, - key: None, } } /// Get the index of the operation, if found. - pub(crate) fn index(&self) -> Option { + pub fn index(&self) -> Option { if self.found { Some(self.pos) } else { @@ -32,8 +30,8 @@ impl OpIdSearch { } } -impl<'a> TreeQuery<'a> for OpIdSearch { - fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { +impl TreeQuery for OpIdSearch { + fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { if child.index.ops.contains(&self.target) { QueryResult::Descend } else { diff --git a/automerge/src/query/prop.rs b/automerge/src/query/prop.rs new file mode 100644 index 00000000..6c97cfcd --- /dev/null +++ b/automerge/src/query/prop.rs @@ -0,0 +1,46 @@ +use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::query::{binary_search_by, QueryResult, TreeQuery}; +use crate::types::{Key, Op}; +use std::fmt::Debug; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Prop { + key: Key, + pub ops: Vec, + pub ops_pos: Vec, + pub pos: usize, +} + +impl Prop { + pub fn new(prop: usize) -> Self { + Prop { + key: Key::Map(prop), + ops: vec![], + ops_pos: vec![], + pos: 0, + } + } +} + +impl TreeQuery for Prop { + fn query_node_with_metadata( + &mut self, + child: &OpTreeNode, + m: &OpSetMetadata, + ) -> QueryResult { + let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); + self.pos = start; + for pos in start..child.len() { + let op = child.get(pos).unwrap(); + if op.key != self.key { + break; + } + if op.visible() { + self.ops.push(op.clone()); + self.ops_pos.push(pos); + } + self.pos += 1; + } + QueryResult::Finish + } +} diff --git a/rust/automerge/src/query/prop_at.rs b/automerge/src/query/prop_at.rs similarity index 81% rename from rust/automerge/src/query/prop_at.rs rename to automerge/src/query/prop_at.rs index f0c2eedc..11cbf752 100644 --- a/rust/automerge/src/query/prop_at.rs +++ b/automerge/src/query/prop_at.rs @@ -7,13 +7,13 @@ use std::fmt::Debug; pub(crate) struct PropAt { clock: Clock, key: Key, - pub(crate) ops: Vec, - pub(crate) ops_pos: Vec, - pub(crate) pos: usize, + pub ops: Vec, + pub ops_pos: Vec, + pub pos: usize, } impl PropAt { - pub(crate) fn new(prop: usize, clock: Clock) -> Self { + pub fn new(prop: usize, clock: Clock) -> Self { PropAt { clock, key: Key::Map(prop), @@ -24,18 +24,17 @@ impl PropAt { } } -impl<'a> TreeQuery<'a> for PropAt { +impl TreeQuery for PropAt { fn query_node_with_metadata( &mut self, - child: &'a OpTreeNode, + child: &OpTreeNode, m: &OpSetMetadata, - ops: &[Op], ) -> QueryResult { - let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.key)); + let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); let mut window: VisWindow = Default::default(); self.pos = start; for pos in start..child.len() { - let op = &ops[child.get(pos).unwrap()]; + let op = child.get(pos).unwrap(); if op.key != self.key { break; } diff --git a/automerge/src/query/raw_spans.rs b/automerge/src/query/raw_spans.rs new file mode 100644 index 00000000..e375e683 --- /dev/null +++ b/automerge/src/query/raw_spans.rs @@ -0,0 +1,78 @@ +use crate::query::{OpSetMetadata, QueryResult, TreeQuery}; +use crate::types::{ElemId, Op, OpId, OpType, ScalarValue}; +use std::fmt::Debug; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct RawSpans { + pos: usize, + seen: usize, + last_seen: Option, + last_insert: Option, + changed: bool, + pub spans: Vec, +} + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct RawSpan { + pub id: OpId, + pub start: usize, + pub end: usize, + pub name: String, + pub value: ScalarValue, +} + +impl RawSpans { + pub fn new() -> Self { + RawSpans { + pos: 0, + seen: 0, + last_seen: None, + last_insert: None, + changed: false, + spans: Vec::new(), + } + } +} + +impl TreeQuery for RawSpans { + fn query_element_with_metadata(&mut self, element: &Op, m: &OpSetMetadata) -> QueryResult { + // find location to insert + // mark or set + if element.succ.is_empty() { + if let OpType::MarkBegin(md) = &element.action { + let pos = self + .spans + .binary_search_by(|probe| m.lamport_cmp(probe.id, element.id)) + .unwrap_err(); + self.spans.insert( + pos, + RawSpan { + id: element.id, + start: self.seen, + end: 0, + name: md.name.clone(), + value: md.value.clone(), + }, + ); + } + if let OpType::MarkEnd(_) = &element.action { + for s in self.spans.iter_mut() { + if s.id == element.id.prev() { + s.end = self.seen; + break; + } + } + } + } + if element.insert { + self.last_seen = None; + self.last_insert = element.elemid(); + } + if self.last_seen.is_none() && element.visible() { + self.seen += 1; + self.last_seen = element.elemid(); + } + self.pos += 1; + QueryResult::Next + } +} diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs new file mode 100644 index 00000000..e584bea6 --- /dev/null +++ b/automerge/src/query/seek_op.rs @@ -0,0 +1,130 @@ +use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::query::{binary_search_by, QueryResult, TreeQuery}; +use crate::types::{Key, Op, HEAD}; +use std::cmp::Ordering; +use std::fmt::Debug; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct SeekOp { + /// the op we are looking for + op: Op, + /// The position to insert at + pub pos: usize, + /// The indices of ops that this op overwrites + pub succ: Vec, + /// whether a position has been found + found: bool, +} + +impl SeekOp { + pub fn new(op: &Op) -> Self { + SeekOp { + op: op.clone(), + succ: vec![], + pos: 0, + found: false, + } + } + + fn lesser_insert(&self, op: &Op, m: &OpSetMetadata) -> bool { + op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less + } + + fn greater_opid(&self, op: &Op, m: &OpSetMetadata) -> bool { + m.lamport_cmp(op.id, self.op.id) == Ordering::Greater + } + + fn is_target_insert(&self, op: &Op) -> bool { + if !op.insert { + return false; + } + if self.op.insert { + op.elemid() == self.op.key.elemid() + } else { + op.elemid() == self.op.elemid() + } + } +} + +impl TreeQuery for SeekOp { + fn query_node_with_metadata( + &mut self, + child: &OpTreeNode, + m: &OpSetMetadata, + ) -> QueryResult { + if self.found { + return QueryResult::Descend; + } + match self.op.key { + Key::Seq(HEAD) => { + while self.pos < child.len() { + let op = child.get(self.pos).unwrap(); + if self.op.overwrites(op) { + self.succ.push(self.pos); + } + if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { + break; + } + self.pos += 1; + } + QueryResult::Finish + } + Key::Seq(e) => { + if child.index.ops.contains(&e.0) { + QueryResult::Descend + } else { + self.pos += child.len(); + QueryResult::Next + } + } + Key::Map(_) => { + self.pos = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); + while self.pos < child.len() { + let op = child.get(self.pos).unwrap(); + if op.key != self.op.key { + break; + } + if self.op.overwrites(op) { + self.succ.push(self.pos); + } + if m.lamport_cmp(op.id, self.op.id) == Ordering::Greater { + break; + } + self.pos += 1; + } + QueryResult::Finish + } + } + } + + fn query_element_with_metadata(&mut self, e: &Op, m: &OpSetMetadata) -> QueryResult { + if !self.found { + if self.is_target_insert(e) { + self.found = true; + if self.op.overwrites(e) { + self.succ.push(self.pos); + } + } + self.pos += 1; + QueryResult::Next + } else { + // we have already found the target + if self.op.overwrites(e) { + self.succ.push(self.pos); + } + if self.op.insert { + if self.lesser_insert(e, m) { + QueryResult::Finish + } else { + self.pos += 1; + QueryResult::Next + } + } else if e.insert || self.greater_opid(e, m) { + QueryResult::Finish + } else { + self.pos += 1; + QueryResult::Next + } + } + } +} diff --git a/automerge/src/query/spans.rs b/automerge/src/query/spans.rs new file mode 100644 index 00000000..589dba03 --- /dev/null +++ b/automerge/src/query/spans.rs @@ -0,0 +1,108 @@ +use crate::query::{OpSetMetadata, QueryResult, TreeQuery}; +use crate::types::{ElemId, Op, OpType, ScalarValue}; +use std::collections::HashMap; +use std::fmt::Debug; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Spans { + pos: usize, + seen: usize, + last_seen: Option, + last_insert: Option, + seen_at_this_mark: Option, + seen_at_last_mark: Option, + ops: Vec, + marks: HashMap, + changed: bool, + pub spans: Vec, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct Span { + pub pos: usize, + pub marks: Vec<(String, ScalarValue)>, +} + +impl Spans { + pub fn new() -> Self { + Spans { + pos: 0, + seen: 0, + last_seen: None, + last_insert: None, + seen_at_last_mark: None, + seen_at_this_mark: None, + changed: false, + ops: Vec::new(), + marks: HashMap::new(), + spans: Vec::new(), + } + } + + pub fn check_marks(&mut self) { + let mut new_marks = HashMap::new(); + for op in &self.ops { + if let OpType::MarkBegin(m) = &op.action { + new_marks.insert(m.name.clone(), m.value.clone()); + } + } + if new_marks != self.marks { + self.changed = true; + self.marks = new_marks; + } + if self.changed + && (self.seen_at_last_mark != self.seen_at_this_mark + || self.seen_at_last_mark.is_none() && self.seen_at_this_mark.is_none()) + { + self.changed = false; + self.seen_at_last_mark = self.seen_at_this_mark; + let mut marks: Vec<_> = self + .marks + .iter() + .map(|(key, val)| (key.clone(), val.clone())) + .collect(); + marks.sort_by(|(k1, _), (k2, _)| k1.cmp(k2)); + self.spans.push(Span { + pos: self.seen, + marks, + }); + } + } +} + +impl TreeQuery for Spans { + /* + fn query_node(&mut self, _child: &OpTreeNode) -> QueryResult { + unimplemented!() + } + */ + + fn query_element_with_metadata(&mut self, element: &Op, m: &OpSetMetadata) -> QueryResult { + // find location to insert + // mark or set + if element.succ.is_empty() { + if let OpType::MarkBegin(_) = &element.action { + let pos = self + .ops + .binary_search_by(|probe| m.lamport_cmp(probe.id, element.id)) + .unwrap_err(); + self.ops.insert(pos, element.clone()); + } + if let OpType::MarkEnd(_) = &element.action { + self.ops.retain(|op| op.id != element.id.prev()); + } + } + if element.insert { + self.last_seen = None; + self.last_insert = element.elemid(); + } + if self.last_seen.is_none() && element.visible() { + self.check_marks(); + self.seen += 1; + self.last_seen = element.elemid(); + self.seen_at_this_mark = element.elemid(); + } + self.pos += 1; + QueryResult::Next + } +} diff --git a/rust/automerge-wasm/src/sequence_tree.rs b/automerge/src/sequence_tree.rs similarity index 71% rename from rust/automerge-wasm/src/sequence_tree.rs rename to automerge/src/sequence_tree.rs index 91b183a2..3031e391 100644 --- a/rust/automerge-wasm/src/sequence_tree.rs +++ b/automerge/src/sequence_tree.rs @@ -4,37 +4,41 @@ use std::{ mem, }; -pub(crate) const B: usize = 16; -pub(crate) type SequenceTree = SequenceTreeInternal; +pub type SequenceTree = SequenceTreeInternal; #[derive(Clone, Debug)] -pub(crate) struct SequenceTreeInternal { - root_node: Option>, +pub struct SequenceTreeInternal { + root_node: Option>, } #[derive(Clone, Debug, PartialEq)] -struct SequenceTreeNode { +struct SequenceTreeNode { elements: Vec, - children: Vec>, + children: Vec>, length: usize, } -impl SequenceTreeInternal +impl SequenceTreeInternal where T: Clone + Debug, { /// Construct a new, empty, sequence. - pub(crate) fn new() -> Self { + pub fn new() -> Self { Self { root_node: None } } /// Get the length of the sequence. - pub(crate) fn len(&self) -> usize { + pub fn len(&self) -> usize { self.root_node.as_ref().map_or(0, |n| n.len()) } + /// Check if the sequence is empty. + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + /// Create an iterator through the sequence. - pub(crate) fn iter(&self) -> Iter<'_, T> { + pub fn iter(&self) -> Iter<'_, T, B> { Iter { inner: self, index: 0, @@ -46,7 +50,7 @@ where /// # Panics /// /// Panics if `index > len`. - pub(crate) fn insert(&mut self, index: usize, element: T) { + pub fn insert(&mut self, index: usize, element: T) { let old_len = self.len(); if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] @@ -89,22 +93,27 @@ where } /// Push the `element` onto the back of the sequence. - pub(crate) fn push(&mut self, element: T) { + pub fn push(&mut self, element: T) { let l = self.len(); self.insert(l, element) } /// Get the `element` at `index` in the sequence. - pub(crate) fn get(&self, index: usize) -> Option<&T> { + pub fn get(&self, index: usize) -> Option<&T> { self.root_node.as_ref().and_then(|n| n.get(index)) } + /// Get the `element` at `index` in the sequence. + pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { + self.root_node.as_mut().and_then(|n| n.get_mut(index)) + } + /// Removes the element at `index` from the sequence. /// /// # Panics /// /// Panics if `index` is out of bounds. - pub(crate) fn remove(&mut self, index: usize) -> T { + pub fn remove(&mut self, index: usize) -> T { if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] let len = root.check(); @@ -125,9 +134,18 @@ where panic!("remove from empty tree") } } + + /// Update the `element` at `index` in the sequence, returning the old value. + /// + /// # Panics + /// + /// Panics if `index > len` + pub fn set(&mut self, index: usize, element: T) -> T { + self.root_node.as_mut().unwrap().set(index, element) + } } -impl SequenceTreeNode +impl SequenceTreeNode where T: Clone + Debug, { @@ -139,7 +157,7 @@ where } } - pub(crate) fn len(&self) -> usize { + pub fn len(&self) -> usize { self.length } @@ -362,7 +380,7 @@ where l } - pub(crate) fn remove(&mut self, index: usize) -> T { + pub fn remove(&mut self, index: usize) -> T { let original_len = self.len(); if self.is_leaf() { let v = self.remove_from_leaf(index); @@ -405,7 +423,7 @@ where } } - fn merge(&mut self, middle: T, successor_sibling: SequenceTreeNode) { + fn merge(&mut self, middle: T, successor_sibling: SequenceTreeNode) { self.elements.push(middle); self.elements.extend(successor_sibling.elements); self.children.extend(successor_sibling.children); @@ -413,7 +431,31 @@ where assert!(self.is_full()); } - pub(crate) fn get(&self, index: usize) -> Option<&T> { + pub fn set(&mut self, index: usize, element: T) -> T { + if self.is_leaf() { + let old_element = self.elements.get_mut(index).unwrap(); + mem::replace(old_element, element) + } else { + let mut cumulative_len = 0; + for (child_index, child) in self.children.iter_mut().enumerate() { + match (cumulative_len + child.len()).cmp(&index) { + Ordering::Less => { + cumulative_len += child.len() + 1; + } + Ordering::Equal => { + let old_element = self.elements.get_mut(child_index).unwrap(); + return mem::replace(old_element, element); + } + Ordering::Greater => { + return child.set(index - cumulative_len, element); + } + } + } + panic!("Invalid index to set: {} but len was {}", index, self.len()) + } + } + + pub fn get(&self, index: usize) -> Option<&T> { if self.is_leaf() { return self.elements.get(index); } else { @@ -432,9 +474,29 @@ where } None } + + pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { + if self.is_leaf() { + return self.elements.get_mut(index); + } else { + let mut cumulative_len = 0; + for (child_index, child) in self.children.iter_mut().enumerate() { + match (cumulative_len + child.len()).cmp(&index) { + Ordering::Less => { + cumulative_len += child.len() + 1; + } + Ordering::Equal => return self.elements.get_mut(child_index), + Ordering::Greater => { + return child.get_mut(index - cumulative_len); + } + } + } + } + None + } } -impl Default for SequenceTreeInternal +impl Default for SequenceTreeInternal where T: Clone + Debug, { @@ -443,7 +505,7 @@ where } } -impl PartialEq for SequenceTreeInternal +impl PartialEq for SequenceTreeInternal where T: Clone + Debug + PartialEq, { @@ -452,13 +514,13 @@ where } } -impl<'a, T> IntoIterator for &'a SequenceTreeInternal +impl<'a, T, const B: usize> IntoIterator for &'a SequenceTreeInternal where T: Clone + Debug, { type Item = &'a T; - type IntoIter = Iter<'a, T>; + type IntoIter = Iter<'a, T, B>; fn into_iter(self) -> Self::IntoIter { Iter { @@ -468,13 +530,12 @@ where } } -#[derive(Debug)] -pub struct Iter<'a, T> { - inner: &'a SequenceTreeInternal, +pub struct Iter<'a, T, const B: usize> { + inner: &'a SequenceTreeInternal, index: usize, } -impl<'a, T> Iterator for Iter<'a, T> +impl<'a, T, const B: usize> Iterator for Iter<'a, T, B> where T: Clone + Debug, { @@ -493,35 +554,37 @@ where #[cfg(test)] mod tests { - use proptest::prelude::*; + use crate::ActorId; use super::*; #[test] fn push_back() { let mut t = SequenceTree::new(); + let actor = ActorId::random(); - t.push(1); - t.push(2); - t.push(3); - t.push(4); - t.push(5); - t.push(6); - t.push(8); - t.push(100); + t.push(actor.op_id_at(1)); + t.push(actor.op_id_at(2)); + t.push(actor.op_id_at(3)); + t.push(actor.op_id_at(4)); + t.push(actor.op_id_at(5)); + t.push(actor.op_id_at(6)); + t.push(actor.op_id_at(8)); + t.push(actor.op_id_at(100)); } #[test] fn insert() { let mut t = SequenceTree::new(); + let actor = ActorId::random(); - t.insert(0, 1); - t.insert(1, 1); - t.insert(0, 1); - t.insert(0, 1); - t.insert(0, 1); - t.insert(3, 1); - t.insert(4, 1); + t.insert(0, actor.op_id_at(1)); + t.insert(1, actor.op_id_at(1)); + t.insert(0, actor.op_id_at(1)); + t.insert(0, actor.op_id_at(1)); + t.insert(0, actor.op_id_at(1)); + t.insert(3, actor.op_id_at(1)); + t.insert(4, actor.op_id_at(1)); } #[test] @@ -546,72 +609,79 @@ mod tests { } } - fn arb_indices() -> impl Strategy> { - proptest::collection::vec(any::(), 0..1000).prop_map(|v| { - let mut len = 0; - v.into_iter() - .map(|i| { - len += 1; - i % len - }) - .collect::>() - }) - } - - proptest! { - - #[test] - fn proptest_insert(indices in arb_indices()) { - let mut t = SequenceTreeInternal::::new(); - let mut v = Vec::new(); - - for i in indices{ - if i <= v.len() { - t.insert(i % 3, i); - v.insert(i % 3, i); - } else { - return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) - } - - assert_eq!(v, t.iter().copied().collect::>()) - } + /* + fn arb_indices() -> impl Strategy> { + proptest::collection::vec(any::(), 0..1000).prop_map(|v| { + let mut len = 0; + v.into_iter() + .map(|i| { + len += 1; + i % len + }) + .collect::>() + }) } + */ - } + // use proptest::prelude::*; - proptest! { + /* + proptest! { - // This is a really slow test due to all the copying of the Vecs (i.e. not due to the - // sequencetree) so we only do a few runs - #![proptest_config(ProptestConfig::with_cases(20))] - #[test] - fn proptest_remove(inserts in arb_indices(), removes in arb_indices()) { - let mut t = SequenceTreeInternal::::new(); - let mut v = Vec::new(); + #[test] + fn proptest_insert(indices in arb_indices()) { + let mut t = SequenceTreeInternal::::new(); + let actor = ActorId::random(); + let mut v = Vec::new(); - for i in inserts { - if i <= v.len() { - t.insert(i , i); - v.insert(i , i); - } else { - return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) + for i in indices{ + if i <= v.len() { + t.insert(i % 3, i); + v.insert(i % 3, i); + } else { + return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) + } + + assert_eq!(v, t.iter().copied().collect::>()) } - - assert_eq!(v, t.iter().copied().collect::>()) } - for i in removes { - if i < v.len() { - let tr = t.remove(i); - let vr = v.remove(i); - assert_eq!(tr, vr); - } else { - return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) - } - - assert_eq!(v, t.iter().copied().collect::>()) - } } + */ - } + /* + proptest! { + + #[test] + fn proptest_remove(inserts in arb_indices(), removes in arb_indices()) { + let mut t = SequenceTreeInternal::::new(); + let actor = ActorId::random(); + let mut v = Vec::new(); + + for i in inserts { + if i <= v.len() { + t.insert(i , i); + v.insert(i , i); + } else { + return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) + } + + assert_eq!(v, t.iter().copied().collect::>()) + } + + for i in removes { + if i < v.len() { + let tr = t.remove(i); + let vr = v.remove(i); + assert_eq!(tr, vr); + } else { + return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) + } + + assert_eq!(v, t.iter().copied().collect::>()) + } + } + + } + */ } diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs new file mode 100644 index 00000000..da9bcc73 --- /dev/null +++ b/automerge/src/sync.rs @@ -0,0 +1,369 @@ +use crate::exid::ExId; +use crate::{ + decoding, decoding::Decoder, encoding::Encodable, Automerge, AutomergeError, Change, ChangeHash, +}; +use itertools::Itertools; +use std::{ + borrow::Cow, + collections::{HashMap, HashSet}, + io, + io::Write, +}; + +mod bloom; +mod state; + +pub use bloom::BloomFilter; +pub use state::{Have, State}; + +const HASH_SIZE: usize = 32; // 256 bits = 32 bytes +const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification + +impl Automerge { + pub fn generate_sync_message(&self, sync_state: &mut State) -> Option { + let our_heads = self.get_heads(); + + let our_need = self.get_missing_deps(sync_state.their_heads.as_ref().unwrap_or(&vec![])); + + let their_heads_set = if let Some(ref heads) = sync_state.their_heads { + heads.iter().collect::>() + } else { + HashSet::new() + }; + let our_have = if our_need.iter().all(|hash| their_heads_set.contains(hash)) { + vec![self.make_bloom_filter(sync_state.shared_heads.clone())] + } else { + Vec::new() + }; + + if let Some(ref their_have) = sync_state.their_have { + if let Some(first_have) = their_have.first().as_ref() { + if !first_have + .last_sync + .iter() + .all(|hash| self.get_change_by_hash(hash).is_some()) + { + let reset_msg = Message { + heads: our_heads, + need: Vec::new(), + have: vec![Have::default()], + changes: Vec::new(), + }; + return Some(reset_msg); + } + } + } + + let mut changes_to_send = if let (Some(their_have), Some(their_need)) = ( + sync_state.their_have.as_ref(), + sync_state.their_need.as_ref(), + ) { + self.get_changes_to_send(their_have.clone(), their_need) + } else { + Vec::new() + }; + + let heads_unchanged = sync_state.last_sent_heads == our_heads; + + let heads_equal = if let Some(their_heads) = sync_state.their_heads.as_ref() { + their_heads == &our_heads + } else { + false + }; + + if heads_unchanged && heads_equal && changes_to_send.is_empty() { + return None; + } + + // deduplicate the changes to send with those we have already sent + changes_to_send.retain(|change| !sync_state.sent_hashes.contains(&change.hash)); + + sync_state.last_sent_heads = our_heads.clone(); + sync_state + .sent_hashes + .extend(changes_to_send.iter().map(|c| c.hash)); + + let sync_message = Message { + heads: our_heads, + have: our_have, + need: our_need, + changes: changes_to_send.into_iter().cloned().collect(), + }; + + Some(sync_message) + } + + pub fn receive_sync_message( + &mut self, + sync_state: &mut State, + message: Message, + ) -> Result, AutomergeError> { + let mut result = vec![]; + let before_heads = self.get_heads(); + + let Message { + heads: message_heads, + changes: message_changes, + need: message_need, + have: message_have, + } = message; + + let changes_is_empty = message_changes.is_empty(); + if !changes_is_empty { + result = self.apply_changes(message_changes)?; + sync_state.shared_heads = advance_heads( + &before_heads.iter().collect(), + &self.get_heads().into_iter().collect(), + &sync_state.shared_heads, + ); + } + + // trim down the sent hashes to those that we know they haven't seen + self.filter_changes(&message_heads, &mut sync_state.sent_hashes); + + if changes_is_empty && message_heads == before_heads { + sync_state.last_sent_heads = message_heads.clone(); + } + + let known_heads = message_heads + .iter() + .filter(|head| self.get_change_by_hash(head).is_some()) + .collect::>(); + if known_heads.len() == message_heads.len() { + sync_state.shared_heads = message_heads.clone(); + // If the remote peer has lost all its data, reset our state to perform a full resync + if message_heads.is_empty() { + sync_state.last_sent_heads = Default::default(); + sync_state.sent_hashes = Default::default(); + } + } else { + sync_state.shared_heads = sync_state + .shared_heads + .iter() + .chain(known_heads) + .copied() + .unique() + .sorted() + .collect::>(); + } + + sync_state.their_have = Some(message_have); + sync_state.their_heads = Some(message_heads); + sync_state.their_need = Some(message_need); + + Ok(result) + } + + fn make_bloom_filter(&self, last_sync: Vec) -> Have { + let new_changes = self.get_changes(&last_sync); + let hashes = new_changes + .into_iter() + .map(|change| change.hash) + .collect::>(); + Have { + last_sync, + bloom: BloomFilter::from(&hashes[..]), + } + } + + fn get_changes_to_send(&self, have: Vec, need: &[ChangeHash]) -> Vec<&Change> { + if have.is_empty() { + need.iter() + .filter_map(|hash| self.get_change_by_hash(hash)) + .collect() + } else { + let mut last_sync_hashes = HashSet::new(); + let mut bloom_filters = Vec::with_capacity(have.len()); + + for h in have { + let Have { last_sync, bloom } = h; + for hash in last_sync { + last_sync_hashes.insert(hash); + } + bloom_filters.push(bloom); + } + let last_sync_hashes = last_sync_hashes.into_iter().collect::>(); + + let changes = self.get_changes(&last_sync_hashes); + + let mut change_hashes = HashSet::with_capacity(changes.len()); + let mut dependents: HashMap> = HashMap::new(); + let mut hashes_to_send = HashSet::new(); + + for change in &changes { + change_hashes.insert(change.hash); + + for dep in &change.deps { + dependents.entry(*dep).or_default().push(change.hash); + } + + if bloom_filters + .iter() + .all(|bloom| !bloom.contains_hash(&change.hash)) + { + hashes_to_send.insert(change.hash); + } + } + + let mut stack = hashes_to_send.iter().copied().collect::>(); + while let Some(hash) = stack.pop() { + if let Some(deps) = dependents.get(&hash) { + for dep in deps { + if hashes_to_send.insert(*dep) { + stack.push(*dep); + } + } + } + } + + let mut changes_to_send = Vec::new(); + for hash in need { + hashes_to_send.insert(*hash); + if !change_hashes.contains(hash) { + let change = self.get_change_by_hash(hash); + if let Some(change) = change { + changes_to_send.push(change); + } + } + } + + for change in changes { + if hashes_to_send.contains(&change.hash) { + changes_to_send.push(change); + } + } + changes_to_send + } + } +} + +/// The sync message to be sent. +#[derive(Debug, Clone)] +pub struct Message { + /// The heads of the sender. + pub heads: Vec, + /// The hashes of any changes that are being explicitly requested from the recipient. + pub need: Vec, + /// A summary of the changes that the sender already has. + pub have: Vec, + /// The changes for the recipient to apply. + pub changes: Vec, +} + +impl Message { + pub fn encode(self) -> Vec { + let mut buf = vec![MESSAGE_TYPE_SYNC]; + + encode_hashes(&mut buf, &self.heads); + encode_hashes(&mut buf, &self.need); + (self.have.len() as u32).encode_vec(&mut buf); + for have in self.have { + encode_hashes(&mut buf, &have.last_sync); + have.bloom.to_bytes().encode_vec(&mut buf); + } + + (self.changes.len() as u32).encode_vec(&mut buf); + for mut change in self.changes { + change.compress(); + change.raw_bytes().encode_vec(&mut buf); + } + + buf + } + + pub fn decode(bytes: &[u8]) -> Result { + let mut decoder = Decoder::new(Cow::Borrowed(bytes)); + + let message_type = decoder.read::()?; + if message_type != MESSAGE_TYPE_SYNC { + return Err(decoding::Error::WrongType { + expected_one_of: vec![MESSAGE_TYPE_SYNC], + found: message_type, + }); + } + + let heads = decode_hashes(&mut decoder)?; + let need = decode_hashes(&mut decoder)?; + let have_count = decoder.read::()?; + let mut have = Vec::with_capacity(have_count as usize); + for _ in 0..have_count { + let last_sync = decode_hashes(&mut decoder)?; + let bloom_bytes: Vec = decoder.read()?; + let bloom = BloomFilter::try_from(bloom_bytes.as_slice())?; + have.push(Have { last_sync, bloom }); + } + + let change_count = decoder.read::()?; + let mut changes = Vec::with_capacity(change_count as usize); + for _ in 0..change_count { + let change = decoder.read()?; + changes.push(Change::from_bytes(change)?); + } + + Ok(Message { + heads, + need, + have, + changes, + }) + } +} + +fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { + debug_assert!( + hashes.windows(2).all(|h| h[0] <= h[1]), + "hashes were not sorted" + ); + hashes.encode_vec(buf); +} + +impl Encodable for &[ChangeHash] { + fn encode(&self, buf: &mut W) -> io::Result { + let head = self.len().encode(buf)?; + let mut body = 0; + for hash in self.iter() { + buf.write_all(&hash.0)?; + body += hash.0.len(); + } + Ok(head + body) + } +} + +fn decode_hashes(decoder: &mut Decoder) -> Result, decoding::Error> { + let length = decoder.read::()?; + let mut hashes = Vec::with_capacity(length as usize); + + for _ in 0..length { + let hash_bytes = decoder.read_bytes(HASH_SIZE)?; + let hash = ChangeHash::try_from(hash_bytes).map_err(decoding::Error::BadChangeFormat)?; + hashes.push(hash); + } + + Ok(hashes) +} + +fn advance_heads( + my_old_heads: &HashSet<&ChangeHash>, + my_new_heads: &HashSet, + our_old_shared_heads: &[ChangeHash], +) -> Vec { + let new_heads = my_new_heads + .iter() + .filter(|head| !my_old_heads.contains(head)) + .copied() + .collect::>(); + + let common_heads = our_old_shared_heads + .iter() + .filter(|head| my_new_heads.contains(head)) + .copied() + .collect::>(); + + let mut advanced_heads = HashSet::with_capacity(new_heads.len() + common_heads.len()); + for head in new_heads.into_iter().chain(common_heads) { + advanced_heads.insert(head); + } + let mut advanced_heads = advanced_heads.into_iter().collect::>(); + advanced_heads.sort(); + advanced_heads +} diff --git a/rust/automerge/src/sync/bloom.rs b/automerge/src/sync/bloom.rs similarity index 59% rename from rust/automerge/src/sync/bloom.rs rename to automerge/src/sync/bloom.rs index 8523061e..d20df5fd 100644 --- a/rust/automerge/src/sync/bloom.rs +++ b/automerge/src/sync/bloom.rs @@ -1,7 +1,6 @@ -use std::borrow::Borrow; +use std::borrow::Cow; -use crate::storage::parse; -use crate::ChangeHash; +use crate::{decoding, decoding::Decoder, encoding::Encodable, ChangeHash}; // These constants correspond to a 1% false positive rate. The values can be changed without // breaking compatibility of the network protocol, since the parameters used for a particular @@ -9,7 +8,7 @@ use crate::ChangeHash; const BITS_PER_ENTRY: u32 = 10; const NUM_PROBES: u32 = 7; -#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize)] +#[derive(Default, Debug, Clone)] pub struct BloomFilter { num_entries: u32, num_bits_per_entry: u32, @@ -17,55 +16,18 @@ pub struct BloomFilter { bits: Vec, } -impl Default for BloomFilter { - fn default() -> Self { - BloomFilter { - num_entries: 0, - num_bits_per_entry: BITS_PER_ENTRY, - num_probes: NUM_PROBES, - bits: Vec::new(), - } - } -} - -#[derive(Debug, thiserror::Error)] -pub(crate) enum ParseError { - #[error(transparent)] - Leb128(#[from] parse::leb128::Error), -} - impl BloomFilter { pub fn to_bytes(&self) -> Vec { let mut buf = Vec::new(); if self.num_entries != 0 { - leb128::write::unsigned(&mut buf, self.num_entries as u64).unwrap(); - leb128::write::unsigned(&mut buf, self.num_bits_per_entry as u64).unwrap(); - leb128::write::unsigned(&mut buf, self.num_probes as u64).unwrap(); + self.num_entries.encode_vec(&mut buf); + self.num_bits_per_entry.encode_vec(&mut buf); + self.num_probes.encode_vec(&mut buf); buf.extend(&self.bits); } buf } - pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, ParseError> { - if input.is_empty() { - Ok((input, Self::default())) - } else { - let (i, num_entries) = parse::leb128_u32(input)?; - let (i, num_bits_per_entry) = parse::leb128_u32(i)?; - let (i, num_probes) = parse::leb128_u32(i)?; - let (i, bits) = parse::take_n(bits_capacity(num_entries, num_bits_per_entry), i)?; - Ok(( - i, - Self { - num_entries, - num_bits_per_entry, - num_probes, - bits: bits.to_vec(), - }, - )) - } - } - fn get_probes(&self, hash: &ChangeHash) -> Vec { let hash_bytes = hash.0; let modulo = 8 * self.bits.len() as u32; @@ -79,8 +41,7 @@ impl BloomFilter { let z = u32::from_le_bytes([hash_bytes[8], hash_bytes[9], hash_bytes[10], hash_bytes[11]]) % modulo; - let mut probes = Vec::with_capacity(self.num_probes as usize); - probes.push(x); + let mut probes = vec![x]; for _ in 1..self.num_probes { x = (x + y) % modulo; y = (y + z) % modulo; @@ -121,23 +82,6 @@ impl BloomFilter { true } } - - pub fn from_hashes>(hashes: impl ExactSizeIterator) -> Self { - let num_entries = hashes.len() as u32; - let num_bits_per_entry = BITS_PER_ENTRY; - let num_probes = NUM_PROBES; - let bits = vec![0; bits_capacity(num_entries, num_bits_per_entry)]; - let mut filter = Self { - num_entries, - num_bits_per_entry, - num_probes, - bits, - }; - for hash in hashes { - filter.add_hash(hash.borrow()); - } - filter - } } fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize { @@ -145,16 +89,44 @@ fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize { f as usize } -#[derive(thiserror::Error, Debug)] -#[error("{0}")] -pub struct DecodeError(String); - -impl TryFrom<&[u8]> for BloomFilter { - type Error = DecodeError; - - fn try_from(bytes: &[u8]) -> Result { - Self::parse(parse::Input::new(bytes)) - .map(|(_, b)| b) - .map_err(|e| DecodeError(e.to_string())) +impl From<&[ChangeHash]> for BloomFilter { + fn from(hashes: &[ChangeHash]) -> Self { + let num_entries = hashes.len() as u32; + let num_bits_per_entry = BITS_PER_ENTRY; + let num_probes = NUM_PROBES; + let bits = vec![0; bits_capacity(num_entries, num_bits_per_entry) as usize]; + let mut filter = Self { + num_entries, + num_bits_per_entry, + num_probes, + bits, + }; + for hash in hashes { + filter.add_hash(hash); + } + filter + } +} + +impl TryFrom<&[u8]> for BloomFilter { + type Error = decoding::Error; + + fn try_from(bytes: &[u8]) -> Result { + if bytes.is_empty() { + Ok(Self::default()) + } else { + let mut decoder = Decoder::new(Cow::Borrowed(bytes)); + let num_entries = decoder.read()?; + let num_bits_per_entry = decoder.read()?; + let num_probes = decoder.read()?; + let bits = + decoder.read_bytes(bits_capacity(num_entries, num_bits_per_entry) as usize)?; + Ok(Self { + num_entries, + num_bits_per_entry, + num_probes, + bits: bits.to_vec(), + }) + } } } diff --git a/automerge/src/sync/state.rs b/automerge/src/sync/state.rs new file mode 100644 index 00000000..9828060c --- /dev/null +++ b/automerge/src/sync/state.rs @@ -0,0 +1,63 @@ +use std::{borrow::Cow, collections::HashSet}; + +use super::{decode_hashes, encode_hashes, BloomFilter}; +use crate::{decoding, decoding::Decoder, ChangeHash}; + +const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification + +/// The state of synchronisation with a peer. +#[derive(Debug, Clone, Default)] +pub struct State { + pub shared_heads: Vec, + pub last_sent_heads: Vec, + pub their_heads: Option>, + pub their_need: Option>, + pub their_have: Option>, + pub sent_hashes: HashSet, +} + +/// A summary of the changes that the sender of the message already has. +/// This is implicitly a request to the recipient to send all changes that the +/// sender does not already have. +#[derive(Debug, Clone, Default)] +pub struct Have { + /// The heads at the time of the last successful sync with this recipient. + pub last_sync: Vec, + /// A bloom filter summarising all of the changes that the sender of the message has added + /// since the last sync. + pub bloom: BloomFilter, +} + +impl State { + pub fn new() -> Self { + Default::default() + } + + pub fn encode(&self) -> Vec { + let mut buf = vec![SYNC_STATE_TYPE]; + encode_hashes(&mut buf, &self.shared_heads); + buf + } + + pub fn decode(bytes: &[u8]) -> Result { + let mut decoder = Decoder::new(Cow::Borrowed(bytes)); + + let record_type = decoder.read::()?; + if record_type != SYNC_STATE_TYPE { + return Err(decoding::Error::WrongType { + expected_one_of: vec![SYNC_STATE_TYPE], + found: record_type, + }); + } + + let shared_heads = decode_hashes(&mut decoder)?; + Ok(Self { + shared_heads, + last_sent_heads: Vec::new(), + their_heads: None, + their_need: None, + their_have: Some(Vec::new()), + sent_hashes: HashSet::new(), + }) + } +} diff --git a/rust/automerge/src/transaction.rs b/automerge/src/transaction.rs similarity index 52% rename from rust/automerge/src/transaction.rs rename to automerge/src/transaction.rs index b513bc63..667503ae 100644 --- a/rust/automerge/src/transaction.rs +++ b/automerge/src/transaction.rs @@ -1,16 +1,14 @@ mod commit; mod inner; mod manual_transaction; -pub(crate) mod observation; mod result; mod transactable; pub use self::commit::CommitOptions; pub use self::transactable::Transactable; -pub(crate) use inner::{TransactionArgs, TransactionInner}; +pub(crate) use inner::TransactionInner; pub use manual_transaction::Transaction; -pub use observation::{Observation, Observed, UnObserved}; pub use result::Failure; pub use result::Success; -pub type Result = std::result::Result, Failure>; +pub type Result = std::result::Result, Failure>; diff --git a/rust/automerge/src/transaction/commit.rs b/automerge/src/transaction/commit.rs similarity index 96% rename from rust/automerge/src/transaction/commit.rs rename to automerge/src/transaction/commit.rs index d2873af3..d4b12a97 100644 --- a/rust/automerge/src/transaction/commit.rs +++ b/automerge/src/transaction/commit.rs @@ -1,5 +1,5 @@ /// Optional metadata for a commit. -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct CommitOptions { pub message: Option, pub time: Option, diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs new file mode 100644 index 00000000..04c62f18 --- /dev/null +++ b/automerge/src/transaction/inner.rs @@ -0,0 +1,437 @@ +use std::num::NonZeroU64; + +use crate::automerge::Actor; +use crate::exid::ExId; +use crate::query::{self, OpIdSearch}; +use crate::types::{Key, ObjId, OpId}; +use crate::{change::export_change, types::Op, Automerge, ChangeHash, Prop}; +use crate::{AutomergeError, ObjType, OpType, ScalarValue}; + +#[derive(Debug, Clone)] +pub struct TransactionInner { + pub(crate) actor: usize, + pub(crate) seq: u64, + pub(crate) start_op: NonZeroU64, + pub(crate) time: i64, + pub(crate) message: Option, + pub(crate) extra_bytes: Vec, + pub(crate) hash: Option, + pub(crate) deps: Vec, + pub(crate) operations: Vec<(ObjId, Op)>, +} + +impl TransactionInner { + pub fn pending_ops(&self) -> usize { + self.operations.len() + } + + /// Commit the operations performed in this transaction, returning the hashes corresponding to + /// the new heads. + pub fn commit( + mut self, + doc: &mut Automerge, + message: Option, + time: Option, + ) -> ChangeHash { + if message.is_some() { + self.message = message; + } + + if let Some(t) = time { + self.time = t; + } + + let num_ops = self.operations.len(); + let change = export_change(self, &doc.ops.m.actors, &doc.ops.m.props); + let hash = change.hash; + doc.update_history(change, num_ops); + debug_assert_eq!(doc.get_heads(), vec![hash]); + hash + } + + /// Undo the operations added in this transaction, returning the number of cancelled + /// operations. + pub fn rollback(self, doc: &mut Automerge) -> usize { + // remove the actor from the cache so that it doesn't end up in the saved document + if doc.states.get(&self.actor).is_none() { + let actor = doc.ops.m.actors.remove_last(); + doc.actor = Actor::Unused(actor); + } + + let num = self.operations.len(); + // remove in reverse order so sets are removed before makes etc... + for (obj, op) in self.operations.iter().rev() { + for pred_id in &op.pred { + if let Some(p) = doc.ops.search(obj, OpIdSearch::new(*pred_id)).index() { + doc.ops.replace(obj, p, |o| o.remove_succ(op)); + } + } + if let Some(pos) = doc.ops.search(obj, OpIdSearch::new(op.id)).index() { + doc.ops.remove(obj, pos); + } + } + num + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub fn set, V: Into>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + value: V, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + let value = value.into(); + self.local_op(doc, obj, prop.into(), value.into())?; + Ok(()) + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub fn set_object>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + value: ObjType, + ) -> Result { + let obj = doc.exid_to_obj(obj)?; + let id = self.local_op(doc, obj, prop.into(), value.into())?.unwrap(); + Ok(doc.id_to_exid(id)) + } + + fn next_id(&mut self) -> OpId { + OpId( + self.start_op.get() + self.operations.len() as u64, + self.actor, + ) + } + + fn insert_local_op( + &mut self, + doc: &mut Automerge, + op: Op, + pos: usize, + obj: ObjId, + succ_pos: &[usize], + ) { + for succ in succ_pos { + doc.ops.replace(&obj, *succ, |old_op| { + old_op.add_succ(&op); + }); + } + + if !op.is_del() { + doc.ops.insert(pos, &obj, op.clone()); + } + + self.operations.push((obj, op)); + } + + #[allow(clippy::too_many_arguments)] + pub fn mark>( + &mut self, + doc: &mut Automerge, + obj: O, + start: usize, + expand_start: bool, + end: usize, + expand_end: bool, + mark: &str, + value: ScalarValue, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj.as_ref())?; + + self.do_insert( + doc, + obj, + start, + OpType::mark(mark.into(), expand_start, value), + )?; + self.do_insert(doc, obj, end, OpType::MarkEnd(expand_end))?; + + Ok(()) + } + + pub fn unmark>( + &mut self, + doc: &mut Automerge, + obj: O, + mark: O, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj.as_ref())?; + let markid = doc.exid_to_obj(mark.as_ref())?.0; + let op1 = Op { + id: self.next_id(), + action: OpType::Del, + key: markid.into(), + succ: Default::default(), + pred: vec![markid], + insert: false, + }; + let q1 = doc.ops.search(&obj, query::SeekOp::new(&op1)); + for i in q1.succ { + doc.ops.replace(&obj, i, |old_op| old_op.add_succ(&op1)); + } + self.operations.push((obj, op1)); + + let markid = markid.next(); + let op2 = Op { + id: self.next_id(), + action: OpType::Del, + key: markid.into(), + succ: Default::default(), + pred: vec![markid], + insert: false, + }; + let q2 = doc.ops.search(&obj, query::SeekOp::new(&op2)); + + for i in q2.succ { + doc.ops.replace(&obj, i, |old_op| old_op.add_succ(&op2)); + } + self.operations.push((obj, op2)); + Ok(()) + } + + pub fn insert>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + index: usize, + value: V, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + let value = value.into(); + self.do_insert(doc, obj, index, value.into())?; + Ok(()) + } + + pub fn insert_object( + &mut self, + doc: &mut Automerge, + obj: &ExId, + index: usize, + value: ObjType, + ) -> Result { + let obj = doc.exid_to_obj(obj)?; + let id = self.do_insert(doc, obj, index, value.into())?.unwrap(); + Ok(doc.id_to_exid(id)) + } + + fn do_insert( + &mut self, + doc: &mut Automerge, + obj: ObjId, + index: usize, + action: OpType, + ) -> Result, AutomergeError> { + let id = self.next_id(); + + let query = doc.ops.search(&obj, query::InsertNth::new(index)); + + let key = query.key()?; + + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + id, + action, + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + }; + + doc.ops.insert(query.pos(), &obj, op.clone()); + self.operations.push((obj, op)); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + pub(crate) fn local_op( + &mut self, + doc: &mut Automerge, + obj: ObjId, + prop: Prop, + action: OpType, + ) -> Result, AutomergeError> { + match prop { + Prop::Map(s) => self.local_map_op(doc, obj, s, action), + Prop::Seq(n) => self.local_list_op(doc, obj, n, action), + } + } + + fn local_map_op( + &mut self, + doc: &mut Automerge, + obj: ObjId, + prop: String, + action: OpType, + ) -> Result, AutomergeError> { + if prop.is_empty() { + return Err(AutomergeError::EmptyStringKey); + } + + let id = self.next_id(); + let prop = doc.ops.m.props.cache(prop); + let query = doc.ops.search(&obj, query::Prop::new(prop)); + + // no key present to delete + if query.ops.is_empty() && action == OpType::Del { + return Ok(None); + } + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let pred = query.ops.iter().map(|op| op.id).collect(); + + let op = Op { + id, + action, + key: Key::Map(prop), + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(doc, op, query.pos, obj, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + fn local_list_op( + &mut self, + doc: &mut Automerge, + obj: ObjId, + index: usize, + action: OpType, + ) -> Result, AutomergeError> { + let query = doc.ops.search(&obj, query::Nth::new(index)); + + let id = self.next_id(); + let pred = query.ops.iter().map(|op| op.id).collect(); + let key = query.key()?; + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + let is_make = matches!(&action, OpType::Make(_)); + + let op = Op { + id, + action, + key, + succ: Default::default(), + pred, + insert: false, + }; + + self.insert_local_op(doc, op, query.pos, obj, &query.ops_pos); + + if is_make { + Ok(Some(id)) + } else { + Ok(None) + } + } + + pub fn inc>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + self.local_op(doc, obj, prop.into(), OpType::Inc(value))?; + Ok(()) + } + + pub fn del>( + &mut self, + doc: &mut Automerge, + obj: &ExId, + prop: P, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + self.local_op(doc, obj, prop.into(), OpType::Del)?; + Ok(()) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + pub fn splice( + &mut self, + doc: &mut Automerge, + obj: &ExId, + mut pos: usize, + del: usize, + vals: impl IntoIterator, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj)?; + for _ in 0..del { + // del() + self.local_op(doc, obj, pos.into(), OpType::Del)?; + } + for v in vals { + // insert() + self.do_insert(doc, obj, pos, v.into())?; + pos += 1; + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use crate::{transaction::Transactable, ROOT}; + + use super::*; + + #[test] + fn map_rollback_doesnt_panic() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + + let a = tx.set_object(ROOT, "a", ObjType::Map).unwrap(); + tx.set(&a, "b", 1).unwrap(); + assert!(tx.value(&a, "b").unwrap().is_some()); + } +} diff --git a/automerge/src/transaction/manual_transaction.rs b/automerge/src/transaction/manual_transaction.rs new file mode 100644 index 00000000..cdfd7530 --- /dev/null +++ b/automerge/src/transaction/manual_transaction.rs @@ -0,0 +1,316 @@ +use crate::exid::ExId; +use crate::AutomergeError; +use crate::{query, Automerge, ChangeHash, Keys, KeysAt, ObjType, Prop, ScalarValue, Value}; + +use super::{CommitOptions, Transactable, TransactionInner}; + +/// A transaction on a document. +/// Transactions group operations into a single change so that no other operations can happen +/// in-between. +/// +/// Created from [`Automerge::transaction`]. +/// +/// ## Drop +/// +/// This transaction should be manually committed or rolled back. If not done manually then it will +/// be rolled back when it is dropped. This is to prevent the document being in an unsafe +/// intermediate state. +/// This is consistent with `?` error handling. +#[derive(Debug)] +pub struct Transaction<'a> { + // this is an option so that we can take it during commit and rollback to prevent it being + // rolled back during drop. + pub(crate) inner: Option, + pub(crate) doc: &'a mut Automerge, +} + +impl<'a> Transaction<'a> { + /// Get the heads of the document before this transaction was started. + pub fn get_heads(&self) -> Vec { + self.doc.get_heads() + } + + /// Commit the operations performed in this transaction, returning the hashes corresponding to + /// the new heads. + pub fn commit(mut self) -> ChangeHash { + self.inner.take().unwrap().commit(self.doc, None, None) + } + + /// Commit the operations in this transaction with some options. + /// + /// ``` + /// # use automerge::transaction::CommitOptions; + /// # use automerge::transaction::Transactable; + /// # use automerge::ROOT; + /// # use automerge::Automerge; + /// # use automerge::ObjType; + /// # use std::time::SystemTime; + /// let mut doc = Automerge::new(); + /// let mut tx = doc.transaction(); + /// tx.set_object(ROOT, "todos", ObjType::List).unwrap(); + /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as + /// i64; + /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); + /// ``` + pub fn commit_with(mut self, options: CommitOptions) -> ChangeHash { + self.inner + .take() + .unwrap() + .commit(self.doc, options.message, options.time) + } + + /// Undo the operations added in this transaction, returning the number of cancelled + /// operations. + pub fn rollback(mut self) -> usize { + self.inner.take().unwrap().rollback(self.doc) + } +} + +impl<'a> Transactable for Transaction<'a> { + /// Get the number of pending operations in this transaction. + fn pending_ops(&self) -> usize { + self.inner.as_ref().unwrap().pending_ops() + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn set, P: Into, V: Into>( + &mut self, + obj: O, + prop: P, + value: V, + ) -> Result<(), AutomergeError> { + self.inner + .as_mut() + .unwrap() + .set(self.doc, obj.as_ref(), prop, value) + } + + fn set_object, P: Into>( + &mut self, + obj: O, + prop: P, + value: ObjType, + ) -> Result { + self.inner + .as_mut() + .unwrap() + .set_object(self.doc, obj.as_ref(), prop, value) + } + + fn insert, V: Into>( + &mut self, + obj: O, + index: usize, + value: V, + ) -> Result<(), AutomergeError> { + self.inner + .as_mut() + .unwrap() + .insert(self.doc, obj.as_ref(), index, value) + } + + #[allow(clippy::too_many_arguments)] + fn mark>( + &mut self, + obj: O, + start: usize, + expand_start: bool, + end: usize, + expand_end: bool, + mark: &str, + value: ScalarValue, + ) -> Result<(), AutomergeError> { + self.inner.as_mut().unwrap().mark( + self.doc, + obj, + start, + expand_start, + end, + expand_end, + mark, + value, + ) + } + + fn unmark>(&mut self, obj: O, mark: O) -> Result<(), AutomergeError> { + self.inner.as_mut().unwrap().unmark(self.doc, obj, mark) + } + + fn insert_object( + &mut self, + obj: &ExId, + index: usize, + value: ObjType, + ) -> Result { + self.inner + .as_mut() + .unwrap() + .insert_object(self.doc, obj, index, value) + } + + fn inc, P: Into>( + &mut self, + obj: O, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + self.inner + .as_mut() + .unwrap() + .inc(self.doc, obj.as_ref(), prop, value) + } + + fn del, P: Into>( + &mut self, + obj: O, + prop: P, + ) -> Result<(), AutomergeError> { + self.inner + .as_mut() + .unwrap() + .del(self.doc, obj.as_ref(), prop) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + fn splice, V: IntoIterator>( + &mut self, + obj: O, + pos: usize, + del: usize, + vals: V, + ) -> Result<(), AutomergeError> { + self.inner + .as_mut() + .unwrap() + .splice(self.doc, obj.as_ref(), pos, del, vals) + } + + fn keys>(&self, obj: O) -> Keys { + self.doc.keys(obj) + } + + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt { + self.doc.keys_at(obj, heads) + } + + fn length>(&self, obj: O) -> usize { + self.doc.length(obj) + } + + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + self.doc.length_at(obj, heads) + } + + fn object_type>(&self, obj: O) -> Option { + self.doc.object_type(obj) + } + + fn text>(&self, obj: O) -> Result { + self.doc.text(obj) + } + + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result { + self.doc.text_at(obj, heads) + } + + fn list>(&self, obj: O) -> Result, AutomergeError> { + self.doc.list(obj) + } + + fn list_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.list_at(obj, heads) + } + + fn spans>(&self, obj: O) -> Result, AutomergeError> { + self.doc.spans(obj) + } + + fn raw_spans>(&self, obj: O) -> Result, AutomergeError> { + self.doc.raw_spans(obj) + } + + fn attribute>( + &self, + obj: O, + baseline: &[ChangeHash], + change_sets: &[Vec], + ) -> Result, AutomergeError> { + self.doc.attribute(obj, baseline, change_sets) + } + + fn attribute2>( + &self, + obj: O, + baseline: &[ChangeHash], + change_sets: &[Vec], + ) -> Result, AutomergeError> { + self.doc.attribute2(obj, baseline, change_sets) + } + + fn value, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, AutomergeError> { + self.doc.value(obj, prop) + } + + fn value_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.value_at(obj, prop, heads) + } + + fn values, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, AutomergeError> { + self.doc.values(obj, prop) + } + + fn values_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError> { + self.doc.values_at(obj, prop, heads) + } +} + +// If a transaction is not commited or rolled back manually then it can leave the document in an +// intermediate state. +// This defaults to rolling back the transaction to be compatible with `?` error returning before +// reaching a call to `commit`. +impl<'a> Drop for Transaction<'a> { + fn drop(&mut self) { + if let Some(txn) = self.inner.take() { + txn.rollback(self.doc); + } + } +} diff --git a/rust/automerge/src/transaction/result.rs b/automerge/src/transaction/result.rs similarity index 68% rename from rust/automerge/src/transaction/result.rs rename to automerge/src/transaction/result.rs index 5327ff44..345c9f2c 100644 --- a/rust/automerge/src/transaction/result.rs +++ b/automerge/src/transaction/result.rs @@ -2,12 +2,11 @@ use crate::ChangeHash; /// The result of a successful, and committed, transaction. #[derive(Debug)] -pub struct Success { +pub struct Success { /// The result of the transaction. pub result: O, - /// The hash of the change, will be `None` if the transaction did not create any operations - pub hash: Option, - pub op_observer: Obs, + /// The hash of the change, also the head of the document. + pub hash: ChangeHash, } /// The result of a failed, and rolled back, transaction. diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs new file mode 100644 index 00000000..fa8e7ac9 --- /dev/null +++ b/automerge/src/transaction/transactable.rs @@ -0,0 +1,193 @@ +use crate::exid::ExId; +use crate::query; +use crate::{AutomergeError, ChangeHash, Keys, KeysAt, ObjType, Prop, ScalarValue, Value}; +use unicode_segmentation::UnicodeSegmentation; + +/// A way of mutating a document within a single change. +pub trait Transactable { + /// Get the number of pending operations in this transaction. + fn pending_ops(&self) -> usize; + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn set, P: Into, V: Into>( + &mut self, + obj: O, + prop: P, + value: V, + ) -> Result<(), AutomergeError>; + + /// Set the value of property `P` to the new object `V` in object `obj`. + /// + /// # Returns + /// + /// The id of the object which was created. + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn set_object, P: Into>( + &mut self, + obj: O, + prop: P, + object: ObjType, + ) -> Result; + + /// Insert a value into a list at the given index. + fn insert, V: Into>( + &mut self, + obj: O, + index: usize, + value: V, + ) -> Result<(), AutomergeError>; + + /// Insert an object into a list at the given index. + fn insert_object( + &mut self, + obj: &ExId, + index: usize, + object: ObjType, + ) -> Result; + + /// Set a mark within a range on a list + #[allow(clippy::too_many_arguments)] + fn mark>( + &mut self, + obj: O, + start: usize, + expand_start: bool, + end: usize, + expand_end: bool, + mark: &str, + value: ScalarValue, + ) -> Result<(), AutomergeError>; + + fn unmark>(&mut self, obj: O, mark: O) -> Result<(), AutomergeError>; + + /// Increment the counter at the prop in the object by `value`. + fn inc, P: Into>( + &mut self, + obj: O, + prop: P, + value: i64, + ) -> Result<(), AutomergeError>; + + /// Delete the value at prop in the object. + fn del, P: Into>(&mut self, obj: O, prop: P) + -> Result<(), AutomergeError>; + + fn splice, V: IntoIterator>( + &mut self, + obj: O, + pos: usize, + del: usize, + vals: V, + ) -> Result<(), AutomergeError>; + + /// Like [`Self::splice`] but for text. + fn splice_text>( + &mut self, + obj: O, + pos: usize, + del: usize, + text: &str, + ) -> Result<(), AutomergeError> { + let text = text.to_owned(); + let vals = text.graphemes(true).map(|c| c.into()); + self.splice(obj, pos, del, vals) + } + + /// Get the keys of the given object, it should be a map. + fn keys>(&self, obj: O) -> Keys; + + /// Get the keys of the given object at a point in history. + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt; + + /// Get the length of the given object. + fn length>(&self, obj: O) -> usize; + + /// Get the length of the given object at a point in history. + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; + + /// Get type for object + fn object_type>(&self, obj: O) -> Option; + + /// Get the string that this text object represents. + fn text>(&self, obj: O) -> Result; + + /// Get the string that this text object represents at a point in history. + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result; + + /// Get the string that this text object represents. + fn list>(&self, obj: O) -> Result, AutomergeError>; + + /// Get the string that this text object represents at a point in history. + fn list_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result, AutomergeError>; + + /// test spans api for mark/span experiment + fn spans>(&self, obj: O) -> Result, AutomergeError>; + + /// test raw_spans api for mark/span experiment + fn raw_spans>(&self, obj: O) -> Result, AutomergeError>; + + /// test attribute api for mark/span experiment + fn attribute>( + &self, + obj: O, + baseline: &[ChangeHash], + change_sets: &[Vec], + ) -> Result, AutomergeError>; + + /// test attribute api for mark/span experiment + fn attribute2>( + &self, + obj: O, + baseline: &[ChangeHash], + change_sets: &[Vec], + ) -> Result, AutomergeError>; + + /// Get the value at this prop in the object. + fn value, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, AutomergeError>; + + /// Get the value at this prop in the object at a point in history. + fn value_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError>; + + fn values, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, AutomergeError>; + + fn values_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, AutomergeError>; +} diff --git a/automerge/src/types.rs b/automerge/src/types.rs new file mode 100644 index 00000000..ae4ed432 --- /dev/null +++ b/automerge/src/types.rs @@ -0,0 +1,568 @@ +use crate::error; +use crate::legacy as amp; +use serde::{Deserialize, Serialize}; +use std::cmp::Eq; +use std::fmt; +use std::str::FromStr; +use tinyvec::{ArrayVec, TinyVec}; + +pub(crate) use crate::clock::Clock; +pub(crate) use crate::value::{Counter, ScalarValue, Value}; + +pub(crate) const HEAD: ElemId = ElemId(OpId(0, 0)); +pub(crate) const ROOT: OpId = OpId(0, 0); + +const ROOT_STR: &str = "_root"; +const HEAD_STR: &str = "_head"; + +/// An actor id is a sequence of bytes. By default we use a uuid which can be nicely stack +/// allocated. +/// +/// In the event that users want to use their own type of identifier that is longer than a uuid +/// then they will likely end up pushing it onto the heap which is still fine. +/// +// Note that change encoding relies on the Ord implementation for the ActorId being implemented in +// terms of the lexicographic ordering of the underlying bytes. Be aware of this if you are +// changing the ActorId implementation in ways which might affect the Ord implementation +#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord)] +#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))] +pub struct ActorId(TinyVec<[u8; 16]>); + +impl fmt::Debug for ActorId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("ActorID") + .field(&hex::encode(&self.0)) + .finish() + } +} + +impl ActorId { + pub fn random() -> ActorId { + ActorId(TinyVec::from(*uuid::Uuid::new_v4().as_bytes())) + } + + pub fn to_bytes(&self) -> &[u8] { + &self.0 + } + + pub fn to_hex_string(&self) -> String { + hex::encode(&self.0) + } +} + +impl TryFrom<&str> for ActorId { + type Error = error::InvalidActorId; + + fn try_from(s: &str) -> Result { + hex::decode(s) + .map(ActorId::from) + .map_err(|_| error::InvalidActorId(s.into())) + } +} + +impl From for ActorId { + fn from(u: uuid::Uuid) -> Self { + ActorId(TinyVec::from(*u.as_bytes())) + } +} + +impl From<&[u8]> for ActorId { + fn from(b: &[u8]) -> Self { + ActorId(TinyVec::from(b)) + } +} + +impl From<&Vec> for ActorId { + fn from(b: &Vec) -> Self { + ActorId::from(b.as_slice()) + } +} + +impl From> for ActorId { + fn from(b: Vec) -> Self { + let inner = if let Ok(arr) = ArrayVec::try_from(b.as_slice()) { + TinyVec::Inline(arr) + } else { + TinyVec::Heap(b) + }; + ActorId(inner) + } +} + +impl From<[u8; N]> for ActorId { + fn from(array: [u8; N]) -> Self { + ActorId::from(&array) + } +} + +impl From<&[u8; N]> for ActorId { + fn from(slice: &[u8; N]) -> Self { + let inner = if let Ok(arr) = ArrayVec::try_from(slice.as_slice()) { + TinyVec::Inline(arr) + } else { + TinyVec::Heap(slice.to_vec()) + }; + ActorId(inner) + } +} + +impl FromStr for ActorId { + type Err = error::InvalidActorId; + + fn from_str(s: &str) -> Result { + ActorId::try_from(s) + } +} + +impl fmt::Display for ActorId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.to_hex_string()) + } +} + +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Copy, Hash)] +#[serde(rename_all = "camelCase", untagged)] +pub enum ObjType { + Map, + Table, + List, + Text, +} + +impl ObjType { + pub fn is_sequence(&self) -> bool { + matches!(self, Self::List | Self::Text) + } +} + +impl From for ObjType { + fn from(other: amp::MapType) -> Self { + match other { + amp::MapType::Map => Self::Map, + amp::MapType::Table => Self::Table, + } + } +} + +impl From for ObjType { + fn from(other: amp::SequenceType) -> Self { + match other { + amp::SequenceType::List => Self::List, + amp::SequenceType::Text => Self::Text, + } + } +} + +impl fmt::Display for ObjType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ObjType::Map => write!(f, "map"), + ObjType::Table => write!(f, "table"), + ObjType::List => write!(f, "list"), + ObjType::Text => write!(f, "text"), + } + } +} + +#[derive(PartialEq, Debug, Clone)] +pub enum OpType { + Make(ObjType), + /// Perform a deletion, expanding the operation to cover `n` deletions (multiOp). + Del, + Inc(i64), + Set(ScalarValue), + MarkBegin(MarkData), + MarkEnd(bool), +} + +impl OpType { + pub(crate) fn mark(name: String, expand: bool, value: ScalarValue) -> Self { + OpType::MarkBegin(MarkData { + name, + expand, + value, + }) + } +} + +#[derive(PartialEq, Debug, Clone)] +pub struct MarkData { + pub name: String, + pub value: ScalarValue, + pub expand: bool, +} + +impl From for OpType { + fn from(v: ObjType) -> Self { + OpType::Make(v) + } +} + +impl From for OpType { + fn from(v: ScalarValue) -> Self { + OpType::Set(v) + } +} + +#[derive(Debug)] +pub(crate) enum Export { + Id(OpId), + Special(String), + Prop(usize), +} + +pub(crate) trait Exportable { + fn export(&self) -> Export; +} + +impl OpId { + #[inline] + pub fn counter(&self) -> u64 { + self.0 + } + #[inline] + pub fn actor(&self) -> usize { + self.1 + } + #[inline] + pub fn prev(&self) -> OpId { + OpId(self.0 - 1, self.1) + } + #[inline] + pub fn next(&self) -> OpId { + OpId(self.0 + 1, self.1) + } +} + +impl Exportable for ObjId { + fn export(&self) -> Export { + if self.0 == ROOT { + Export::Special(ROOT_STR.to_owned()) + } else { + Export::Id(self.0) + } + } +} + +impl Exportable for &ObjId { + fn export(&self) -> Export { + if self.0 == ROOT { + Export::Special(ROOT_STR.to_owned()) + } else { + Export::Id(self.0) + } + } +} + +impl Exportable for ElemId { + fn export(&self) -> Export { + if self == &HEAD { + Export::Special(HEAD_STR.to_owned()) + } else { + Export::Id(self.0) + } + } +} + +impl Exportable for OpId { + fn export(&self) -> Export { + Export::Id(*self) + } +} + +impl Exportable for Key { + fn export(&self) -> Export { + match self { + Key::Map(p) => Export::Prop(*p), + Key::Seq(e) => e.export(), + } + } +} + +impl From for ObjId { + fn from(o: OpId) -> Self { + ObjId(o) + } +} + +impl From for ElemId { + fn from(o: OpId) -> Self { + ElemId(o) + } +} + +impl From for Prop { + fn from(p: String) -> Self { + Prop::Map(p) + } +} + +impl From<&String> for Prop { + fn from(p: &String) -> Self { + Prop::Map(p.clone()) + } +} + +impl From<&str> for Prop { + fn from(p: &str) -> Self { + Prop::Map(p.to_owned()) + } +} + +impl From for Prop { + fn from(index: usize) -> Self { + Prop::Seq(index) + } +} + +impl From for Prop { + fn from(index: f64) -> Self { + Prop::Seq(index as usize) + } +} + +impl From for Key { + fn from(id: OpId) -> Self { + Key::Seq(ElemId(id)) + } +} + +impl From for Key { + fn from(e: ElemId) -> Self { + Key::Seq(e) + } +} + +impl From> for ElemId { + fn from(e: Option) -> Self { + e.unwrap_or(HEAD) + } +} + +impl From> for Key { + fn from(e: Option) -> Self { + Key::Seq(e.into()) + } +} + +#[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone, Copy, Hash)] +pub(crate) enum Key { + Map(usize), + Seq(ElemId), +} + +#[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone)] +pub enum Prop { + Map(String), + Seq(usize), +} + +impl Key { + pub fn elemid(&self) -> Option { + match self { + Key::Map(_) => None, + Key::Seq(id) => Some(*id), + } + } +} + +#[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq, Copy, Hash, Default)] +pub(crate) struct OpId(pub u64, pub usize); + +#[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] +pub(crate) struct ObjId(pub OpId); + +impl ObjId { + pub fn root() -> Self { + ObjId(OpId(0, 0)) + } +} + +#[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] +pub(crate) struct ElemId(pub OpId); + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Op { + pub id: OpId, + pub action: OpType, + pub key: Key, + pub succ: Vec, + pub pred: Vec, + pub insert: bool, +} + +impl Op { + pub(crate) fn add_succ(&mut self, op: &Op) { + self.succ.push(op.id); + if let OpType::Set(ScalarValue::Counter(Counter { + current, + increments, + .. + })) = &mut self.action + { + if let OpType::Inc(n) = &op.action { + *current += *n; + *increments += 1; + } + } + } + + pub(crate) fn remove_succ(&mut self, op: &Op) { + self.succ.retain(|id| id != &op.id); + if let OpType::Set(ScalarValue::Counter(Counter { + current, + increments, + .. + })) = &mut self.action + { + if let OpType::Inc(n) = &op.action { + *current -= *n; + *increments -= 1; + } + } + } + + pub fn visible(&self) -> bool { + if self.is_inc() || self.is_mark() { + false + } else if self.is_counter() { + self.succ.len() <= self.incs() + } else { + self.succ.is_empty() + } + } + + pub fn incs(&self) -> usize { + if let OpType::Set(ScalarValue::Counter(Counter { increments, .. })) = &self.action { + *increments + } else { + 0 + } + } + + pub fn is_del(&self) -> bool { + matches!(&self.action, OpType::Del) + } + + pub fn is_inc(&self) -> bool { + matches!(&self.action, OpType::Inc(_)) + } + + pub fn valid_mark_anchor(&self) -> bool { + self.succ.is_empty() + && matches!( + &self.action, + OpType::MarkBegin(MarkData { expand: true, .. }) | OpType::MarkEnd(false) + ) + } + + pub fn is_mark(&self) -> bool { + matches!(&self.action, OpType::MarkBegin(_) | OpType::MarkEnd(_)) + } + + pub fn is_counter(&self) -> bool { + matches!(&self.action, OpType::Set(ScalarValue::Counter(_))) + } + + pub fn is_noop(&self, action: &OpType) -> bool { + matches!((&self.action, action), (OpType::Set(n), OpType::Set(m)) if n == m) + } + + pub fn overwrites(&self, other: &Op) -> bool { + self.pred.iter().any(|i| i == &other.id) + } + + pub fn elemid(&self) -> Option { + if self.insert { + Some(ElemId(self.id)) + } else { + self.key.elemid() + } + } + + pub fn as_string(&self) -> Option { + match &self.action { + OpType::Set(scalar) => scalar.as_string(), + _ => None, + } + } + + pub fn value(&self) -> Value { + match &self.action { + OpType::Make(obj_type) => Value::Object(*obj_type), + OpType::Set(scalar) => Value::Scalar(scalar.clone()), + _ => panic!("cant convert op into a value - {:?}", self), + } + } + + #[allow(dead_code)] + pub fn dump(&self) -> String { + match &self.action { + OpType::Set(value) if self.insert => format!("i:{}", value), + OpType::Set(value) => format!("s:{}", value), + OpType::Make(obj) => format!("make{}", obj), + OpType::MarkBegin(m) => format!("mark{}={}", m.name, m.value), + OpType::MarkEnd(_) => "unmark".into(), + OpType::Inc(val) => format!("inc:{}", val), + OpType::Del => "del".to_string(), + } + } +} + +#[derive(Debug, Clone)] +pub struct Peer {} + +/// The sha256 hash of a change. +#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Copy)] +pub struct ChangeHash(pub [u8; 32]); + +impl fmt::Debug for ChangeHash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("ChangeHash") + .field(&hex::encode(&self.0)) + .finish() + } +} + +impl fmt::Display for ChangeHash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", hex::encode(&self.0)) + } +} + +#[derive(thiserror::Error, Debug)] +pub enum ParseChangeHashError { + #[error(transparent)] + HexDecode(#[from] hex::FromHexError), + #[error("incorrect length, change hash should be 32 bytes, got {actual}")] + IncorrectLength { actual: usize }, +} + +impl FromStr for ChangeHash { + type Err = ParseChangeHashError; + + fn from_str(s: &str) -> Result { + let bytes = hex::decode(s)?; + if bytes.len() == 32 { + Ok(ChangeHash(bytes.try_into().unwrap())) + } else { + Err(ParseChangeHashError::IncorrectLength { + actual: bytes.len(), + }) + } + } +} + +impl TryFrom<&[u8]> for ChangeHash { + type Error = error::InvalidChangeHashSlice; + + fn try_from(bytes: &[u8]) -> Result { + if bytes.len() != 32 { + Err(error::InvalidChangeHashSlice(Vec::from(bytes))) + } else { + let mut array = [0; 32]; + array.copy_from_slice(bytes); + Ok(ChangeHash(array)) + } + } +} diff --git a/rust/automerge/src/value.rs b/automerge/src/value.rs similarity index 72% rename from rust/automerge/src/value.rs rename to automerge/src/value.rs index be128787..c28d9259 100644 --- a/rust/automerge/src/value.rs +++ b/automerge/src/value.rs @@ -1,64 +1,65 @@ use crate::error; -use crate::types::ObjType; +use crate::types::{ObjType, Op, OpId, OpType}; use serde::{Deserialize, Serialize, Serializer}; use smol_str::SmolStr; -use std::borrow::Cow; use std::fmt; -/// The type of values in an automerge document #[derive(Debug, Clone, PartialEq)] -pub enum Value<'a> { - /// An composite object of type `ObjType` +pub enum Value { Object(ObjType), - /// A non composite value - // TODO: if we don't have to store this in patches any more then it might be able to be just a - // &'a ScalarValue rather than a Cow - Scalar(Cow<'a, ScalarValue>), + Scalar(ScalarValue), } -impl<'a> Value<'a> { - pub fn map() -> Value<'a> { +impl Value { + pub fn as_string(&self) -> Option { + match self { + Value::Scalar(val) => val.as_string(), + _ => None, + } + } + + pub fn map() -> Value { Value::Object(ObjType::Map) } - pub fn list() -> Value<'a> { + pub fn list() -> Value { Value::Object(ObjType::List) } - pub fn text() -> Value<'a> { + pub fn text() -> Value { Value::Object(ObjType::Text) } - pub fn table() -> Value<'a> { + pub fn table() -> Value { Value::Object(ObjType::Table) } - pub fn str(s: &str) -> Value<'a> { - Value::Scalar(Cow::Owned(ScalarValue::Str(s.into()))) + pub fn str(s: &str) -> Value { + Value::Scalar(ScalarValue::Str(s.into())) } - pub fn int(n: i64) -> Value<'a> { - Value::Scalar(Cow::Owned(ScalarValue::Int(n))) + pub fn int(n: i64) -> Value { + Value::Scalar(ScalarValue::Int(n)) } - pub fn uint(n: u64) -> Value<'a> { - Value::Scalar(Cow::Owned(ScalarValue::Uint(n))) + pub fn uint(n: u64) -> Value { + Value::Scalar(ScalarValue::Uint(n)) } - pub fn counter(n: i64) -> Value<'a> { - Value::Scalar(Cow::Owned(ScalarValue::counter(n))) + pub fn counter(n: i64) -> Value { + Value::Scalar(ScalarValue::counter(n)) } - pub fn timestamp(n: i64) -> Value<'a> { - Value::Scalar(Cow::Owned(ScalarValue::Timestamp(n))) + pub fn timestamp(n: i64) -> Value { + Value::Scalar(ScalarValue::Timestamp(n)) } - pub fn f64(n: f64) -> Value<'a> { - Value::Scalar(Cow::Owned(ScalarValue::F64(n))) + pub fn f64(n: f64) -> Value { + Value::Scalar(ScalarValue::F64(n)) } - pub fn bytes(b: Vec) -> Value<'a> { - Value::Scalar(Cow::Owned(ScalarValue::Bytes(b))) + pub fn bytes(b: Vec) -> Value { + Value::Scalar(ScalarValue::Bytes(b)) } pub fn is_object(&self) -> bool { @@ -70,80 +71,44 @@ impl<'a> Value<'a> { } pub fn is_bytes(&self) -> bool { - if let Self::Scalar(s) = self { - s.is_bytes() - } else { - false - } + matches!(self, Self::Scalar(ScalarValue::Bytes(_))) } pub fn is_str(&self) -> bool { - if let Self::Scalar(s) = self { - s.is_str() - } else { - false - } + matches!(self, Self::Scalar(ScalarValue::Str(_))) } pub fn is_int(&self) -> bool { - if let Self::Scalar(s) = self { - s.is_int() - } else { - false - } + matches!(self, Self::Scalar(ScalarValue::Int(_))) } pub fn is_uint(&self) -> bool { - if let Self::Scalar(s) = self { - s.is_uint() - } else { - false - } + matches!(self, Self::Scalar(ScalarValue::Uint(_))) } pub fn is_f64(&self) -> bool { - if let Self::Scalar(s) = self { - s.is_f64() - } else { - false - } + matches!(self, Self::Scalar(ScalarValue::F64(_))) } pub fn is_counter(&self) -> bool { - if let Self::Scalar(s) = self { - s.is_counter() - } else { - false - } + matches!(self, Self::Scalar(ScalarValue::Counter(_))) } pub fn is_timestamp(&self) -> bool { - if let Self::Scalar(s) = self { - s.is_timestamp() - } else { - false - } + matches!(self, Self::Scalar(ScalarValue::Timestamp(_))) } pub fn is_boolean(&self) -> bool { - if let Self::Scalar(s) = self { - s.is_boolean() - } else { - false - } + matches!(self, Self::Scalar(ScalarValue::Boolean(_))) } pub fn is_null(&self) -> bool { - if let Self::Scalar(s) = self { - s.is_null() - } else { - false - } + matches!(self, Self::Scalar(ScalarValue::Null)) } pub fn into_scalar(self) -> Result { match self { - Self::Scalar(s) => Ok(s.into_owned()), + Self::Scalar(s) => Ok(s), _ => Err(self), } } @@ -162,28 +127,9 @@ impl<'a> Value<'a> { } } - pub fn into_owned(self) -> Value<'static> { - match self { - Value::Object(o) => Value::Object(o), - Value::Scalar(Cow::Owned(s)) => Value::Scalar(Cow::Owned(s)), - Value::Scalar(Cow::Borrowed(s)) => Value::Scalar(Cow::Owned((*s).clone())), - } - } - - pub fn to_owned(&self) -> Value<'static> { - match self { - Value::Object(o) => Value::Object(*o), - Value::Scalar(Cow::Owned(s)) => Value::Scalar(Cow::Owned(s.clone())), - Value::Scalar(Cow::Borrowed(s)) => Value::Scalar(Cow::Owned((*s).clone())), - } - } - pub fn into_bytes(self) -> Result, Self> { match self { - Value::Scalar(s) => s - .into_owned() - .into_bytes() - .map_err(|v| Value::Scalar(Cow::Owned(v))), + Value::Scalar(s) => s.into_bytes().map_err(Value::Scalar), _ => Err(self), } } @@ -197,10 +143,7 @@ impl<'a> Value<'a> { pub fn into_string(self) -> Result { match self { - Value::Scalar(s) => s - .into_owned() - .into_string() - .map_err(|v| Value::Scalar(Cow::Owned(v))), + Value::Scalar(s) => s.into_string().map_err(Value::Scalar), _ => Err(self), } } @@ -242,7 +185,7 @@ impl<'a> Value<'a> { } } -impl<'a> fmt::Display for Value<'a> { +impl fmt::Display for Value { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Value::Object(o) => write!(f, "Object: {}", o), @@ -251,93 +194,110 @@ impl<'a> fmt::Display for Value<'a> { } } -impl<'a> From<&str> for Value<'a> { +impl From<&str> for Value { fn from(s: &str) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Str(s.into()))) + Value::Scalar(ScalarValue::Str(s.into())) } } -impl<'a> From<&String> for Value<'a> { - fn from(s: &String) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Str(s.into()))) - } -} - -impl<'a> From for Value<'a> { +impl From for Value { fn from(s: String) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Str(s.into()))) + Value::Scalar(ScalarValue::Str(s.into())) } } -impl<'a> From for Value<'a> { - fn from(s: SmolStr) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Str(s))) - } -} - -impl<'a> From for Value<'a> { +impl From for Value { fn from(c: char) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Str(SmolStr::new(c.to_string())))) + Value::Scalar(ScalarValue::Str(SmolStr::new(c.to_string()))) } } -impl<'a> From> for Value<'a> { +impl From> for Value { fn from(v: Vec) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Bytes(v))) + Value::Scalar(ScalarValue::Bytes(v)) } } -impl<'a> From for Value<'a> { +impl From for Value { fn from(n: f64) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::F64(n))) + Value::Scalar(ScalarValue::F64(n)) } } -impl<'a> From for Value<'a> { +impl From for Value { fn from(n: i64) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Int(n))) + Value::Scalar(ScalarValue::Int(n)) } } -impl<'a> From for Value<'a> { +impl From for Value { fn from(n: i32) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Int(n.into()))) + Value::Scalar(ScalarValue::Int(n.into())) } } -impl<'a> From for Value<'a> { +impl From for Value { fn from(n: u32) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Uint(n.into()))) + Value::Scalar(ScalarValue::Uint(n.into())) } } -impl<'a> From for Value<'a> { +impl From for Value { fn from(n: u64) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Uint(n))) + Value::Scalar(ScalarValue::Uint(n)) } } -impl<'a> From for Value<'a> { +impl From for Value { fn from(v: bool) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Boolean(v))) + Value::Scalar(ScalarValue::Boolean(v)) } } -impl<'a> From<()> for Value<'a> { +impl From<()> for Value { fn from(_: ()) -> Self { - Value::Scalar(Cow::Owned(ScalarValue::Null)) + Value::Scalar(ScalarValue::Null) } } -impl<'a> From for Value<'a> { +impl From for Value { fn from(o: ObjType) -> Self { Value::Object(o) } } -impl<'a> From for Value<'a> { +impl From for Value { fn from(v: ScalarValue) -> Self { - Value::Scalar(Cow::Owned(v)) + Value::Scalar(v) + } +} + +impl From<&Op> for (Value, OpId) { + fn from(op: &Op) -> Self { + match &op.action { + OpType::Make(obj_type) => (Value::Object(*obj_type), op.id), + OpType::Set(scalar) => (Value::Scalar(scalar.clone()), op.id), + _ => panic!("cant convert op into a value - {:?}", op), + } + } +} + +impl From for (Value, OpId) { + fn from(op: Op) -> Self { + match &op.action { + OpType::Make(obj_type) => (Value::Object(*obj_type), op.id), + OpType::Set(scalar) => (Value::Scalar(scalar.clone()), op.id), + _ => panic!("cant convert op into a value - {:?}", op), + } + } +} + +impl From for OpType { + fn from(v: Value) -> Self { + match v { + Value::Object(o) => OpType::Make(o), + Value::Scalar(s) => OpType::Set(s), + } } } @@ -366,21 +326,12 @@ pub struct Counter { pub(crate) increments: usize, } -impl Counter { - pub(crate) fn increment>(&mut self, increments: I) { - for inc in increments { - self.current += inc; - self.increments += 1; - } - } -} - impl Serialize for Counter { fn serialize(&self, serializer: S) -> Result where S: Serializer, { - serializer.serialize_i64(self.current) + serializer.serialize_i64(self.start) } } @@ -434,7 +385,6 @@ impl From<&Counter> for f64 { } } -/// A value which is not a composite value #[derive(Serialize, PartialEq, Debug, Clone)] #[serde(untagged)] pub enum ScalarValue { @@ -446,11 +396,6 @@ pub enum ScalarValue { Counter(Counter), Timestamp(i64), Boolean(bool), - /// A value from a future version of automerge - Unknown { - type_code: u8, - bytes: Vec, - }, Null, } @@ -653,6 +598,13 @@ impl ScalarValue { } } + pub fn as_string(&self) -> Option { + match self { + ScalarValue::Str(s) => Some(s.to_string()), + _ => None, + } + } + pub fn counter(n: i64) -> ScalarValue { ScalarValue::Counter(n.into()) } @@ -664,12 +616,6 @@ impl From<&str> for ScalarValue { } } -impl From<&String> for ScalarValue { - fn from(s: &String) -> Self { - ScalarValue::Str(s.into()) - } -} - impl From for ScalarValue { fn from(s: String) -> Self { ScalarValue::Str(s.into()) @@ -742,7 +688,6 @@ impl fmt::Display for ScalarValue { ScalarValue::Timestamp(i) => write!(f, "Timestamp: {}", i), ScalarValue::Boolean(b) => write!(f, "{}", b), ScalarValue::Null => write!(f, "null"), - ScalarValue::Unknown { type_code, .. } => write!(f, "unknown type {}", type_code), } } } diff --git a/rust/automerge/src/visualisation.rs b/automerge/src/visualisation.rs similarity index 70% rename from rust/automerge/src/visualisation.rs rename to automerge/src/visualisation.rs index 31e9bbdb..a25bf22a 100644 --- a/rust/automerge/src/visualisation.rs +++ b/automerge/src/visualisation.rs @@ -1,4 +1,4 @@ -use crate::types::{ObjId, Op}; +use crate::types::ObjId; use fxhash::FxHasher; use std::{borrow::Cow, collections::HashMap, hash::BuildHasherDefault}; @@ -16,17 +16,17 @@ impl Default for NodeId { } #[derive(Clone)] -pub(crate) struct Node<'a> { +pub(crate) struct Node<'a, const B: usize> { id: NodeId, children: Vec, - node_type: NodeType<'a>, + node_type: NodeType<'a, B>, metadata: &'a crate::op_set::OpSetMetadata, } #[derive(Clone)] -pub(crate) enum NodeType<'a> { +pub(crate) enum NodeType<'a, const B: usize> { ObjRoot(crate::types::ObjId), - ObjTreeNode(ObjId, &'a crate::op_tree::OpTreeNode, &'a [Op]), + ObjTreeNode(ObjId, &'a crate::op_tree::OpTreeNode), } #[derive(Clone)] @@ -35,30 +35,24 @@ pub(crate) struct Edge { child_id: NodeId, } -pub(crate) struct GraphVisualisation<'a> { - nodes: HashMap>, +pub(crate) struct GraphVisualisation<'a, const B: usize> { + nodes: HashMap>, actor_shorthands: HashMap, } -impl<'a> GraphVisualisation<'a> { +impl<'a, const B: usize> GraphVisualisation<'a, B> { pub(super) fn construct( trees: &'a HashMap< crate::types::ObjId, - crate::op_tree::OpTree, + (crate::types::ObjType, crate::op_tree::OpTreeInternal), BuildHasherDefault, >, metadata: &'a crate::op_set::OpSetMetadata, - ) -> GraphVisualisation<'a> { + ) -> GraphVisualisation<'a, B> { let mut nodes = HashMap::new(); - for (obj_id, tree) in trees { - if let Some(root_node) = &tree.internal.root_node { - let tree_id = Self::construct_nodes( - root_node, - &tree.internal.ops, - obj_id, - &mut nodes, - metadata, - ); + for (obj_id, (_, tree)) in trees { + if let Some(root_node) = &tree.root_node { + let tree_id = Self::construct_nodes(root_node, obj_id, &mut nodes, metadata); let obj_tree_id = NodeId::default(); nodes.insert( obj_tree_id, @@ -82,16 +76,15 @@ impl<'a> GraphVisualisation<'a> { } fn construct_nodes( - node: &'a crate::op_tree::OpTreeNode, - ops: &'a [Op], + node: &'a crate::op_tree::OpTreeNode, objid: &ObjId, - nodes: &mut HashMap>, + nodes: &mut HashMap>, m: &'a crate::op_set::OpSetMetadata, ) -> NodeId { let node_id = NodeId::default(); let mut child_ids = Vec::new(); for child in &node.children { - let child_id = Self::construct_nodes(child, ops, objid, nodes, m); + let child_id = Self::construct_nodes(child, objid, nodes, m); child_ids.push(child_id); } nodes.insert( @@ -99,7 +92,7 @@ impl<'a> GraphVisualisation<'a> { Node { id: node_id, children: child_ids, - node_type: NodeType::ObjTreeNode(*objid, node, ops), + node_type: NodeType::ObjTreeNode(*objid, node), metadata: m, }, ); @@ -107,8 +100,8 @@ impl<'a> GraphVisualisation<'a> { } } -impl<'a> dot::GraphWalk<'a, &'a Node<'a>, Edge> for GraphVisualisation<'a> { - fn nodes(&'a self) -> dot::Nodes<'a, &'a Node<'a>> { +impl<'a, const B: usize> dot::GraphWalk<'a, &'a Node<'a, B>, Edge> for GraphVisualisation<'a, B> { + fn nodes(&'a self) -> dot::Nodes<'a, &'a Node<'a, B>> { Cow::Owned(self.nodes.values().collect::>()) } @@ -125,36 +118,36 @@ impl<'a> dot::GraphWalk<'a, &'a Node<'a>, Edge> for GraphVisualisation<'a> { Cow::Owned(edges) } - fn source(&'a self, edge: &Edge) -> &'a Node<'a> { + fn source(&'a self, edge: &Edge) -> &'a Node<'a, B> { self.nodes.get(&edge.parent_id).unwrap() } - fn target(&'a self, edge: &Edge) -> &'a Node<'a> { + fn target(&'a self, edge: &Edge) -> &'a Node<'a, B> { self.nodes.get(&edge.child_id).unwrap() } } -impl<'a> dot::Labeller<'a, &'a Node<'a>, Edge> for GraphVisualisation<'a> { +impl<'a, const B: usize> dot::Labeller<'a, &'a Node<'a, B>, Edge> for GraphVisualisation<'a, B> { fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new("OpSet").unwrap() } - fn node_id(&'a self, n: &&Node<'a>) -> dot::Id<'a> { + fn node_id(&'a self, n: &&Node<'a, B>) -> dot::Id<'a> { dot::Id::new(format!("node_{}", n.id.0)).unwrap() } - fn node_shape(&'a self, node: &&'a Node<'a>) -> Option> { + fn node_shape(&'a self, node: &&'a Node<'a, B>) -> Option> { let shape = match node.node_type { - NodeType::ObjTreeNode(_, _, _) => dot::LabelText::label("none"), + NodeType::ObjTreeNode(_, _) => dot::LabelText::label("none"), NodeType::ObjRoot(_) => dot::LabelText::label("ellipse"), }; Some(shape) } - fn node_label(&'a self, n: &&Node<'a>) -> dot::LabelText<'a> { + fn node_label(&'a self, n: &&Node<'a, B>) -> dot::LabelText<'a> { match n.node_type { - NodeType::ObjTreeNode(objid, tree_node, ops) => dot::LabelText::HtmlStr( - OpTable::create(tree_node, ops, &objid, n.metadata, &self.actor_shorthands) + NodeType::ObjTreeNode(objid, tree_node) => dot::LabelText::HtmlStr( + OpTable::create(tree_node, &objid, n.metadata, &self.actor_shorthands) .to_html() .into(), ), @@ -170,9 +163,8 @@ struct OpTable { } impl OpTable { - fn create<'a>( - node: &'a crate::op_tree::OpTreeNode, - ops: &'a [Op], + fn create<'a, const B: usize>( + node: &'a crate::op_tree::OpTreeNode, obj: &ObjId, metadata: &crate::op_set::OpSetMetadata, actor_shorthands: &HashMap, @@ -180,7 +172,7 @@ impl OpTable { let rows = node .elements .iter() - .map(|e| OpTableRow::create(&ops[*e], obj, metadata, actor_shorthands)) + .map(|e| OpTableRow::create(e, obj, metadata, actor_shorthands)) .collect(); OpTable { rows } } @@ -200,7 +192,6 @@ impl OpTable { prop\ action\ succ\ - pred\ \
\ {}\ @@ -216,7 +207,6 @@ struct OpTableRow { prop: String, op_description: String, succ: String, - pred: String, } impl OpTableRow { @@ -227,7 +217,6 @@ impl OpTableRow { &self.prop, &self.op_description, &self.succ, - &self.pred, ]; let row = rows .iter() @@ -245,10 +234,12 @@ impl OpTableRow { actor_shorthands: &HashMap, ) -> Self { let op_description = match &op.action { - crate::OpType::Delete => "del".to_string(), - crate::OpType::Put(v) => format!("set {}", v), + crate::OpType::Del => "del".to_string(), + crate::OpType::Set(v) => format!("set {}", v), crate::OpType::Make(obj) => format!("make {}", obj), - crate::OpType::Increment(v) => format!("inc {}", v), + crate::OpType::Inc(v) => format!("inc {}", v), + crate::OpType::MarkBegin(v) => format!("mark {}={}", v.name, v.value), + crate::OpType::MarkEnd(v) => format!("/mark {}", v), }; let prop = match op.key { crate::types::Key::Map(k) => metadata.props[k].clone(), @@ -259,18 +250,12 @@ impl OpTableRow { .iter() .map(|s| format!(",{}", print_opid(s, actor_shorthands))) .collect(); - let pred = op - .pred - .iter() - .map(|s| format!(",{}", print_opid(s, actor_shorthands))) - .collect(); OpTableRow { op_description, obj_id: print_opid(&obj.0, actor_shorthands), op_id: print_opid(&op.id, actor_shorthands), prop, succ, - pred, } } } diff --git a/automerge/tests/attribute.rs b/automerge/tests/attribute.rs new file mode 100644 index 00000000..c2996656 --- /dev/null +++ b/automerge/tests/attribute.rs @@ -0,0 +1,39 @@ +use automerge::transaction::Transactable; +use automerge::{AutoCommit, AutomergeError, ROOT}; + +/* +mod helpers; +use helpers::{ + pretty_print, realize, realize_obj, + RealizedObject, +}; +*/ + +#[test] +fn simple_attribute_text() -> Result<(), AutomergeError> { + let mut doc = AutoCommit::new(); + let note = doc.set_object(&ROOT, "note", automerge::ObjType::Text)?; + doc.splice_text(¬e, 0, 0, "hello little world")?; + let baseline = doc.get_heads(); + assert!(doc.text(¬e).unwrap() == "hello little world"); + let mut doc2 = doc.fork(); + doc2.splice_text(¬e, 5, 7, " big")?; + let h2 = doc2.get_heads(); + assert!(doc2.text(¬e)? == "hello big world"); + let mut doc3 = doc.fork(); + doc3.splice_text(¬e, 0, 0, "Well, ")?; + let h3 = doc3.get_heads(); + assert!(doc3.text(¬e)? == "Well, hello little world"); + doc.merge(&mut doc2)?; + doc.merge(&mut doc3)?; + let text = doc.text(¬e)?; + assert!(text == "Well, hello big world"); + let cs = vec![h2, h3]; + let attribute = doc.attribute(¬e, &baseline, &cs)?; + assert!(&text[attribute[0].add[0].clone()] == " big"); + assert!(attribute[0].del[0] == (15, " little".to_owned())); + //println!("{:?} == {:?}", attribute[0].del[0] , (15, " little".to_owned())); + assert!(&text[attribute[1].add[0].clone()] == "Well, "); + //println!("- ------- attribute = {:?}", attribute); + Ok(()) +} diff --git a/rust/automerge-test/src/lib.rs b/automerge/tests/helpers/mod.rs similarity index 72% rename from rust/automerge-test/src/lib.rs rename to automerge/tests/helpers/mod.rs index a1d4ea89..5384c218 100644 --- a/rust/automerge-test/src/lib.rs +++ b/automerge/tests/helpers/mod.rs @@ -4,8 +4,6 @@ use std::{ hash::Hash, }; -use automerge::ReadDoc; - use serde::ser::{SerializeMap, SerializeSeq}; pub fn new_doc() -> automerge::AutoCommit { @@ -42,19 +40,17 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// ## Constructing documents /// /// ```rust -/// # use automerge::transaction::Transactable; -/// # use automerge_test::{assert_doc, map, list}; -/// let mut doc = automerge::AutoCommit::new(); -/// let todos = doc.put_object(automerge::ROOT, "todos", automerge::ObjType::List).unwrap(); -/// let todo = doc.insert_object(todos, 0, automerge::ObjType::Map).unwrap(); -/// let title = doc.put(todo, "title", "water plants").unwrap(); +/// let mut doc = automerge::Automerge::new(); +/// let todos = doc.set(automerge::ROOT, "todos", automerge::Value::map()).unwrap().unwrap(); +/// let todo = doc.insert(todos, 0, automerge::Value::map()).unwrap(); +/// let title = doc.set(todo, "title", "water plants").unwrap().unwrap(); /// /// assert_doc!( /// &doc, /// map!{ /// "todos" => { -/// list![ -/// { map!{ "title" => { "water plants" } } } +/// todos => list![ +/// { map!{ title = "water plants" } } /// ] /// } /// } @@ -67,21 +63,17 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// conflicting values we must capture all of these. /// /// ```rust -/// # use automerge_test::{assert_doc, map}; -/// # use automerge::transaction::Transactable; -/// # use automerge::ReadDoc; -/// -/// let mut doc1 = automerge::AutoCommit::new(); -/// let mut doc2 = automerge::AutoCommit::new(); -/// doc1.put(automerge::ROOT, "field", "one").unwrap(); -/// doc2.put(automerge::ROOT, "field", "two").unwrap(); +/// let mut doc1 = automerge::Automerge::new(); +/// let mut doc2 = automerge::Automerge::new(); +/// let op1 = doc1.set(automerge::ROOT, "field", "one").unwrap().unwrap(); +/// let op2 = doc2.set(automerge::ROOT, "field", "two").unwrap().unwrap(); /// doc1.merge(&mut doc2); /// assert_doc!( -/// doc1.document(), +/// &doc1, /// map!{ /// "field" => { -/// "one", -/// "two" +/// op1 => "one", +/// op2.translate(&doc2) => "two" /// } /// } /// ); @@ -89,11 +81,16 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { #[macro_export] macro_rules! assert_doc { ($doc: expr, $expected: expr) => {{ - use $crate::realize; + use $crate::helpers::realize; let realized = realize($doc); let expected_obj = $expected.into(); if realized != expected_obj { - $crate::pretty_panic(expected_obj, realized) + let serde_right = serde_json::to_string_pretty(&realized).unwrap(); + let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); + panic!( + "documents didn't match\n expected\n{}\n got\n{}", + &serde_left, &serde_right + ); } }}; } @@ -103,11 +100,16 @@ macro_rules! assert_doc { #[macro_export] macro_rules! assert_obj { ($doc: expr, $obj_id: expr, $prop: expr, $expected: expr) => {{ - use $crate::realize_prop; + use $crate::helpers::realize_prop; let realized = realize_prop($doc, $obj_id, $prop); let expected_obj = $expected.into(); if realized != expected_obj { - $crate::pretty_panic(expected_obj, realized) + let serde_right = serde_json::to_string_pretty(&realized).unwrap(); + let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); + panic!( + "documents didn't match\n expected\n{}\n got\n{}", + &serde_left, &serde_right + ); } }}; } @@ -116,13 +118,12 @@ macro_rules! assert_obj { /// the keys of the map, the inner set is the set of values for that key: /// /// ``` -/// # use automerge_test::map; /// map!{ /// "key" => { /// "value1", /// "value2", /// } -/// }; +/// } /// ``` /// /// The map above would represent a map with a conflict on the "key" property. The values can be @@ -133,7 +134,6 @@ macro_rules! map { (@inner { $($value:expr),* }) => { { use std::collections::BTreeSet; - use $crate::RealizedObject; let mut inner: BTreeSet = BTreeSet::new(); $( let _ = inner.insert($value.into()); @@ -145,7 +145,6 @@ macro_rules! map { ($($key:expr => $inner:tt),*) => { { use std::collections::{BTreeMap, BTreeSet}; - use $crate::RealizedObject; let mut _map: BTreeMap> = ::std::collections::BTreeMap::new(); $( let inner = map!(@inner $inner); @@ -159,13 +158,12 @@ macro_rules! map { /// Construct `RealizedObject::Sequence`. This macro represents a sequence of values /// /// ``` -/// # use automerge_test::{list, RealizedObject}; /// list![ /// { /// "value1", /// "value2", /// } -/// ]; +/// ] /// ``` /// /// The list above would represent a list with a conflict on the 0 index. The values can be @@ -180,7 +178,6 @@ macro_rules! list { (@inner { $($value:expr),* }) => { { use std::collections::BTreeSet; - use $crate::RealizedObject; let mut inner: BTreeSet = BTreeSet::new(); $( let _ = inner.insert($value.into()); @@ -191,10 +188,10 @@ macro_rules! list { ($($inner:tt,)+) => { list!($($inner),+) }; ($($inner:tt),*) => { { - use std::collections::BTreeSet; let _cap = list!(@count $($inner),*); let mut _list: Vec> = Vec::new(); $( + //println!("{}", stringify!($inner)); let inner = list!(@inner $inner); let _ = _list.push(inner); )* @@ -239,7 +236,6 @@ pub enum OrdScalarValue { Timestamp(i64), Boolean(bool), Null, - Unknown { type_code: u8, bytes: Vec }, } impl From for OrdScalarValue { @@ -254,9 +250,6 @@ impl From for OrdScalarValue { automerge::ScalarValue::Timestamp(v) => OrdScalarValue::Timestamp(v), automerge::ScalarValue::Boolean(v) => OrdScalarValue::Boolean(v), automerge::ScalarValue::Null => OrdScalarValue::Null, - automerge::ScalarValue::Unknown { type_code, bytes } => { - OrdScalarValue::Unknown { type_code, bytes } - } } } } @@ -273,10 +266,6 @@ impl From<&OrdScalarValue> for automerge::ScalarValue { OrdScalarValue::Timestamp(v) => automerge::ScalarValue::Timestamp(*v), OrdScalarValue::Boolean(v) => automerge::ScalarValue::Boolean(*v), OrdScalarValue::Null => automerge::ScalarValue::Null, - OrdScalarValue::Unknown { type_code, bytes } => automerge::ScalarValue::Unknown { - type_code: *type_code, - bytes: bytes.to_vec(), - }, } } } @@ -286,23 +275,8 @@ impl serde::Serialize for OrdScalarValue { where S: serde::Serializer, { - match self { - OrdScalarValue::Bytes(v) => serializer.serialize_bytes(v), - OrdScalarValue::Str(v) => serializer.serialize_str(v.as_str()), - OrdScalarValue::Int(v) => serializer.serialize_i64(*v), - OrdScalarValue::Uint(v) => serializer.serialize_u64(*v), - OrdScalarValue::F64(v) => serializer.serialize_f64(v.into_inner()), - OrdScalarValue::Counter(v) => { - serializer.serialize_str(format!("Counter({})", v).as_str()) - } - OrdScalarValue::Timestamp(v) => { - serializer.serialize_str(format!("Timestamp({})", v).as_str()) - } - OrdScalarValue::Boolean(v) => serializer.serialize_bool(*v), - OrdScalarValue::Null => serializer.serialize_none(), - OrdScalarValue::Unknown { type_code, .. } => serializer - .serialize_str(format!("An unknown type with code {}", type_code).as_str()), - } + let s = automerge::ScalarValue::from(self); + s.serialize(serializer) } } @@ -333,24 +307,24 @@ impl serde::Serialize for RealizedObject { } } -pub fn realize(doc: &R) -> RealizedObject { +pub fn realize(doc: &automerge::Automerge) -> RealizedObject { realize_obj(doc, &automerge::ROOT, automerge::ObjType::Map) } -pub fn realize_prop>( - doc: &R, +pub fn realize_prop>( + doc: &automerge::Automerge, obj_id: &automerge::ObjId, prop: P, ) -> RealizedObject { - let (val, obj_id) = doc.get(obj_id, prop).unwrap().unwrap(); + let (val, obj_id) = doc.value(obj_id, prop).unwrap().unwrap(); match val { automerge::Value::Object(obj_type) => realize_obj(doc, &obj_id, obj_type), - automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v.into_owned())), + automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v)), } } -pub fn realize_obj( - doc: &R, +pub fn realize_obj( + doc: &automerge::Automerge, obj_id: &automerge::ObjId, objtype: automerge::ObjType, ) -> RealizedObject { @@ -373,18 +347,16 @@ pub fn realize_obj( } } -fn realize_values>( - doc: &R, +fn realize_values>( + doc: &automerge::Automerge, obj_id: &automerge::ObjId, key: K, ) -> BTreeSet { let mut values = BTreeSet::new(); - for (value, objid) in doc.get_all(obj_id, key).unwrap() { + for (value, objid) in doc.values(obj_id, key).unwrap() { let realized = match value { automerge::Value::Object(objtype) => realize_obj(doc, &objid, objtype), - automerge::Value::Scalar(v) => { - RealizedObject::Value(OrdScalarValue::from(v.into_owned())) - } + automerge::Value::Scalar(v) => RealizedObject::Value(OrdScalarValue::from(v)), }; values.insert(realized); } @@ -425,30 +397,6 @@ impl From for RealizedObject { } } -impl From for RealizedObject { - fn from(v: u64) -> Self { - RealizedObject::Value(OrdScalarValue::Uint(v)) - } -} - -impl From for RealizedObject { - fn from(v: u32) -> Self { - RealizedObject::Value(OrdScalarValue::Uint(v.into())) - } -} - -impl From for RealizedObject { - fn from(v: i64) -> Self { - RealizedObject::Value(OrdScalarValue::Int(v)) - } -} - -impl From for RealizedObject { - fn from(v: i32) -> Self { - RealizedObject::Value(OrdScalarValue::Int(v.into())) - } -} - impl From for RealizedObject { fn from(s: automerge::ScalarValue) -> Self { RealizedObject::Value(OrdScalarValue::from(s)) @@ -461,36 +409,8 @@ impl From<&str> for RealizedObject { } } -impl From for RealizedObject { - fn from(f: f64) -> Self { - RealizedObject::Value(OrdScalarValue::F64(f.into())) - } -} - -impl From> for RealizedObject { - fn from(vals: Vec) -> Self { - RealizedObject::Sequence( - vals.into_iter() - .map(|i| { - let mut set = BTreeSet::new(); - set.insert(i.into()); - set - }) - .collect(), - ) - } -} - /// Pretty print the contents of a document +#[allow(dead_code)] pub fn pretty_print(doc: &automerge::Automerge) { println!("{}", serde_json::to_string_pretty(&realize(doc)).unwrap()) } - -pub fn pretty_panic(expected_obj: RealizedObject, realized: RealizedObject) { - let serde_right = serde_json::to_string_pretty(&realized).unwrap(); - let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); - panic!( - "documents didn't match\n expected\n{}\n got\n{}", - &serde_left, &serde_right - ); -} diff --git a/automerge/tests/test.rs b/automerge/tests/test.rs new file mode 100644 index 00000000..a912c01c --- /dev/null +++ b/automerge/tests/test.rs @@ -0,0 +1,903 @@ +use automerge::transaction::Transactable; +use automerge::{ActorId, AutoCommit, Automerge, ObjType, ScalarValue, Value, ROOT}; + +mod helpers; +#[allow(unused_imports)] +use helpers::{ + mk_counter, new_doc, new_doc_with_actor, pretty_print, realize, realize_obj, sorted_actors, + RealizedObject, +}; +#[test] +fn no_conflict_on_repeated_assignment() { + let mut doc = AutoCommit::new(); + doc.set(&automerge::ROOT, "foo", 1).unwrap(); + doc.set(&automerge::ROOT, "foo", 2).unwrap(); + assert_doc!( + doc.document(), + map! { + "foo" => { 2 }, + } + ); +} + +#[test] +fn repeated_map_assignment_which_resolves_conflict_not_ignored() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + doc1.set(&automerge::ROOT, "field", 123).unwrap(); + doc2.merge(&mut doc1).unwrap(); + doc2.set(&automerge::ROOT, "field", 456).unwrap(); + doc1.set(&automerge::ROOT, "field", 789).unwrap(); + doc1.merge(&mut doc2).unwrap(); + assert_eq!(doc1.values(&automerge::ROOT, "field").unwrap().len(), 2); + + doc1.set(&automerge::ROOT, "field", 123).unwrap(); + assert_doc!( + doc1.document(), + map! { + "field" => { 123 } + } + ); +} + +#[test] +fn repeated_list_assignment_which_resolves_conflict_not_ignored() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + let list_id = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); + doc1.insert(&list_id, 0, 123).unwrap(); + doc2.merge(&mut doc1).unwrap(); + doc2.set(&list_id, 0, 456).unwrap(); + doc1.merge(&mut doc2).unwrap(); + doc1.set(&list_id, 0, 789).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "list" => { + list![ + { 789 }, + ] + } + } + ); +} + +#[test] +fn list_deletion() { + let mut doc = new_doc(); + let list_id = doc + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); + doc.insert(&list_id, 0, 123).unwrap(); + doc.insert(&list_id, 1, 456).unwrap(); + doc.insert(&list_id, 2, 789).unwrap(); + doc.del(&list_id, 1).unwrap(); + assert_doc!( + doc.document(), + map! { + "list" => { list![ + { 123 }, + { 789 }, + ]} + } + ) +} + +#[test] +fn merge_concurrent_map_prop_updates() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + doc1.set(&automerge::ROOT, "foo", "bar").unwrap(); + doc2.set(&automerge::ROOT, "hello", "world").unwrap(); + doc1.merge(&mut doc2).unwrap(); + assert_eq!( + doc1.value(&automerge::ROOT, "foo").unwrap().unwrap().0, + "bar".into() + ); + assert_doc!( + doc1.document(), + map! { + "foo" => { "bar" }, + "hello" => { "world" }, + } + ); + doc2.merge(&mut doc1).unwrap(); + assert_doc!( + doc2.document(), + map! { + "foo" => { "bar" }, + "hello" => { "world" }, + } + ); + assert_eq!(realize(doc1.document()), realize(doc2.document())); +} + +#[test] +fn add_concurrent_increments_of_same_property() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + doc1.set(&automerge::ROOT, "counter", mk_counter(0)) + .unwrap(); + doc2.merge(&mut doc1).unwrap(); + doc1.inc(&automerge::ROOT, "counter", 1).unwrap(); + doc2.inc(&automerge::ROOT, "counter", 2).unwrap(); + doc1.merge(&mut doc2).unwrap(); + assert_doc!( + doc1.document(), + map! { + "counter" => { + mk_counter(3) + } + } + ); +} + +#[test] +fn add_increments_only_to_preceeded_values() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + + doc1.set(&automerge::ROOT, "counter", mk_counter(0)) + .unwrap(); + doc1.inc(&automerge::ROOT, "counter", 1).unwrap(); + + // create a counter in doc2 + doc2.set(&automerge::ROOT, "counter", mk_counter(0)) + .unwrap(); + doc2.inc(&automerge::ROOT, "counter", 3).unwrap(); + + // The two values should be conflicting rather than added + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "counter" => { + mk_counter(1), + mk_counter(3), + } + } + ); +} + +#[test] +fn concurrent_updates_of_same_field() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + doc1.set(&automerge::ROOT, "field", "one").unwrap(); + doc2.set(&automerge::ROOT, "field", "two").unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "field" => { + "one", + "two", + } + } + ); +} + +#[test] +fn concurrent_updates_of_same_list_element() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + let list_id = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::List) + .unwrap(); + doc1.insert(&list_id, 0, "finch").unwrap(); + doc2.merge(&mut doc1).unwrap(); + doc1.set(&list_id, 0, "greenfinch").unwrap(); + doc2.set(&list_id, 0, "goldfinch").unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "birds" => { + list![{ + "greenfinch", + "goldfinch", + }] + } + } + ); +} + +#[test] +fn assignment_conflicts_of_different_types() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + let mut doc3 = new_doc(); + doc1.set(&automerge::ROOT, "field", "string").unwrap(); + doc2.set_object(&automerge::ROOT, "field", ObjType::List) + .unwrap(); + doc3.set_object(&automerge::ROOT, "field", ObjType::Map) + .unwrap(); + doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc3).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "field" => { + "string", + list!{}, + map!{}, + } + } + ); +} + +#[test] +fn changes_within_conflicting_map_field() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + doc1.set(&automerge::ROOT, "field", "string").unwrap(); + let map_id = doc2 + .set_object(&automerge::ROOT, "field", ObjType::Map) + .unwrap(); + doc2.set(&map_id, "innerKey", 42).unwrap(); + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "field" => { + "string", + map!{ + "innerKey" => { + 42, + } + } + } + } + ); +} + +#[test] +fn changes_within_conflicting_list_element() { + let (actor1, actor2) = sorted_actors(); + let mut doc1 = new_doc_with_actor(actor1); + let mut doc2 = new_doc_with_actor(actor2); + let list_id = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); + doc1.insert(&list_id, 0, "hello").unwrap(); + doc2.merge(&mut doc1).unwrap(); + + let map_in_doc1 = doc1.set_object(&list_id, 0, ObjType::Map).unwrap(); + doc1.set(&map_in_doc1, "map1", true).unwrap(); + doc1.set(&map_in_doc1, "key", 1).unwrap(); + + let map_in_doc2 = doc2.set_object(&list_id, 0, ObjType::Map).unwrap(); + doc1.merge(&mut doc2).unwrap(); + doc2.set(&map_in_doc2, "map2", true).unwrap(); + doc2.set(&map_in_doc2, "key", 2).unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "list" => { + list![ + { + map!{ + "map2" => { true }, + "key" => { 2 }, + }, + map!{ + "key" => { 1 }, + "map1" => { true }, + } + } + ] + } + } + ); +} + +#[test] +fn concurrently_assigned_nested_maps_should_not_merge() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + + let doc1_map_id = doc1 + .set_object(&automerge::ROOT, "config", ObjType::Map) + .unwrap(); + doc1.set(&doc1_map_id, "background", "blue").unwrap(); + + let doc2_map_id = doc2 + .set_object(&automerge::ROOT, "config", ObjType::Map) + .unwrap(); + doc2.set(&doc2_map_id, "logo_url", "logo.png").unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "config" => { + map!{ + "background" => {"blue"} + }, + map!{ + "logo_url" => {"logo.png"} + } + } + } + ); +} + +#[test] +fn concurrent_insertions_at_different_list_positions() { + let (actor1, actor2) = sorted_actors(); + let mut doc1 = new_doc_with_actor(actor1); + let mut doc2 = new_doc_with_actor(actor2); + assert!(doc1.get_actor() < doc2.get_actor()); + + let list_id = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); + + doc1.insert(&list_id, 0, "one").unwrap(); + doc1.insert(&list_id, 1, "three").unwrap(); + doc2.merge(&mut doc1).unwrap(); + doc1.splice(&list_id, 1, 0, vec!["two".into()]).unwrap(); + doc2.insert(&list_id, 2, "four").unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "list" => { + list![ + {"one"}, + {"two"}, + {"three"}, + {"four"}, + ] + } + } + ); +} + +#[test] +fn concurrent_insertions_at_same_list_position() { + let (actor1, actor2) = sorted_actors(); + let mut doc1 = new_doc_with_actor(actor1); + let mut doc2 = new_doc_with_actor(actor2); + assert!(doc1.get_actor() < doc2.get_actor()); + + let list_id = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::List) + .unwrap(); + doc1.insert(&list_id, 0, "parakeet").unwrap(); + + doc2.merge(&mut doc1).unwrap(); + doc1.insert(&list_id, 1, "starling").unwrap(); + doc2.insert(&list_id, 1, "chaffinch").unwrap(); + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "birds" => { + list![ + { + "parakeet", + }, + { + "chaffinch", + }, + { + "starling", + }, + ] + }, + } + ); +} + +#[test] +fn concurrent_assignment_and_deletion_of_a_map_entry() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + doc1.set(&automerge::ROOT, "bestBird", "robin").unwrap(); + doc2.merge(&mut doc1).unwrap(); + doc1.del(&automerge::ROOT, "bestBird").unwrap(); + doc2.set(&automerge::ROOT, "bestBird", "magpie").unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "bestBird" => { + "magpie", + } + } + ); +} + +#[test] +fn concurrent_assignment_and_deletion_of_list_entry() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + let list_id = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::List) + .unwrap(); + doc1.insert(&list_id, 0, "blackbird").unwrap(); + doc1.insert(&list_id, 1, "thrush").unwrap(); + doc1.insert(&list_id, 2, "goldfinch").unwrap(); + doc2.merge(&mut doc1).unwrap(); + doc1.set(&list_id, 1, "starling").unwrap(); + doc2.del(&list_id, 1).unwrap(); + + assert_doc!( + doc2.document(), + map! { + "birds" => {list![ + {"blackbird"}, + {"goldfinch"}, + ]} + } + ); + + assert_doc!( + doc1.document(), + map! { + "birds" => {list![ + { "blackbird" }, + { "starling" }, + { "goldfinch" }, + ]} + } + ); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "birds" => {list![ + { "blackbird" }, + { "starling" }, + { "goldfinch" }, + ]} + } + ); +} + +#[test] +fn insertion_after_a_deleted_list_element() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + let list_id = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::List) + .unwrap(); + + doc1.insert(&list_id, 0, "blackbird").unwrap(); + doc1.insert(&list_id, 1, "thrush").unwrap(); + doc1.insert(&list_id, 2, "goldfinch").unwrap(); + + doc2.merge(&mut doc1).unwrap(); + + doc1.splice(&list_id, 1, 2, Vec::new()).unwrap(); + + doc2.splice(&list_id, 2, 0, vec!["starling".into()]) + .unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "birds" => {list![ + { "blackbird" }, + { "starling" } + ]} + } + ); + + doc2.merge(&mut doc1).unwrap(); + assert_doc!( + doc2.document(), + map! { + "birds" => {list![ + { "blackbird" }, + { "starling" } + ]} + } + ); +} + +#[test] +fn concurrent_deletion_of_same_list_element() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + let list_id = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::List) + .unwrap(); + + doc1.insert(&list_id, 0, "albatross").unwrap(); + doc1.insert(&list_id, 1, "buzzard").unwrap(); + doc1.insert(&list_id, 2, "cormorant").unwrap(); + + doc2.merge(&mut doc1).unwrap(); + + doc1.del(&list_id, 1).unwrap(); + + doc2.del(&list_id, 1).unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "birds" => {list![ + { "albatross" }, + { "cormorant" } + ]} + } + ); + + doc2.merge(&mut doc1).unwrap(); + assert_doc!( + doc2.document(), + map! { + "birds" => {list![ + { "albatross" }, + { "cormorant" } + ]} + } + ); +} + +#[test] +fn concurrent_updates_at_different_levels() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + + let animals = doc1 + .set_object(&automerge::ROOT, "animals", ObjType::Map) + .unwrap(); + let birds = doc1.set_object(&animals, "birds", ObjType::Map).unwrap(); + doc1.set(&birds, "pink", "flamingo").unwrap(); + doc1.set(&birds, "black", "starling").unwrap(); + + let mammals = doc1.set_object(&animals, "mammals", ObjType::List).unwrap(); + doc1.insert(&mammals, 0, "badger").unwrap(); + + doc2.merge(&mut doc1).unwrap(); + + doc1.set(&birds, "brown", "sparrow").unwrap(); + + doc2.del(&animals, "birds").unwrap(); + doc1.merge(&mut doc2).unwrap(); + + assert_obj!( + doc1.document(), + &automerge::ROOT, + "animals", + map! { + "mammals" => { + list![{ "badger" }], + } + } + ); + + assert_obj!( + doc2.document(), + &automerge::ROOT, + "animals", + map! { + "mammals" => { + list![{ "badger" }], + } + } + ); +} + +#[test] +fn concurrent_updates_of_concurrently_deleted_objects() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + + let birds = doc1 + .set_object(&automerge::ROOT, "birds", ObjType::Map) + .unwrap(); + let blackbird = doc1.set_object(&birds, "blackbird", ObjType::Map).unwrap(); + doc1.set(&blackbird, "feathers", "black").unwrap(); + + doc2.merge(&mut doc1).unwrap(); + + doc1.del(&birds, "blackbird").unwrap(); + + doc2.set(&blackbird, "beak", "orange").unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "birds" => { + map!{}, + } + } + ); +} + +#[test] +fn does_not_interleave_sequence_insertions_at_same_position() { + let (actor1, actor2) = sorted_actors(); + let mut doc1 = new_doc_with_actor(actor1); + let mut doc2 = new_doc_with_actor(actor2); + + let wisdom = doc1 + .set_object(&automerge::ROOT, "wisdom", ObjType::List) + .unwrap(); + doc2.merge(&mut doc1).unwrap(); + + doc1.splice( + &wisdom, + 0, + 0, + vec![ + "to".into(), + "be".into(), + "is".into(), + "to".into(), + "do".into(), + ], + ) + .unwrap(); + + doc2.splice( + &wisdom, + 0, + 0, + vec![ + "to".into(), + "do".into(), + "is".into(), + "to".into(), + "be".into(), + ], + ) + .unwrap(); + + doc1.merge(&mut doc2).unwrap(); + + assert_doc!( + doc1.document(), + map! { + "wisdom" => {list![ + {"to"}, + {"do"}, + {"is"}, + {"to"}, + {"be"}, + {"to"}, + {"be"}, + {"is"}, + {"to"}, + {"do"}, + ]} + } + ); +} + +#[test] +fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id() { + let (actor1, actor2) = sorted_actors(); + assert!(actor2 > actor1); + let mut doc1 = new_doc_with_actor(actor1); + let mut doc2 = new_doc_with_actor(actor2); + + let list = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); + doc1.insert(&list, 0, "two").unwrap(); + doc2.merge(&mut doc1).unwrap(); + + doc2.insert(&list, 0, "one").unwrap(); + assert_doc!( + doc2.document(), + map! { + "list" => { list![ + { "one" }, + { "two" }, + ]} + } + ); +} + +#[test] +fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() { + let (actor2, actor1) = sorted_actors(); + assert!(actor2 < actor1); + let mut doc1 = new_doc_with_actor(actor1); + let mut doc2 = new_doc_with_actor(actor2); + + let list = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); + doc1.insert(&list, 0, "two").unwrap(); + doc2.merge(&mut doc1).unwrap(); + + doc2.insert(&list, 0, "one").unwrap(); + assert_doc!( + doc2.document(), + map! { + "list" => { list![ + { "one" }, + { "two" }, + ]} + } + ); +} + +#[test] +fn insertion_consistent_with_causality() { + let mut doc1 = new_doc(); + let mut doc2 = new_doc(); + + let list = doc1 + .set_object(&automerge::ROOT, "list", ObjType::List) + .unwrap(); + doc1.insert(&list, 0, "four").unwrap(); + doc2.merge(&mut doc1).unwrap(); + doc2.insert(&list, 0, "three").unwrap(); + doc1.merge(&mut doc2).unwrap(); + doc1.insert(&list, 0, "two").unwrap(); + doc2.merge(&mut doc1).unwrap(); + doc2.insert(&list, 0, "one").unwrap(); + + assert_doc!( + doc2.document(), + map! { + "list" => { list![ + {"one"}, + {"two"}, + {"three" }, + {"four"}, + ]} + } + ); +} + +#[test] +fn save_and_restore_empty() { + let mut doc = new_doc(); + let loaded = Automerge::load(&doc.save()).unwrap(); + + assert_doc!(&loaded, map! {}); +} + +#[test] +fn save_restore_complex() { + let mut doc1 = new_doc(); + let todos = doc1 + .set_object(&automerge::ROOT, "todos", ObjType::List) + .unwrap(); + + let first_todo = doc1.insert_object(&todos, 0, ObjType::Map).unwrap(); + doc1.set(&first_todo, "title", "water plants").unwrap(); + doc1.set(&first_todo, "done", false).unwrap(); + + let mut doc2 = new_doc(); + doc2.merge(&mut doc1).unwrap(); + doc2.set(&first_todo, "title", "weed plants").unwrap(); + + doc1.set(&first_todo, "title", "kill plants").unwrap(); + doc1.merge(&mut doc2).unwrap(); + + let reloaded = Automerge::load(&doc1.save()).unwrap(); + + assert_doc!( + &reloaded, + map! { + "todos" => {list![ + {map!{ + "title" => { + "weed plants", + "kill plants", + }, + "done" => {false}, + }} + ]} + } + ); +} + +#[test] +fn list_counter_del() -> Result<(), automerge::AutomergeError> { + let mut v = vec![ActorId::random(), ActorId::random(), ActorId::random()]; + v.sort(); + println!("{:?}", v); + let actor1 = v[0].clone(); + let actor2 = v[1].clone(); + let actor3 = v[2].clone(); + + let mut doc1 = new_doc_with_actor(actor1); + + let list = doc1.set_object(ROOT, "list", ObjType::List)?; + doc1.insert(&list, 0, "a")?; + doc1.insert(&list, 1, "b")?; + doc1.insert(&list, 2, "c")?; + + let mut doc2 = AutoCommit::load(&doc1.save())?; + doc2.set_actor(actor2); + + let mut doc3 = AutoCommit::load(&doc1.save())?; + doc3.set_actor(actor3); + + doc1.set(&list, 1, ScalarValue::counter(0))?; + doc2.set(&list, 1, ScalarValue::counter(10))?; + doc3.set(&list, 1, ScalarValue::counter(100))?; + + doc1.set(&list, 2, ScalarValue::counter(0))?; + doc2.set(&list, 2, ScalarValue::counter(10))?; + doc3.set(&list, 2, 100)?; + + doc1.inc(&list, 1, 1)?; + doc1.inc(&list, 2, 1)?; + + doc1.merge(&mut doc2).unwrap(); + doc1.merge(&mut doc3).unwrap(); + + let values = doc1.values(&list, 1)?; + assert_eq!(values.len(), 3); + assert_eq!(&values[0].0, &Value::counter(1)); + assert_eq!(&values[1].0, &Value::counter(10)); + assert_eq!(&values[2].0, &Value::counter(100)); + + let values = doc1.values(&list, 2)?; + assert_eq!(values.len(), 3); + assert_eq!(&values[0].0, &Value::counter(1)); + assert_eq!(&values[1].0, &Value::counter(10)); + assert_eq!(&values[2].0, &Value::int(100)); + + doc1.inc(&list, 1, 1)?; + doc1.inc(&list, 2, 1)?; + + let values = doc1.values(&list, 1)?; + assert_eq!(values.len(), 3); + assert_eq!(&values[0].0, &Value::counter(2)); + assert_eq!(&values[1].0, &Value::counter(11)); + assert_eq!(&values[2].0, &Value::counter(101)); + + let values = doc1.values(&list, 2)?; + assert_eq!(values.len(), 2); + assert_eq!(&values[0].0, &Value::counter(2)); + assert_eq!(&values[1].0, &Value::counter(11)); + + assert_eq!(doc1.length(&list), 3); + + doc1.del(&list, 2)?; + + assert_eq!(doc1.length(&list), 2); + + let doc4 = AutoCommit::load(&doc1.save())?; + + assert_eq!(doc4.length(&list), 2); + + doc1.del(&list, 1)?; + + assert_eq!(doc1.length(&list), 1); + + let doc5 = AutoCommit::load(&doc1.save())?; + + assert_eq!(doc5.length(&list), 1); + + Ok(()) +} diff --git a/rust/deny.toml b/deny.toml similarity index 87% rename from rust/deny.toml rename to deny.toml index 473cdae8..888b7c58 100644 --- a/rust/deny.toml +++ b/deny.toml @@ -46,6 +46,7 @@ notice = "warn" # output a note when they are encountered. ignore = [ #"RUSTSEC-0000-0000", + "RUSTSEC-2021-0127", # serde_cbor is unmaintained, but we only use it in criterion for benchmarks ] # Threshold for security vulnerabilities, any vulnerability with a CVSS score # lower than the range specified will be ignored. Note that ignored advisories @@ -99,20 +100,9 @@ confidence-threshold = 0.8 # Allow 1 or more licenses on a per-crate basis, so that particular licenses # aren't accepted for every possible crate as with the normal allow list exceptions = [ - # The Unicode-DFS--2016 license is necessary for unicode-ident because they - # use data from the unicode tables to generate the tables which are - # included in the application. We do not distribute those data files so - # this is not a problem for us. See https://github.com/dtolnay/unicode-ident/pull/9/files - # for more details. - { allow = ["MIT", "Apache-2.0", "Unicode-DFS-2016"], name = "unicode-ident" }, - - # these are needed by cbindgen and its dependancies - # should be revied more fully before release - { allow = ["MPL-2.0"], name = "cbindgen" }, - { allow = ["BSD-3-Clause"], name = "instant" }, - - # we only use prettytable in tests - { allow = ["BSD-3-Clause"], name = "prettytable" }, + # this is a LGPL like license in the CLI + # since this is an application not a library people would link to it should be fine + { allow = ["EPL-2.0"], name = "colored_json" }, ] # Some crates don't have (easily) machine readable licensing information, @@ -175,20 +165,17 @@ deny = [ ] # Certain crates/versions that will be skipped when doing duplicate detection. skip = [ - # duct, which we only depend on for integration tests in automerge-cli, - # pulls in a version of os_pipe which in turn pulls in a version of - # windows-sys which is different to the version in pulled in by is-terminal. - # This is fine to ignore for now because it doesn't end up in downstream - # dependencies. - { name = "windows-sys", version = "0.42.0" } + # These are transitive depdendencies of criterion, which is only included for benchmarking anyway + { name = "itoa", version = "0.4.8" }, + { name = "textwrap", version = "0.11.0" }, + { name = "clap", version = "2.34.0" }, ] # Similarly to `skip` allows you to skip certain crates during duplicate # detection. Unlike skip, it also includes the entire tree of transitive # dependencies starting at the specified crate, up to a certain depth, which is # by default infinite skip-tree = [ - # // We only ever use criterion in benchmarks - { name = "criterion", version = "0.4.0", depth=10}, + #{ name = "ansi_term", version = "=0.11.0", depth = 20 }, ] # This section is considered when running `cargo deny check sources`. diff --git a/rust/edit-trace/.gitignore b/edit-trace/.gitignore similarity index 90% rename from rust/edit-trace/.gitignore rename to edit-trace/.gitignore index 55778aca..bf54725a 100644 --- a/rust/edit-trace/.gitignore +++ b/edit-trace/.gitignore @@ -3,4 +3,3 @@ Cargo.lock node_modules yarn.lock flamegraph.svg -/prof diff --git a/rust/edit-trace/Cargo.toml b/edit-trace/Cargo.toml similarity index 63% rename from rust/edit-trace/Cargo.toml rename to edit-trace/Cargo.toml index eaebde46..7514e626 100644 --- a/rust/edit-trace/Cargo.toml +++ b/edit-trace/Cargo.toml @@ -4,20 +4,14 @@ version = "0.1.0" edition = "2021" license = "MIT" +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + [dependencies] automerge = { path = "../automerge" } -criterion = "0.4.0" +criterion = "0.3.5" json = "0.12.4" rand = "^0.8" - -[[bin]] -name = "edit-trace" -doc = false -bench = false - [[bench]] -debug = true name = "main" harness = false - diff --git a/edit-trace/README.md b/edit-trace/README.md new file mode 100644 index 00000000..58c65fe8 --- /dev/null +++ b/edit-trace/README.md @@ -0,0 +1,52 @@ +Try the different editing traces on different automerge implementations + +### Automerge Experiement - pure rust + +```code + # cargo --release run +``` + +#### Benchmarks + +There are some criterion benchmarks in the `benches` folder which can be run with `cargo bench` or `cargo criterion`. +For flamegraphing, `cargo flamegraph --bench main -- --bench "save" # or "load" or "replay" or nothing` can be useful. + +### Automerge Experiement - wasm api + +```code + # node automerge-wasm.js +``` + +### Automerge Experiment - JS wrapper + +```code + # node automerge-js.js +``` + +### Automerge 1.0 pure javascript - new fast backend + +This assume automerge has been checked out in a directory along side this repo + +```code + # node automerge-1.0.js +``` + +### Automerge 1.0 with rust backend + +This assume automerge has been checked out in a directory along side this repo + +```code + # node automerge-rs.js +``` + +### Automerge Experiment - JS wrapper + +```code + # node automerge-js.js +``` + +### Baseline Test. Javascript Array with no CRDT info + +```code + # node baseline.js +``` diff --git a/rust/edit-trace/automerge-1.0.js b/edit-trace/automerge-1.0.js similarity index 100% rename from rust/edit-trace/automerge-1.0.js rename to edit-trace/automerge-1.0.js diff --git a/rust/edit-trace/automerge-js.js b/edit-trace/automerge-js.js similarity index 52% rename from rust/edit-trace/automerge-js.js rename to edit-trace/automerge-js.js index 2956d5d5..bdfa8455 100644 --- a/rust/edit-trace/automerge-js.js +++ b/edit-trace/automerge-js.js @@ -1,29 +1,24 @@ // Apply the paper editing trace to an Automerge.Text object, one char at a time const { edits, finalText } = require('./editing-trace') -const Automerge = require('../../javascript') +const Automerge = require('../automerge-js') -let start = new Date() -let state = Automerge.from({text: ""}) +const start = new Date() +let state = Automerge.from({text: new Automerge.Text()}) state = Automerge.change(state, doc => { for (let i = 0; i < edits.length; i++) { - if (i % 10000 === 0) { + if (i % 1000 === 0) { console.log(`Processed ${i} edits in ${new Date() - start} ms`) } let edit = edits[i] - Automerge.splice(doc, 'text', ... edit) + if (edit[1] > 0) doc.text.deleteAt(edit[0], edit[1]) + if (edit.length > 2) doc.text.insertAt(edit[0], ...edit.slice(2)) } }) + +let _ = Automerge.save(state) console.log(`Done in ${new Date() - start} ms`) -start = new Date() -let bytes = Automerge.save(state) -console.log(`Save in ${new Date() - start} ms`) - -start = new Date() -let _load = Automerge.load(bytes) -console.log(`Load in ${new Date() - start} ms`) - -if (state.text !== finalText) { +if (state.text.join('') !== finalText) { throw new RangeError('ERROR: final text did not match expectation') } diff --git a/edit-trace/automerge-rs.js b/edit-trace/automerge-rs.js new file mode 100644 index 00000000..8786b412 --- /dev/null +++ b/edit-trace/automerge-rs.js @@ -0,0 +1,31 @@ + +// this assumes that the automerge-rs folder is checked out along side this repo +// and someone has run + +// # cd automerge-rs/automerge-backend-wasm +// # yarn release + +const { edits, finalText } = require('./editing-trace') +const Automerge = require('../../automerge') +const path = require('path') +const wasmBackend = require(path.resolve("../../automerge-rs/automerge-backend-wasm")) +Automerge.setDefaultBackend(wasmBackend) + +const start = new Date() +let state = Automerge.from({text: new Automerge.Text()}) + +state = Automerge.change(state, doc => { + for (let i = 0; i < edits.length; i++) { + if (i % 1000 === 0) { + console.log(`Processed ${i} edits in ${new Date() - start} ms`) + } + if (edits[i][1] > 0) doc.text.deleteAt(edits[i][0], edits[i][1]) + if (edits[i].length > 2) doc.text.insertAt(edits[i][0], ...edits[i].slice(2)) + } +}) + +console.log(`Done in ${new Date() - start} ms`) + +if (state.text.join('') !== finalText) { + throw new RangeError('ERROR: final text did not match expectation') +} diff --git a/rust/edit-trace/automerge-wasm.js b/edit-trace/automerge-wasm.js similarity index 56% rename from rust/edit-trace/automerge-wasm.js rename to edit-trace/automerge-wasm.js index 8f6f51af..3680efc0 100644 --- a/rust/edit-trace/automerge-wasm.js +++ b/edit-trace/automerge-wasm.js @@ -1,40 +1,34 @@ + +// make sure to + +// # cd ../automerge-wasm +// # yarn release +// # yarn opt + const { edits, finalText } = require('./editing-trace') const Automerge = require('../automerge-wasm') const start = new Date() let doc = Automerge.create(); -doc.enablePatches(true) -let mat = doc.materialize("/") -let text = doc.putObject("_root", "text", "", "text") +let text = doc.set("_root", "text", "", "text") for (let i = 0; i < edits.length; i++) { let edit = edits[i] - if (i % 10000 === 0) { + if (i % 1000 === 0) { console.log(`Processed ${i} edits in ${new Date() - start} ms`) } doc.splice(text, ...edit) } +let _ = doc.save() + console.log(`Done in ${new Date() - start} ms`) let t_time = new Date() -let saved = doc.save() -console.log(`doc.save in ${new Date() - t_time} ms`) - -t_time = new Date() -Automerge.load(saved) -console.log(`doc.load in ${new Date() - t_time} ms`) - -t_time = new Date() let t = doc.text(text); console.log(`doc.text in ${new Date() - t_time} ms`) -t_time = new Date() -t = doc.text(text); -mat = doc.applyPatches(mat) -console.log(`doc.applyPatches() in ${new Date() - t_time} ms`) - if (doc.text(text) !== finalText) { throw new RangeError('ERROR: final text did not match expectation') } diff --git a/rust/edit-trace/baseline.js b/edit-trace/baseline.js similarity index 95% rename from rust/edit-trace/baseline.js rename to edit-trace/baseline.js index b99f0ae7..803ee122 100644 --- a/rust/edit-trace/baseline.js +++ b/edit-trace/baseline.js @@ -5,7 +5,7 @@ const start = new Date() let chars = [] for (let i = 0; i < edits.length; i++) { let edit = edits[i] - if (i % 10000 === 0) { + if (i % 1000 === 0) { console.log(`Processed ${i} edits in ${new Date() - start} ms`) } chars.splice(...edit) diff --git a/rust/edit-trace/benches/main.rs b/edit-trace/benches/main.rs similarity index 85% rename from rust/edit-trace/benches/main.rs rename to edit-trace/benches/main.rs index 00028945..6bb4d6c0 100644 --- a/rust/edit-trace/benches/main.rs +++ b/edit-trace/benches/main.rs @@ -1,23 +1,23 @@ -use automerge::{transaction::Transactable, AutoCommit, Automerge, ObjType, ROOT}; +use automerge::{transaction::Transactable, AutoCommit, Automerge, ObjType, ScalarValue, ROOT}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use std::fs; -fn replay_trace_tx(commands: Vec<(usize, usize, String)>) -> Automerge { +fn replay_trace_tx(commands: Vec<(usize, usize, Vec)>) -> Automerge { let mut doc = Automerge::new(); let mut tx = doc.transaction(); - let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); + let text = tx.set_object(ROOT, "text", ObjType::Text).unwrap(); for (pos, del, vals) in commands { - tx.splice_text(&text, pos, del, &vals).unwrap(); + tx.splice(&text, pos, del, vals).unwrap(); } tx.commit(); doc } -fn replay_trace_autotx(commands: Vec<(usize, usize, String)>) -> AutoCommit { +fn replay_trace_autotx(commands: Vec<(usize, usize, Vec)>) -> AutoCommit { let mut doc = AutoCommit::new(); - let text = doc.put_object(ROOT, "text", ObjType::Text).unwrap(); + let text = doc.set_object(ROOT, "text", ObjType::Text).unwrap(); for (pos, del, vals) in commands { - doc.splice_text(&text, pos, del, &vals).unwrap(); + doc.splice(&text, pos, del, vals).unwrap(); } doc.commit(); doc @@ -46,10 +46,10 @@ fn bench(c: &mut Criterion) { for i in 0..edits.len() { let pos: usize = edits[i][0].as_usize().unwrap(); let del: usize = edits[i][1].as_usize().unwrap(); - let mut vals = String::new(); + let mut vals = vec![]; for j in 2..edits[i].len() { let v = edits[i][j].as_str().unwrap(); - vals.push_str(v); + vals.push(ScalarValue::Str(v.into())); } commands.push((pos, del, vals)); } diff --git a/rust/edit-trace/editing-trace.js b/edit-trace/editing-trace.js similarity index 100% rename from rust/edit-trace/editing-trace.js rename to edit-trace/editing-trace.js diff --git a/rust/edit-trace/edits.json b/edit-trace/edits.json similarity index 100% rename from rust/edit-trace/edits.json rename to edit-trace/edits.json diff --git a/rust/edit-trace/package.json b/edit-trace/package.json similarity index 69% rename from rust/edit-trace/package.json rename to edit-trace/package.json index acd37ac0..a9d1e0e0 100644 --- a/rust/edit-trace/package.json +++ b/edit-trace/package.json @@ -4,9 +4,9 @@ "main": "wasm-text.js", "license": "MIT", "scripts": { - "wasm": "0x -D prof automerge-wasm.js" + "wasm": "0x -D prof wasm-text.js" }, "devDependencies": { - "0x": "^5.4.1" + "0x": "^4.11.0" } } diff --git a/rust/edit-trace/src/main.rs b/edit-trace/src/main.rs similarity index 77% rename from rust/edit-trace/src/main.rs rename to edit-trace/src/main.rs index 9724a109..b6a452a2 100644 --- a/rust/edit-trace/src/main.rs +++ b/edit-trace/src/main.rs @@ -1,6 +1,5 @@ -use automerge::ObjType; -use automerge::ReadDoc; use automerge::{transaction::Transactable, Automerge, AutomergeError, ROOT}; +use automerge::{ObjType, ScalarValue}; use std::time::Instant; fn main() -> Result<(), AutomergeError> { @@ -10,10 +9,10 @@ fn main() -> Result<(), AutomergeError> { for i in 0..edits.len() { let pos: usize = edits[i][0].as_usize().unwrap(); let del: usize = edits[i][1].as_usize().unwrap(); - let mut vals = String::new(); + let mut vals = vec![]; for j in 2..edits[i].len() { let v = edits[i][j].as_str().unwrap(); - vals.push_str(v); + vals.push(ScalarValue::Str(v.into())); } commands.push((pos, del, vals)); } @@ -21,15 +20,14 @@ fn main() -> Result<(), AutomergeError> { let now = Instant::now(); let mut tx = doc.transaction(); - let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); + let text = tx.set_object(ROOT, "text", ObjType::Text).unwrap(); for (i, (pos, del, vals)) in commands.into_iter().enumerate() { if i % 1000 == 0 { println!("Processed {} edits in {} ms", i, now.elapsed().as_millis()); } - tx.splice_text(&text, pos, del, &vals)?; + tx.splice(&text, pos, del, vals)?; } tx.commit(); - println!("Done in {} ms", now.elapsed().as_millis()); let save = Instant::now(); let bytes = doc.save(); println!("Saved in {} ms", save.elapsed().as_millis()); @@ -38,9 +36,6 @@ fn main() -> Result<(), AutomergeError> { let _ = Automerge::load(&bytes).unwrap(); println!("Loaded in {} ms", load.elapsed().as_millis()); - let get_txt = Instant::now(); - doc.text(&text)?; - println!("Text in {} ms", get_txt.elapsed().as_millis()); - + println!("Done in {} ms", now.elapsed().as_millis()); Ok(()) } diff --git a/rust/automerge-wasm/examples/cra/.gitignore b/examples/cra/.gitignore similarity index 100% rename from rust/automerge-wasm/examples/cra/.gitignore rename to examples/cra/.gitignore diff --git a/rust/automerge-wasm/examples/cra/README.md b/examples/cra/README.md similarity index 100% rename from rust/automerge-wasm/examples/cra/README.md rename to examples/cra/README.md diff --git a/rust/automerge-wasm/examples/cra/package.json b/examples/cra/package.json similarity index 92% rename from rust/automerge-wasm/examples/cra/package.json rename to examples/cra/package.json index 0b465b94..ccf145a9 100644 --- a/rust/automerge-wasm/examples/cra/package.json +++ b/examples/cra/package.json @@ -10,7 +10,6 @@ "@types/node": "^16.11.21", "@types/react": "^17.0.38", "@types/react-dom": "^17.0.11", - "automerge-wasm": "file:../../automerge-wasm/automerge-wasm-0.0.24.tgz", "react": "^17.0.2", "react-dom": "^17.0.2", "react-scripts": "5.0.0", diff --git a/javascript/examples/create-react-app/public/favicon.ico b/examples/cra/public/favicon.ico similarity index 100% rename from javascript/examples/create-react-app/public/favicon.ico rename to examples/cra/public/favicon.ico diff --git a/javascript/examples/create-react-app/public/index.html b/examples/cra/public/index.html similarity index 100% rename from javascript/examples/create-react-app/public/index.html rename to examples/cra/public/index.html diff --git a/javascript/examples/create-react-app/public/logo192.png b/examples/cra/public/logo192.png similarity index 100% rename from javascript/examples/create-react-app/public/logo192.png rename to examples/cra/public/logo192.png diff --git a/javascript/examples/create-react-app/public/logo512.png b/examples/cra/public/logo512.png similarity index 100% rename from javascript/examples/create-react-app/public/logo512.png rename to examples/cra/public/logo512.png diff --git a/javascript/examples/create-react-app/public/manifest.json b/examples/cra/public/manifest.json similarity index 100% rename from javascript/examples/create-react-app/public/manifest.json rename to examples/cra/public/manifest.json diff --git a/javascript/examples/create-react-app/public/robots.txt b/examples/cra/public/robots.txt similarity index 100% rename from javascript/examples/create-react-app/public/robots.txt rename to examples/cra/public/robots.txt diff --git a/javascript/examples/create-react-app/src/App.css b/examples/cra/src/App.css similarity index 100% rename from javascript/examples/create-react-app/src/App.css rename to examples/cra/src/App.css diff --git a/rust/automerge-wasm/examples/cra/src/App.test.tsx b/examples/cra/src/App.test.tsx similarity index 100% rename from rust/automerge-wasm/examples/cra/src/App.test.tsx rename to examples/cra/src/App.test.tsx diff --git a/rust/automerge-wasm/examples/cra/src/App.tsx b/examples/cra/src/App.tsx similarity index 82% rename from rust/automerge-wasm/examples/cra/src/App.tsx rename to examples/cra/src/App.tsx index 177f50bd..fa6fba64 100644 --- a/rust/automerge-wasm/examples/cra/src/App.tsx +++ b/examples/cra/src/App.tsx @@ -5,14 +5,10 @@ import * as Automerge from "automerge-wasm" function App() { const [ doc, ] = useState(Automerge.create()) - const [ edits, ] = useState(doc.putObject("_root", "edits", "")) + const [ edits, ] = useState(doc.set("_root", "edits", Automerge.TEXT) || "") const [ val, setVal ] = useState(""); useEffect(() => { doc.splice(edits, 0, 0, "the quick fox jumps over the lazy dog") - let doc2 = Automerge.load(doc.save()); - console.log("LOAD",Automerge.load) - console.log("DOC",doc.materialize("/")) - console.log("DOC2",doc2.materialize("/")) let result = doc.text(edits) setVal(result) }, []) diff --git a/rust/automerge-wasm/examples/cra/src/index.css b/examples/cra/src/index.css similarity index 100% rename from rust/automerge-wasm/examples/cra/src/index.css rename to examples/cra/src/index.css diff --git a/rust/automerge-wasm/examples/cra/src/index.tsx b/examples/cra/src/index.tsx similarity index 100% rename from rust/automerge-wasm/examples/cra/src/index.tsx rename to examples/cra/src/index.tsx diff --git a/javascript/examples/create-react-app/src/logo.svg b/examples/cra/src/logo.svg similarity index 100% rename from javascript/examples/create-react-app/src/logo.svg rename to examples/cra/src/logo.svg diff --git a/rust/automerge-wasm/examples/cra/src/react-app-env.d.ts b/examples/cra/src/react-app-env.d.ts similarity index 100% rename from rust/automerge-wasm/examples/cra/src/react-app-env.d.ts rename to examples/cra/src/react-app-env.d.ts diff --git a/rust/automerge-wasm/examples/cra/src/reportWebVitals.ts b/examples/cra/src/reportWebVitals.ts similarity index 100% rename from rust/automerge-wasm/examples/cra/src/reportWebVitals.ts rename to examples/cra/src/reportWebVitals.ts diff --git a/rust/automerge-wasm/examples/cra/src/setupTests.ts b/examples/cra/src/setupTests.ts similarity index 100% rename from rust/automerge-wasm/examples/cra/src/setupTests.ts rename to examples/cra/src/setupTests.ts diff --git a/rust/automerge-wasm/examples/cra/tsconfig.json b/examples/cra/tsconfig.json similarity index 100% rename from rust/automerge-wasm/examples/cra/tsconfig.json rename to examples/cra/tsconfig.json diff --git a/flake.lock b/flake.lock index a052776b..b2070c2d 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "flake-utils": { "locked": { - "lastModified": 1667395993, - "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", + "lastModified": 1642700792, + "narHash": "sha256-XqHrk7hFb+zBvRg6Ghl+AZDq03ov6OshJLiSWOoX5es=", "owner": "numtide", "repo": "flake-utils", - "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", + "rev": "846b2ae0fc4cc943637d3d1def4454213e203cba", "type": "github" }, "original": { @@ -17,11 +17,11 @@ }, "flake-utils_2": { "locked": { - "lastModified": 1659877975, - "narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=", + "lastModified": 1637014545, + "narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=", "owner": "numtide", "repo": "flake-utils", - "rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0", + "rev": "bba5dcc8e0b20ab664967ad83d24d64cb64ec4f4", "type": "github" }, "original": { @@ -32,11 +32,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1669542132, - "narHash": "sha256-DRlg++NJAwPh8io3ExBJdNW7Djs3plVI5jgYQ+iXAZQ=", + "lastModified": 1643805626, + "narHash": "sha256-AXLDVMG+UaAGsGSpOtQHPIKB+IZ0KSd9WS77aanGzgc=", "owner": "nixos", "repo": "nixpkgs", - "rev": "a115bb9bd56831941be3776c8a94005867f316a7", + "rev": "554d2d8aa25b6e583575459c297ec23750adb6cb", "type": "github" }, "original": { @@ -48,11 +48,11 @@ }, "nixpkgs_2": { "locked": { - "lastModified": 1665296151, - "narHash": "sha256-uOB0oxqxN9K7XGF1hcnY+PQnlQJ+3bP2vCn/+Ru/bbc=", + "lastModified": 1637453606, + "narHash": "sha256-Gy6cwUswft9xqsjWxFYEnx/63/qzaFUwatcbV5GF/GQ=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "14ccaaedd95a488dd7ae142757884d8e125b3363", + "rev": "8afc4e543663ca0a6a4f496262cd05233737e732", "type": "github" }, "original": { @@ -75,11 +75,11 @@ "nixpkgs": "nixpkgs_2" }, "locked": { - "lastModified": 1669775522, - "narHash": "sha256-6xxGArBqssX38DdHpDoPcPvB/e79uXyQBwpBcaO/BwY=", + "lastModified": 1643941258, + "narHash": "sha256-uHyEuICSu8qQp6adPTqV33ajiwoF0sCh+Iazaz5r7fo=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "3158e47f6b85a288d12948aeb9a048e0ed4434d6", + "rev": "674156c4c2f46dd6a6846466cb8f9fee84c211ca", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 37835738..ea17d00b 100644 --- a/flake.nix +++ b/flake.nix @@ -3,67 +3,58 @@ inputs = { nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; - flake-utils.url = "github:numtide/flake-utils"; + flake-utils = { + url = "github:numtide/flake-utils"; + inputs.nixpkgs.follows = "nixpkgs"; + }; rust-overlay.url = "github:oxalica/rust-overlay"; }; - outputs = { - self, - nixpkgs, - flake-utils, - rust-overlay, - }: + outputs = { self, nixpkgs, flake-utils, rust-overlay }: flake-utils.lib.eachDefaultSystem - (system: let - pkgs = import nixpkgs { - overlays = [rust-overlay.overlays.default]; - inherit system; - }; - rust = pkgs.rust-bin.stable.latest.default; - in { - formatter = pkgs.alejandra; + (system: + let + pkgs = import nixpkgs { + overlays = [ rust-overlay.overlay ]; + inherit system; + }; + lib = pkgs.lib; + rust = pkgs.rust-bin.stable.latest.default; + cargoNix = pkgs.callPackage ./Cargo.nix { + inherit pkgs; + release = true; + }; + debugCargoNix = pkgs.callPackage ./Cargo.nix { + inherit pkgs; + release = false; + }; + in + { + devShell = pkgs.mkShell { + buildInputs = with pkgs; + [ + (rust.override { + extensions = [ "rust-src" ]; + targets = [ "wasm32-unknown-unknown" ]; + }) + cargo-edit + cargo-watch + cargo-criterion + cargo-fuzz + cargo-flamegraph + cargo-deny + crate2nix + wasm-pack + pkgconfig + openssl + gnuplot - packages = { - deadnix = pkgs.runCommand "deadnix" {} '' - ${pkgs.deadnix}/bin/deadnix --fail ${./.} - mkdir $out - ''; - }; + nodejs + yarn - checks = { - inherit (self.packages.${system}) deadnix; - }; - - devShells.default = pkgs.mkShell { - buildInputs = with pkgs; [ - (rust.override { - extensions = ["rust-src"]; - targets = ["wasm32-unknown-unknown"]; - }) - cargo-edit - cargo-watch - cargo-criterion - cargo-fuzz - cargo-flamegraph - cargo-deny - crate2nix - wasm-pack - pkgconfig - openssl - gnuplot - - nodejs - yarn - deno - - # c deps - cmake - cmocka - doxygen - - rnix-lsp - nixpkgs-fmt - ]; - }; - }); + rnix-lsp + nixpkgs-fmt + ]; + }; + }); } diff --git a/img/brandmark.png b/img/brandmark.png deleted file mode 100644 index 56e1c82d..00000000 Binary files a/img/brandmark.png and /dev/null differ diff --git a/img/brandmark.svg b/img/brandmark.svg deleted file mode 100644 index 1347dfac..00000000 --- a/img/brandmark.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/img/favicon.ico b/img/favicon.ico deleted file mode 100644 index 90486824..00000000 Binary files a/img/favicon.ico and /dev/null differ diff --git a/img/lockup.png b/img/lockup.png deleted file mode 100644 index 94e63a48..00000000 Binary files a/img/lockup.png and /dev/null differ diff --git a/img/lockup.svg b/img/lockup.svg deleted file mode 100644 index 34297ecf..00000000 --- a/img/lockup.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/img/sign.png b/img/sign.png deleted file mode 100644 index 772396cb..00000000 Binary files a/img/sign.png and /dev/null differ diff --git a/img/sign.svg b/img/sign.svg deleted file mode 100644 index df31316e..00000000 --- a/img/sign.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/javascript/.denoifyrc.json b/javascript/.denoifyrc.json deleted file mode 100644 index 9453a31f..00000000 --- a/javascript/.denoifyrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "replacer": "scripts/denoify-replacer.mjs" -} diff --git a/javascript/.eslintignore b/javascript/.eslintignore deleted file mode 100644 index 4d6880d3..00000000 --- a/javascript/.eslintignore +++ /dev/null @@ -1,2 +0,0 @@ -dist -examples diff --git a/javascript/.eslintrc.cjs b/javascript/.eslintrc.cjs deleted file mode 100644 index 88776271..00000000 --- a/javascript/.eslintrc.cjs +++ /dev/null @@ -1,15 +0,0 @@ -module.exports = { - root: true, - parser: "@typescript-eslint/parser", - plugins: ["@typescript-eslint"], - extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], - rules: { - "@typescript-eslint/no-unused-vars": [ - "error", - { - argsIgnorePattern: "^_", - varsIgnorePattern: "^_", - }, - ], - }, -} diff --git a/javascript/.gitignore b/javascript/.gitignore deleted file mode 100644 index f98d9db2..00000000 --- a/javascript/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -/node_modules -/yarn.lock -dist -docs/ -.vim -deno_dist/ diff --git a/javascript/.prettierignore b/javascript/.prettierignore deleted file mode 100644 index 6ab2f796..00000000 --- a/javascript/.prettierignore +++ /dev/null @@ -1,4 +0,0 @@ -e2e/verdacciodb -dist -docs -deno_dist diff --git a/javascript/.prettierrc b/javascript/.prettierrc deleted file mode 100644 index 18b9c97f..00000000 --- a/javascript/.prettierrc +++ /dev/null @@ -1,4 +0,0 @@ -{ - "semi": false, - "arrowParens": "avoid" -} diff --git a/javascript/HACKING.md b/javascript/HACKING.md deleted file mode 100644 index b7e92eef..00000000 --- a/javascript/HACKING.md +++ /dev/null @@ -1,39 +0,0 @@ -## Architecture - -The `@automerge/automerge` package is a set of -[`Proxy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy) -objects which provide an idiomatic javascript interface built on top of the -lower level `@automerge/automerge-wasm` package (which is in turn built from the -Rust codebase and can be found in `~/automerge-wasm`). I.e. the responsibility -of this codebase is - -- To map from the javascript data model to the underlying `set`, `make`, - `insert`, and `delete` operations of Automerge. -- To expose a more convenient interface to functions in `automerge-wasm` which - generate messages to send over the network or compressed file formats to store - on disk - -## Building and testing - -Much of the functionality of this package depends on the -`@automerge/automerge-wasm` package and frequently you will be working on both -of them at the same time. It would be frustrating to have to push -`automerge-wasm` to NPM every time you want to test a change but I (Alex) also -don't trust `yarn link` to do the right thing here. Therefore, the `./e2e` -folder contains a little yarn package which spins up a local NPM registry. See -`./e2e/README` for details. In brief though: - -To build `automerge-wasm` and install it in the local `node_modules` - -```bash -cd e2e && yarn install && yarn run e2e buildjs -``` - -NOw that you've done this you can run the tests - -```bash -yarn test -``` - -If you make changes to the `automerge-wasm` package you will need to re-run -`yarn e2e buildjs` diff --git a/javascript/LICENSE b/javascript/LICENSE deleted file mode 100644 index 63b21502..00000000 --- a/javascript/LICENSE +++ /dev/null @@ -1,10 +0,0 @@ -MIT License - -Copyright 2022, Ink & Switch LLC - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/javascript/README.md b/javascript/README.md deleted file mode 100644 index af8306ac..00000000 --- a/javascript/README.md +++ /dev/null @@ -1,109 +0,0 @@ -## Automerge - -Automerge is a library of data structures for building collaborative -applications, this package is the javascript implementation. - -Detailed documentation is available at [automerge.org](http://automerge.org/) -but see the following for a short getting started guid. - -## Quickstart - -First, install the library. - -``` -yarn add @automerge/automerge -``` - -If you're writing a `node` application, you can skip straight to [Make some -data](#make-some-data). If you're in a browser you need a bundler - -### Bundler setup - -`@automerge/automerge` is a wrapper around a core library which is written in -rust, compiled to WebAssembly and distributed as a separate package called -`@automerge/automerge-wasm`. Browsers don't currently support WebAssembly -modules taking part in ESM module imports, so you must use a bundler to import -`@automerge/automerge` in the browser. There are a lot of bundlers out there, we -have examples for common bundlers in the `examples` folder. Here is a short -example using Webpack 5. - -Assuming a standard setup of a new webpack project, you'll need to enable the -`asyncWebAssembly` experiment. In a typical webpack project that means adding -something like this to `webpack.config.js` - -```javascript -module.exports = { - ... - experiments: { asyncWebAssembly: true }, - performance: { // we dont want the wasm blob to generate warnings - hints: false, - maxEntrypointSize: 512000, - maxAssetSize: 512000 - } -}; -``` - -### Make some data - -Automerge allows to separate threads of execution to make changes to some data -and always be able to merge their changes later. - -```javascript -import * as automerge from "@automerge/automerge" -import * as assert from "assert" - -let doc1 = automerge.from({ - tasks: [ - { description: "feed fish", done: false }, - { description: "water plants", done: false }, - ], -}) - -// Create a new thread of execution -let doc2 = automerge.clone(doc1) - -// Now we concurrently make changes to doc1 and doc2 - -// Complete a task in doc2 -doc2 = automerge.change(doc2, d => { - d.tasks[0].done = true -}) - -// Add a task in doc1 -doc1 = automerge.change(doc1, d => { - d.tasks.push({ - description: "water fish", - done: false, - }) -}) - -// Merge changes from both docs -doc1 = automerge.merge(doc1, doc2) -doc2 = automerge.merge(doc2, doc1) - -// Both docs are merged and identical -assert.deepEqual(doc1, { - tasks: [ - { description: "feed fish", done: true }, - { description: "water plants", done: false }, - { description: "water fish", done: false }, - ], -}) - -assert.deepEqual(doc2, { - tasks: [ - { description: "feed fish", done: true }, - { description: "water plants", done: false }, - { description: "water fish", done: false }, - ], -}) -``` - -## Development - -See [HACKING.md](./HACKING.md) - -## Meta - -Copyright 2017–present, the Automerge contributors. Released under the terms of the -MIT license (see `LICENSE`). diff --git a/javascript/config/cjs.json b/javascript/config/cjs.json deleted file mode 100644 index 0b135067..00000000 --- a/javascript/config/cjs.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "extends": "../tsconfig.json", - "exclude": [ - "../dist/**/*", - "../node_modules", - "../test/**/*", - "../src/**/*.deno.ts" - ], - "compilerOptions": { - "outDir": "../dist/cjs" - } -} diff --git a/javascript/config/declonly.json b/javascript/config/declonly.json deleted file mode 100644 index 7c1df687..00000000 --- a/javascript/config/declonly.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../tsconfig.json", - "exclude": [ - "../dist/**/*", - "../node_modules", - "../test/**/*", - "../src/**/*.deno.ts" - ], - "emitDeclarationOnly": true, - "compilerOptions": { - "outDir": "../dist" - } -} diff --git a/javascript/config/mjs.json b/javascript/config/mjs.json deleted file mode 100644 index ecf3ce36..00000000 --- a/javascript/config/mjs.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "extends": "../tsconfig.json", - "exclude": [ - "../dist/**/*", - "../node_modules", - "../test/**/*", - "../src/**/*.deno.ts" - ], - "compilerOptions": { - "target": "es6", - "module": "es6", - "outDir": "../dist/mjs" - } -} diff --git a/javascript/deno-tests/deno.ts b/javascript/deno-tests/deno.ts deleted file mode 100644 index fc0a4dad..00000000 --- a/javascript/deno-tests/deno.ts +++ /dev/null @@ -1,10 +0,0 @@ -import * as Automerge from "../deno_dist/index.ts" - -Deno.test("It should create, clone and free", () => { - let doc1 = Automerge.init() - let doc2 = Automerge.clone(doc1) - - // this is only needed if weakrefs are not supported - Automerge.free(doc1) - Automerge.free(doc2) -}) diff --git a/javascript/e2e/.gitignore b/javascript/e2e/.gitignore deleted file mode 100644 index 3021843a..00000000 --- a/javascript/e2e/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -node_modules/ -verdacciodb/ -htpasswd diff --git a/javascript/e2e/README.md b/javascript/e2e/README.md deleted file mode 100644 index 9dcee471..00000000 --- a/javascript/e2e/README.md +++ /dev/null @@ -1,70 +0,0 @@ -#End to end testing for javascript packaging - -The network of packages and bundlers we rely on to get the `automerge` package -working is a little complex. We have the `automerge-wasm` package, which the -`automerge` package depends upon, which means that anyone who depends on -`automerge` needs to either a) be using node or b) use a bundler in order to -load the underlying WASM module which is packaged in `automerge-wasm`. - -The various bundlers involved are complicated and capricious and so we need an -easy way of testing that everything is in fact working as expected. To do this -we run a custom NPM registry (namely [Verdaccio](https://verdaccio.org/)) and -build the `automerge-wasm` and `automerge` packages and publish them to this -registry. Once we have this registry running we are able to build the example -projects which depend on these packages and check that everything works as -expected. - -## Usage - -First, install everything: - -``` -yarn install -``` - -### Build `automerge-js` - -This builds the `automerge-wasm` package and then runs `yarn build` in the -`automerge-js` project with the `--registry` set to the verdaccio registry. The -end result is that you can run `yarn test` in the resulting `automerge-js` -directory in order to run tests against the current `automerge-wasm`. - -``` -yarn e2e buildjs -``` - -### Build examples - -This either builds or the examples in `automerge-js/examples` or just a subset -of them. Once this is complete you can run the relevant scripts (e.g. `vite dev` -for the Vite example) to check everything works. - -``` -yarn e2e buildexamples -``` - -Or, to just build the webpack example - -``` -yarn e2e buildexamples -e webpack -``` - -### Run Registry - -If you're experimenting with a project which is not in the `examples` folder -you'll need a running registry. `run-registry` builds and publishes -`automerge-js` and `automerge-wasm` and then runs the registry at -`localhost:4873`. - -``` -yarn e2e run-registry -``` - -You can now run `yarn install --registry http://localhost:4873` to experiment -with the built packages. - -## Using the `dev` build of `automerge-wasm` - -All the commands above take a `-p` flag which can be either `release` or -`debug`. The `debug` builds with additional debug symbols which makes errors -less cryptic. diff --git a/javascript/e2e/index.ts b/javascript/e2e/index.ts deleted file mode 100644 index fb0b1599..00000000 --- a/javascript/e2e/index.ts +++ /dev/null @@ -1,534 +0,0 @@ -import { once } from "events" -import { setTimeout } from "timers/promises" -import { spawn, ChildProcess } from "child_process" -import * as child_process from "child_process" -import { - command, - subcommands, - run, - array, - multioption, - option, - Type, -} from "cmd-ts" -import * as path from "path" -import * as fsPromises from "fs/promises" -import fetch from "node-fetch" - -const VERDACCIO_DB_PATH = path.normalize(`${__dirname}/verdacciodb`) -const VERDACCIO_CONFIG_PATH = path.normalize(`${__dirname}/verdaccio.yaml`) -const AUTOMERGE_WASM_PATH = path.normalize( - `${__dirname}/../../rust/automerge-wasm` -) -const AUTOMERGE_JS_PATH = path.normalize(`${__dirname}/..`) -const EXAMPLES_DIR = path.normalize(path.join(__dirname, "../", "examples")) - -// The different example projects in "../examples" -type Example = "webpack" | "vite" | "create-react-app" - -// Type to parse strings to `Example` so the types line up for the `buildExamples` commmand -const ReadExample: Type = { - async from(str) { - if (str === "webpack") { - return "webpack" - } else if (str === "vite") { - return "vite" - } else if (str === "create-react-app") { - return "create-react-app" - } else { - throw new Error(`Unknown example type ${str}`) - } - }, -} - -type Profile = "dev" | "release" - -const ReadProfile: Type = { - async from(str) { - if (str === "dev") { - return "dev" - } else if (str === "release") { - return "release" - } else { - throw new Error(`Unknown profile ${str}`) - } - }, -} - -const buildjs = command({ - name: "buildjs", - args: { - profile: option({ - type: ReadProfile, - long: "profile", - short: "p", - defaultValue: () => "dev" as Profile, - }), - }, - handler: ({ profile }) => { - console.log("building js") - withPublishedWasm(profile, async (registryUrl: string) => { - await buildAndPublishAutomergeJs(registryUrl) - }) - }, -}) - -const buildWasm = command({ - name: "buildwasm", - args: { - profile: option({ - type: ReadProfile, - long: "profile", - short: "p", - defaultValue: () => "dev" as Profile, - }), - }, - handler: ({ profile }) => { - console.log("building automerge-wasm") - withRegistry(buildAutomergeWasm(profile)) - }, -}) - -const buildexamples = command({ - name: "buildexamples", - args: { - examples: multioption({ - long: "example", - short: "e", - type: array(ReadExample), - }), - profile: option({ - type: ReadProfile, - long: "profile", - short: "p", - defaultValue: () => "dev" as Profile, - }), - }, - handler: ({ examples, profile }) => { - if (examples.length === 0) { - examples = ["webpack", "vite", "create-react-app"] - } - buildExamples(examples, profile) - }, -}) - -const runRegistry = command({ - name: "run-registry", - args: { - profile: option({ - type: ReadProfile, - long: "profile", - short: "p", - defaultValue: () => "dev" as Profile, - }), - }, - handler: ({ profile }) => { - withPublishedWasm(profile, async (registryUrl: string) => { - await buildAndPublishAutomergeJs(registryUrl) - console.log("\n************************") - console.log(` Verdaccio NPM registry is running at ${registryUrl}`) - console.log(" press CTRL-C to exit ") - console.log("************************") - await once(process, "SIGINT") - }).catch(e => { - console.error(`Failed: ${e}`) - }) - }, -}) - -const app = subcommands({ - name: "e2e", - cmds: { - buildjs, - buildexamples, - buildwasm: buildWasm, - "run-registry": runRegistry, - }, -}) - -run(app, process.argv.slice(2)) - -async function buildExamples(examples: Array, profile: Profile) { - await withPublishedWasm(profile, async registryUrl => { - printHeader("building and publishing automerge") - await buildAndPublishAutomergeJs(registryUrl) - for (const example of examples) { - printHeader(`building ${example} example`) - if (example === "webpack") { - const projectPath = path.join(EXAMPLES_DIR, example) - await removeExistingAutomerge(projectPath) - await fsPromises.rm(path.join(projectPath, "yarn.lock"), { - force: true, - }) - await spawnAndWait( - "yarn", - [ - "--cwd", - projectPath, - "install", - "--registry", - registryUrl, - "--check-files", - ], - { stdio: "inherit" } - ) - await spawnAndWait("yarn", ["--cwd", projectPath, "build"], { - stdio: "inherit", - }) - } else if (example === "vite") { - const projectPath = path.join(EXAMPLES_DIR, example) - await removeExistingAutomerge(projectPath) - await fsPromises.rm(path.join(projectPath, "yarn.lock"), { - force: true, - }) - await spawnAndWait( - "yarn", - [ - "--cwd", - projectPath, - "install", - "--registry", - registryUrl, - "--check-files", - ], - { stdio: "inherit" } - ) - await spawnAndWait("yarn", ["--cwd", projectPath, "build"], { - stdio: "inherit", - }) - } else if (example === "create-react-app") { - const projectPath = path.join(EXAMPLES_DIR, example) - await removeExistingAutomerge(projectPath) - await fsPromises.rm(path.join(projectPath, "yarn.lock"), { - force: true, - }) - await spawnAndWait( - "yarn", - [ - "--cwd", - projectPath, - "install", - "--registry", - registryUrl, - "--check-files", - ], - { stdio: "inherit" } - ) - await spawnAndWait("yarn", ["--cwd", projectPath, "build"], { - stdio: "inherit", - }) - } - } - }) -} - -type WithRegistryAction = (registryUrl: string) => Promise - -async function withRegistry( - action: WithRegistryAction, - ...actions: Array -) { - // First, start verdaccio - printHeader("Starting verdaccio NPM server") - const verd = await VerdaccioProcess.start() - actions.unshift(action) - - for (const action of actions) { - try { - type Step = "verd-died" | "action-completed" - const verdDied: () => Promise = async () => { - await verd.died() - return "verd-died" - } - const actionComplete: () => Promise = async () => { - await action("http://localhost:4873") - return "action-completed" - } - const result = await Promise.race([verdDied(), actionComplete()]) - if (result === "verd-died") { - throw new Error("verdaccio unexpectedly exited") - } - } catch (e) { - await verd.kill() - throw e - } - } - await verd.kill() -} - -async function withPublishedWasm(profile: Profile, action: WithRegistryAction) { - await withRegistry(buildAutomergeWasm(profile), publishAutomergeWasm, action) -} - -function buildAutomergeWasm(profile: Profile): WithRegistryAction { - return async (registryUrl: string) => { - printHeader("building automerge-wasm") - await spawnAndWait( - "yarn", - ["--cwd", AUTOMERGE_WASM_PATH, "--registry", registryUrl, "install"], - { stdio: "inherit" } - ) - const cmd = profile === "release" ? "release" : "debug" - await spawnAndWait("yarn", ["--cwd", AUTOMERGE_WASM_PATH, cmd], { - stdio: "inherit", - }) - } -} - -async function publishAutomergeWasm(registryUrl: string) { - printHeader("Publishing automerge-wasm to verdaccio") - await fsPromises.rm( - path.join(VERDACCIO_DB_PATH, "@automerge/automerge-wasm"), - { recursive: true, force: true } - ) - await yarnPublish(registryUrl, AUTOMERGE_WASM_PATH) -} - -async function buildAndPublishAutomergeJs(registryUrl: string) { - // Build the js package - printHeader("Building automerge") - await removeExistingAutomerge(AUTOMERGE_JS_PATH) - await removeFromVerdaccio("@automerge/automerge") - await fsPromises.rm(path.join(AUTOMERGE_JS_PATH, "yarn.lock"), { - force: true, - }) - await spawnAndWait( - "yarn", - [ - "--cwd", - AUTOMERGE_JS_PATH, - "install", - "--registry", - registryUrl, - "--check-files", - ], - { stdio: "inherit" } - ) - await spawnAndWait("yarn", ["--cwd", AUTOMERGE_JS_PATH, "build"], { - stdio: "inherit", - }) - await yarnPublish(registryUrl, AUTOMERGE_JS_PATH) -} - -/** - * A running verdaccio process - * - */ -class VerdaccioProcess { - child: ChildProcess - stdout: Array - stderr: Array - - constructor(child: ChildProcess) { - this.child = child - - // Collect stdout/stderr otherwise the subprocess gets blocked writing - this.stdout = [] - this.stderr = [] - this.child.stdout && - this.child.stdout.on("data", data => this.stdout.push(data)) - this.child.stderr && - this.child.stderr.on("data", data => this.stderr.push(data)) - - const errCallback = (e: any) => { - console.error("!!!!!!!!!ERROR IN VERDACCIO PROCESS!!!!!!!!!") - console.error(" ", e) - if (this.stdout.length > 0) { - console.log("\n**Verdaccio stdout**") - const stdout = Buffer.concat(this.stdout) - process.stdout.write(stdout) - } - - if (this.stderr.length > 0) { - console.log("\n**Verdaccio stderr**") - const stdout = Buffer.concat(this.stderr) - process.stdout.write(stdout) - } - process.exit(-1) - } - this.child.on("error", errCallback) - } - - /** - * Spawn a verdaccio process and wait for it to respond succesfully to http requests - * - * The returned `VerdaccioProcess` can be used to control the subprocess - */ - static async start() { - const child = spawn( - "yarn", - ["verdaccio", "--config", VERDACCIO_CONFIG_PATH], - { env: { ...process.env, FORCE_COLOR: "true" } } - ) - - // Forward stdout and stderr whilst waiting for startup to complete - const stdoutCallback = (data: Buffer) => process.stdout.write(data) - const stderrCallback = (data: Buffer) => process.stderr.write(data) - child.stdout && child.stdout.on("data", stdoutCallback) - child.stderr && child.stderr.on("data", stderrCallback) - - const healthCheck = async () => { - while (true) { - try { - const resp = await fetch("http://localhost:4873") - if (resp.status === 200) { - return - } else { - console.log(`Healthcheck failed: bad status ${resp.status}`) - } - } catch (e) { - console.error(`Healthcheck failed: ${e}`) - } - await setTimeout(500) - } - } - await withTimeout(healthCheck(), 10000) - - // Stop forwarding stdout/stderr - child.stdout && child.stdout.off("data", stdoutCallback) - child.stderr && child.stderr.off("data", stderrCallback) - return new VerdaccioProcess(child) - } - - /** - * Send a SIGKILL to the process and wait for it to stop - */ - async kill() { - this.child.stdout && this.child.stdout.destroy() - this.child.stderr && this.child.stderr.destroy() - this.child.kill() - try { - await withTimeout(once(this.child, "close"), 500) - } catch (e) { - console.error("unable to kill verdaccio subprocess, trying -9") - this.child.kill(9) - await withTimeout(once(this.child, "close"), 500) - } - } - - /** - * A promise which resolves if the subprocess exits for some reason - */ - async died(): Promise { - const [exit, _signal] = await once(this.child, "exit") - return exit - } -} - -function printHeader(header: string) { - console.log("\n===============================") - console.log(` ${header}`) - console.log("===============================") -} - -/** - * Removes the automerge, @automerge/automerge-wasm, and @automerge/automerge packages from - * `$packageDir/node_modules` - * - * This is useful to force refreshing a package by use in combination with - * `yarn install --check-files`, which checks if a package is present in - * `node_modules` and if it is not forces a reinstall. - * - * @param packageDir - The directory containing the package.json of the target project - */ -async function removeExistingAutomerge(packageDir: string) { - await fsPromises.rm(path.join(packageDir, "node_modules", "@automerge"), { - recursive: true, - force: true, - }) - await fsPromises.rm(path.join(packageDir, "node_modules", "automerge"), { - recursive: true, - force: true, - }) -} - -type SpawnResult = { - stdout?: Buffer - stderr?: Buffer -} - -async function spawnAndWait( - cmd: string, - args: Array, - options: child_process.SpawnOptions -): Promise { - const child = spawn(cmd, args, options) - let stdout = null - let stderr = null - if (child.stdout) { - stdout = [] - child.stdout.on("data", data => stdout.push(data)) - } - if (child.stderr) { - stderr = [] - child.stderr.on("data", data => stderr.push(data)) - } - - const [exit, _signal] = await once(child, "exit") - if (exit && exit !== 0) { - throw new Error("nonzero exit code") - } - return { - stderr: stderr ? Buffer.concat(stderr) : null, - stdout: stdout ? Buffer.concat(stdout) : null, - } -} - -/** - * Remove a package from the verdaccio registry. This is necessary because we - * often want to _replace_ a version rather than update the version number. - * Obviously this is very bad and verboten in normal circumastances, but the - * whole point here is to be able to test the entire packaging story so it's - * okay I Promise. - */ -async function removeFromVerdaccio(packageName: string) { - await fsPromises.rm(path.join(VERDACCIO_DB_PATH, packageName), { - force: true, - recursive: true, - }) -} - -async function yarnPublish(registryUrl: string, cwd: string) { - await spawnAndWait( - "yarn", - ["--registry", registryUrl, "--cwd", cwd, "publish", "--non-interactive"], - { - stdio: "inherit", - env: { - ...process.env, - FORCE_COLOR: "true", - // This is a fake token, it just has to be the right format - npm_config__auth: - "//localhost:4873/:_authToken=Gp2Mgxm4faa/7wp0dMSuRA==", - }, - } - ) -} - -/** - * Wait for a given delay to resolve a promise, throwing an error if the - * promise doesn't resolve with the timeout - * - * @param promise - the promise to wait for @param timeout - the delay in - * milliseconds to wait before throwing - */ -async function withTimeout( - promise: Promise, - timeout: number -): Promise { - type Step = "timed-out" | { result: T } - const timedOut: () => Promise = async () => { - await setTimeout(timeout) - return "timed-out" - } - const succeeded: () => Promise = async () => { - const result = await promise - return { result } - } - const result = await Promise.race([timedOut(), succeeded()]) - if (result === "timed-out") { - throw new Error("timed out") - } else { - return result.result - } -} diff --git a/javascript/e2e/package.json b/javascript/e2e/package.json deleted file mode 100644 index 7bb80852..00000000 --- a/javascript/e2e/package.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "name": "e2e", - "version": "0.0.1", - "description": "", - "main": "index.js", - "scripts": { - "e2e": "ts-node index.ts" - }, - "author": "", - "license": "ISC", - "dependencies": { - "@types/node": "^18.7.18", - "cmd-ts": "^0.11.0", - "node-fetch": "^2", - "ts-node": "^10.9.1", - "typed-emitter": "^2.1.0", - "typescript": "^4.8.3", - "verdaccio": "5" - }, - "devDependencies": { - "@types/node-fetch": "2.x" - } -} diff --git a/javascript/e2e/tsconfig.json b/javascript/e2e/tsconfig.json deleted file mode 100644 index a2109873..00000000 --- a/javascript/e2e/tsconfig.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "compilerOptions": { - "types": ["node"] - }, - "module": "nodenext" -} diff --git a/javascript/e2e/verdaccio.yaml b/javascript/e2e/verdaccio.yaml deleted file mode 100644 index 865f5f05..00000000 --- a/javascript/e2e/verdaccio.yaml +++ /dev/null @@ -1,25 +0,0 @@ -storage: "./verdacciodb" -auth: - htpasswd: - file: ./htpasswd -publish: - allow_offline: true -logs: { type: stdout, format: pretty, level: info } -packages: - "@automerge/automerge-wasm": - access: "$all" - publish: "$all" - "@automerge/automerge": - access: "$all" - publish: "$all" - "*": - access: "$all" - publish: "$all" - proxy: npmjs - "@*/*": - access: "$all" - publish: "$all" - proxy: npmjs -uplinks: - npmjs: - url: https://registry.npmjs.org/ diff --git a/javascript/e2e/yarn.lock b/javascript/e2e/yarn.lock deleted file mode 100644 index 46e2abf2..00000000 --- a/javascript/e2e/yarn.lock +++ /dev/null @@ -1,2130 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@cspotcode/source-map-support@^0.8.0": - version "0.8.1" - resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" - integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== - dependencies: - "@jridgewell/trace-mapping" "0.3.9" - -"@jridgewell/resolve-uri@^3.0.3": - version "3.1.0" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" - integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== - -"@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.14" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" - integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== - -"@jridgewell/trace-mapping@0.3.9": - version "0.3.9" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" - integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== - dependencies: - "@jridgewell/resolve-uri" "^3.0.3" - "@jridgewell/sourcemap-codec" "^1.4.10" - -"@tootallnate/once@1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" - integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== - -"@tsconfig/node10@^1.0.7": - version "1.0.9" - resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" - integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== - -"@tsconfig/node12@^1.0.7": - version "1.0.11" - resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" - integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== - -"@tsconfig/node14@^1.0.0": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" - integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== - -"@tsconfig/node16@^1.0.2": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.3.tgz#472eaab5f15c1ffdd7f8628bd4c4f753995ec79e" - integrity sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ== - -"@types/node-fetch@2.x": - version "2.6.2" - resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da" - integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A== - dependencies: - "@types/node" "*" - form-data "^3.0.0" - -"@types/node@*", "@types/node@^18.7.18": - version "18.7.23" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.7.23.tgz#75c580983846181ebe5f4abc40fe9dfb2d65665f" - integrity sha512-DWNcCHolDq0ZKGizjx2DZjR/PqsYwAcYUJmfMWqtVU2MBMG5Mo+xFZrhGId5r/O5HOuMPyQEcM6KUBp5lBZZBg== - -"@verdaccio/commons-api@10.2.0": - version "10.2.0" - resolved "https://registry.yarnpkg.com/@verdaccio/commons-api/-/commons-api-10.2.0.tgz#3b684c31749837b0574375bb2e10644ecea9fcca" - integrity sha512-F/YZANu4DmpcEV0jronzI7v2fGVWkQ5Mwi+bVmV+ACJ+EzR0c9Jbhtbe5QyLUuzR97t8R5E/Xe53O0cc2LukdQ== - dependencies: - http-errors "2.0.0" - http-status-codes "2.2.0" - -"@verdaccio/file-locking@10.3.0": - version "10.3.0" - resolved "https://registry.yarnpkg.com/@verdaccio/file-locking/-/file-locking-10.3.0.tgz#a4342665c549163817c267bfa451e32ed3009767" - integrity sha512-FE5D5H4wy/nhgR/d2J5e1Na9kScj2wMjlLPBHz7XF4XZAVSRdm45+kL3ZmrfA6b2HTADP/uH7H05/cnAYW8bhw== - dependencies: - lockfile "1.0.4" - -"@verdaccio/local-storage@10.3.1": - version "10.3.1" - resolved "https://registry.yarnpkg.com/@verdaccio/local-storage/-/local-storage-10.3.1.tgz#8cbdc6390a0eb532577ae217729cb0a4e062f299" - integrity sha512-f3oArjXPOAwUAA2dsBhfL/rSouqJ2sfml8k97RtnBPKOzisb28bgyAQW0mqwQvN4MTK5S/2xudmobFpvJAIatg== - dependencies: - "@verdaccio/commons-api" "10.2.0" - "@verdaccio/file-locking" "10.3.0" - "@verdaccio/streams" "10.2.0" - async "3.2.4" - debug "4.3.4" - lodash "4.17.21" - lowdb "1.0.0" - mkdirp "1.0.4" - -"@verdaccio/readme@10.4.1": - version "10.4.1" - resolved "https://registry.yarnpkg.com/@verdaccio/readme/-/readme-10.4.1.tgz#c568d158c36ca7dd742b1abef890383918f621b2" - integrity sha512-OZ6R+HF2bIU3WFFdPxgUgyglaIfZzGSqyUfM2m1TFNfDCK84qJvRIgQJ1HG/82KVOpGuz/nxVyw2ZyEZDkP1vA== - dependencies: - dompurify "2.3.9" - jsdom "16.7.0" - marked "4.0.18" - -"@verdaccio/streams@10.2.0": - version "10.2.0" - resolved "https://registry.yarnpkg.com/@verdaccio/streams/-/streams-10.2.0.tgz#e01d2bfdcfe8aa2389f31bc6b72a602628bd025b" - integrity sha512-FaIzCnDg0x0Js5kSQn1Le3YzDHl7XxrJ0QdIw5LrDUmLsH3VXNi4/NMlSHnw5RiTTMs4UbEf98V3RJRB8exqJA== - -"@verdaccio/ui-theme@6.0.0-6-next.28": - version "6.0.0-6-next.28" - resolved "https://registry.yarnpkg.com/@verdaccio/ui-theme/-/ui-theme-6.0.0-6-next.28.tgz#bf8ff0e90f3d292741440c7e6ab6744b97d96a98" - integrity sha512-1sJ28aVGMiRJrSz0e8f4t+IUgt/cyYmuDLhogXHOEjEIIEcfMNyQ5bVYqq03wLVoKWEh5D6gHo1hQnVKQl1L5g== - -JSONStream@1.3.5: - version "1.3.5" - resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0" - integrity sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ== - dependencies: - jsonparse "^1.2.0" - through ">=2.2.7 <3" - -abab@^2.0.3, abab@^2.0.5: - version "2.0.6" - resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" - integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== - -accepts@~1.3.5, accepts@~1.3.8: - version "1.3.8" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" - integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== - dependencies: - mime-types "~2.1.34" - negotiator "0.6.3" - -acorn-globals@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" - integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== - dependencies: - acorn "^7.1.1" - acorn-walk "^7.1.1" - -acorn-walk@^7.1.1: - version "7.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" - integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== - -acorn-walk@^8.1.1: - version "8.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" - integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== - -acorn@^7.1.1: - version "7.4.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" - integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== - -acorn@^8.2.4, acorn@^8.4.1: - version "8.8.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" - integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== - -agent-base@6: - version "6.0.2" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" - integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== - dependencies: - debug "4" - -ajv@^6.12.3: - version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -apache-md5@1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/apache-md5/-/apache-md5-1.1.7.tgz#dcef1802700cc231d60c5e08fd088f2f9b36375a" - integrity sha512-JtHjzZmJxtzfTSjsCyHgPR155HBe5WGyUyHTaEkfy46qhwCFKx1Epm6nAxgUG3WfUZP1dWhGqj9Z2NOBeZ+uBw== - -arg@^4.1.0: - version "4.1.3" - resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" - integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== - -argparse@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" - integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== - -array-flatten@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== - -asn1@~0.2.3: - version "0.2.6" - resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" - integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== - dependencies: - safer-buffer "~2.1.0" - -assert-plus@1.0.0, assert-plus@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" - integrity sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw== - -async@3.2.4: - version "3.2.4" - resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" - integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== - -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== - -atomic-sleep@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" - integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== - -aws-sign2@~0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" - integrity sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA== - -aws4@^1.8.0: - version "1.11.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" - integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== - -balanced-match@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -bcrypt-pbkdf@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" - integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== - dependencies: - tweetnacl "^0.14.3" - -bcryptjs@2.4.3: - version "2.4.3" - resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb" - integrity sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ== - -body-parser@1.20.0: - version "1.20.0" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" - integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== - dependencies: - bytes "3.1.2" - content-type "~1.0.4" - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - http-errors "2.0.0" - iconv-lite "0.4.24" - on-finished "2.4.1" - qs "6.10.3" - raw-body "2.5.1" - type-is "~1.6.18" - unpipe "1.0.0" - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - -browser-process-hrtime@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" - integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== - -buffer-equal-constant-time@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" - integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA== - -bytes@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" - integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== - -bytes@3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" - integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== - -call-bind@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" - integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== - dependencies: - function-bind "^1.1.1" - get-intrinsic "^1.0.2" - -caseless@~0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" - integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== - -chalk@^4.0.0: - version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -clipanion@3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/clipanion/-/clipanion-3.1.0.tgz#3e217dd6476bb9236638b07eb4673f7309839819" - integrity sha512-v025Hz+IDQ15FpOyK8p02h5bFznMu6rLFsJSyOPR+7WrbSnZ1Ek6pblPukV7K5tC/dsWfncQPIrJ4iUy2PXkbw== - dependencies: - typanion "^3.3.1" - -cmd-ts@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/cmd-ts/-/cmd-ts-0.11.0.tgz#80926180f39665e35e321b72439f792a2b63b745" - integrity sha512-6RvjD+f9oGPeWoMS53oavafmQ9qC839PjP3CyvPkAIfqMEXTbrclni7t3fnyVJFNWxuBexnLshcotY0RuNrI8Q== - dependencies: - chalk "^4.0.0" - debug "^4.3.4" - didyoumean "^1.2.2" - strip-ansi "^6.0.0" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6: - version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - -compressible@~2.0.16: - version "2.0.18" - resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" - integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== - dependencies: - mime-db ">= 1.43.0 < 2" - -compression@1.7.4: - version "1.7.4" - resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" - integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== - dependencies: - accepts "~1.3.5" - bytes "3.0.0" - compressible "~2.0.16" - debug "2.6.9" - on-headers "~1.0.2" - safe-buffer "5.1.2" - vary "~1.1.2" - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - -content-disposition@0.5.4: - version "0.5.4" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" - integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== - dependencies: - safe-buffer "5.2.1" - -content-type@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" - integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== - -cookie-signature@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== - -cookie@0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" - integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== - -cookies@0.8.0: - version "0.8.0" - resolved "https://registry.yarnpkg.com/cookies/-/cookies-0.8.0.tgz#1293ce4b391740a8406e3c9870e828c4b54f3f90" - integrity sha512-8aPsApQfebXnuI+537McwYsDtjVxGm8gTIzQI3FDW6t5t/DAhERxtnbEPN/8RX+uZthoz4eCOgloXaE5cYyNow== - dependencies: - depd "~2.0.0" - keygrip "~1.1.0" - -core-util-is@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" - integrity sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ== - -cors@2.8.5: - version "2.8.5" - resolved "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29" - integrity sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g== - dependencies: - object-assign "^4" - vary "^1" - -create-require@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" - integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== - -cssom@^0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" - integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== - -cssom@~0.3.6: - version "0.3.8" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" - integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== - -cssstyle@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" - integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== - dependencies: - cssom "~0.3.6" - -d@1, d@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" - integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== - dependencies: - es5-ext "^0.10.50" - type "^1.0.1" - -dashdash@^1.12.0: - version "1.14.1" - resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" - integrity sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g== - dependencies: - assert-plus "^1.0.0" - -data-urls@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" - integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== - dependencies: - abab "^2.0.3" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.0.0" - -dayjs@1.11.5: - version "1.11.5" - resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.5.tgz#00e8cc627f231f9499c19b38af49f56dc0ac5e93" - integrity sha512-CAdX5Q3YW3Gclyo5Vpqkgpj8fSdLQcRuzfX6mC6Phy0nfJ0eGYOeS7m4mt2plDWLAtA4TqTakvbboHvUxfe4iA== - -debug@2.6.9: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@4, debug@4.3.4, debug@^4.3.3, debug@^4.3.4: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -debug@^3.2.7: - version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" - integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== - dependencies: - ms "^2.1.1" - -decimal.js@^10.2.1: - version "10.4.1" - resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.4.1.tgz#be75eeac4a2281aace80c1a8753587c27ef053e7" - integrity sha512-F29o+vci4DodHYT9UrR5IEbfBw9pE5eSapIJdTqXK5+6hq+t8VRxwQyKlW2i+KDKFkkJQRvFyI/QXD83h8LyQw== - -deep-is@~0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" - integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== - -depd@2.0.0, depd@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" - integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== - -destroy@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" - integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== - -didyoumean@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" - integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== - -diff@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" - integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== - -domexception@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" - integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== - dependencies: - webidl-conversions "^5.0.0" - -dompurify@2.3.9: - version "2.3.9" - resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.3.9.tgz#a4be5e7278338d6db09922dffcf6182cd099d70a" - integrity sha512-3zOnuTwup4lPV/GfGS6UzG4ub9nhSYagR/5tB3AvDEwqyy5dtyCM2dVjwGDCnrPerXifBKTYh/UWCGKK7ydhhw== - -ecc-jsbn@~0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" - integrity sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw== - dependencies: - jsbn "~0.1.0" - safer-buffer "^2.1.0" - -ecdsa-sig-formatter@1.0.11: - version "1.0.11" - resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" - integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== - dependencies: - safe-buffer "^5.0.1" - -ee-first@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== - -encodeurl@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== - -envinfo@7.8.1: - version "7.8.1" - resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.8.1.tgz#06377e3e5f4d379fea7ac592d5ad8927e0c4d475" - integrity sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw== - -es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@^0.10.53, es5-ext@~0.10.14, es5-ext@~0.10.2, es5-ext@~0.10.46: - version "0.10.62" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5" - integrity sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA== - dependencies: - es6-iterator "^2.0.3" - es6-symbol "^3.1.3" - next-tick "^1.1.0" - -es6-iterator@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" - integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g== - dependencies: - d "1" - es5-ext "^0.10.35" - es6-symbol "^3.1.1" - -es6-symbol@^3.1.1, es6-symbol@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" - integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== - dependencies: - d "^1.0.1" - ext "^1.1.2" - -es6-weak-map@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.3.tgz#b6da1f16cc2cc0d9be43e6bdbfc5e7dfcdf31d53" - integrity sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA== - dependencies: - d "1" - es5-ext "^0.10.46" - es6-iterator "^2.0.3" - es6-symbol "^3.1.1" - -escape-html@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== - -escodegen@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" - integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== - dependencies: - esprima "^4.0.1" - estraverse "^5.2.0" - esutils "^2.0.2" - optionator "^0.8.1" - optionalDependencies: - source-map "~0.6.1" - -eslint-import-resolver-node@0.3.6: - version "0.3.6" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" - integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== - dependencies: - debug "^3.2.7" - resolve "^1.20.0" - -esprima@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -estraverse@^5.2.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" - integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -etag@~1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" - integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== - -event-emitter@^0.3.5: - version "0.3.5" - resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" - integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA== - dependencies: - d "1" - es5-ext "~0.10.14" - -express-rate-limit@5.5.1: - version "5.5.1" - resolved "https://registry.yarnpkg.com/express-rate-limit/-/express-rate-limit-5.5.1.tgz#110c23f6a65dfa96ab468eda95e71697bc6987a2" - integrity sha512-MTjE2eIbHv5DyfuFz4zLYWxpqVhEhkTiwFGuB74Q9CSou2WHO52nlE5y3Zlg6SIsiYUIPj6ifFxnkPz6O3sIUg== - -express@4.18.1: - version "4.18.1" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" - integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== - dependencies: - accepts "~1.3.8" - array-flatten "1.1.1" - body-parser "1.20.0" - content-disposition "0.5.4" - content-type "~1.0.4" - cookie "0.5.0" - cookie-signature "1.0.6" - debug "2.6.9" - depd "2.0.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - finalhandler "1.2.0" - fresh "0.5.2" - http-errors "2.0.0" - merge-descriptors "1.0.1" - methods "~1.1.2" - on-finished "2.4.1" - parseurl "~1.3.3" - path-to-regexp "0.1.7" - proxy-addr "~2.0.7" - qs "6.10.3" - range-parser "~1.2.1" - safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" - setprototypeof "1.2.0" - statuses "2.0.1" - type-is "~1.6.18" - utils-merge "1.0.1" - vary "~1.1.2" - -ext@^1.1.2: - version "1.7.0" - resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" - integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== - dependencies: - type "^2.7.2" - -extend@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" - integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== - -extsprintf@1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" - integrity sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g== - -extsprintf@^1.2.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.1.tgz#8d172c064867f235c0c84a596806d279bf4bcc07" - integrity sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA== - -fast-deep-equal@^3.1.1: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-json-stable-stringify@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@~2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== - -fast-redact@^3.0.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/fast-redact/-/fast-redact-3.1.2.tgz#d58e69e9084ce9fa4c1a6fa98a3e1ecf5d7839aa" - integrity sha512-+0em+Iya9fKGfEQGcd62Yv6onjBmmhV1uh86XVfOU8VwAe6kaFdQCWI9s0/Nnugx5Vd9tdbZ7e6gE2tR9dzXdw== - -fast-safe-stringify@2.1.1, fast-safe-stringify@^2.0.8: - version "2.1.1" - resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" - integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== - -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== - dependencies: - debug "2.6.9" - encodeurl "~1.0.2" - escape-html "~1.0.3" - on-finished "2.4.1" - parseurl "~1.3.3" - statuses "2.0.1" - unpipe "~1.0.0" - -flatstr@^1.0.12: - version "1.0.12" - resolved "https://registry.yarnpkg.com/flatstr/-/flatstr-1.0.12.tgz#c2ba6a08173edbb6c9640e3055b95e287ceb5931" - integrity sha512-4zPxDyhCyiN2wIAtSLI6gc82/EjqZc1onI4Mz/l0pWrAlsSfYH/2ZIcU+e3oA2wDwbzIWNKwa23F8rh6+DRWkw== - -forever-agent@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" - integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== - -form-data@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - -form-data@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" - integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.6" - mime-types "^2.1.12" - -forwarded@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" - integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== - -fresh@0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" - integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== - -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -get-intrinsic@^1.0.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" - integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== - dependencies: - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.3" - -getpass@^0.1.1: - version "0.1.7" - resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" - integrity sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng== - dependencies: - assert-plus "^1.0.0" - -glob@^6.0.1: - version "6.0.4" - resolved "https://registry.yarnpkg.com/glob/-/glob-6.0.4.tgz#0f08860f6a155127b2fadd4f9ce24b1aab6e4d22" - integrity sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A== - dependencies: - inflight "^1.0.4" - inherits "2" - minimatch "2 || 3" - once "^1.3.0" - path-is-absolute "^1.0.0" - -graceful-fs@^4.1.3: - version "4.2.10" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" - integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== - -handlebars@4.7.7: - version "4.7.7" - resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" - integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== - dependencies: - minimist "^1.2.5" - neo-async "^2.6.0" - source-map "^0.6.1" - wordwrap "^1.0.0" - optionalDependencies: - uglify-js "^3.1.4" - -har-schema@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" - integrity sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q== - -har-validator@~5.1.0: - version "5.1.5" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" - integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== - dependencies: - ajv "^6.12.3" - har-schema "^2.0.0" - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has-symbols@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" - integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== - -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" - -html-encoding-sniffer@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" - integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== - dependencies: - whatwg-encoding "^1.0.5" - -http-errors@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" - integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== - dependencies: - depd "2.0.0" - inherits "2.0.4" - setprototypeof "1.2.0" - statuses "2.0.1" - toidentifier "1.0.1" - -http-proxy-agent@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" - integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== - dependencies: - "@tootallnate/once" "1" - agent-base "6" - debug "4" - -http-signature@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" - integrity sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ== - dependencies: - assert-plus "^1.0.0" - jsprim "^1.2.2" - sshpk "^1.7.0" - -http-status-codes@2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/http-status-codes/-/http-status-codes-2.2.0.tgz#bb2efe63d941dfc2be18e15f703da525169622be" - integrity sha512-feERVo9iWxvnejp3SEfm/+oNG517npqL2/PIA8ORjyOZjGC7TwCRQsZylciLS64i6pJ0wRYz3rkXLRwbtFa8Ng== - -https-proxy-agent@5.0.1, https-proxy-agent@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" - integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== - dependencies: - agent-base "6" - debug "4" - -iconv-lite@0.4.24: - version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -ipaddr.js@1.9.1: - version "1.9.1" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" - integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== - -is-core-module@^2.9.0: - version "2.10.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" - integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== - dependencies: - has "^1.0.3" - -is-potential-custom-element-name@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" - integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== - -is-promise@^2.1.0, is-promise@^2.2.2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" - integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== - -is-typedarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" - integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== - -isstream@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" - integrity sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g== - -js-yaml@4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - -jsbn@~0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" - integrity sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg== - -jsdom@16.7.0: - version "16.7.0" - resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" - integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== - dependencies: - abab "^2.0.5" - acorn "^8.2.4" - acorn-globals "^6.0.0" - cssom "^0.4.4" - cssstyle "^2.3.0" - data-urls "^2.0.0" - decimal.js "^10.2.1" - domexception "^2.0.1" - escodegen "^2.0.0" - form-data "^3.0.0" - html-encoding-sniffer "^2.0.1" - http-proxy-agent "^4.0.1" - https-proxy-agent "^5.0.0" - is-potential-custom-element-name "^1.0.1" - nwsapi "^2.2.0" - parse5 "6.0.1" - saxes "^5.0.1" - symbol-tree "^3.2.4" - tough-cookie "^4.0.0" - w3c-hr-time "^1.0.2" - w3c-xmlserializer "^2.0.0" - webidl-conversions "^6.1.0" - whatwg-encoding "^1.0.5" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.5.0" - ws "^7.4.6" - xml-name-validator "^3.0.0" - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-schema@0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" - integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== - -json-stringify-safe@~5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" - integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== - -jsonparse@^1.2.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" - integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== - -jsonwebtoken@8.5.1: - version "8.5.1" - resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" - integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== - dependencies: - jws "^3.2.2" - lodash.includes "^4.3.0" - lodash.isboolean "^3.0.3" - lodash.isinteger "^4.0.4" - lodash.isnumber "^3.0.3" - lodash.isplainobject "^4.0.6" - lodash.isstring "^4.0.1" - lodash.once "^4.0.0" - ms "^2.1.1" - semver "^5.6.0" - -jsprim@^1.2.2: - version "1.4.2" - resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.2.tgz#712c65533a15c878ba59e9ed5f0e26d5b77c5feb" - integrity sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw== - dependencies: - assert-plus "1.0.0" - extsprintf "1.3.0" - json-schema "0.4.0" - verror "1.10.0" - -jwa@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" - integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== - dependencies: - buffer-equal-constant-time "1.0.1" - ecdsa-sig-formatter "1.0.11" - safe-buffer "^5.0.1" - -jws@^3.2.2: - version "3.2.2" - resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" - integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== - dependencies: - jwa "^1.4.1" - safe-buffer "^5.0.1" - -keygrip@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/keygrip/-/keygrip-1.1.0.tgz#871b1681d5e159c62a445b0c74b615e0917e7226" - integrity sha512-iYSchDJ+liQ8iwbSI2QqsQOvqv58eJCEanyJPJi+Khyu8smkcKSFUCbPwzFcL7YVtZ6eONjqRX/38caJ7QjRAQ== - dependencies: - tsscmp "1.0.6" - -kleur@4.1.5: - version "4.1.5" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" - integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== - -levn@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - -lockfile@1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/lockfile/-/lockfile-1.0.4.tgz#07f819d25ae48f87e538e6578b6964a4981a5609" - integrity sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA== - dependencies: - signal-exit "^3.0.2" - -lodash.includes@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" - integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w== - -lodash.isboolean@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" - integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg== - -lodash.isinteger@^4.0.4: - version "4.0.4" - resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" - integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA== - -lodash.isnumber@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" - integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw== - -lodash.isplainobject@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" - integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== - -lodash.isstring@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" - integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw== - -lodash.once@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" - integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg== - -lodash@4, lodash@4.17.21, lodash@^4.7.0: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== - -lowdb@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/lowdb/-/lowdb-1.0.0.tgz#5243be6b22786ccce30e50c9a33eac36b20c8064" - integrity sha512-2+x8esE/Wb9SQ1F9IHaYWfsC9FIecLOPrK4g17FGEayjUWH172H6nwicRovGvSE2CPZouc2MCIqCI7h9d+GftQ== - dependencies: - graceful-fs "^4.1.3" - is-promise "^2.1.0" - lodash "4" - pify "^3.0.0" - steno "^0.4.1" - -lru-cache@7.14.0: - version "7.14.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.0.tgz#21be64954a4680e303a09e9468f880b98a0b3c7f" - integrity sha512-EIRtP1GrSJny0dqb50QXRUNBxHJhcpxHC++M5tD7RYbvLLn5KVWKsbyswSSqDuU15UFi3bgTQIY8nhDMeF6aDQ== - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -lru-queue@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" - integrity sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ== - dependencies: - es5-ext "~0.10.2" - -lunr-mutable-indexes@2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/lunr-mutable-indexes/-/lunr-mutable-indexes-2.3.2.tgz#864253489735d598c5140f3fb75c0a5c8be2e98c" - integrity sha512-Han6cdWAPPFM7C2AigS2Ofl3XjAT0yVMrUixodJEpyg71zCtZ2yzXc3s+suc/OaNt4ca6WJBEzVnEIjxCTwFMw== - dependencies: - lunr ">= 2.3.0 < 2.4.0" - -"lunr@>= 2.3.0 < 2.4.0": - version "2.3.9" - resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" - integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== - -make-error@^1.1.1: - version "1.3.6" - resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" - integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== - -marked@4.0.18: - version "4.0.18" - resolved "https://registry.yarnpkg.com/marked/-/marked-4.0.18.tgz#cd0ac54b2e5610cfb90e8fd46ccaa8292c9ed569" - integrity sha512-wbLDJ7Zh0sqA0Vdg6aqlbT+yPxqLblpAZh1mK2+AO2twQkPywvvqQNfEPVwSSRjZ7dZcdeVBIAgiO7MMp3Dszw== - -marked@4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/marked/-/marked-4.1.0.tgz#3fc6e7485f21c1ca5d6ec4a39de820e146954796" - integrity sha512-+Z6KDjSPa6/723PQYyc1axYZpYYpDnECDaU6hkaf5gqBieBkMKYReL5hteF2QizhlMbgbo8umXl/clZ67+GlsA== - -media-typer@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== - -memoizee@0.4.15: - version "0.4.15" - resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.4.15.tgz#e6f3d2da863f318d02225391829a6c5956555b72" - integrity sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ== - dependencies: - d "^1.0.1" - es5-ext "^0.10.53" - es6-weak-map "^2.0.3" - event-emitter "^0.3.5" - is-promise "^2.2.2" - lru-queue "^0.1.0" - next-tick "^1.1.0" - timers-ext "^0.1.7" - -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== - -methods@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== - -mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": - version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12, mime-types@~2.1.19, mime-types@~2.1.24, mime-types@~2.1.34: - version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - -mime@1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - -mime@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7" - integrity sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A== - -"minimatch@2 || 3": - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -minimatch@5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" - integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== - dependencies: - brace-expansion "^2.0.1" - -minimist@^1.2.5, minimist@^1.2.6: - version "1.2.6" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== - -mkdirp@1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" - integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== - -mkdirp@~0.5.1: - version "0.5.6" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" - integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== - dependencies: - minimist "^1.2.6" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -ms@2.1.3, ms@^2.1.1: - version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" - integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== - -mv@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/mv/-/mv-2.1.1.tgz#ae6ce0d6f6d5e0a4f7d893798d03c1ea9559b6a2" - integrity sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg== - dependencies: - mkdirp "~0.5.1" - ncp "~2.0.0" - rimraf "~2.4.0" - -ncp@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ncp/-/ncp-2.0.0.tgz#195a21d6c46e361d2fb1281ba38b91e9df7bdbb3" - integrity sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA== - -negotiator@0.6.3: - version "0.6.3" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" - integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== - -neo-async@^2.6.0: - version "2.6.2" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" - integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== - -next-tick@1, next-tick@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" - integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ== - -node-fetch@2.6.7, node-fetch@^2: - version "2.6.7" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" - integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== - dependencies: - whatwg-url "^5.0.0" - -nwsapi@^2.2.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.2.tgz#e5418863e7905df67d51ec95938d67bf801f0bb0" - integrity sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw== - -oauth-sign@~0.9.0: - version "0.9.0" - resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" - integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== - -object-assign@^4: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== - -object-inspect@^1.9.0: - version "1.12.2" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" - integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== - -on-finished@2.4.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" - integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== - dependencies: - ee-first "1.1.1" - -on-headers@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" - integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== - -once@^1.3.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== - dependencies: - wrappy "1" - -optionator@^0.8.1: - version "0.8.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" - integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.6" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - word-wrap "~1.2.3" - -parse-ms@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/parse-ms/-/parse-ms-2.1.0.tgz#348565a753d4391fa524029956b172cb7753097d" - integrity sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA== - -parse5@6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" - integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== - -parseurl@~1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" - integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== - -path-parse@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== - -performance-now@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" - integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== - -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg== - -pino-std-serializers@^3.1.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/pino-std-serializers/-/pino-std-serializers-3.2.0.tgz#b56487c402d882eb96cd67c257868016b61ad671" - integrity sha512-EqX4pwDPrt3MuOAAUBMU0Tk5kR/YcCM5fNPEzgCO2zJ5HfX0vbiH9HbJglnyeQsN96Kznae6MWD47pZB5avTrg== - -pino@6.14.0: - version "6.14.0" - resolved "https://registry.yarnpkg.com/pino/-/pino-6.14.0.tgz#b745ea87a99a6c4c9b374e4f29ca7910d4c69f78" - integrity sha512-iuhEDel3Z3hF9Jfe44DPXR8l07bhjuFY3GMHIXbjnY9XcafbyDDwl2sN2vw2GjMPf5Nkoe+OFao7ffn9SXaKDg== - dependencies: - fast-redact "^3.0.0" - fast-safe-stringify "^2.0.8" - flatstr "^1.0.12" - pino-std-serializers "^3.1.0" - process-warning "^1.0.0" - quick-format-unescaped "^4.0.3" - sonic-boom "^1.0.2" - -pkginfo@0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/pkginfo/-/pkginfo-0.4.1.tgz#b5418ef0439de5425fc4995042dced14fb2a84ff" - integrity sha512-8xCNE/aT/EXKenuMDZ+xTVwkT8gsoHN2z/Q29l80u0ppGEXVvsKRzNMbtKhg8LS8k1tJLAHHylf6p4VFmP6XUQ== - -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== - -prettier-bytes@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/prettier-bytes/-/prettier-bytes-1.0.4.tgz#994b02aa46f699c50b6257b5faaa7fe2557e62d6" - integrity sha512-dLbWOa4xBn+qeWeIF60qRoB6Pk2jX5P3DIVgOQyMyvBpu931Q+8dXz8X0snJiFkQdohDDLnZQECjzsAj75hgZQ== - -pretty-ms@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/pretty-ms/-/pretty-ms-7.0.1.tgz#7d903eaab281f7d8e03c66f867e239dc32fb73e8" - integrity sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q== - dependencies: - parse-ms "^2.1.0" - -process-warning@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-1.0.0.tgz#980a0b25dc38cd6034181be4b7726d89066b4616" - integrity sha512-du4wfLyj4yCZq1VupnVSZmRsPJsNuxoDQFdCFHLaYiEbFBD7QE0a+I4D7hOxrVnh78QE/YipFAj9lXHiXocV+Q== - -proxy-addr@~2.0.7: - version "2.0.7" - resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" - integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== - dependencies: - forwarded "0.2.0" - ipaddr.js "1.9.1" - -psl@^1.1.24, psl@^1.1.33: - version "1.9.0" - resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" - integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== - -punycode@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" - integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== - -punycode@^2.1.0, punycode@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== - -qs@6.10.3: - version "6.10.3" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" - integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== - dependencies: - side-channel "^1.0.4" - -qs@~6.5.2: - version "6.5.3" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.3.tgz#3aeeffc91967ef6e35c0e488ef46fb296ab76aad" - integrity sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA== - -querystringify@^2.1.1: - version "2.2.0" - resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" - integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== - -quick-format-unescaped@^4.0.3: - version "4.0.4" - resolved "https://registry.yarnpkg.com/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz#93ef6dd8d3453cbc7970dd614fad4c5954d6b5a7" - integrity sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg== - -range-parser@~1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - -raw-body@2.5.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" - integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== - dependencies: - bytes "3.1.2" - http-errors "2.0.0" - iconv-lite "0.4.24" - unpipe "1.0.0" - -request@2.88.0: - version "2.88.0" - resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" - integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== - dependencies: - aws-sign2 "~0.7.0" - aws4 "^1.8.0" - caseless "~0.12.0" - combined-stream "~1.0.6" - extend "~3.0.2" - forever-agent "~0.6.1" - form-data "~2.3.2" - har-validator "~5.1.0" - http-signature "~1.2.0" - is-typedarray "~1.0.0" - isstream "~0.1.2" - json-stringify-safe "~5.0.1" - mime-types "~2.1.19" - oauth-sign "~0.9.0" - performance-now "^2.1.0" - qs "~6.5.2" - safe-buffer "^5.1.2" - tough-cookie "~2.4.3" - tunnel-agent "^0.6.0" - uuid "^3.3.2" - -requires-port@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" - integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== - -resolve@^1.20.0: - version "1.22.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" - integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== - dependencies: - is-core-module "^2.9.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -rimraf@~2.4.0: - version "2.4.5" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.4.5.tgz#ee710ce5d93a8fdb856fb5ea8ff0e2d75934b2da" - integrity sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ== - dependencies: - glob "^6.0.1" - -rxjs@^7.5.2: - version "7.5.7" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.7.tgz#2ec0d57fdc89ece220d2e702730ae8f1e49def39" - integrity sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA== - dependencies: - tslib "^2.1.0" - -safe-buffer@5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.1.2: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== - -saxes@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" - integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== - dependencies: - xmlchars "^2.2.0" - -semver@7.3.7: - version "7.3.7" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== - dependencies: - lru-cache "^6.0.0" - -semver@^5.6.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" - integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== - -send@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== - dependencies: - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - fresh "0.5.2" - http-errors "2.0.0" - mime "1.6.0" - ms "2.1.3" - on-finished "2.4.1" - range-parser "~1.2.1" - statuses "2.0.1" - -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== - dependencies: - encodeurl "~1.0.2" - escape-html "~1.0.3" - parseurl "~1.3.3" - send "0.18.0" - -setprototypeof@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" - integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== - -side-channel@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== - dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" - -signal-exit@^3.0.2: - version "3.0.7" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" - integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== - -sonic-boom@^1.0.2: - version "1.4.1" - resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-1.4.1.tgz#d35d6a74076624f12e6f917ade7b9d75e918f53e" - integrity sha512-LRHh/A8tpW7ru89lrlkU4AszXt1dbwSjVWguGrmlxE7tawVmDBlI1PILMkXAxJTwqhgsEeTHzj36D5CmHgQmNg== - dependencies: - atomic-sleep "^1.0.0" - flatstr "^1.0.12" - -source-map@^0.6.1, source-map@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -sshpk@^1.7.0: - version "1.17.0" - resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5" - integrity sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ== - dependencies: - asn1 "~0.2.3" - assert-plus "^1.0.0" - bcrypt-pbkdf "^1.0.0" - dashdash "^1.12.0" - ecc-jsbn "~0.1.1" - getpass "^0.1.1" - jsbn "~0.1.0" - safer-buffer "^2.0.2" - tweetnacl "~0.14.0" - -statuses@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" - integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== - -steno@^0.4.1: - version "0.4.4" - resolved "https://registry.yarnpkg.com/steno/-/steno-0.4.4.tgz#071105bdfc286e6615c0403c27e9d7b5dcb855cb" - integrity sha512-EEHMVYHNXFHfGtgjNITnka0aHhiAlo93F7z2/Pwd+g0teG9CnM3JIINM7hVVB5/rhw9voufD7Wukwgtw2uqh6w== - dependencies: - graceful-fs "^4.1.3" - -strip-ansi@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== - -symbol-tree@^3.2.4: - version "3.2.4" - resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" - integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== - -"through@>=2.2.7 <3": - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== - -timers-ext@^0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.7.tgz#6f57ad8578e07a3fb9f91d9387d65647555e25c6" - integrity sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ== - dependencies: - es5-ext "~0.10.46" - next-tick "1" - -toidentifier@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" - integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== - -tough-cookie@^4.0.0: - version "4.1.2" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.2.tgz#e53e84b85f24e0b65dd526f46628db6c85f6b874" - integrity sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ== - dependencies: - psl "^1.1.33" - punycode "^2.1.1" - universalify "^0.2.0" - url-parse "^1.5.3" - -tough-cookie@~2.4.3: - version "2.4.3" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" - integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== - dependencies: - psl "^1.1.24" - punycode "^1.4.1" - -tr46@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" - integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== - dependencies: - punycode "^2.1.1" - -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== - -ts-node@^10.9.1: - version "10.9.1" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" - integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== - dependencies: - "@cspotcode/source-map-support" "^0.8.0" - "@tsconfig/node10" "^1.0.7" - "@tsconfig/node12" "^1.0.7" - "@tsconfig/node14" "^1.0.0" - "@tsconfig/node16" "^1.0.2" - acorn "^8.4.1" - acorn-walk "^8.1.1" - arg "^4.1.0" - create-require "^1.1.0" - diff "^4.0.1" - make-error "^1.1.1" - v8-compile-cache-lib "^3.0.1" - yn "3.1.1" - -tslib@^2.1.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" - integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== - -tsscmp@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/tsscmp/-/tsscmp-1.0.6.tgz#85b99583ac3589ec4bfef825b5000aa911d605eb" - integrity sha512-LxhtAkPDTkVCMQjt2h6eBVY28KCjikZqZfMcC15YBeNjkgUpdCfBu5HoiOTDu86v6smE8yOjyEktJ8hlbANHQA== - -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== - dependencies: - safe-buffer "^5.0.1" - -tweetnacl@^0.14.3, tweetnacl@~0.14.0: - version "0.14.5" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" - integrity sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA== - -typanion@^3.3.1: - version "3.12.0" - resolved "https://registry.yarnpkg.com/typanion/-/typanion-3.12.0.tgz#8352830e5cf26ebfc5832da265886c9fb3ebb323" - integrity sha512-o59ZobUBsG+2dHnGVI2shscqqzHdzCOixCU0t8YXLxM2Su42J2ha7hY9V5+6SIBjVsw6aLqrlYznCgQGJN4Kag== - -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== - dependencies: - prelude-ls "~1.1.2" - -type-is@~1.6.18: - version "1.6.18" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" - integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== - dependencies: - media-typer "0.3.0" - mime-types "~2.1.24" - -type@^1.0.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" - integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== - -type@^2.7.2: - version "2.7.2" - resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" - integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== - -typed-emitter@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/typed-emitter/-/typed-emitter-2.1.0.tgz#ca78e3d8ef1476f228f548d62e04e3d4d3fd77fb" - integrity sha512-g/KzbYKbH5C2vPkaXGu8DJlHrGKHLsM25Zg9WuC9pMGfuvT+X25tZQWo5fK1BjBm8+UrVE9LDCvaY0CQk+fXDA== - optionalDependencies: - rxjs "^7.5.2" - -typescript@^4.8.3: - version "4.8.4" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.8.4.tgz#c464abca159669597be5f96b8943500b238e60e6" - integrity sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ== - -uglify-js@^3.1.4: - version "3.17.2" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.2.tgz#f55f668b9a64b213977ae688703b6bbb7ca861c6" - integrity sha512-bbxglRjsGQMchfvXZNusUcYgiB9Hx2K4AHYXQy2DITZ9Rd+JzhX7+hoocE5Winr7z2oHvPsekkBwXtigvxevXg== - -universalify@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" - integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== - -unix-crypt-td-js@1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/unix-crypt-td-js/-/unix-crypt-td-js-1.1.4.tgz#4912dfad1c8aeb7d20fa0a39e4c31918c1d5d5dd" - integrity sha512-8rMeVYWSIyccIJscb9NdCfZKSRBKYTeVnwmiRYT2ulE3qd1RaDQ0xQDP+rI3ccIWbhu/zuo5cgN8z73belNZgw== - -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== - -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - -url-parse@^1.5.3: - version "1.5.10" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" - integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== - dependencies: - querystringify "^2.1.1" - requires-port "^1.0.0" - -utils-merge@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== - -uuid@^3.3.2: - version "3.4.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" - integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== - -v8-compile-cache-lib@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" - integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== - -validator@13.7.0: - version "13.7.0" - resolved "https://registry.yarnpkg.com/validator/-/validator-13.7.0.tgz#4f9658ba13ba8f3d82ee881d3516489ea85c0857" - integrity sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw== - -vary@^1, vary@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== - -verdaccio-audit@10.2.2: - version "10.2.2" - resolved "https://registry.yarnpkg.com/verdaccio-audit/-/verdaccio-audit-10.2.2.tgz#254380e57932fda64b45cb739e9c42cc9fb2dfdf" - integrity sha512-f2uZlKD7vi0yEB0wN8WOf+eA/3SCyKD9cvK17Hh7Wm8f/bl7k1B3hHOTtUCn/yu85DGsj2pcNzrAfp2wMVgz9Q== - dependencies: - body-parser "1.20.0" - express "4.18.1" - https-proxy-agent "5.0.1" - node-fetch "2.6.7" - -verdaccio-htpasswd@10.5.0: - version "10.5.0" - resolved "https://registry.yarnpkg.com/verdaccio-htpasswd/-/verdaccio-htpasswd-10.5.0.tgz#de9ea2967856af765178b08485dc8e83f544a12c" - integrity sha512-olBsT3uy1TT2ZqmMCJUsMHrztJzoEpa8pxxvYrDZdWnEksl6mHV10lTeLbH9BUwbEheOeKkkdsERqUOs+if0jg== - dependencies: - "@verdaccio/file-locking" "10.3.0" - apache-md5 "1.1.7" - bcryptjs "2.4.3" - http-errors "2.0.0" - unix-crypt-td-js "1.1.4" - -verdaccio@5: - version "5.15.3" - resolved "https://registry.yarnpkg.com/verdaccio/-/verdaccio-5.15.3.tgz#4953471c0130c8e88b3d5562b5c63b38b575ed3d" - integrity sha512-8oEtepXF1oksGVYahi2HS1Yx9u6HD/4ukBDNDfwISmlNp7HVKJL2+kjzmDJWam88BpDNxOBU/LFXWSsEAFKFCQ== - dependencies: - "@verdaccio/commons-api" "10.2.0" - "@verdaccio/local-storage" "10.3.1" - "@verdaccio/readme" "10.4.1" - "@verdaccio/streams" "10.2.0" - "@verdaccio/ui-theme" "6.0.0-6-next.28" - JSONStream "1.3.5" - async "3.2.4" - body-parser "1.20.0" - clipanion "3.1.0" - compression "1.7.4" - cookies "0.8.0" - cors "2.8.5" - dayjs "1.11.5" - debug "^4.3.3" - envinfo "7.8.1" - eslint-import-resolver-node "0.3.6" - express "4.18.1" - express-rate-limit "5.5.1" - fast-safe-stringify "2.1.1" - handlebars "4.7.7" - http-errors "2.0.0" - js-yaml "4.1.0" - jsonwebtoken "8.5.1" - kleur "4.1.5" - lodash "4.17.21" - lru-cache "7.14.0" - lunr-mutable-indexes "2.3.2" - marked "4.1.0" - memoizee "0.4.15" - mime "3.0.0" - minimatch "5.1.0" - mkdirp "1.0.4" - mv "2.1.1" - pino "6.14.0" - pkginfo "0.4.1" - prettier-bytes "^1.0.4" - pretty-ms "^7.0.1" - request "2.88.0" - semver "7.3.7" - validator "13.7.0" - verdaccio-audit "10.2.2" - verdaccio-htpasswd "10.5.0" - -verror@1.10.0: - version "1.10.0" - resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" - integrity sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw== - dependencies: - assert-plus "^1.0.0" - core-util-is "1.0.2" - extsprintf "^1.2.0" - -w3c-hr-time@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" - integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== - dependencies: - browser-process-hrtime "^1.0.0" - -w3c-xmlserializer@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" - integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== - dependencies: - xml-name-validator "^3.0.0" - -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== - -webidl-conversions@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" - integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== - -webidl-conversions@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" - integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== - -whatwg-encoding@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" - integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== - dependencies: - iconv-lite "0.4.24" - -whatwg-mimetype@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" - integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== - -whatwg-url@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - -whatwg-url@^8.0.0, whatwg-url@^8.5.0: - version "8.7.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" - integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== - dependencies: - lodash "^4.7.0" - tr46 "^2.1.0" - webidl-conversions "^6.1.0" - -word-wrap@~1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== - -wordwrap@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== - -ws@^7.4.6: - version "7.5.9" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" - integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== - -xml-name-validator@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" - integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== - -xmlchars@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" - integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== - -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yn@3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" - integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== diff --git a/javascript/examples/create-react-app/.gitignore b/javascript/examples/create-react-app/.gitignore deleted file mode 100644 index c2658d7d..00000000 --- a/javascript/examples/create-react-app/.gitignore +++ /dev/null @@ -1 +0,0 @@ -node_modules/ diff --git a/javascript/examples/create-react-app/README.md b/javascript/examples/create-react-app/README.md deleted file mode 100644 index baa135ac..00000000 --- a/javascript/examples/create-react-app/README.md +++ /dev/null @@ -1,59 +0,0 @@ -# Automerge + `create-react-app` - -This is a little fiddly to get working. The problem is that `create-react-app` -hard codes a webpack configuration which does not support WASM modules, which we -require in order to bundle the WASM implementation of automerge. To get around -this we use [`craco`](https://github.com/dilanx/craco) which does some monkey -patching to allow us to modify the webpack config that `create-react-app` -bundles. Then we use a craco plugin called -[`craco-wasm`](https://www.npmjs.com/package/craco-wasm) to perform the -necessary modifications to the webpack config. It should be noted that this is -all quite fragile and ideally you probably don't want to use `create-react-app` -to do this in production. - -## Setup - -Assuming you have already run `create-react-app` and your working directory is -the project. - -### Install craco and craco-wasm - -```bash -yarn add craco craco-wasm -``` - -### Modify `package.json` to use `craco` for scripts - -In `package.json` the `scripts` section will look like this: - -```json - "scripts": { - "start": "craco start", - "build": "craco build", - "test": "craco test", - "eject": "craco eject" - }, -``` - -Replace that section with: - -```json - "scripts": { - "start": "craco start", - "build": "craco build", - "test": "craco test", - "eject": "craco eject" - }, -``` - -### Create `craco.config.js` - -In the root of the project add the following contents to `craco.config.js` - -```javascript -const cracoWasm = require("craco-wasm") - -module.exports = { - plugins: [cracoWasm()], -} -``` diff --git a/javascript/examples/create-react-app/craco.config.js b/javascript/examples/create-react-app/craco.config.js deleted file mode 100644 index 489dad8f..00000000 --- a/javascript/examples/create-react-app/craco.config.js +++ /dev/null @@ -1,5 +0,0 @@ -const cracoWasm = require("craco-wasm") - -module.exports = { - plugins: [cracoWasm()], -} diff --git a/javascript/examples/create-react-app/package.json b/javascript/examples/create-react-app/package.json deleted file mode 100644 index 273d277b..00000000 --- a/javascript/examples/create-react-app/package.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "name": "automerge-create-react-app", - "version": "0.1.0", - "private": true, - "dependencies": { - "@craco/craco": "^7.0.0-alpha.8", - "craco-wasm": "0.0.1", - "@testing-library/jest-dom": "^5.16.5", - "@testing-library/react": "^13.4.0", - "@testing-library/user-event": "^13.5.0", - "@automerge/automerge": "2.0.0-alpha.7", - "react": "^18.2.0", - "react-dom": "^18.2.0", - "react-scripts": "5.0.1", - "web-vitals": "^2.1.4" - }, - "scripts": { - "start": "craco start", - "build": "craco build", - "test": "craco test", - "eject": "craco eject" - }, - "eslintConfig": { - "extends": [ - "react-app", - "react-app/jest" - ] - }, - "browserslist": { - "production": [ - ">0.2%", - "not dead", - "not op_mini all" - ], - "development": [ - "last 1 chrome version", - "last 1 firefox version", - "last 1 safari version" - ] - } -} diff --git a/javascript/examples/create-react-app/src/App.js b/javascript/examples/create-react-app/src/App.js deleted file mode 100644 index 7cfe997b..00000000 --- a/javascript/examples/create-react-app/src/App.js +++ /dev/null @@ -1,20 +0,0 @@ -import * as Automerge from "@automerge/automerge" -import logo from "./logo.svg" -import "./App.css" - -let doc = Automerge.init() -doc = Automerge.change(doc, d => (d.hello = "from automerge")) -const result = JSON.stringify(doc) - -function App() { - return ( -
-
- logo -

{result}

-
-
- ) -} - -export default App diff --git a/javascript/examples/create-react-app/src/App.test.js b/javascript/examples/create-react-app/src/App.test.js deleted file mode 100644 index ea796120..00000000 --- a/javascript/examples/create-react-app/src/App.test.js +++ /dev/null @@ -1,8 +0,0 @@ -import { render, screen } from "@testing-library/react" -import App from "./App" - -test("renders learn react link", () => { - render() - const linkElement = screen.getByText(/learn react/i) - expect(linkElement).toBeInTheDocument() -}) diff --git a/javascript/examples/create-react-app/src/index.css b/javascript/examples/create-react-app/src/index.css deleted file mode 100644 index 4a1df4db..00000000 --- a/javascript/examples/create-react-app/src/index.css +++ /dev/null @@ -1,13 +0,0 @@ -body { - margin: 0; - font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", - "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", - sans-serif; - -webkit-font-smoothing: antialiased; - -moz-osx-font-smoothing: grayscale; -} - -code { - font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", - monospace; -} diff --git a/javascript/examples/create-react-app/src/index.js b/javascript/examples/create-react-app/src/index.js deleted file mode 100644 index 58c21edc..00000000 --- a/javascript/examples/create-react-app/src/index.js +++ /dev/null @@ -1,17 +0,0 @@ -import React from "react" -import ReactDOM from "react-dom/client" -import "./index.css" -import App from "./App" -import reportWebVitals from "./reportWebVitals" - -const root = ReactDOM.createRoot(document.getElementById("root")) -root.render( - - - -) - -// If you want to start measuring performance in your app, pass a function -// to log results (for example: reportWebVitals(console.log)) -// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals -reportWebVitals() diff --git a/javascript/examples/create-react-app/src/reportWebVitals.js b/javascript/examples/create-react-app/src/reportWebVitals.js deleted file mode 100644 index eee308db..00000000 --- a/javascript/examples/create-react-app/src/reportWebVitals.js +++ /dev/null @@ -1,13 +0,0 @@ -const reportWebVitals = onPerfEntry => { - if (onPerfEntry && onPerfEntry instanceof Function) { - import("web-vitals").then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { - getCLS(onPerfEntry) - getFID(onPerfEntry) - getFCP(onPerfEntry) - getLCP(onPerfEntry) - getTTFB(onPerfEntry) - }) - } -} - -export default reportWebVitals diff --git a/javascript/examples/create-react-app/src/setupTests.js b/javascript/examples/create-react-app/src/setupTests.js deleted file mode 100644 index 6a0fd123..00000000 --- a/javascript/examples/create-react-app/src/setupTests.js +++ /dev/null @@ -1,5 +0,0 @@ -// jest-dom adds custom jest matchers for asserting on DOM nodes. -// allows you to do things like: -// expect(element).toHaveTextContent(/react/i) -// learn more: https://github.com/testing-library/jest-dom -import "@testing-library/jest-dom" diff --git a/javascript/examples/create-react-app/yarn.lock b/javascript/examples/create-react-app/yarn.lock deleted file mode 100644 index ec83af3b..00000000 --- a/javascript/examples/create-react-app/yarn.lock +++ /dev/null @@ -1,9120 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@adobe/css-tools@^4.0.1": - version "4.0.1" - resolved "http://localhost:4873/@adobe%2fcss-tools/-/css-tools-4.0.1.tgz#b38b444ad3aa5fedbb15f2f746dcd934226a12dd" - integrity sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g== - -"@ampproject/remapping@^2.1.0": - version "2.2.0" - resolved "http://localhost:4873/@ampproject%2fremapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" - integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== - dependencies: - "@jridgewell/gen-mapping" "^0.1.0" - "@jridgewell/trace-mapping" "^0.3.9" - -"@apideck/better-ajv-errors@^0.3.1": - version "0.3.6" - resolved "http://localhost:4873/@apideck%2fbetter-ajv-errors/-/better-ajv-errors-0.3.6.tgz#957d4c28e886a64a8141f7522783be65733ff097" - integrity sha512-P+ZygBLZtkp0qqOAJJVX4oX/sFo5JR3eBWwwuqHHhK0GIgQOKWrAfiAaWX0aArHkRWHMuggFEgAZNxVPwPZYaA== - dependencies: - json-schema "^0.4.0" - jsonpointer "^5.0.0" - leven "^3.1.0" - -"@automerge/automerge-wasm@0.1.12": - version "0.1.12" - resolved "https://registry.yarnpkg.com/@automerge/automerge-wasm/-/automerge-wasm-0.1.12.tgz#8ce25255d95d4ed6fb387de6858f7b7b7e2ed4a9" - integrity sha512-/xjX1217QYJ+QaoT6iHQw4hGNUIoc3xc65c9eCnfX5v9J9BkTOl05p2Cnr51O2rPc/M6TqZLmlvpvNVdcH9JpA== - -"@automerge/automerge@2.0.0-alpha.7": - version "2.0.0-alpha.7" - resolved "https://registry.yarnpkg.com/@automerge/automerge/-/automerge-2.0.0-alpha.7.tgz#2ee220d51bcd796074a18af74eeabb5f177e1f36" - integrity sha512-Wd2/GNeqtBybUtXclEE7bWBmmEkhv3q2ITQmLh18V0VvMPbqMBpcOKYzQFnKCyiPyRe5XcYeQAyGyunhE5V0ug== - dependencies: - "@automerge/automerge-wasm" "0.1.12" - uuid "^8.3" - -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.8.3": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fcode-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" - integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== - dependencies: - "@babel/highlight" "^7.18.6" - -"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.18.8", "@babel/compat-data@^7.19.3": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fcompat-data/-/compat-data-7.19.3.tgz#707b939793f867f5a73b2666e6d9a3396eb03151" - integrity sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw== - -"@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fcore/-/core-7.19.3.tgz#2519f62a51458f43b682d61583c3810e7dcee64c" - integrity sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ== - dependencies: - "@ampproject/remapping" "^2.1.0" - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.19.3" - "@babel/helper-compilation-targets" "^7.19.3" - "@babel/helper-module-transforms" "^7.19.0" - "@babel/helpers" "^7.19.0" - "@babel/parser" "^7.19.3" - "@babel/template" "^7.18.10" - "@babel/traverse" "^7.19.3" - "@babel/types" "^7.19.3" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.1" - semver "^6.3.0" - -"@babel/eslint-parser@^7.16.3": - version "7.19.1" - resolved "http://localhost:4873/@babel%2feslint-parser/-/eslint-parser-7.19.1.tgz#4f68f6b0825489e00a24b41b6a1ae35414ecd2f4" - integrity sha512-AqNf2QWt1rtu2/1rLswy6CDP7H9Oh3mMhk177Y67Rg8d7RD9WfOLLv8CGn6tisFvS2htm86yIe1yLF6I1UDaGQ== - dependencies: - "@nicolo-ribaudo/eslint-scope-5-internals" "5.1.1-v1" - eslint-visitor-keys "^2.1.0" - semver "^6.3.0" - -"@babel/generator@^7.19.3", "@babel/generator@^7.7.2": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fgenerator/-/generator-7.19.3.tgz#d7f4d1300485b4547cb6f94b27d10d237b42bf59" - integrity sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ== - dependencies: - "@babel/types" "^7.19.3" - "@jridgewell/gen-mapping" "^0.3.2" - jsesc "^2.5.1" - -"@babel/helper-annotate-as-pure@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" - integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fhelper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz#acd4edfd7a566d1d51ea975dff38fd52906981bb" - integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw== - dependencies: - "@babel/helper-explode-assignable-expression" "^7.18.6" - "@babel/types" "^7.18.9" - -"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.19.0", "@babel/helper-compilation-targets@^7.19.3": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fhelper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz#a10a04588125675d7c7ae299af86fa1b2ee038ca" - integrity sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg== - dependencies: - "@babel/compat-data" "^7.19.3" - "@babel/helper-validator-option" "^7.18.6" - browserslist "^4.21.3" - semver "^6.3.0" - -"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-create-class-features-plugin/-/helper-create-class-features-plugin-7.19.0.tgz#bfd6904620df4e46470bae4850d66be1054c404b" - integrity sha512-NRz8DwF4jT3UfrmUoZjd0Uph9HQnP30t7Ash+weACcyNkiYTywpIjDBgReJMKgr+n86sn2nPVVmJ28Dm053Kqw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-member-expression-to-functions" "^7.18.9" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/helper-replace-supers" "^7.18.9" - "@babel/helper-split-export-declaration" "^7.18.6" - -"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.19.0.tgz#7976aca61c0984202baca73d84e2337a5424a41b" - integrity sha512-htnV+mHX32DF81amCDrwIDr8nrp1PTm+3wfBN9/v8QJOLEioOCOG7qNyq0nHeFiWbT3Eb7gsPwEmV64UCQ1jzw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - regexpu-core "^5.1.0" - -"@babel/helper-define-polyfill-provider@^0.3.3": - version "0.3.3" - resolved "http://localhost:4873/@babel%2fhelper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz#8612e55be5d51f0cd1f36b4a5a83924e89884b7a" - integrity sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww== - dependencies: - "@babel/helper-compilation-targets" "^7.17.7" - "@babel/helper-plugin-utils" "^7.16.7" - debug "^4.1.1" - lodash.debounce "^4.0.8" - resolve "^1.14.2" - semver "^6.1.2" - -"@babel/helper-environment-visitor@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fhelper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" - integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== - -"@babel/helper-explode-assignable-expression@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096" - integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" - integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== - dependencies: - "@babel/template" "^7.18.10" - "@babel/types" "^7.19.0" - -"@babel/helper-hoist-variables@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" - integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-member-expression-to-functions@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fhelper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.9.tgz#1531661e8375af843ad37ac692c132841e2fd815" - integrity sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg== - dependencies: - "@babel/types" "^7.18.9" - -"@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" - integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-module-transforms/-/helper-module-transforms-7.19.0.tgz#309b230f04e22c58c6a2c0c0c7e50b216d350c30" - integrity sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-simple-access" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/helper-validator-identifier" "^7.18.6" - "@babel/template" "^7.18.10" - "@babel/traverse" "^7.19.0" - "@babel/types" "^7.19.0" - -"@babel/helper-optimise-call-expression@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe" - integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-plugin-utils/-/helper-plugin-utils-7.19.0.tgz#4796bb14961521f0f8715990bee2fb6e51ce21bf" - integrity sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw== - -"@babel/helper-remap-async-to-generator@^7.18.6", "@babel/helper-remap-async-to-generator@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fhelper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz#997458a0e3357080e54e1d79ec347f8a8cd28519" - integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-wrap-function" "^7.18.9" - "@babel/types" "^7.18.9" - -"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.18.9", "@babel/helper-replace-supers@^7.19.1": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fhelper-replace-supers/-/helper-replace-supers-7.19.1.tgz#e1592a9b4b368aa6bdb8784a711e0bcbf0612b78" - integrity sha512-T7ahH7wV0Hfs46SFh5Jz3s0B6+o8g3c+7TMxu7xKfmHikg7EAZ3I2Qk9LFhjxXq8sL7UkP5JflezNwoZa8WvWw== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-member-expression-to-functions" "^7.18.9" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/traverse" "^7.19.1" - "@babel/types" "^7.19.0" - -"@babel/helper-simple-access@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" - integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-skip-transparent-expression-wrappers@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fhelper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.9.tgz#778d87b3a758d90b471e7b9918f34a9a02eb5818" - integrity sha512-imytd2gHi3cJPsybLRbmFrF7u5BIEuI2cNheyKi3/iOBC63kNn3q8Crn2xVuESli0aM4KYsyEqKyS7lFL8YVtw== - dependencies: - "@babel/types" "^7.18.9" - -"@babel/helper-split-export-declaration@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" - integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-string-parser@^7.18.10": - version "7.18.10" - resolved "http://localhost:4873/@babel%2fhelper-string-parser/-/helper-string-parser-7.18.10.tgz#181f22d28ebe1b3857fa575f5c290b1aaf659b56" - integrity sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw== - -"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fhelper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" - integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== - -"@babel/helper-validator-option@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhelper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" - integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== - -"@babel/helper-wrap-function@^7.18.9": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelper-wrap-function/-/helper-wrap-function-7.19.0.tgz#89f18335cff1152373222f76a4b37799636ae8b1" - integrity sha512-txX8aN8CZyYGTwcLhlk87KRqncAzhh5TpQamZUa0/u3an36NtDpUP6bQgBCBcLeBs09R/OwQu3OjK0k/HwfNDg== - dependencies: - "@babel/helper-function-name" "^7.19.0" - "@babel/template" "^7.18.10" - "@babel/traverse" "^7.19.0" - "@babel/types" "^7.19.0" - -"@babel/helpers@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fhelpers/-/helpers-7.19.0.tgz#f30534657faf246ae96551d88dd31e9d1fa1fc18" - integrity sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg== - dependencies: - "@babel/template" "^7.18.10" - "@babel/traverse" "^7.19.0" - "@babel/types" "^7.19.0" - -"@babel/highlight@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fhighlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" - integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== - dependencies: - "@babel/helper-validator-identifier" "^7.18.6" - chalk "^2.0.0" - js-tokens "^4.0.0" - -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.19.3": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fparser/-/parser-7.19.3.tgz#8dd36d17c53ff347f9e55c328710321b49479a9a" - integrity sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ== - -"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2" - integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.9.tgz#a11af19aa373d68d561f08e0a57242350ed0ec50" - integrity sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" - "@babel/plugin-proposal-optional-chaining" "^7.18.9" - -"@babel/plugin-proposal-async-generator-functions@^7.19.1": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fplugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.19.1.tgz#34f6f5174b688529342288cd264f80c9ea9fb4a7" - integrity sha512-0yu8vNATgLy4ivqMNBIwb1HebCelqN7YX8SL3FDXORv/RqT0zEEWUCH4GH44JsSrvCu6GqnAdR5EBFAPeNBB4Q== - dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-remap-async-to-generator" "^7.18.9" - "@babel/plugin-syntax-async-generators" "^7.8.4" - -"@babel/plugin-proposal-class-properties@^7.16.0", "@babel/plugin-proposal-class-properties@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" - integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-proposal-class-static-block@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz#8aa81d403ab72d3962fc06c26e222dacfc9b9020" - integrity sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - -"@babel/plugin-proposal-decorators@^7.16.4": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fplugin-proposal-decorators/-/plugin-proposal-decorators-7.19.3.tgz#c1977e4902a18cdf9051bf7bf08d97db2fd8b110" - integrity sha512-MbgXtNXqo7RTKYIXVchVJGPvaVufQH3pxvQyfbGvNw1DObIhph+PesYXJTcd8J4DdWibvf6Z2eanOyItX8WnJg== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.19.0" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-replace-supers" "^7.19.1" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/plugin-syntax-decorators" "^7.19.0" - -"@babel/plugin-proposal-dynamic-import@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz#72bcf8d408799f547d759298c3c27c7e7faa4d94" - integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - -"@babel/plugin-proposal-export-namespace-from@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203" - integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - -"@babel/plugin-proposal-json-strings@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz#7e8788c1811c393aff762817e7dbf1ebd0c05f0b" - integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-json-strings" "^7.8.3" - -"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.9.tgz#8148cbb350483bf6220af06fa6db3690e14b2e23" - integrity sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - -"@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0", "@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" - integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - -"@babel/plugin-proposal-numeric-separator@^7.16.0", "@babel/plugin-proposal-numeric-separator@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" - integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - -"@babel/plugin-proposal-object-rest-spread@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.9.tgz#f9434f6beb2c8cae9dfcf97d2a5941bbbf9ad4e7" - integrity sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q== - dependencies: - "@babel/compat-data" "^7.18.8" - "@babel/helper-compilation-targets" "^7.18.9" - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-transform-parameters" "^7.18.8" - -"@babel/plugin-proposal-optional-catch-binding@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz#f9400d0e6a3ea93ba9ef70b09e72dd6da638a2cb" - integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - -"@babel/plugin-proposal-optional-chaining@^7.16.0", "@babel/plugin-proposal-optional-chaining@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.9.tgz#e8e8fe0723f2563960e4bf5e9690933691915993" - integrity sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - -"@babel/plugin-proposal-private-methods@^7.16.0", "@babel/plugin-proposal-private-methods@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" - integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-proposal-private-property-in-object@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz#a64137b232f0aca3733a67eb1a144c192389c503" - integrity sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - -"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz#af613d2cd5e643643b65cded64207b15c85cb78e" - integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-syntax-async-generators@^7.8.4": - version "7.8.4" - resolved "http://localhost:4873/@babel%2fplugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" - integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-bigint@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" - integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": - version "7.12.13" - resolved "http://localhost:4873/@babel%2fplugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" - integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/plugin-syntax-class-static-block@^7.14.5": - version "7.14.5" - resolved "http://localhost:4873/@babel%2fplugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" - integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-decorators@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-syntax-decorators/-/plugin-syntax-decorators-7.19.0.tgz#5f13d1d8fce96951bea01a10424463c9a5b3a599" - integrity sha512-xaBZUEDntt4faL1yN8oIFlhfXeQAWJW7CLKYsHTUqriCUbj8xOra8bfxxKGi/UwExPFBuPdH4XfHc9rGQhrVkQ== - dependencies: - "@babel/helper-plugin-utils" "^7.19.0" - -"@babel/plugin-syntax-dynamic-import@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" - integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-export-namespace-from@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" - integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.3" - -"@babel/plugin-syntax-flow@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-syntax-flow/-/plugin-syntax-flow-7.18.6.tgz#774d825256f2379d06139be0c723c4dd444f3ca1" - integrity sha512-LUbR+KNTBWCUAqRG9ex5Gnzu2IOkt8jRJbHHXFT9q+L9zm7M/QQbEqXyw1n1pohYvOyWC8CjeyjrSaIwiYjK7A== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-syntax-import-assertions@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz#cd6190500a4fa2fe31990a963ffab4b63e4505e4" - integrity sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-syntax-import-meta@^7.8.3": - version "7.10.4" - resolved "http://localhost:4873/@babel%2fplugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" - integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-json-strings@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" - integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-jsx@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" - integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": - version "7.10.4" - resolved "http://localhost:4873/@babel%2fplugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" - integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" - integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": - version "7.10.4" - resolved "http://localhost:4873/@babel%2fplugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" - integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-object-rest-spread@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" - integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-catch-binding@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" - integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-chaining@^7.8.3": - version "7.8.3" - resolved "http://localhost:4873/@babel%2fplugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" - integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-private-property-in-object@^7.14.5": - version "7.14.5" - resolved "http://localhost:4873/@babel%2fplugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" - integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": - version "7.14.5" - resolved "http://localhost:4873/@babel%2fplugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" - integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-typescript@^7.18.6", "@babel/plugin-syntax-typescript@^7.7.2": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz#1c09cd25795c7c2b8a4ba9ae49394576d4133285" - integrity sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-arrow-functions@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz#19063fcf8771ec7b31d742339dac62433d0611fe" - integrity sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-async-to-generator@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz#ccda3d1ab9d5ced5265fdb13f1882d5476c71615" - integrity sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag== - dependencies: - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-remap-async-to-generator" "^7.18.6" - -"@babel/plugin-transform-block-scoped-functions@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8" - integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-block-scoping@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.9.tgz#f9b7e018ac3f373c81452d6ada8bd5a18928926d" - integrity sha512-5sDIJRV1KtQVEbt/EIBwGy4T01uYIo4KRB3VUqzkhrAIOGx7AoctL9+Ux88btY0zXdDyPJ9mW+bg+v+XEkGmtw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-classes@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-transform-classes/-/plugin-transform-classes-7.19.0.tgz#0e61ec257fba409c41372175e7c1e606dc79bb20" - integrity sha512-YfeEE9kCjqTS9IitkgfJuxjcEtLUHMqa8yUJ6zdz8vR7hKuo6mOy2C05P0F1tdMmDCeuyidKnlrw/iTppHcr2A== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-compilation-targets" "^7.19.0" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-replace-supers" "^7.18.9" - "@babel/helper-split-export-declaration" "^7.18.6" - globals "^11.1.0" - -"@babel/plugin-transform-computed-properties@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.9.tgz#2357a8224d402dad623caf6259b611e56aec746e" - integrity sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-destructuring@^7.18.13": - version "7.18.13" - resolved "http://localhost:4873/@babel%2fplugin-transform-destructuring/-/plugin-transform-destructuring-7.18.13.tgz#9e03bc4a94475d62b7f4114938e6c5c33372cbf5" - integrity sha512-TodpQ29XekIsex2A+YJPj5ax2plkGa8YYY6mFjCohk/IG9IY42Rtuj1FuDeemfg2ipxIFLzPeA83SIBnlhSIow== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz#b286b3e7aae6c7b861e45bed0a2fafd6b1a4fef8" - integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-duplicate-keys@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz#687f15ee3cdad6d85191eb2a372c4528eaa0ae0e" - integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-exponentiation-operator@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz#421c705f4521888c65e91fdd1af951bfefd4dacd" - integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== - dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-flow-strip-types@^7.16.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.19.0.tgz#e9e8606633287488216028719638cbbb2f2dde8f" - integrity sha512-sgeMlNaQVbCSpgLSKP4ZZKfsJVnFnNQlUSk6gPYzR/q7tzCgQF2t8RBKAP6cKJeZdveei7Q7Jm527xepI8lNLg== - dependencies: - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/plugin-syntax-flow" "^7.18.6" - -"@babel/plugin-transform-for-of@^7.18.8": - version "7.18.8" - resolved "http://localhost:4873/@babel%2fplugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz#6ef8a50b244eb6a0bdbad0c7c61877e4e30097c1" - integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-function-name@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0" - integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== - dependencies: - "@babel/helper-compilation-targets" "^7.18.9" - "@babel/helper-function-name" "^7.18.9" - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-literals@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc" - integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-member-expression-literals@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e" - integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-modules-amd@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz#8c91f8c5115d2202f277549848874027d7172d21" - integrity sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg== - dependencies: - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-commonjs@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz#afd243afba166cca69892e24a8fd8c9f2ca87883" - integrity sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q== - dependencies: - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-simple-access" "^7.18.6" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-systemjs@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.19.0.tgz#5f20b471284430f02d9c5059d9b9a16d4b085a1f" - integrity sha512-x9aiR0WXAWmOWsqcsnrzGR+ieaTMVyGyffPVA7F8cXAGt/UxefYv6uSHZLkAFChN5M5Iy1+wjE+xJuPt22H39A== - dependencies: - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-module-transforms" "^7.19.0" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-validator-identifier" "^7.18.6" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-umd@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz#81d3832d6034b75b54e62821ba58f28ed0aab4b9" - integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== - dependencies: - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-named-capturing-groups-regex@^7.19.1": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fplugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.19.1.tgz#ec7455bab6cd8fb05c525a94876f435a48128888" - integrity sha512-oWk9l9WItWBQYS4FgXD4Uyy5kq898lvkXpXQxoJEY1RnvPk4R/Dvu2ebXU9q8lP+rlMwUQTFf2Ok6d78ODa0kw== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.19.0" - "@babel/helper-plugin-utils" "^7.19.0" - -"@babel/plugin-transform-new-target@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz#d128f376ae200477f37c4ddfcc722a8a1b3246a8" - integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-object-super@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c" - integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-replace-supers" "^7.18.6" - -"@babel/plugin-transform-parameters@^7.18.8": - version "7.18.8" - resolved "http://localhost:4873/@babel%2fplugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz#ee9f1a0ce6d78af58d0956a9378ea3427cccb48a" - integrity sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-property-literals@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3" - integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-react-constant-elements@^7.12.1": - version "7.18.12" - resolved "http://localhost:4873/@babel%2fplugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.18.12.tgz#edf3bec47eb98f14e84fa0af137fcc6aad8e0443" - integrity sha512-Q99U9/ttiu+LMnRU8psd23HhvwXmKWDQIpocm0JKaICcZHnw+mdQbHm6xnSy7dOl8I5PELakYtNBubNQlBXbZw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz#8b1125f919ef36ebdfff061d664e266c666b9415" - integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-react-jsx-development@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz#dbe5c972811e49c7405b630e4d0d2e1380c0ddc5" - integrity sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA== - dependencies: - "@babel/plugin-transform-react-jsx" "^7.18.6" - -"@babel/plugin-transform-react-jsx@^7.18.6": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-transform-react-jsx/-/plugin-transform-react-jsx-7.19.0.tgz#b3cbb7c3a00b92ec8ae1027910e331ba5c500eb9" - integrity sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/plugin-syntax-jsx" "^7.18.6" - "@babel/types" "^7.19.0" - -"@babel/plugin-transform-react-pure-annotations@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz#561af267f19f3e5d59291f9950fd7b9663d0d844" - integrity sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-regenerator@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz#585c66cb84d4b4bf72519a34cfce761b8676ca73" - integrity sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - regenerator-transform "^0.15.0" - -"@babel/plugin-transform-reserved-words@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz#b1abd8ebf8edaa5f7fe6bbb8d2133d23b6a6f76a" - integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-runtime@^7.16.4": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fplugin-transform-runtime/-/plugin-transform-runtime-7.19.1.tgz#a3df2d7312eea624c7889a2dcd37fd1dfd25b2c6" - integrity sha512-2nJjTUFIzBMP/f/miLxEK9vxwW/KUXsdvN4sR//TmuDhe6yU2h57WmIOE12Gng3MDP/xpjUV/ToZRdcf8Yj4fA== - dependencies: - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.19.0" - babel-plugin-polyfill-corejs2 "^0.3.3" - babel-plugin-polyfill-corejs3 "^0.6.0" - babel-plugin-polyfill-regenerator "^0.4.1" - semver "^6.3.0" - -"@babel/plugin-transform-shorthand-properties@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9" - integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-spread@^7.19.0": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fplugin-transform-spread/-/plugin-transform-spread-7.19.0.tgz#dd60b4620c2fec806d60cfaae364ec2188d593b6" - integrity sha512-RsuMk7j6n+r752EtzyScnWkQyuJdli6LdO5Klv8Yx0OfPVTcQkIUfS8clx5e9yHXzlnhOZF3CbQ8C2uP5j074w== - dependencies: - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" - -"@babel/plugin-transform-sticky-regex@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz#c6706eb2b1524028e317720339583ad0f444adcc" - integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-template-literals@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e" - integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-typeof-symbol@^7.18.9": - version "7.18.9" - resolved "http://localhost:4873/@babel%2fplugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz#c8cea68263e45addcd6afc9091429f80925762c0" - integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-typescript@^7.18.6": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fplugin-transform-typescript/-/plugin-transform-typescript-7.19.3.tgz#4f1db1e0fe278b42ddbc19ec2f6cd2f8262e35d6" - integrity sha512-z6fnuK9ve9u/0X0rRvI9MY0xg+DOUaABDYOe+/SQTxtlptaBB/V9JIUxJn6xp3lMBeb9qe8xSFmHU35oZDXD+w== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.19.0" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/plugin-syntax-typescript" "^7.18.6" - -"@babel/plugin-transform-unicode-escapes@^7.18.10": - version "7.18.10" - resolved "http://localhost:4873/@babel%2fplugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz#1ecfb0eda83d09bbcb77c09970c2dd55832aa246" - integrity sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.9" - -"@babel/plugin-transform-unicode-regex@^7.18.6": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fplugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz#194317225d8c201bbae103364ffe9e2cea36cdca" - integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/preset-env@^7.11.0", "@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.4": - version "7.19.3" - resolved "http://localhost:4873/@babel%2fpreset-env/-/preset-env-7.19.3.tgz#52cd19abaecb3f176a4ff9cc5e15b7bf06bec754" - integrity sha512-ziye1OTc9dGFOAXSWKUqQblYHNlBOaDl8wzqf2iKXJAltYiR3hKHUKmkt+S9PppW7RQpq4fFCrwwpIDj/f5P4w== - dependencies: - "@babel/compat-data" "^7.19.3" - "@babel/helper-compilation-targets" "^7.19.3" - "@babel/helper-plugin-utils" "^7.19.0" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" - "@babel/plugin-proposal-async-generator-functions" "^7.19.1" - "@babel/plugin-proposal-class-properties" "^7.18.6" - "@babel/plugin-proposal-class-static-block" "^7.18.6" - "@babel/plugin-proposal-dynamic-import" "^7.18.6" - "@babel/plugin-proposal-export-namespace-from" "^7.18.9" - "@babel/plugin-proposal-json-strings" "^7.18.6" - "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" - "@babel/plugin-proposal-numeric-separator" "^7.18.6" - "@babel/plugin-proposal-object-rest-spread" "^7.18.9" - "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" - "@babel/plugin-proposal-optional-chaining" "^7.18.9" - "@babel/plugin-proposal-private-methods" "^7.18.6" - "@babel/plugin-proposal-private-property-in-object" "^7.18.6" - "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-class-properties" "^7.12.13" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-import-assertions" "^7.18.6" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - "@babel/plugin-syntax-top-level-await" "^7.14.5" - "@babel/plugin-transform-arrow-functions" "^7.18.6" - "@babel/plugin-transform-async-to-generator" "^7.18.6" - "@babel/plugin-transform-block-scoped-functions" "^7.18.6" - "@babel/plugin-transform-block-scoping" "^7.18.9" - "@babel/plugin-transform-classes" "^7.19.0" - "@babel/plugin-transform-computed-properties" "^7.18.9" - "@babel/plugin-transform-destructuring" "^7.18.13" - "@babel/plugin-transform-dotall-regex" "^7.18.6" - "@babel/plugin-transform-duplicate-keys" "^7.18.9" - "@babel/plugin-transform-exponentiation-operator" "^7.18.6" - "@babel/plugin-transform-for-of" "^7.18.8" - "@babel/plugin-transform-function-name" "^7.18.9" - "@babel/plugin-transform-literals" "^7.18.9" - "@babel/plugin-transform-member-expression-literals" "^7.18.6" - "@babel/plugin-transform-modules-amd" "^7.18.6" - "@babel/plugin-transform-modules-commonjs" "^7.18.6" - "@babel/plugin-transform-modules-systemjs" "^7.19.0" - "@babel/plugin-transform-modules-umd" "^7.18.6" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.19.1" - "@babel/plugin-transform-new-target" "^7.18.6" - "@babel/plugin-transform-object-super" "^7.18.6" - "@babel/plugin-transform-parameters" "^7.18.8" - "@babel/plugin-transform-property-literals" "^7.18.6" - "@babel/plugin-transform-regenerator" "^7.18.6" - "@babel/plugin-transform-reserved-words" "^7.18.6" - "@babel/plugin-transform-shorthand-properties" "^7.18.6" - "@babel/plugin-transform-spread" "^7.19.0" - "@babel/plugin-transform-sticky-regex" "^7.18.6" - "@babel/plugin-transform-template-literals" "^7.18.9" - "@babel/plugin-transform-typeof-symbol" "^7.18.9" - "@babel/plugin-transform-unicode-escapes" "^7.18.10" - "@babel/plugin-transform-unicode-regex" "^7.18.6" - "@babel/preset-modules" "^0.1.5" - "@babel/types" "^7.19.3" - babel-plugin-polyfill-corejs2 "^0.3.3" - babel-plugin-polyfill-corejs3 "^0.6.0" - babel-plugin-polyfill-regenerator "^0.4.1" - core-js-compat "^3.25.1" - semver "^6.3.0" - -"@babel/preset-modules@^0.1.5": - version "0.1.5" - resolved "http://localhost:4873/@babel%2fpreset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" - integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" - "@babel/plugin-transform-dotall-regex" "^7.4.4" - "@babel/types" "^7.4.4" - esutils "^2.0.2" - -"@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fpreset-react/-/preset-react-7.18.6.tgz#979f76d6277048dc19094c217b507f3ad517dd2d" - integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-transform-react-display-name" "^7.18.6" - "@babel/plugin-transform-react-jsx" "^7.18.6" - "@babel/plugin-transform-react-jsx-development" "^7.18.6" - "@babel/plugin-transform-react-pure-annotations" "^7.18.6" - -"@babel/preset-typescript@^7.16.0": - version "7.18.6" - resolved "http://localhost:4873/@babel%2fpreset-typescript/-/preset-typescript-7.18.6.tgz#ce64be3e63eddc44240c6358daefac17b3186399" - integrity sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-transform-typescript" "^7.18.6" - -"@babel/runtime-corejs3@^7.10.2": - version "7.19.1" - resolved "http://localhost:4873/@babel%2fruntime-corejs3/-/runtime-corejs3-7.19.1.tgz#f0cbbe7edda7c4109cd253bb1dee99aba4594ad9" - integrity sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g== - dependencies: - core-js-pure "^3.25.1" - regenerator-runtime "^0.13.4" - -"@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.5", "@babel/runtime@^7.16.3", "@babel/runtime@^7.18.9", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": - version "7.19.0" - resolved "http://localhost:4873/@babel%2fruntime/-/runtime-7.19.0.tgz#22b11c037b094d27a8a2504ea4dcff00f50e2259" - integrity sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA== - dependencies: - regenerator-runtime "^0.13.4" - -"@babel/template@^7.18.10", "@babel/template@^7.3.3": - version "7.18.10" - resolved "http://localhost:4873/@babel%2ftemplate/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" - integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/parser" "^7.18.10" - "@babel/types" "^7.18.10" - -"@babel/traverse@^7.19.0", "@babel/traverse@^7.19.1", "@babel/traverse@^7.19.3", "@babel/traverse@^7.7.2": - version "7.19.3" - resolved "http://localhost:4873/@babel%2ftraverse/-/traverse-7.19.3.tgz#3a3c5348d4988ba60884e8494b0592b2f15a04b4" - integrity sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.19.3" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/parser" "^7.19.3" - "@babel/types" "^7.19.3" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.19.0", "@babel/types@^7.19.3", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": - version "7.19.3" - resolved "http://localhost:4873/@babel%2ftypes/-/types-7.19.3.tgz#fc420e6bbe54880bce6779ffaf315f5e43ec9624" - integrity sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw== - dependencies: - "@babel/helper-string-parser" "^7.18.10" - "@babel/helper-validator-identifier" "^7.19.1" - to-fast-properties "^2.0.0" - -"@bcoe/v8-coverage@^0.2.3": - version "0.2.3" - resolved "http://localhost:4873/@bcoe%2fv8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" - integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== - -"@craco/craco@^7.0.0-alpha.8": - version "7.0.0-alpha.8" - resolved "http://localhost:4873/@craco%2fcraco/-/craco-7.0.0-alpha.8.tgz#40f19f44198ff2341b40654c8c6b4f54c2217972" - integrity sha512-IN3/ldPaktGflPu342cg7n8LYa2c3x9H2XzngUkDzTjro25ig1GyVcUdnG1U0X6wrRTF9K1AxZ5su9jLbdyFUw== - dependencies: - autoprefixer "^10.4.12" - cosmiconfig "^7.0.1" - cosmiconfig-typescript-loader "^4.1.1" - cross-spawn "^7.0.3" - lodash "^4.17.21" - semver "^7.3.7" - webpack-merge "^5.8.0" - -"@csstools/normalize.css@*": - version "12.0.0" - resolved "http://localhost:4873/@csstools%2fnormalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" - integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== - -"@csstools/postcss-cascade-layers@^1.1.0": - version "1.1.1" - resolved "http://localhost:4873/@csstools%2fpostcss-cascade-layers/-/postcss-cascade-layers-1.1.1.tgz#8a997edf97d34071dd2e37ea6022447dd9e795ad" - integrity sha512-+KdYrpKC5TgomQr2DlZF4lDEpHcoxnj5IGddYYfBWJAKfj1JtuHUIqMa+E1pJJ+z3kvDViWMqyqPlG4Ja7amQA== - dependencies: - "@csstools/selector-specificity" "^2.0.2" - postcss-selector-parser "^6.0.10" - -"@csstools/postcss-color-function@^1.1.1": - version "1.1.1" - resolved "http://localhost:4873/@csstools%2fpostcss-color-function/-/postcss-color-function-1.1.1.tgz#2bd36ab34f82d0497cfacdc9b18d34b5e6f64b6b" - integrity sha512-Bc0f62WmHdtRDjf5f3e2STwRAl89N2CLb+9iAwzrv4L2hncrbDwnQD9PCq0gtAt7pOI2leIV08HIBUd4jxD8cw== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-font-format-keywords@^1.0.1": - version "1.0.1" - resolved "http://localhost:4873/@csstools%2fpostcss-font-format-keywords/-/postcss-font-format-keywords-1.0.1.tgz#677b34e9e88ae997a67283311657973150e8b16a" - integrity sha512-ZgrlzuUAjXIOc2JueK0X5sZDjCtgimVp/O5CEqTcs5ShWBa6smhWYbS0x5cVc/+rycTDbjjzoP0KTDnUneZGOg== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-hwb-function@^1.0.2": - version "1.0.2" - resolved "http://localhost:4873/@csstools%2fpostcss-hwb-function/-/postcss-hwb-function-1.0.2.tgz#ab54a9fce0ac102c754854769962f2422ae8aa8b" - integrity sha512-YHdEru4o3Rsbjmu6vHy4UKOXZD+Rn2zmkAmLRfPet6+Jz4Ojw8cbWxe1n42VaXQhD3CQUXXTooIy8OkVbUcL+w== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-ic-unit@^1.0.1": - version "1.0.1" - resolved "http://localhost:4873/@csstools%2fpostcss-ic-unit/-/postcss-ic-unit-1.0.1.tgz#28237d812a124d1a16a5acc5c3832b040b303e58" - integrity sha512-Ot1rcwRAaRHNKC9tAqoqNZhjdYBzKk1POgWfhN4uCOE47ebGcLRqXjKkApVDpjifL6u2/55ekkpnFcp+s/OZUw== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-is-pseudo-class@^2.0.7": - version "2.0.7" - resolved "http://localhost:4873/@csstools%2fpostcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.7.tgz#846ae6c0d5a1eaa878fce352c544f9c295509cd1" - integrity sha512-7JPeVVZHd+jxYdULl87lvjgvWldYu+Bc62s9vD/ED6/QTGjy0jy0US/f6BG53sVMTBJ1lzKZFpYmofBN9eaRiA== - dependencies: - "@csstools/selector-specificity" "^2.0.0" - postcss-selector-parser "^6.0.10" - -"@csstools/postcss-nested-calc@^1.0.0": - version "1.0.0" - resolved "http://localhost:4873/@csstools%2fpostcss-nested-calc/-/postcss-nested-calc-1.0.0.tgz#d7e9d1d0d3d15cf5ac891b16028af2a1044d0c26" - integrity sha512-JCsQsw1wjYwv1bJmgjKSoZNvf7R6+wuHDAbi5f/7MbFhl2d/+v+TvBTU4BJH3G1X1H87dHl0mh6TfYogbT/dJQ== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-normalize-display-values@^1.0.1": - version "1.0.1" - resolved "http://localhost:4873/@csstools%2fpostcss-normalize-display-values/-/postcss-normalize-display-values-1.0.1.tgz#15da54a36e867b3ac5163ee12c1d7f82d4d612c3" - integrity sha512-jcOanIbv55OFKQ3sYeFD/T0Ti7AMXc9nM1hZWu8m/2722gOTxFg7xYu4RDLJLeZmPUVQlGzo4jhzvTUq3x4ZUw== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-oklab-function@^1.1.1": - version "1.1.1" - resolved "http://localhost:4873/@csstools%2fpostcss-oklab-function/-/postcss-oklab-function-1.1.1.tgz#88cee0fbc8d6df27079ebd2fa016ee261eecf844" - integrity sha512-nJpJgsdA3dA9y5pgyb/UfEzE7W5Ka7u0CX0/HIMVBNWzWemdcTH3XwANECU6anWv/ao4vVNLTMxhiPNZsTK6iA== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-progressive-custom-properties@^1.1.0", "@csstools/postcss-progressive-custom-properties@^1.3.0": - version "1.3.0" - resolved "http://localhost:4873/@csstools%2fpostcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz#542292558384361776b45c85226b9a3a34f276fa" - integrity sha512-ASA9W1aIy5ygskZYuWams4BzafD12ULvSypmaLJT2jvQ8G0M3I8PRQhC0h7mG0Z3LI05+agZjqSR9+K9yaQQjA== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-stepped-value-functions@^1.0.1": - version "1.0.1" - resolved "http://localhost:4873/@csstools%2fpostcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.1.tgz#f8772c3681cc2befed695e2b0b1d68e22f08c4f4" - integrity sha512-dz0LNoo3ijpTOQqEJLY8nyaapl6umbmDcgj4AD0lgVQ572b2eqA1iGZYTTWhrcrHztWDDRAX2DGYyw2VBjvCvQ== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-text-decoration-shorthand@^1.0.0": - version "1.0.0" - resolved "http://localhost:4873/@csstools%2fpostcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-1.0.0.tgz#ea96cfbc87d921eca914d3ad29340d9bcc4c953f" - integrity sha512-c1XwKJ2eMIWrzQenN0XbcfzckOLLJiczqy+YvfGmzoVXd7pT9FfObiSEfzs84bpE/VqfpEuAZ9tCRbZkZxxbdw== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-trigonometric-functions@^1.0.2": - version "1.0.2" - resolved "http://localhost:4873/@csstools%2fpostcss-trigonometric-functions/-/postcss-trigonometric-functions-1.0.2.tgz#94d3e4774c36d35dcdc88ce091336cb770d32756" - integrity sha512-woKaLO///4bb+zZC2s80l+7cm07M7268MsyG3M0ActXXEFi6SuhvriQYcb58iiKGbjwwIU7n45iRLEHypB47Og== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-unset-value@^1.0.2": - version "1.0.2" - resolved "http://localhost:4873/@csstools%2fpostcss-unset-value/-/postcss-unset-value-1.0.2.tgz#c99bb70e2cdc7312948d1eb41df2412330b81f77" - integrity sha512-c8J4roPBILnelAsdLr4XOAR/GsTm0GJi4XpcfvoWk3U6KiTCqiFYc63KhRMQQX35jYMp4Ao8Ij9+IZRgMfJp1g== - -"@csstools/selector-specificity@^2.0.0", "@csstools/selector-specificity@^2.0.2": - version "2.0.2" - resolved "http://localhost:4873/@csstools%2fselector-specificity/-/selector-specificity-2.0.2.tgz#1bfafe4b7ed0f3e4105837e056e0a89b108ebe36" - integrity sha512-IkpVW/ehM1hWKln4fCA3NzJU8KwD+kIOvPZA4cqxoJHtE21CCzjyp+Kxbu0i5I4tBNOlXPL9mjwnWlL0VEG4Fg== - -"@eslint/eslintrc@^1.3.2": - version "1.3.2" - resolved "http://localhost:4873/@eslint%2feslintrc/-/eslintrc-1.3.2.tgz#58b69582f3b7271d8fa67fe5251767a5b38ea356" - integrity sha512-AXYd23w1S/bv3fTs3Lz0vjiYemS08jWkI3hYyS9I1ry+0f+Yjs1wm+sU0BS8qDOPrBIkp4qHYC16I8uVtpLajQ== - dependencies: - ajv "^6.12.4" - debug "^4.3.2" - espree "^9.4.0" - globals "^13.15.0" - ignore "^5.2.0" - import-fresh "^3.2.1" - js-yaml "^4.1.0" - minimatch "^3.1.2" - strip-json-comments "^3.1.1" - -"@humanwhocodes/config-array@^0.10.5": - version "0.10.7" - resolved "http://localhost:4873/@humanwhocodes%2fconfig-array/-/config-array-0.10.7.tgz#6d53769fd0c222767e6452e8ebda825c22e9f0dc" - integrity sha512-MDl6D6sBsaV452/QSdX+4CXIjZhIcI0PELsxUjk4U828yd58vk3bTIvk/6w5FY+4hIy9sLW0sfrV7K7Kc++j/w== - dependencies: - "@humanwhocodes/object-schema" "^1.2.1" - debug "^4.1.1" - minimatch "^3.0.4" - -"@humanwhocodes/gitignore-to-minimatch@^1.0.2": - version "1.0.2" - resolved "http://localhost:4873/@humanwhocodes%2fgitignore-to-minimatch/-/gitignore-to-minimatch-1.0.2.tgz#316b0a63b91c10e53f242efb4ace5c3b34e8728d" - integrity sha512-rSqmMJDdLFUsyxR6FMtD00nfQKKLFb1kv+qBbOVKqErvloEIJLo5bDTJTQNTYgeyp78JsA7u/NPi5jT1GR/MuA== - -"@humanwhocodes/module-importer@^1.0.1": - version "1.0.1" - resolved "http://localhost:4873/@humanwhocodes%2fmodule-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" - integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== - -"@humanwhocodes/object-schema@^1.2.1": - version "1.2.1" - resolved "http://localhost:4873/@humanwhocodes%2fobject-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" - integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== - -"@istanbuljs/load-nyc-config@^1.0.0": - version "1.1.0" - resolved "http://localhost:4873/@istanbuljs%2fload-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" - integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== - dependencies: - camelcase "^5.3.1" - find-up "^4.1.0" - get-package-type "^0.1.0" - js-yaml "^3.13.1" - resolve-from "^5.0.0" - -"@istanbuljs/schema@^0.1.2": - version "0.1.3" - resolved "http://localhost:4873/@istanbuljs%2fschema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" - integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== - -"@jest/console@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2fconsole/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" - integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^27.5.1" - jest-util "^27.5.1" - slash "^3.0.0" - -"@jest/console@^28.1.3": - version "28.1.3" - resolved "http://localhost:4873/@jest%2fconsole/-/console-28.1.3.tgz#2030606ec03a18c31803b8a36382762e447655df" - integrity sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw== - dependencies: - "@jest/types" "^28.1.3" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^28.1.3" - jest-util "^28.1.3" - slash "^3.0.0" - -"@jest/core@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2fcore/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" - integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== - dependencies: - "@jest/console" "^27.5.1" - "@jest/reporters" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.8.1" - exit "^0.1.2" - graceful-fs "^4.2.9" - jest-changed-files "^27.5.1" - jest-config "^27.5.1" - jest-haste-map "^27.5.1" - jest-message-util "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-resolve-dependencies "^27.5.1" - jest-runner "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - jest-watcher "^27.5.1" - micromatch "^4.0.4" - rimraf "^3.0.0" - slash "^3.0.0" - strip-ansi "^6.0.0" - -"@jest/environment@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2fenvironment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" - integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== - dependencies: - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - -"@jest/expect-utils@^29.1.2": - version "29.1.2" - resolved "http://localhost:4873/@jest%2fexpect-utils/-/expect-utils-29.1.2.tgz#66dbb514d38f7d21456bc774419c9ae5cca3f88d" - integrity sha512-4a48bhKfGj/KAH39u0ppzNTABXQ8QPccWAFUFobWBaEMSMp+sB31Z2fK/l47c4a/Mu1po2ffmfAIPxXbVTXdtg== - dependencies: - jest-get-type "^29.0.0" - -"@jest/fake-timers@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2ffake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" - integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== - dependencies: - "@jest/types" "^27.5.1" - "@sinonjs/fake-timers" "^8.0.1" - "@types/node" "*" - jest-message-util "^27.5.1" - jest-mock "^27.5.1" - jest-util "^27.5.1" - -"@jest/globals@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2fglobals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" - integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/types" "^27.5.1" - expect "^27.5.1" - -"@jest/reporters@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2freporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" - integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== - dependencies: - "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - collect-v8-coverage "^1.0.0" - exit "^0.1.2" - glob "^7.1.2" - graceful-fs "^4.2.9" - istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^5.1.0" - istanbul-lib-report "^3.0.0" - istanbul-lib-source-maps "^4.0.0" - istanbul-reports "^3.1.3" - jest-haste-map "^27.5.1" - jest-resolve "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - slash "^3.0.0" - source-map "^0.6.0" - string-length "^4.0.1" - terminal-link "^2.0.0" - v8-to-istanbul "^8.1.0" - -"@jest/schemas@^28.1.3": - version "28.1.3" - resolved "http://localhost:4873/@jest%2fschemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" - integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== - dependencies: - "@sinclair/typebox" "^0.24.1" - -"@jest/schemas@^29.0.0": - version "29.0.0" - resolved "http://localhost:4873/@jest%2fschemas/-/schemas-29.0.0.tgz#5f47f5994dd4ef067fb7b4188ceac45f77fe952a" - integrity sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA== - dependencies: - "@sinclair/typebox" "^0.24.1" - -"@jest/source-map@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2fsource-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" - integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== - dependencies: - callsites "^3.0.0" - graceful-fs "^4.2.9" - source-map "^0.6.0" - -"@jest/test-result@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2ftest-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" - integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== - dependencies: - "@jest/console" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-result@^28.1.3": - version "28.1.3" - resolved "http://localhost:4873/@jest%2ftest-result/-/test-result-28.1.3.tgz#5eae945fd9f4b8fcfce74d239e6f725b6bf076c5" - integrity sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg== - dependencies: - "@jest/console" "^28.1.3" - "@jest/types" "^28.1.3" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-sequencer@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2ftest-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" - integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== - dependencies: - "@jest/test-result" "^27.5.1" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-runtime "^27.5.1" - -"@jest/transform@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2ftransform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409" - integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== - dependencies: - "@babel/core" "^7.1.0" - "@jest/types" "^27.5.1" - babel-plugin-istanbul "^6.1.1" - chalk "^4.0.0" - convert-source-map "^1.4.0" - fast-json-stable-stringify "^2.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-regex-util "^27.5.1" - jest-util "^27.5.1" - micromatch "^4.0.4" - pirates "^4.0.4" - slash "^3.0.0" - source-map "^0.6.1" - write-file-atomic "^3.0.0" - -"@jest/types@^27.5.1": - version "27.5.1" - resolved "http://localhost:4873/@jest%2ftypes/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" - integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^16.0.0" - chalk "^4.0.0" - -"@jest/types@^28.1.3": - version "28.1.3" - resolved "http://localhost:4873/@jest%2ftypes/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" - integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== - dependencies: - "@jest/schemas" "^28.1.3" - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^17.0.8" - chalk "^4.0.0" - -"@jest/types@^29.1.2": - version "29.1.2" - resolved "http://localhost:4873/@jest%2ftypes/-/types-29.1.2.tgz#7442d32b16bcd7592d9614173078b8c334ec730a" - integrity sha512-DcXGtoTykQB5jiwCmVr8H4vdg2OJhQex3qPkG+ISyDO7xQXbt/4R6dowcRyPemRnkH7JoHvZuxPBdlq+9JxFCg== - dependencies: - "@jest/schemas" "^29.0.0" - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^17.0.8" - chalk "^4.0.0" - -"@jridgewell/gen-mapping@^0.1.0": - version "0.1.1" - resolved "http://localhost:4873/@jridgewell%2fgen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" - integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== - dependencies: - "@jridgewell/set-array" "^1.0.0" - "@jridgewell/sourcemap-codec" "^1.4.10" - -"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": - version "0.3.2" - resolved "http://localhost:4873/@jridgewell%2fgen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" - integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== - dependencies: - "@jridgewell/set-array" "^1.0.1" - "@jridgewell/sourcemap-codec" "^1.4.10" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/resolve-uri@^3.0.3": - version "3.1.0" - resolved "http://localhost:4873/@jridgewell%2fresolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" - integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== - -"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "http://localhost:4873/@jridgewell%2fset-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== - -"@jridgewell/source-map@^0.3.2": - version "0.3.2" - resolved "http://localhost:4873/@jridgewell%2fsource-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" - integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== - dependencies: - "@jridgewell/gen-mapping" "^0.3.0" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.14" - resolved "http://localhost:4873/@jridgewell%2fsourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" - integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== - -"@jridgewell/trace-mapping@^0.3.14", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.15" - resolved "http://localhost:4873/@jridgewell%2ftrace-mapping/-/trace-mapping-0.3.15.tgz#aba35c48a38d3fd84b37e66c9c0423f9744f9774" - integrity sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g== - dependencies: - "@jridgewell/resolve-uri" "^3.0.3" - "@jridgewell/sourcemap-codec" "^1.4.10" - -"@leichtgewicht/ip-codec@^2.0.1": - version "2.0.4" - resolved "http://localhost:4873/@leichtgewicht%2fip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" - integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== - -"@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1": - version "5.1.1-v1" - resolved "http://localhost:4873/@nicolo-ribaudo%2feslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz#dbf733a965ca47b1973177dc0bb6c889edcfb129" - integrity sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg== - dependencies: - eslint-scope "5.1.1" - -"@nodelib/fs.scandir@2.1.5": - version "2.1.5" - resolved "http://localhost:4873/@nodelib%2ffs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" - integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== - dependencies: - "@nodelib/fs.stat" "2.0.5" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "http://localhost:4873/@nodelib%2ffs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== - -"@nodelib/fs.walk@^1.2.3": - version "1.2.8" - resolved "http://localhost:4873/@nodelib%2ffs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" - integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== - dependencies: - "@nodelib/fs.scandir" "2.1.5" - fastq "^1.6.0" - -"@pmmmwh/react-refresh-webpack-plugin@^0.5.3": - version "0.5.7" - resolved "http://localhost:4873/@pmmmwh%2freact-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.7.tgz#58f8217ba70069cc6a73f5d7e05e85b458c150e2" - integrity sha512-bcKCAzF0DV2IIROp9ZHkRJa6O4jy7NlnHdWL3GmcUxYWNjLXkK5kfELELwEfSP5hXPfVL/qOGMAROuMQb9GG8Q== - dependencies: - ansi-html-community "^0.0.8" - common-path-prefix "^3.0.0" - core-js-pure "^3.8.1" - error-stack-parser "^2.0.6" - find-up "^5.0.0" - html-entities "^2.1.0" - loader-utils "^2.0.0" - schema-utils "^3.0.0" - source-map "^0.7.3" - -"@rollup/plugin-babel@^5.2.0": - version "5.3.1" - resolved "http://localhost:4873/@rollup%2fplugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" - integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== - dependencies: - "@babel/helper-module-imports" "^7.10.4" - "@rollup/pluginutils" "^3.1.0" - -"@rollup/plugin-node-resolve@^11.2.1": - version "11.2.1" - resolved "http://localhost:4873/@rollup%2fplugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" - integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== - dependencies: - "@rollup/pluginutils" "^3.1.0" - "@types/resolve" "1.17.1" - builtin-modules "^3.1.0" - deepmerge "^4.2.2" - is-module "^1.0.0" - resolve "^1.19.0" - -"@rollup/plugin-replace@^2.4.1": - version "2.4.2" - resolved "http://localhost:4873/@rollup%2fplugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" - integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== - dependencies: - "@rollup/pluginutils" "^3.1.0" - magic-string "^0.25.7" - -"@rollup/pluginutils@^3.1.0": - version "3.1.0" - resolved "http://localhost:4873/@rollup%2fpluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" - integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== - dependencies: - "@types/estree" "0.0.39" - estree-walker "^1.0.1" - picomatch "^2.2.2" - -"@rushstack/eslint-patch@^1.1.0": - version "1.2.0" - resolved "http://localhost:4873/@rushstack%2feslint-patch/-/eslint-patch-1.2.0.tgz#8be36a1f66f3265389e90b5f9c9962146758f728" - integrity sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg== - -"@sinclair/typebox@^0.24.1": - version "0.24.44" - resolved "http://localhost:4873/@sinclair%2ftypebox/-/typebox-0.24.44.tgz#0a0aa3bf4a155a678418527342a3ee84bd8caa5c" - integrity sha512-ka0W0KN5i6LfrSocduwliMMpqVgohtPFidKdMEOUjoOFCHcOOYkKsPRxfs5f15oPNHTm6ERAm0GV/+/LTKeiWg== - -"@sinonjs/commons@^1.7.0": - version "1.8.3" - resolved "http://localhost:4873/@sinonjs%2fcommons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" - integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== - dependencies: - type-detect "4.0.8" - -"@sinonjs/fake-timers@^8.0.1": - version "8.1.0" - resolved "http://localhost:4873/@sinonjs%2ffake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" - integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== - dependencies: - "@sinonjs/commons" "^1.7.0" - -"@surma/rollup-plugin-off-main-thread@^2.2.3": - version "2.2.3" - resolved "http://localhost:4873/@surma%2frollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" - integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== - dependencies: - ejs "^3.1.6" - json5 "^2.2.0" - magic-string "^0.25.0" - string.prototype.matchall "^4.0.6" - -"@svgr/babel-plugin-add-jsx-attribute@^5.4.0": - version "5.4.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" - integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== - -"@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": - version "5.4.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef" - integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== - -"@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": - version "5.0.1" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd" - integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== - -"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": - version "5.0.1" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897" - integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== - -"@svgr/babel-plugin-svg-dynamic-title@^5.4.0": - version "5.4.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7" - integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== - -"@svgr/babel-plugin-svg-em-dimensions@^5.4.0": - version "5.4.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0" - integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== - -"@svgr/babel-plugin-transform-react-native-svg@^5.4.0": - version "5.4.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80" - integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== - -"@svgr/babel-plugin-transform-svg-component@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fbabel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a" - integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== - -"@svgr/babel-preset@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fbabel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327" - integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== - dependencies: - "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" - "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" - "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1" - "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1" - "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0" - "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0" - "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0" - "@svgr/babel-plugin-transform-svg-component" "^5.5.0" - -"@svgr/core@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fcore/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579" - integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== - dependencies: - "@svgr/plugin-jsx" "^5.5.0" - camelcase "^6.2.0" - cosmiconfig "^7.0.0" - -"@svgr/hast-util-to-babel-ast@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fhast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461" - integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== - dependencies: - "@babel/types" "^7.12.6" - -"@svgr/plugin-jsx@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fplugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000" - integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== - dependencies: - "@babel/core" "^7.12.3" - "@svgr/babel-preset" "^5.5.0" - "@svgr/hast-util-to-babel-ast" "^5.5.0" - svg-parser "^2.0.2" - -"@svgr/plugin-svgo@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fplugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246" - integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== - dependencies: - cosmiconfig "^7.0.0" - deepmerge "^4.2.2" - svgo "^1.2.2" - -"@svgr/webpack@^5.5.0": - version "5.5.0" - resolved "http://localhost:4873/@svgr%2fwebpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" - integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== - dependencies: - "@babel/core" "^7.12.3" - "@babel/plugin-transform-react-constant-elements" "^7.12.1" - "@babel/preset-env" "^7.12.1" - "@babel/preset-react" "^7.12.5" - "@svgr/core" "^5.5.0" - "@svgr/plugin-jsx" "^5.5.0" - "@svgr/plugin-svgo" "^5.5.0" - loader-utils "^2.0.0" - -"@testing-library/dom@^8.5.0": - version "8.18.1" - resolved "http://localhost:4873/@testing-library%2fdom/-/dom-8.18.1.tgz#80f91be02bc171fe5a3a7003f88207be31ac2cf3" - integrity sha512-oEvsm2B/WtcHKE+IcEeeCqNU/ltFGaVyGbpcm4g/2ytuT49jrlH9x5qRKL/H3A6yfM4YAbSbC0ceT5+9CEXnLg== - dependencies: - "@babel/code-frame" "^7.10.4" - "@babel/runtime" "^7.12.5" - "@types/aria-query" "^4.2.0" - aria-query "^5.0.0" - chalk "^4.1.0" - dom-accessibility-api "^0.5.9" - lz-string "^1.4.4" - pretty-format "^27.0.2" - -"@testing-library/jest-dom@^5.16.5": - version "5.16.5" - resolved "http://localhost:4873/@testing-library%2fjest-dom/-/jest-dom-5.16.5.tgz#3912846af19a29b2dbf32a6ae9c31ef52580074e" - integrity sha512-N5ixQ2qKpi5OLYfwQmUb/5mSV9LneAcaUfp32pn4yCnpb8r/Yz0pXFPck21dIicKmi+ta5WRAknkZCfA8refMA== - dependencies: - "@adobe/css-tools" "^4.0.1" - "@babel/runtime" "^7.9.2" - "@types/testing-library__jest-dom" "^5.9.1" - aria-query "^5.0.0" - chalk "^3.0.0" - css.escape "^1.5.1" - dom-accessibility-api "^0.5.6" - lodash "^4.17.15" - redent "^3.0.0" - -"@testing-library/react@^13.4.0": - version "13.4.0" - resolved "http://localhost:4873/@testing-library%2freact/-/react-13.4.0.tgz#6a31e3bf5951615593ad984e96b9e5e2d9380966" - integrity sha512-sXOGON+WNTh3MLE9rve97ftaZukN3oNf2KjDy7YTx6hcTO2uuLHuCGynMDhFwGw/jYf4OJ2Qk0i4i79qMNNkyw== - dependencies: - "@babel/runtime" "^7.12.5" - "@testing-library/dom" "^8.5.0" - "@types/react-dom" "^18.0.0" - -"@testing-library/user-event@^13.5.0": - version "13.5.0" - resolved "http://localhost:4873/@testing-library%2fuser-event/-/user-event-13.5.0.tgz#69d77007f1e124d55314a2b73fd204b333b13295" - integrity sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg== - dependencies: - "@babel/runtime" "^7.12.5" - -"@tootallnate/once@1": - version "1.1.2" - resolved "http://localhost:4873/@tootallnate%2fonce/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" - integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== - -"@trysound/sax@0.2.0": - version "0.2.0" - resolved "http://localhost:4873/@trysound%2fsax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" - integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== - -"@types/aria-query@^4.2.0": - version "4.2.2" - resolved "http://localhost:4873/@types%2faria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" - integrity sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig== - -"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": - version "7.1.19" - resolved "http://localhost:4873/@types%2fbabel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" - integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - "@types/babel__generator" "*" - "@types/babel__template" "*" - "@types/babel__traverse" "*" - -"@types/babel__generator@*": - version "7.6.4" - resolved "http://localhost:4873/@types%2fbabel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" - integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== - dependencies: - "@babel/types" "^7.0.0" - -"@types/babel__template@*": - version "7.4.1" - resolved "http://localhost:4873/@types%2fbabel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" - integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - -"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": - version "7.18.2" - resolved "http://localhost:4873/@types%2fbabel__traverse/-/babel__traverse-7.18.2.tgz#235bf339d17185bdec25e024ca19cce257cc7309" - integrity sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg== - dependencies: - "@babel/types" "^7.3.0" - -"@types/body-parser@*": - version "1.19.2" - resolved "http://localhost:4873/@types%2fbody-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" - integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== - dependencies: - "@types/connect" "*" - "@types/node" "*" - -"@types/bonjour@^3.5.9": - version "3.5.10" - resolved "http://localhost:4873/@types%2fbonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" - integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== - dependencies: - "@types/node" "*" - -"@types/connect-history-api-fallback@^1.3.5": - version "1.3.5" - resolved "http://localhost:4873/@types%2fconnect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae" - integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== - dependencies: - "@types/express-serve-static-core" "*" - "@types/node" "*" - -"@types/connect@*": - version "3.4.35" - resolved "http://localhost:4873/@types%2fconnect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" - integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== - dependencies: - "@types/node" "*" - -"@types/eslint-scope@^3.7.3": - version "3.7.4" - resolved "http://localhost:4873/@types%2feslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" - integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*", "@types/eslint@^7.29.0 || ^8.4.1": - version "8.4.6" - resolved "http://localhost:4873/@types%2feslint/-/eslint-8.4.6.tgz#7976f054c1bccfcf514bff0564c0c41df5c08207" - integrity sha512-/fqTbjxyFUaYNO7VcW5g+4npmqVACz1bB7RTHYuLj+PRjw9hrCwrUXVQFpChUS0JsyEFvMZ7U/PfmvWgxJhI9g== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - -"@types/estree@*": - version "1.0.0" - resolved "http://localhost:4873/@types%2festree/-/estree-1.0.0.tgz#5fb2e536c1ae9bf35366eed879e827fa59ca41c2" - integrity sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ== - -"@types/estree@0.0.39": - version "0.0.39" - resolved "http://localhost:4873/@types%2festree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" - integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== - -"@types/estree@^0.0.51": - version "0.0.51" - resolved "http://localhost:4873/@types%2festree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" - integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== - -"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": - version "4.17.31" - resolved "http://localhost:4873/@types%2fexpress-serve-static-core/-/express-serve-static-core-4.17.31.tgz#a1139efeab4e7323834bb0226e62ac019f474b2f" - integrity sha512-DxMhY+NAsTwMMFHBTtJFNp5qiHKJ7TeqOo23zVEM9alT1Ml27Q3xcTH0xwxn7Q0BbMcVEJOs/7aQtUWupUQN3Q== - dependencies: - "@types/node" "*" - "@types/qs" "*" - "@types/range-parser" "*" - -"@types/express@*", "@types/express@^4.17.13": - version "4.17.14" - resolved "http://localhost:4873/@types%2fexpress/-/express-4.17.14.tgz#143ea0557249bc1b3b54f15db4c81c3d4eb3569c" - integrity sha512-TEbt+vaPFQ+xpxFLFssxUDXj5cWCxZJjIcB7Yg0k0GMHGtgtQgpvx/MUQUeAkNbA9AAGrwkAsoeItdTgS7FMyg== - dependencies: - "@types/body-parser" "*" - "@types/express-serve-static-core" "^4.17.18" - "@types/qs" "*" - "@types/serve-static" "*" - -"@types/graceful-fs@^4.1.2": - version "4.1.5" - resolved "http://localhost:4873/@types%2fgraceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" - integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== - dependencies: - "@types/node" "*" - -"@types/html-minifier-terser@^6.0.0": - version "6.1.0" - resolved "http://localhost:4873/@types%2fhtml-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" - integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== - -"@types/http-proxy@^1.17.8": - version "1.17.9" - resolved "http://localhost:4873/@types%2fhttp-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a" - integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw== - dependencies: - "@types/node" "*" - -"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": - version "2.0.4" - resolved "http://localhost:4873/@types%2fistanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" - integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== - -"@types/istanbul-lib-report@*": - version "3.0.0" - resolved "http://localhost:4873/@types%2fistanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" - integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== - dependencies: - "@types/istanbul-lib-coverage" "*" - -"@types/istanbul-reports@^3.0.0": - version "3.0.1" - resolved "http://localhost:4873/@types%2fistanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" - integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== - dependencies: - "@types/istanbul-lib-report" "*" - -"@types/jest@*": - version "29.1.2" - resolved "http://localhost:4873/@types%2fjest/-/jest-29.1.2.tgz#7ad8077043ab5f6c108c8111bcc1d224e5600a87" - integrity sha512-y+nlX0h87U0R+wsGn6EBuoRWYyv3KFtwRNP3QWp9+k2tJ2/bqcGS3UxD7jgT+tiwJWWq3UsyV4Y+T6rsMT4XMg== - dependencies: - expect "^29.0.0" - pretty-format "^29.0.0" - -"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": - version "7.0.11" - resolved "http://localhost:4873/@types%2fjson-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" - integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== - -"@types/json5@^0.0.29": - version "0.0.29" - resolved "http://localhost:4873/@types%2fjson5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" - integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== - -"@types/mime@*": - version "3.0.1" - resolved "http://localhost:4873/@types%2fmime/-/mime-3.0.1.tgz#5f8f2bca0a5863cb69bc0b0acd88c96cb1d4ae10" - integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== - -"@types/node@*": - version "18.8.3" - resolved "http://localhost:4873/@types%2fnode/-/node-18.8.3.tgz#ce750ab4017effa51aed6a7230651778d54e327c" - integrity sha512-0os9vz6BpGwxGe9LOhgP/ncvYN5Tx1fNcd2TM3rD/aCGBkysb+ZWpXEocG24h6ZzOi13+VB8HndAQFezsSOw1w== - -"@types/parse-json@^4.0.0": - version "4.0.0" - resolved "http://localhost:4873/@types%2fparse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" - integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== - -"@types/prettier@^2.1.5": - version "2.7.1" - resolved "http://localhost:4873/@types%2fprettier/-/prettier-2.7.1.tgz#dfd20e2dc35f027cdd6c1908e80a5ddc7499670e" - integrity sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow== - -"@types/prop-types@*": - version "15.7.5" - resolved "http://localhost:4873/@types%2fprop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" - integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== - -"@types/q@^1.5.1": - version "1.5.5" - resolved "http://localhost:4873/@types%2fq/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df" - integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== - -"@types/qs@*": - version "6.9.7" - resolved "http://localhost:4873/@types%2fqs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" - integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== - -"@types/range-parser@*": - version "1.2.4" - resolved "http://localhost:4873/@types%2frange-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" - integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== - -"@types/react-dom@^18.0.0": - version "18.0.6" - resolved "http://localhost:4873/@types%2freact-dom/-/react-dom-18.0.6.tgz#36652900024842b74607a17786b6662dd1e103a1" - integrity sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA== - dependencies: - "@types/react" "*" - -"@types/react@*": - version "18.0.21" - resolved "http://localhost:4873/@types%2freact/-/react-18.0.21.tgz#b8209e9626bb00a34c76f55482697edd2b43cc67" - integrity sha512-7QUCOxvFgnD5Jk8ZKlUAhVcRj7GuJRjnjjiY/IUBWKgOlnvDvTMLD4RTF7NPyVmbRhNrbomZiOepg7M/2Kj1mA== - dependencies: - "@types/prop-types" "*" - "@types/scheduler" "*" - csstype "^3.0.2" - -"@types/resolve@1.17.1": - version "1.17.1" - resolved "http://localhost:4873/@types%2fresolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" - integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== - dependencies: - "@types/node" "*" - -"@types/retry@0.12.0": - version "0.12.0" - resolved "http://localhost:4873/@types%2fretry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" - integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== - -"@types/scheduler@*": - version "0.16.2" - resolved "http://localhost:4873/@types%2fscheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" - integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== - -"@types/serve-index@^1.9.1": - version "1.9.1" - resolved "http://localhost:4873/@types%2fserve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" - integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== - dependencies: - "@types/express" "*" - -"@types/serve-static@*", "@types/serve-static@^1.13.10": - version "1.15.0" - resolved "http://localhost:4873/@types%2fserve-static/-/serve-static-1.15.0.tgz#c7930ff61afb334e121a9da780aac0d9b8f34155" - integrity sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg== - dependencies: - "@types/mime" "*" - "@types/node" "*" - -"@types/sockjs@^0.3.33": - version "0.3.33" - resolved "http://localhost:4873/@types%2fsockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" - integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== - dependencies: - "@types/node" "*" - -"@types/stack-utils@^2.0.0": - version "2.0.1" - resolved "http://localhost:4873/@types%2fstack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" - integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== - -"@types/testing-library__jest-dom@^5.9.1": - version "5.14.5" - resolved "http://localhost:4873/@types%2ftesting-library__jest-dom/-/testing-library__jest-dom-5.14.5.tgz#d113709c90b3c75fdb127ec338dad7d5f86c974f" - integrity sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ== - dependencies: - "@types/jest" "*" - -"@types/trusted-types@^2.0.2": - version "2.0.2" - resolved "http://localhost:4873/@types%2ftrusted-types/-/trusted-types-2.0.2.tgz#fc25ad9943bcac11cceb8168db4f275e0e72e756" - integrity sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg== - -"@types/ws@^8.5.1": - version "8.5.3" - resolved "http://localhost:4873/@types%2fws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d" - integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== - dependencies: - "@types/node" "*" - -"@types/yargs-parser@*": - version "21.0.0" - resolved "http://localhost:4873/@types%2fyargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" - integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== - -"@types/yargs@^16.0.0": - version "16.0.4" - resolved "http://localhost:4873/@types%2fyargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977" - integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw== - dependencies: - "@types/yargs-parser" "*" - -"@types/yargs@^17.0.8": - version "17.0.13" - resolved "http://localhost:4873/@types%2fyargs/-/yargs-17.0.13.tgz#34cced675ca1b1d51fcf4d34c3c6f0fa142a5c76" - integrity sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg== - dependencies: - "@types/yargs-parser" "*" - -"@typescript-eslint/eslint-plugin@^5.5.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2feslint-plugin/-/eslint-plugin-5.39.0.tgz#778b2d9e7f293502c7feeea6c74dca8eb3e67511" - integrity sha512-xVfKOkBm5iWMNGKQ2fwX5GVgBuHmZBO1tCRwXmY5oAIsPscfwm2UADDuNB8ZVYCtpQvJK4xpjrK7jEhcJ0zY9A== - dependencies: - "@typescript-eslint/scope-manager" "5.39.0" - "@typescript-eslint/type-utils" "5.39.0" - "@typescript-eslint/utils" "5.39.0" - debug "^4.3.4" - ignore "^5.2.0" - regexpp "^3.2.0" - semver "^7.3.7" - tsutils "^3.21.0" - -"@typescript-eslint/experimental-utils@^5.0.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2fexperimental-utils/-/experimental-utils-5.39.0.tgz#9263bb72b57449cc2f07ffb7fd4e12d0160b7f5e" - integrity sha512-n5N9kG/oGu2xXhHzsWzn94s6CWoiUj59FPU2dF2IQZxPftw+q6Jm5sV2vj5qTgAElRooHhrgtl2gxBQDCPt6WA== - dependencies: - "@typescript-eslint/utils" "5.39.0" - -"@typescript-eslint/parser@^5.5.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2fparser/-/parser-5.39.0.tgz#93fa0bc980a3a501e081824f6097f7ca30aaa22b" - integrity sha512-PhxLjrZnHShe431sBAGHaNe6BDdxAASDySgsBCGxcBecVCi8NQWxQZMcizNA4g0pN51bBAn/FUfkWG3SDVcGlA== - dependencies: - "@typescript-eslint/scope-manager" "5.39.0" - "@typescript-eslint/types" "5.39.0" - "@typescript-eslint/typescript-estree" "5.39.0" - debug "^4.3.4" - -"@typescript-eslint/scope-manager@5.39.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2fscope-manager/-/scope-manager-5.39.0.tgz#873e1465afa3d6c78d8ed2da68aed266a08008d0" - integrity sha512-/I13vAqmG3dyqMVSZPjsbuNQlYS082Y7OMkwhCfLXYsmlI0ca4nkL7wJ/4gjX70LD4P8Hnw1JywUVVAwepURBw== - dependencies: - "@typescript-eslint/types" "5.39.0" - "@typescript-eslint/visitor-keys" "5.39.0" - -"@typescript-eslint/type-utils@5.39.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2ftype-utils/-/type-utils-5.39.0.tgz#0a8c00f95dce4335832ad2dc6bc431c14e32a0a6" - integrity sha512-KJHJkOothljQWzR3t/GunL0TPKY+fGJtnpl+pX+sJ0YiKTz3q2Zr87SGTmFqsCMFrLt5E0+o+S6eQY0FAXj9uA== - dependencies: - "@typescript-eslint/typescript-estree" "5.39.0" - "@typescript-eslint/utils" "5.39.0" - debug "^4.3.4" - tsutils "^3.21.0" - -"@typescript-eslint/types@5.39.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2ftypes/-/types-5.39.0.tgz#f4e9f207ebb4579fd854b25c0bf64433bb5ed78d" - integrity sha512-gQMZrnfEBFXK38hYqt8Lkwt8f4U6yq+2H5VDSgP/qiTzC8Nw8JO3OuSUOQ2qW37S/dlwdkHDntkZM6SQhKyPhw== - -"@typescript-eslint/typescript-estree@5.39.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2ftypescript-estree/-/typescript-estree-5.39.0.tgz#c0316aa04a1a1f4f7f9498e3c13ef1d3dc4cf88b" - integrity sha512-qLFQP0f398sdnogJoLtd43pUgB18Q50QSA+BTE5h3sUxySzbWDpTSdgt4UyxNSozY/oDK2ta6HVAzvGgq8JYnA== - dependencies: - "@typescript-eslint/types" "5.39.0" - "@typescript-eslint/visitor-keys" "5.39.0" - debug "^4.3.4" - globby "^11.1.0" - is-glob "^4.0.3" - semver "^7.3.7" - tsutils "^3.21.0" - -"@typescript-eslint/utils@5.39.0", "@typescript-eslint/utils@^5.13.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2futils/-/utils-5.39.0.tgz#b7063cca1dcf08d1d21b0d91db491161ad0be110" - integrity sha512-+DnY5jkpOpgj+EBtYPyHRjXampJfC0yUZZzfzLuUWVZvCuKqSdJVC8UhdWipIw7VKNTfwfAPiOWzYkAwuIhiAg== - dependencies: - "@types/json-schema" "^7.0.9" - "@typescript-eslint/scope-manager" "5.39.0" - "@typescript-eslint/types" "5.39.0" - "@typescript-eslint/typescript-estree" "5.39.0" - eslint-scope "^5.1.1" - eslint-utils "^3.0.0" - -"@typescript-eslint/visitor-keys@5.39.0": - version "5.39.0" - resolved "http://localhost:4873/@typescript-eslint%2fvisitor-keys/-/visitor-keys-5.39.0.tgz#8f41f7d241b47257b081ddba5d3ce80deaae61e2" - integrity sha512-yyE3RPwOG+XJBLrhvsxAidUgybJVQ/hG8BhiJo0k8JSAYfk/CshVcxf0HwP4Jt7WZZ6vLmxdo1p6EyN3tzFTkg== - dependencies: - "@typescript-eslint/types" "5.39.0" - eslint-visitor-keys "^3.3.0" - -"@webassemblyjs/ast@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" - integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== - dependencies: - "@webassemblyjs/helper-numbers" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - -"@webassemblyjs/floating-point-hex-parser@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2ffloating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" - integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== - -"@webassemblyjs/helper-api-error@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fhelper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" - integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== - -"@webassemblyjs/helper-buffer@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fhelper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" - integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== - -"@webassemblyjs/helper-numbers@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fhelper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" - integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== - dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@xtuc/long" "4.2.2" - -"@webassemblyjs/helper-wasm-bytecode@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fhelper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" - integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== - -"@webassemblyjs/helper-wasm-section@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fhelper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" - integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - -"@webassemblyjs/ieee754@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" - integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== - dependencies: - "@xtuc/ieee754" "^1.2.0" - -"@webassemblyjs/leb128@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fleb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" - integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== - dependencies: - "@xtuc/long" "4.2.2" - -"@webassemblyjs/utf8@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2futf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" - integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== - -"@webassemblyjs/wasm-edit@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fwasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" - integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/helper-wasm-section" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-opt" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - "@webassemblyjs/wast-printer" "1.11.1" - -"@webassemblyjs/wasm-gen@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fwasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" - integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wasm-opt@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fwasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" - integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-buffer" "1.11.1" - "@webassemblyjs/wasm-gen" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - -"@webassemblyjs/wasm-parser@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fwasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" - integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/helper-api-error" "1.11.1" - "@webassemblyjs/helper-wasm-bytecode" "1.11.1" - "@webassemblyjs/ieee754" "1.11.1" - "@webassemblyjs/leb128" "1.11.1" - "@webassemblyjs/utf8" "1.11.1" - -"@webassemblyjs/wast-printer@1.11.1": - version "1.11.1" - resolved "http://localhost:4873/@webassemblyjs%2fwast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" - integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== - dependencies: - "@webassemblyjs/ast" "1.11.1" - "@xtuc/long" "4.2.2" - -"@xtuc/ieee754@^1.2.0": - version "1.2.0" - resolved "http://localhost:4873/@xtuc%2fieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" - integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== - -"@xtuc/long@4.2.2": - version "4.2.2" - resolved "http://localhost:4873/@xtuc%2flong/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" - integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== - -abab@^2.0.3, abab@^2.0.5: - version "2.0.6" - resolved "http://localhost:4873/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" - integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== - -accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: - version "1.3.8" - resolved "http://localhost:4873/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" - integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== - dependencies: - mime-types "~2.1.34" - negotiator "0.6.3" - -acorn-globals@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" - integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== - dependencies: - acorn "^7.1.1" - acorn-walk "^7.1.1" - -acorn-import-assertions@^1.7.6: - version "1.8.0" - resolved "http://localhost:4873/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" - integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== - -acorn-jsx@^5.3.2: - version "5.3.2" - resolved "http://localhost:4873/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" - integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== - -acorn-node@^1.8.2: - version "1.8.2" - resolved "http://localhost:4873/acorn-node/-/acorn-node-1.8.2.tgz#114c95d64539e53dede23de8b9d96df7c7ae2af8" - integrity sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A== - dependencies: - acorn "^7.0.0" - acorn-walk "^7.0.0" - xtend "^4.0.2" - -acorn-walk@^7.0.0, acorn-walk@^7.1.1: - version "7.2.0" - resolved "http://localhost:4873/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" - integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== - -acorn@^7.0.0, acorn@^7.1.1: - version "7.4.1" - resolved "http://localhost:4873/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" - integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== - -acorn@^8.2.4, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.0: - version "8.8.0" - resolved "http://localhost:4873/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" - integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== - -address@^1.0.1, address@^1.1.2: - version "1.2.1" - resolved "http://localhost:4873/address/-/address-1.2.1.tgz#25bb61095b7522d65b357baa11bc05492d4c8acd" - integrity sha512-B+6bi5D34+fDYENiH5qOlA0cV2rAGKuWZ9LeyUUehbXy8e0VS9e498yO0Jeeh+iM+6KbfudHTFjXw2MmJD4QRA== - -adjust-sourcemap-loader@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" - integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== - dependencies: - loader-utils "^2.0.0" - regex-parser "^2.2.11" - -agent-base@6: - version "6.0.2" - resolved "http://localhost:4873/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" - integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== - dependencies: - debug "4" - -ajv-formats@^2.1.1: - version "2.1.1" - resolved "http://localhost:4873/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" - integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== - dependencies: - ajv "^8.0.0" - -ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: - version "3.5.2" - resolved "http://localhost:4873/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" - integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== - -ajv-keywords@^5.0.0: - version "5.1.0" - resolved "http://localhost:4873/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" - integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== - dependencies: - fast-deep-equal "^3.1.3" - -ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: - version "6.12.6" - resolved "http://localhost:4873/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ajv@^8.0.0, ajv@^8.6.0, ajv@^8.8.0: - version "8.11.0" - resolved "http://localhost:4873/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" - integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== - dependencies: - fast-deep-equal "^3.1.1" - json-schema-traverse "^1.0.0" - require-from-string "^2.0.2" - uri-js "^4.2.2" - -ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: - version "4.3.2" - resolved "http://localhost:4873/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" - integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== - dependencies: - type-fest "^0.21.3" - -ansi-html-community@^0.0.8: - version "0.0.8" - resolved "http://localhost:4873/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" - integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "http://localhost:4873/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-regex@^6.0.1: - version "6.0.1" - resolved "http://localhost:4873/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" - integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== - -ansi-styles@^3.2.1: - version "3.2.1" - resolved "http://localhost:4873/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.3.0" - resolved "http://localhost:4873/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -ansi-styles@^5.0.0: - version "5.2.0" - resolved "http://localhost:4873/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" - integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== - -anymatch@^3.0.3, anymatch@~3.1.2: - version "3.1.2" - resolved "http://localhost:4873/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" - integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -arg@^5.0.2: - version "5.0.2" - resolved "http://localhost:4873/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" - integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== - -argparse@^1.0.7: - version "1.0.10" - resolved "http://localhost:4873/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -argparse@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" - integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== - -aria-query@^4.2.2: - version "4.2.2" - resolved "http://localhost:4873/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" - integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== - dependencies: - "@babel/runtime" "^7.10.2" - "@babel/runtime-corejs3" "^7.10.2" - -aria-query@^5.0.0: - version "5.0.2" - resolved "http://localhost:4873/aria-query/-/aria-query-5.0.2.tgz#0b8a744295271861e1d933f8feca13f9b70cfdc1" - integrity sha512-eigU3vhqSO+Z8BKDnVLN/ompjhf3pYzecKXz8+whRy+9gZu8n1TCGfwzQUUPnqdHl9ax1Hr9031orZ+UOEYr7Q== - -array-flatten@1.1.1: - version "1.1.1" - resolved "http://localhost:4873/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== - -array-flatten@^2.1.2: - version "2.1.2" - resolved "http://localhost:4873/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" - integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== - -array-includes@^3.1.4, array-includes@^3.1.5: - version "3.1.5" - resolved "http://localhost:4873/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" - integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.19.5" - get-intrinsic "^1.1.1" - is-string "^1.0.7" - -array-union@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" - integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== - -array.prototype.flat@^1.2.5: - version "1.3.0" - resolved "http://localhost:4873/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" - integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.2" - es-shim-unscopables "^1.0.0" - -array.prototype.flatmap@^1.3.0: - version "1.3.0" - resolved "http://localhost:4873/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" - integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.2" - es-shim-unscopables "^1.0.0" - -array.prototype.reduce@^1.0.4: - version "1.0.4" - resolved "http://localhost:4873/array.prototype.reduce/-/array.prototype.reduce-1.0.4.tgz#8167e80089f78bff70a99e20bd4201d4663b0a6f" - integrity sha512-WnM+AjG/DvLRLo4DDl+r+SvCzYtD2Jd9oeBYMcEaI7t3fFrHY9M53/wdLcTvmZNQ70IU6Htj0emFkZ5TS+lrdw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.2" - es-array-method-boxes-properly "^1.0.0" - is-string "^1.0.7" - -asap@~2.0.6: - version "2.0.6" - resolved "http://localhost:4873/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" - integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== - -ast-types-flow@^0.0.7: - version "0.0.7" - resolved "http://localhost:4873/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" - integrity sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag== - -async@^3.2.3: - version "3.2.4" - resolved "http://localhost:4873/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" - integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== - -asynckit@^0.4.0: - version "0.4.0" - resolved "http://localhost:4873/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== - -at-least-node@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" - integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== - -autoprefixer@^10.4.11, autoprefixer@^10.4.12: - version "10.4.12" - resolved "http://localhost:4873/autoprefixer/-/autoprefixer-10.4.12.tgz#183f30bf0b0722af54ee5ef257f7d4320bb33129" - integrity sha512-WrCGV9/b97Pa+jtwf5UGaRjgQIg7OK3D06GnoYoZNcG1Xb8Gt3EfuKjlhh9i/VtT16g6PYjZ69jdJ2g8FxSC4Q== - dependencies: - browserslist "^4.21.4" - caniuse-lite "^1.0.30001407" - fraction.js "^4.2.0" - normalize-range "^0.1.2" - picocolors "^1.0.0" - postcss-value-parser "^4.2.0" - -axe-core@^4.4.3: - version "4.4.3" - resolved "http://localhost:4873/axe-core/-/axe-core-4.4.3.tgz#11c74d23d5013c0fa5d183796729bc3482bd2f6f" - integrity sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w== - -axobject-query@^2.2.0: - version "2.2.0" - resolved "http://localhost:4873/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" - integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== - -babel-jest@^27.4.2, babel-jest@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" - integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== - dependencies: - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/babel__core" "^7.1.14" - babel-plugin-istanbul "^6.1.1" - babel-preset-jest "^27.5.1" - chalk "^4.0.0" - graceful-fs "^4.2.9" - slash "^3.0.0" - -babel-loader@^8.2.3: - version "8.2.5" - resolved "http://localhost:4873/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e" - integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ== - dependencies: - find-cache-dir "^3.3.1" - loader-utils "^2.0.0" - make-dir "^3.1.0" - schema-utils "^2.6.5" - -babel-plugin-dynamic-import-node@^2.3.3: - version "2.3.3" - resolved "http://localhost:4873/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" - integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== - dependencies: - object.assign "^4.1.0" - -babel-plugin-istanbul@^6.1.1: - version "6.1.1" - resolved "http://localhost:4873/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" - integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@istanbuljs/load-nyc-config" "^1.0.0" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-instrument "^5.0.4" - test-exclude "^6.0.0" - -babel-plugin-jest-hoist@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e" - integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== - dependencies: - "@babel/template" "^7.3.3" - "@babel/types" "^7.3.3" - "@types/babel__core" "^7.0.0" - "@types/babel__traverse" "^7.0.6" - -babel-plugin-macros@^3.1.0: - version "3.1.0" - resolved "http://localhost:4873/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1" - integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== - dependencies: - "@babel/runtime" "^7.12.5" - cosmiconfig "^7.0.0" - resolve "^1.19.0" - -babel-plugin-named-asset-import@^0.3.8: - version "0.3.8" - resolved "http://localhost:4873/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" - integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== - -babel-plugin-polyfill-corejs2@^0.3.3: - version "0.3.3" - resolved "http://localhost:4873/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz#5d1bd3836d0a19e1b84bbf2d9640ccb6f951c122" - integrity sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q== - dependencies: - "@babel/compat-data" "^7.17.7" - "@babel/helper-define-polyfill-provider" "^0.3.3" - semver "^6.1.1" - -babel-plugin-polyfill-corejs3@^0.6.0: - version "0.6.0" - resolved "http://localhost:4873/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz#56ad88237137eade485a71b52f72dbed57c6230a" - integrity sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.3" - core-js-compat "^3.25.1" - -babel-plugin-polyfill-regenerator@^0.4.1: - version "0.4.1" - resolved "http://localhost:4873/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz#390f91c38d90473592ed43351e801a9d3e0fd747" - integrity sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.3" - -babel-plugin-transform-react-remove-prop-types@^0.4.24: - version "0.4.24" - resolved "http://localhost:4873/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" - integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== - -babel-preset-current-node-syntax@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" - integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== - dependencies: - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-bigint" "^7.8.3" - "@babel/plugin-syntax-class-properties" "^7.8.3" - "@babel/plugin-syntax-import-meta" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.8.3" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-top-level-await" "^7.8.3" - -babel-preset-jest@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81" - integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag== - dependencies: - babel-plugin-jest-hoist "^27.5.1" - babel-preset-current-node-syntax "^1.0.0" - -babel-preset-react-app@^10.0.1: - version "10.0.1" - resolved "http://localhost:4873/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" - integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== - dependencies: - "@babel/core" "^7.16.0" - "@babel/plugin-proposal-class-properties" "^7.16.0" - "@babel/plugin-proposal-decorators" "^7.16.4" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.0" - "@babel/plugin-proposal-numeric-separator" "^7.16.0" - "@babel/plugin-proposal-optional-chaining" "^7.16.0" - "@babel/plugin-proposal-private-methods" "^7.16.0" - "@babel/plugin-transform-flow-strip-types" "^7.16.0" - "@babel/plugin-transform-react-display-name" "^7.16.0" - "@babel/plugin-transform-runtime" "^7.16.4" - "@babel/preset-env" "^7.16.4" - "@babel/preset-react" "^7.16.0" - "@babel/preset-typescript" "^7.16.0" - "@babel/runtime" "^7.16.3" - babel-plugin-macros "^3.1.0" - babel-plugin-transform-react-remove-prop-types "^0.4.24" - -balanced-match@^1.0.0: - version "1.0.2" - resolved "http://localhost:4873/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -batch@0.6.1: - version "0.6.1" - resolved "http://localhost:4873/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" - integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== - -bfj@^7.0.2: - version "7.0.2" - resolved "http://localhost:4873/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2" - integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== - dependencies: - bluebird "^3.5.5" - check-types "^11.1.1" - hoopy "^0.1.4" - tryer "^1.0.1" - -big.js@^5.2.2: - version "5.2.2" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" - integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== - -binary-extensions@^2.0.0: - version "2.2.0" - resolved "http://localhost:4873/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== - -bluebird@^3.5.5: - version "3.7.2" - resolved "http://localhost:4873/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" - integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== - -body-parser@1.20.0: - version "1.20.0" - resolved "http://localhost:4873/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" - integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== - dependencies: - bytes "3.1.2" - content-type "~1.0.4" - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - http-errors "2.0.0" - iconv-lite "0.4.24" - on-finished "2.4.1" - qs "6.10.3" - raw-body "2.5.1" - type-is "~1.6.18" - unpipe "1.0.0" - -bonjour-service@^1.0.11: - version "1.0.14" - resolved "http://localhost:4873/bonjour-service/-/bonjour-service-1.0.14.tgz#c346f5bc84e87802d08f8d5a60b93f758e514ee7" - integrity sha512-HIMbgLnk1Vqvs6B4Wq5ep7mxvj9sGz5d1JJyDNSGNIdA/w2MCz6GTjWTdjqOJV1bEPj+6IkxDvWNFKEBxNt4kQ== - dependencies: - array-flatten "^2.1.2" - dns-equal "^1.0.0" - fast-deep-equal "^3.1.3" - multicast-dns "^7.2.5" - -boolbase@^1.0.0, boolbase@~1.0.0: - version "1.0.0" - resolved "http://localhost:4873/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" - integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "http://localhost:4873/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -brace-expansion@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - -braces@^3.0.2, braces@~3.0.2: - version "3.0.2" - resolved "http://localhost:4873/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -browser-process-hrtime@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" - integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== - -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.3, browserslist@^4.21.3, browserslist@^4.21.4: - version "4.21.4" - resolved "http://localhost:4873/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" - integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== - dependencies: - caniuse-lite "^1.0.30001400" - electron-to-chromium "^1.4.251" - node-releases "^2.0.6" - update-browserslist-db "^1.0.9" - -bser@2.1.1: - version "2.1.1" - resolved "http://localhost:4873/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" - integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== - dependencies: - node-int64 "^0.4.0" - -buffer-from@^1.0.0: - version "1.1.2" - resolved "http://localhost:4873/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -builtin-modules@^3.1.0: - version "3.3.0" - resolved "http://localhost:4873/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" - integrity sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw== - -bytes@3.0.0: - version "3.0.0" - resolved "http://localhost:4873/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" - integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== - -bytes@3.1.2: - version "3.1.2" - resolved "http://localhost:4873/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" - integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== - -call-bind@^1.0.0, call-bind@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" - integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== - dependencies: - function-bind "^1.1.1" - get-intrinsic "^1.0.2" - -callsites@^3.0.0: - version "3.1.0" - resolved "http://localhost:4873/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camel-case@^4.1.2: - version "4.1.2" - resolved "http://localhost:4873/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" - integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== - dependencies: - pascal-case "^3.1.2" - tslib "^2.0.3" - -camelcase-css@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" - integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== - -camelcase@^5.3.1: - version "5.3.1" - resolved "http://localhost:4873/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - -camelcase@^6.2.0, camelcase@^6.2.1: - version "6.3.0" - resolved "http://localhost:4873/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" - integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== - -caniuse-api@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" - integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== - dependencies: - browserslist "^4.0.0" - caniuse-lite "^1.0.0" - lodash.memoize "^4.1.2" - lodash.uniq "^4.5.0" - -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001400, caniuse-lite@^1.0.30001407: - version "1.0.30001416" - resolved "http://localhost:4873/caniuse-lite/-/caniuse-lite-1.0.30001416.tgz#29692af8a6a11412f2d3cf9a59d588fcdd21ce4c" - integrity sha512-06wzzdAkCPZO+Qm4e/eNghZBDfVNDsCgw33T27OwBH9unE9S478OYw//Q2L7Npf/zBzs7rjZOszIFQkwQKAEqA== - -case-sensitive-paths-webpack-plugin@^2.4.0: - version "2.4.0" - resolved "http://localhost:4873/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" - integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== - -chalk@^2.0.0, chalk@^2.4.1: - version "2.4.2" - resolved "http://localhost:4873/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chalk@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" - integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: - version "4.1.2" - resolved "http://localhost:4873/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -char-regex@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" - integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== - -char-regex@^2.0.0: - version "2.0.1" - resolved "http://localhost:4873/char-regex/-/char-regex-2.0.1.tgz#6dafdb25f9d3349914079f010ba8d0e6ff9cd01e" - integrity sha512-oSvEeo6ZUD7NepqAat3RqoucZ5SeqLJgOvVIwkafu6IP3V0pO38s/ypdVUmDDK6qIIHNlYHJAKX9E7R7HoKElw== - -check-types@^11.1.1: - version "11.1.2" - resolved "http://localhost:4873/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" - integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== - -chokidar@^3.4.2, chokidar@^3.5.3: - version "3.5.3" - resolved "http://localhost:4873/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" - integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== - dependencies: - anymatch "~3.1.2" - braces "~3.0.2" - glob-parent "~5.1.2" - is-binary-path "~2.1.0" - is-glob "~4.0.1" - normalize-path "~3.0.0" - readdirp "~3.6.0" - optionalDependencies: - fsevents "~2.3.2" - -chrome-trace-event@^1.0.2: - version "1.0.3" - resolved "http://localhost:4873/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" - integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== - -ci-info@^3.2.0: - version "3.4.0" - resolved "http://localhost:4873/ci-info/-/ci-info-3.4.0.tgz#b28484fd436cbc267900364f096c9dc185efb251" - integrity sha512-t5QdPT5jq3o262DOQ8zA6E1tlH2upmUc4Hlvrbx1pGYJuiiHl7O7rvVNI+l8HTVhd/q3Qc9vqimkNk5yiXsAug== - -cjs-module-lexer@^1.0.0: - version "1.2.2" - resolved "http://localhost:4873/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" - integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== - -clean-css@^5.2.2: - version "5.3.1" - resolved "http://localhost:4873/clean-css/-/clean-css-5.3.1.tgz#d0610b0b90d125196a2894d35366f734e5d7aa32" - integrity sha512-lCr8OHhiWCTw4v8POJovCoh4T7I9U11yVsPjMWWnnMmp9ZowCxyad1Pathle/9HjaDp+fdQKjO9fQydE6RHTZg== - dependencies: - source-map "~0.6.0" - -cliui@^7.0.2: - version "7.0.4" - resolved "http://localhost:4873/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" - integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.0" - wrap-ansi "^7.0.0" - -clone-deep@^4.0.1: - version "4.0.1" - resolved "http://localhost:4873/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" - integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== - dependencies: - is-plain-object "^2.0.4" - kind-of "^6.0.2" - shallow-clone "^3.0.0" - -co@^4.6.0: - version "4.6.0" - resolved "http://localhost:4873/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== - -coa@^2.0.2: - version "2.0.2" - resolved "http://localhost:4873/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" - integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== - dependencies: - "@types/q" "^1.5.1" - chalk "^2.4.1" - q "^1.1.2" - -collect-v8-coverage@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" - integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== - -color-convert@^1.9.0: - version "1.9.3" - resolved "http://localhost:4873/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-convert@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@1.1.3: - version "1.1.3" - resolved "http://localhost:4873/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - -color-name@^1.1.4, color-name@~1.1.4: - version "1.1.4" - resolved "http://localhost:4873/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -colord@^2.9.1: - version "2.9.3" - resolved "http://localhost:4873/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" - integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== - -colorette@^2.0.10: - version "2.0.19" - resolved "http://localhost:4873/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" - integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== - -combined-stream@^1.0.8: - version "1.0.8" - resolved "http://localhost:4873/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - -commander@^2.20.0: - version "2.20.3" - resolved "http://localhost:4873/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" - integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== - -commander@^7.2.0: - version "7.2.0" - resolved "http://localhost:4873/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" - integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== - -commander@^8.3.0: - version "8.3.0" - resolved "http://localhost:4873/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" - integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== - -common-path-prefix@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" - integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== - -common-tags@^1.8.0: - version "1.8.2" - resolved "http://localhost:4873/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" - integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== - -commondir@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" - integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== - -compressible@~2.0.16: - version "2.0.18" - resolved "http://localhost:4873/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" - integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== - dependencies: - mime-db ">= 1.43.0 < 2" - -compression@^1.7.4: - version "1.7.4" - resolved "http://localhost:4873/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" - integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== - dependencies: - accepts "~1.3.5" - bytes "3.0.0" - compressible "~2.0.16" - debug "2.6.9" - on-headers "~1.0.2" - safe-buffer "5.1.2" - vary "~1.1.2" - -concat-map@0.0.1: - version "0.0.1" - resolved "http://localhost:4873/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - -confusing-browser-globals@^1.0.11: - version "1.0.11" - resolved "http://localhost:4873/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" - integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== - -connect-history-api-fallback@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" - integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== - -content-disposition@0.5.4: - version "0.5.4" - resolved "http://localhost:4873/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" - integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== - dependencies: - safe-buffer "5.2.1" - -content-type@~1.0.4: - version "1.0.4" - resolved "http://localhost:4873/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" - integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== - -convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: - version "1.8.0" - resolved "http://localhost:4873/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" - integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== - dependencies: - safe-buffer "~5.1.1" - -cookie-signature@1.0.6: - version "1.0.6" - resolved "http://localhost:4873/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== - -cookie@0.5.0: - version "0.5.0" - resolved "http://localhost:4873/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" - integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== - -core-js-compat@^3.25.1: - version "3.25.5" - resolved "http://localhost:4873/core-js-compat/-/core-js-compat-3.25.5.tgz#0016e8158c904f7b059486639e6e82116eafa7d9" - integrity sha512-ovcyhs2DEBUIE0MGEKHP4olCUW/XYte3Vroyxuh38rD1wAO4dHohsovUC4eAOuzFxE6b+RXvBU3UZ9o0YhUTkA== - dependencies: - browserslist "^4.21.4" - -core-js-pure@^3.25.1, core-js-pure@^3.8.1: - version "3.25.5" - resolved "http://localhost:4873/core-js-pure/-/core-js-pure-3.25.5.tgz#79716ba54240c6aa9ceba6eee08cf79471ba184d" - integrity sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg== - -core-js@^3.19.2: - version "3.25.5" - resolved "http://localhost:4873/core-js/-/core-js-3.25.5.tgz#e86f651a2ca8a0237a5f064c2fe56cef89646e27" - integrity sha512-nbm6eZSjm+ZuBQxCUPQKQCoUEfFOXjUZ8dTTyikyKaWrTYmAVbykQfwsKE5dBK88u3QCkCrzsx/PPlKfhsvgpw== - -core-util-is@~1.0.0: - version "1.0.3" - resolved "http://localhost:4873/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" - integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== - -cosmiconfig-typescript-loader@^4.1.1: - version "4.1.1" - resolved "http://localhost:4873/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-4.1.1.tgz#38dd3578344038dae40fdf09792bc2e9df529f78" - integrity sha512-9DHpa379Gp0o0Zefii35fcmuuin6q92FnLDffzdZ0l9tVd3nEobG3O+MZ06+kuBvFTSVScvNb/oHA13Nd4iipg== - -cosmiconfig@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" - integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== - dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.1.0" - parse-json "^5.0.0" - path-type "^4.0.0" - yaml "^1.7.2" - -cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: - version "7.0.1" - resolved "http://localhost:4873/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d" - integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== - dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.2.1" - parse-json "^5.0.0" - path-type "^4.0.0" - yaml "^1.10.0" - -craco-wasm@0.0.1: - version "0.0.1" - resolved "http://localhost:4873/craco-wasm/-/craco-wasm-0.0.1.tgz#a7edbf7ff64e7569909b15684c00de13209985c6" - integrity sha512-0vwZLtkQocS7UlPg9IF4TsG/6gKXcd9O0ISomjRoBMvR2XvtZN4yxvU8/WlY0Vf42PtOcWvhSx9i4oVNxLVE6w== - -cross-spawn@^7.0.2, cross-spawn@^7.0.3: - version "7.0.3" - resolved "http://localhost:4873/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -crypto-random-string@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" - integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== - -css-blank-pseudo@^3.0.3: - version "3.0.3" - resolved "http://localhost:4873/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz#36523b01c12a25d812df343a32c322d2a2324561" - integrity sha512-VS90XWtsHGqoM0t4KpH053c4ehxZ2E6HtGI7x68YFV0pTo/QmkV/YFA+NnlvK8guxZVNWGQhVNJGC39Q8XF4OQ== - dependencies: - postcss-selector-parser "^6.0.9" - -css-declaration-sorter@^6.3.0: - version "6.3.1" - resolved "http://localhost:4873/css-declaration-sorter/-/css-declaration-sorter-6.3.1.tgz#be5e1d71b7a992433fb1c542c7a1b835e45682ec" - integrity sha512-fBffmak0bPAnyqc/HO8C3n2sHrp9wcqQz6ES9koRF2/mLOVAx9zIQ3Y7R29sYCteTPqMCwns4WYQoCX91Xl3+w== - -css-has-pseudo@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz#57f6be91ca242d5c9020ee3e51bbb5b89fc7af73" - integrity sha512-Vse0xpR1K9MNlp2j5w1pgWIJtm1a8qS0JwS9goFYcImjlHEmywP9VUF05aGBXzGpDJF86QXk4L0ypBmwPhGArw== - dependencies: - postcss-selector-parser "^6.0.9" - -css-loader@^6.5.1: - version "6.7.1" - resolved "http://localhost:4873/css-loader/-/css-loader-6.7.1.tgz#e98106f154f6e1baf3fc3bc455cb9981c1d5fd2e" - integrity sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw== - dependencies: - icss-utils "^5.1.0" - postcss "^8.4.7" - postcss-modules-extract-imports "^3.0.0" - postcss-modules-local-by-default "^4.0.0" - postcss-modules-scope "^3.0.0" - postcss-modules-values "^4.0.0" - postcss-value-parser "^4.2.0" - semver "^7.3.5" - -css-minimizer-webpack-plugin@^3.2.0: - version "3.4.1" - resolved "http://localhost:4873/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" - integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== - dependencies: - cssnano "^5.0.6" - jest-worker "^27.0.2" - postcss "^8.3.5" - schema-utils "^4.0.0" - serialize-javascript "^6.0.0" - source-map "^0.6.1" - -css-prefers-color-scheme@^6.0.3: - version "6.0.3" - resolved "http://localhost:4873/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz#ca8a22e5992c10a5b9d315155e7caee625903349" - integrity sha512-4BqMbZksRkJQx2zAjrokiGMd07RqOa2IxIrrN10lyBe9xhn9DEvjUK79J6jkeiv9D9hQFXKb6g1jwU62jziJZA== - -css-select-base-adapter@^0.1.1: - version "0.1.1" - resolved "http://localhost:4873/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" - integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== - -css-select@^2.0.0: - version "2.1.0" - resolved "http://localhost:4873/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" - integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== - dependencies: - boolbase "^1.0.0" - css-what "^3.2.1" - domutils "^1.7.0" - nth-check "^1.0.2" - -css-select@^4.1.3: - version "4.3.0" - resolved "http://localhost:4873/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" - integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== - dependencies: - boolbase "^1.0.0" - css-what "^6.0.1" - domhandler "^4.3.1" - domutils "^2.8.0" - nth-check "^2.0.1" - -css-tree@1.0.0-alpha.37: - version "1.0.0-alpha.37" - resolved "http://localhost:4873/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" - integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== - dependencies: - mdn-data "2.0.4" - source-map "^0.6.1" - -css-tree@^1.1.2, css-tree@^1.1.3: - version "1.1.3" - resolved "http://localhost:4873/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" - integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== - dependencies: - mdn-data "2.0.14" - source-map "^0.6.1" - -css-what@^3.2.1: - version "3.4.2" - resolved "http://localhost:4873/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" - integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== - -css-what@^6.0.1: - version "6.1.0" - resolved "http://localhost:4873/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" - integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== - -css.escape@^1.5.1: - version "1.5.1" - resolved "http://localhost:4873/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" - integrity sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg== - -cssdb@^7.0.1: - version "7.0.1" - resolved "http://localhost:4873/cssdb/-/cssdb-7.0.1.tgz#3810a0c67ae06362982dfe965dbedf57a0f26617" - integrity sha512-pT3nzyGM78poCKLAEy2zWIVX2hikq6dIrjuZzLV98MumBg+xMTNYfHx7paUlfiRTgg91O/vR889CIf+qiv79Rw== - -cssesc@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" - integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== - -cssnano-preset-default@^5.2.12: - version "5.2.12" - resolved "http://localhost:4873/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz#ebe6596ec7030e62c3eb2b3c09f533c0644a9a97" - integrity sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew== - dependencies: - css-declaration-sorter "^6.3.0" - cssnano-utils "^3.1.0" - postcss-calc "^8.2.3" - postcss-colormin "^5.3.0" - postcss-convert-values "^5.1.2" - postcss-discard-comments "^5.1.2" - postcss-discard-duplicates "^5.1.0" - postcss-discard-empty "^5.1.1" - postcss-discard-overridden "^5.1.0" - postcss-merge-longhand "^5.1.6" - postcss-merge-rules "^5.1.2" - postcss-minify-font-values "^5.1.0" - postcss-minify-gradients "^5.1.1" - postcss-minify-params "^5.1.3" - postcss-minify-selectors "^5.2.1" - postcss-normalize-charset "^5.1.0" - postcss-normalize-display-values "^5.1.0" - postcss-normalize-positions "^5.1.1" - postcss-normalize-repeat-style "^5.1.1" - postcss-normalize-string "^5.1.0" - postcss-normalize-timing-functions "^5.1.0" - postcss-normalize-unicode "^5.1.0" - postcss-normalize-url "^5.1.0" - postcss-normalize-whitespace "^5.1.1" - postcss-ordered-values "^5.1.3" - postcss-reduce-initial "^5.1.0" - postcss-reduce-transforms "^5.1.0" - postcss-svgo "^5.1.0" - postcss-unique-selectors "^5.1.1" - -cssnano-utils@^3.1.0: - version "3.1.0" - resolved "http://localhost:4873/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" - integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== - -cssnano@^5.0.6: - version "5.1.13" - resolved "http://localhost:4873/cssnano/-/cssnano-5.1.13.tgz#83d0926e72955332dc4802a7070296e6258efc0a" - integrity sha512-S2SL2ekdEz6w6a2epXn4CmMKU4K3KpcyXLKfAYc9UQQqJRkD/2eLUG0vJ3Db/9OvO5GuAdgXw3pFbR6abqghDQ== - dependencies: - cssnano-preset-default "^5.2.12" - lilconfig "^2.0.3" - yaml "^1.10.2" - -csso@^4.0.2, csso@^4.2.0: - version "4.2.0" - resolved "http://localhost:4873/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" - integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== - dependencies: - css-tree "^1.1.2" - -cssom@^0.4.4: - version "0.4.4" - resolved "http://localhost:4873/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" - integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== - -cssom@~0.3.6: - version "0.3.8" - resolved "http://localhost:4873/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" - integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== - -cssstyle@^2.3.0: - version "2.3.0" - resolved "http://localhost:4873/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" - integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== - dependencies: - cssom "~0.3.6" - -csstype@^3.0.2: - version "3.1.1" - resolved "http://localhost:4873/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9" - integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== - -damerau-levenshtein@^1.0.8: - version "1.0.8" - resolved "http://localhost:4873/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" - integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== - -data-urls@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" - integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== - dependencies: - abab "^2.0.3" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.0.0" - -debug@2.6.9, debug@^2.6.0, debug@^2.6.9: - version "2.6.9" - resolved "http://localhost:4873/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: - version "4.3.4" - resolved "http://localhost:4873/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -debug@^3.2.7: - version "3.2.7" - resolved "http://localhost:4873/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" - integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== - dependencies: - ms "^2.1.1" - -decimal.js@^10.2.1: - version "10.4.1" - resolved "http://localhost:4873/decimal.js/-/decimal.js-10.4.1.tgz#be75eeac4a2281aace80c1a8753587c27ef053e7" - integrity sha512-F29o+vci4DodHYT9UrR5IEbfBw9pE5eSapIJdTqXK5+6hq+t8VRxwQyKlW2i+KDKFkkJQRvFyI/QXD83h8LyQw== - -dedent@^0.7.0: - version "0.7.0" - resolved "http://localhost:4873/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" - integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== - -deep-is@^0.1.3, deep-is@~0.1.3: - version "0.1.4" - resolved "http://localhost:4873/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" - integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== - -deepmerge@^4.2.2: - version "4.2.2" - resolved "http://localhost:4873/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" - integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== - -default-gateway@^6.0.3: - version "6.0.3" - resolved "http://localhost:4873/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" - integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== - dependencies: - execa "^5.0.0" - -define-lazy-prop@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" - integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== - -define-properties@^1.1.3, define-properties@^1.1.4: - version "1.1.4" - resolved "http://localhost:4873/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" - integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== - dependencies: - has-property-descriptors "^1.0.0" - object-keys "^1.1.1" - -defined@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" - integrity sha512-Y2caI5+ZwS5c3RiNDJ6u53VhQHv+hHKwhkI1iHvceKUHw9Df6EK2zRLfjejRgMuCuxK7PfSWIMwWecceVvThjQ== - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "http://localhost:4873/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== - -depd@2.0.0: - version "2.0.0" - resolved "http://localhost:4873/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" - integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== - -depd@~1.1.2: - version "1.1.2" - resolved "http://localhost:4873/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" - integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== - -destroy@1.2.0: - version "1.2.0" - resolved "http://localhost:4873/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" - integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== - -detect-newline@^3.0.0: - version "3.1.0" - resolved "http://localhost:4873/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" - integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== - -detect-node@^2.0.4: - version "2.1.0" - resolved "http://localhost:4873/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" - integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== - -detect-port-alt@^1.1.6: - version "1.1.6" - resolved "http://localhost:4873/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" - integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== - dependencies: - address "^1.0.1" - debug "^2.6.0" - -detective@^5.2.1: - version "5.2.1" - resolved "http://localhost:4873/detective/-/detective-5.2.1.tgz#6af01eeda11015acb0e73f933242b70f24f91034" - integrity sha512-v9XE1zRnz1wRtgurGu0Bs8uHKFSTdteYZNbIPFVhUZ39L/S79ppMpdmVOZAnoz1jfEFodc48n6MX483Xo3t1yw== - dependencies: - acorn-node "^1.8.2" - defined "^1.0.0" - minimist "^1.2.6" - -didyoumean@^1.2.2: - version "1.2.2" - resolved "http://localhost:4873/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" - integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== - -diff-sequences@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" - integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== - -diff-sequences@^29.0.0: - version "29.0.0" - resolved "http://localhost:4873/diff-sequences/-/diff-sequences-29.0.0.tgz#bae49972ef3933556bcb0800b72e8579d19d9e4f" - integrity sha512-7Qe/zd1wxSDL4D/X/FPjOMB+ZMDt71W94KYaq05I2l0oQqgXgs7s4ftYYmV38gBSrPz2vcygxfs1xn0FT+rKNA== - -dir-glob@^3.0.1: - version "3.0.1" - resolved "http://localhost:4873/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" - integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== - dependencies: - path-type "^4.0.0" - -dlv@^1.1.3: - version "1.1.3" - resolved "http://localhost:4873/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" - integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== - -dns-equal@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" - integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg== - -dns-packet@^5.2.2: - version "5.4.0" - resolved "http://localhost:4873/dns-packet/-/dns-packet-5.4.0.tgz#1f88477cf9f27e78a213fb6d118ae38e759a879b" - integrity sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g== - dependencies: - "@leichtgewicht/ip-codec" "^2.0.1" - -doctrine@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== - dependencies: - esutils "^2.0.2" - -doctrine@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" - integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== - dependencies: - esutils "^2.0.2" - -dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: - version "0.5.14" - resolved "http://localhost:4873/dom-accessibility-api/-/dom-accessibility-api-0.5.14.tgz#56082f71b1dc7aac69d83c4285eef39c15d93f56" - integrity sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg== - -dom-converter@^0.2.0: - version "0.2.0" - resolved "http://localhost:4873/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" - integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== - dependencies: - utila "~0.4" - -dom-serializer@0: - version "0.2.2" - resolved "http://localhost:4873/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" - integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== - dependencies: - domelementtype "^2.0.1" - entities "^2.0.0" - -dom-serializer@^1.0.1: - version "1.4.1" - resolved "http://localhost:4873/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" - integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== - dependencies: - domelementtype "^2.0.1" - domhandler "^4.2.0" - entities "^2.0.0" - -domelementtype@1: - version "1.3.1" - resolved "http://localhost:4873/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" - integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== - -domelementtype@^2.0.1, domelementtype@^2.2.0: - version "2.3.0" - resolved "http://localhost:4873/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" - integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== - -domexception@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" - integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== - dependencies: - webidl-conversions "^5.0.0" - -domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: - version "4.3.1" - resolved "http://localhost:4873/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" - integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== - dependencies: - domelementtype "^2.2.0" - -domutils@^1.7.0: - version "1.7.0" - resolved "http://localhost:4873/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" - integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== - dependencies: - dom-serializer "0" - domelementtype "1" - -domutils@^2.5.2, domutils@^2.8.0: - version "2.8.0" - resolved "http://localhost:4873/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" - integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== - dependencies: - dom-serializer "^1.0.1" - domelementtype "^2.2.0" - domhandler "^4.2.0" - -dot-case@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" - integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== - dependencies: - no-case "^3.0.4" - tslib "^2.0.3" - -dotenv-expand@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" - integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== - -dotenv@^10.0.0: - version "10.0.0" - resolved "http://localhost:4873/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" - integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== - -duplexer@^0.1.2: - version "0.1.2" - resolved "http://localhost:4873/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" - integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== - -ee-first@1.1.1: - version "1.1.1" - resolved "http://localhost:4873/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== - -ejs@^3.1.6: - version "3.1.8" - resolved "http://localhost:4873/ejs/-/ejs-3.1.8.tgz#758d32910c78047585c7ef1f92f9ee041c1c190b" - integrity sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ== - dependencies: - jake "^10.8.5" - -electron-to-chromium@^1.4.251: - version "1.4.274" - resolved "http://localhost:4873/electron-to-chromium/-/electron-to-chromium-1.4.274.tgz#74369ac6f020c3cea7c77ec040ddf159fe226233" - integrity sha512-Fgn7JZQzq85I81FpKUNxVLAzoghy8JZJ4NIue+YfUYBbu1AkpgzFvNwzF/ZNZH9ElkmJD0TSWu1F2gTpw/zZlg== - -emittery@^0.10.2: - version "0.10.2" - resolved "http://localhost:4873/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" - integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== - -emittery@^0.8.1: - version "0.8.1" - resolved "http://localhost:4873/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" - integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "http://localhost:4873/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -emoji-regex@^9.2.2: - version "9.2.2" - resolved "http://localhost:4873/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" - integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== - -emojis-list@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" - integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== - -encodeurl@~1.0.2: - version "1.0.2" - resolved "http://localhost:4873/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== - -enhanced-resolve@^5.10.0: - version "5.10.0" - resolved "http://localhost:4873/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz#0dc579c3bb2a1032e357ac45b8f3a6f3ad4fb1e6" - integrity sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ== - dependencies: - graceful-fs "^4.2.4" - tapable "^2.2.0" - -entities@^2.0.0: - version "2.2.0" - resolved "http://localhost:4873/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" - integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== - -error-ex@^1.3.1: - version "1.3.2" - resolved "http://localhost:4873/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - -error-stack-parser@^2.0.6: - version "2.1.4" - resolved "http://localhost:4873/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286" - integrity sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ== - dependencies: - stackframe "^1.3.4" - -es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.1: - version "1.20.4" - resolved "http://localhost:4873/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" - integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== - dependencies: - call-bind "^1.0.2" - es-to-primitive "^1.2.1" - function-bind "^1.1.1" - function.prototype.name "^1.1.5" - get-intrinsic "^1.1.3" - get-symbol-description "^1.0.0" - has "^1.0.3" - has-property-descriptors "^1.0.0" - has-symbols "^1.0.3" - internal-slot "^1.0.3" - is-callable "^1.2.7" - is-negative-zero "^2.0.2" - is-regex "^1.1.4" - is-shared-array-buffer "^1.0.2" - is-string "^1.0.7" - is-weakref "^1.0.2" - object-inspect "^1.12.2" - object-keys "^1.1.1" - object.assign "^4.1.4" - regexp.prototype.flags "^1.4.3" - safe-regex-test "^1.0.0" - string.prototype.trimend "^1.0.5" - string.prototype.trimstart "^1.0.5" - unbox-primitive "^1.0.2" - -es-array-method-boxes-properly@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz#873f3e84418de4ee19c5be752990b2e44718d09e" - integrity sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA== - -es-module-lexer@^0.9.0: - version "0.9.3" - resolved "http://localhost:4873/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" - integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== - -es-shim-unscopables@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" - integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== - dependencies: - has "^1.0.3" - -es-to-primitive@^1.2.1: - version "1.2.1" - resolved "http://localhost:4873/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" - integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== - dependencies: - is-callable "^1.1.4" - is-date-object "^1.0.1" - is-symbol "^1.0.2" - -escalade@^3.1.1: - version "3.1.1" - resolved "http://localhost:4873/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== - -escape-html@~1.0.3: - version "1.0.3" - resolved "http://localhost:4873/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== - -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - -escape-string-regexp@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - -escape-string-regexp@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" - integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== - -escodegen@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" - integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== - dependencies: - esprima "^4.0.1" - estraverse "^5.2.0" - esutils "^2.0.2" - optionator "^0.8.1" - optionalDependencies: - source-map "~0.6.1" - -eslint-config-react-app@^7.0.1: - version "7.0.1" - resolved "http://localhost:4873/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4" - integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA== - dependencies: - "@babel/core" "^7.16.0" - "@babel/eslint-parser" "^7.16.3" - "@rushstack/eslint-patch" "^1.1.0" - "@typescript-eslint/eslint-plugin" "^5.5.0" - "@typescript-eslint/parser" "^5.5.0" - babel-preset-react-app "^10.0.1" - confusing-browser-globals "^1.0.11" - eslint-plugin-flowtype "^8.0.3" - eslint-plugin-import "^2.25.3" - eslint-plugin-jest "^25.3.0" - eslint-plugin-jsx-a11y "^6.5.1" - eslint-plugin-react "^7.27.1" - eslint-plugin-react-hooks "^4.3.0" - eslint-plugin-testing-library "^5.0.1" - -eslint-import-resolver-node@^0.3.6: - version "0.3.6" - resolved "http://localhost:4873/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" - integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== - dependencies: - debug "^3.2.7" - resolve "^1.20.0" - -eslint-module-utils@^2.7.3: - version "2.7.4" - resolved "http://localhost:4873/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz#4f3e41116aaf13a20792261e61d3a2e7e0583974" - integrity sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA== - dependencies: - debug "^3.2.7" - -eslint-plugin-flowtype@^8.0.3: - version "8.0.3" - resolved "http://localhost:4873/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" - integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== - dependencies: - lodash "^4.17.21" - string-natural-compare "^3.0.1" - -eslint-plugin-import@^2.25.3: - version "2.26.0" - resolved "http://localhost:4873/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" - integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== - dependencies: - array-includes "^3.1.4" - array.prototype.flat "^1.2.5" - debug "^2.6.9" - doctrine "^2.1.0" - eslint-import-resolver-node "^0.3.6" - eslint-module-utils "^2.7.3" - has "^1.0.3" - is-core-module "^2.8.1" - is-glob "^4.0.3" - minimatch "^3.1.2" - object.values "^1.1.5" - resolve "^1.22.0" - tsconfig-paths "^3.14.1" - -eslint-plugin-jest@^25.3.0: - version "25.7.0" - resolved "http://localhost:4873/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" - integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== - dependencies: - "@typescript-eslint/experimental-utils" "^5.0.0" - -eslint-plugin-jsx-a11y@^6.5.1: - version "6.6.1" - resolved "http://localhost:4873/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz#93736fc91b83fdc38cc8d115deedfc3091aef1ff" - integrity sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q== - dependencies: - "@babel/runtime" "^7.18.9" - aria-query "^4.2.2" - array-includes "^3.1.5" - ast-types-flow "^0.0.7" - axe-core "^4.4.3" - axobject-query "^2.2.0" - damerau-levenshtein "^1.0.8" - emoji-regex "^9.2.2" - has "^1.0.3" - jsx-ast-utils "^3.3.2" - language-tags "^1.0.5" - minimatch "^3.1.2" - semver "^6.3.0" - -eslint-plugin-react-hooks@^4.3.0: - version "4.6.0" - resolved "http://localhost:4873/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" - integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== - -eslint-plugin-react@^7.27.1: - version "7.31.8" - resolved "http://localhost:4873/eslint-plugin-react/-/eslint-plugin-react-7.31.8.tgz#3a4f80c10be1bcbc8197be9e8b641b2a3ef219bf" - integrity sha512-5lBTZmgQmARLLSYiwI71tiGVTLUuqXantZM6vlSY39OaDSV0M7+32K5DnLkmFrwTe+Ksz0ffuLUC91RUviVZfw== - dependencies: - array-includes "^3.1.5" - array.prototype.flatmap "^1.3.0" - doctrine "^2.1.0" - estraverse "^5.3.0" - jsx-ast-utils "^2.4.1 || ^3.0.0" - minimatch "^3.1.2" - object.entries "^1.1.5" - object.fromentries "^2.0.5" - object.hasown "^1.1.1" - object.values "^1.1.5" - prop-types "^15.8.1" - resolve "^2.0.0-next.3" - semver "^6.3.0" - string.prototype.matchall "^4.0.7" - -eslint-plugin-testing-library@^5.0.1: - version "5.7.2" - resolved "http://localhost:4873/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.7.2.tgz#c1b2112a40aab61f93e10859e8b2d81e54f0ce84" - integrity sha512-0ZmHeR/DUUgEzW8rwUBRWxuqntipDtpvxK0hymdHnLlABryJkzd+CAHr+XnISaVsTisZ5MLHp6nQF+8COHLLTA== - dependencies: - "@typescript-eslint/utils" "^5.13.0" - -eslint-scope@5.1.1, eslint-scope@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" - integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== - dependencies: - esrecurse "^4.3.0" - estraverse "^4.1.1" - -eslint-scope@^7.1.1: - version "7.1.1" - resolved "http://localhost:4873/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" - integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== - dependencies: - esrecurse "^4.3.0" - estraverse "^5.2.0" - -eslint-utils@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" - integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== - dependencies: - eslint-visitor-keys "^2.0.0" - -eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" - integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== - -eslint-visitor-keys@^3.3.0: - version "3.3.0" - resolved "http://localhost:4873/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" - integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== - -eslint-webpack-plugin@^3.1.1: - version "3.2.0" - resolved "http://localhost:4873/eslint-webpack-plugin/-/eslint-webpack-plugin-3.2.0.tgz#1978cdb9edc461e4b0195a20da950cf57988347c" - integrity sha512-avrKcGncpPbPSUHX6B3stNGzkKFto3eL+DKM4+VyMrVnhPc3vRczVlCq3uhuFOdRvDHTVXuzwk1ZKUrqDQHQ9w== - dependencies: - "@types/eslint" "^7.29.0 || ^8.4.1" - jest-worker "^28.0.2" - micromatch "^4.0.5" - normalize-path "^3.0.0" - schema-utils "^4.0.0" - -eslint@^8.3.0: - version "8.24.0" - resolved "http://localhost:4873/eslint/-/eslint-8.24.0.tgz#489516c927a5da11b3979dbfb2679394523383c8" - integrity sha512-dWFaPhGhTAiPcCgm3f6LI2MBWbogMnTJzFBbhXVRQDJPkr9pGZvVjlVfXd+vyDcWPA2Ic9L2AXPIQM0+vk/cSQ== - dependencies: - "@eslint/eslintrc" "^1.3.2" - "@humanwhocodes/config-array" "^0.10.5" - "@humanwhocodes/gitignore-to-minimatch" "^1.0.2" - "@humanwhocodes/module-importer" "^1.0.1" - ajv "^6.10.0" - chalk "^4.0.0" - cross-spawn "^7.0.2" - debug "^4.3.2" - doctrine "^3.0.0" - escape-string-regexp "^4.0.0" - eslint-scope "^7.1.1" - eslint-utils "^3.0.0" - eslint-visitor-keys "^3.3.0" - espree "^9.4.0" - esquery "^1.4.0" - esutils "^2.0.2" - fast-deep-equal "^3.1.3" - file-entry-cache "^6.0.1" - find-up "^5.0.0" - glob-parent "^6.0.1" - globals "^13.15.0" - globby "^11.1.0" - grapheme-splitter "^1.0.4" - ignore "^5.2.0" - import-fresh "^3.0.0" - imurmurhash "^0.1.4" - is-glob "^4.0.0" - js-sdsl "^4.1.4" - js-yaml "^4.1.0" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.4.1" - lodash.merge "^4.6.2" - minimatch "^3.1.2" - natural-compare "^1.4.0" - optionator "^0.9.1" - regexpp "^3.2.0" - strip-ansi "^6.0.1" - strip-json-comments "^3.1.0" - text-table "^0.2.0" - -espree@^9.4.0: - version "9.4.0" - resolved "http://localhost:4873/espree/-/espree-9.4.0.tgz#cd4bc3d6e9336c433265fc0aa016fc1aaf182f8a" - integrity sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw== - dependencies: - acorn "^8.8.0" - acorn-jsx "^5.3.2" - eslint-visitor-keys "^3.3.0" - -esprima@^4.0.0, esprima@^4.0.1: - version "4.0.1" - resolved "http://localhost:4873/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esquery@^1.4.0: - version "1.4.0" - resolved "http://localhost:4873/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" - integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.3.0: - version "4.3.0" - resolved "http://localhost:4873/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^4.1.1: - version "4.3.0" - resolved "http://localhost:4873/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - -estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: - version "5.3.0" - resolved "http://localhost:4873/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" - integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== - -estree-walker@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" - integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== - -esutils@^2.0.2: - version "2.0.3" - resolved "http://localhost:4873/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -etag@~1.8.1: - version "1.8.1" - resolved "http://localhost:4873/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" - integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== - -eventemitter3@^4.0.0: - version "4.0.7" - resolved "http://localhost:4873/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" - integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== - -events@^3.2.0: - version "3.3.0" - resolved "http://localhost:4873/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" - integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== - -execa@^5.0.0: - version "5.1.1" - resolved "http://localhost:4873/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" - integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -exit@^0.1.2: - version "0.1.2" - resolved "http://localhost:4873/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" - integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== - -expect@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" - integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== - dependencies: - "@jest/types" "^27.5.1" - jest-get-type "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - -expect@^29.0.0: - version "29.1.2" - resolved "http://localhost:4873/expect/-/expect-29.1.2.tgz#82f8f28d7d408c7c68da3a386a490ee683e1eced" - integrity sha512-AuAGn1uxva5YBbBlXb+2JPxJRuemZsmlGcapPXWNSBNsQtAULfjioREGBWuI0EOvYUKjDnrCy8PW5Zlr1md5mw== - dependencies: - "@jest/expect-utils" "^29.1.2" - jest-get-type "^29.0.0" - jest-matcher-utils "^29.1.2" - jest-message-util "^29.1.2" - jest-util "^29.1.2" - -express@^4.17.3: - version "4.18.1" - resolved "http://localhost:4873/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" - integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== - dependencies: - accepts "~1.3.8" - array-flatten "1.1.1" - body-parser "1.20.0" - content-disposition "0.5.4" - content-type "~1.0.4" - cookie "0.5.0" - cookie-signature "1.0.6" - debug "2.6.9" - depd "2.0.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - finalhandler "1.2.0" - fresh "0.5.2" - http-errors "2.0.0" - merge-descriptors "1.0.1" - methods "~1.1.2" - on-finished "2.4.1" - parseurl "~1.3.3" - path-to-regexp "0.1.7" - proxy-addr "~2.0.7" - qs "6.10.3" - range-parser "~1.2.1" - safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" - setprototypeof "1.2.0" - statuses "2.0.1" - type-is "~1.6.18" - utils-merge "1.0.1" - vary "~1.1.2" - -fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: - version "3.1.3" - resolved "http://localhost:4873/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-glob@^3.2.11, fast-glob@^3.2.9: - version "3.2.12" - resolved "http://localhost:4873/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" - integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - -fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: - version "2.0.6" - resolved "http://localhost:4873/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== - -fastq@^1.6.0: - version "1.13.0" - resolved "http://localhost:4873/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" - integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== - dependencies: - reusify "^1.0.4" - -faye-websocket@^0.11.3: - version "0.11.4" - resolved "http://localhost:4873/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" - integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== - dependencies: - websocket-driver ">=0.5.1" - -fb-watchman@^2.0.0: - version "2.0.2" - resolved "http://localhost:4873/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" - integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== - dependencies: - bser "2.1.1" - -file-entry-cache@^6.0.1: - version "6.0.1" - resolved "http://localhost:4873/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" - integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== - dependencies: - flat-cache "^3.0.4" - -file-loader@^6.2.0: - version "6.2.0" - resolved "http://localhost:4873/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" - integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== - dependencies: - loader-utils "^2.0.0" - schema-utils "^3.0.0" - -filelist@^1.0.1: - version "1.0.4" - resolved "http://localhost:4873/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" - integrity sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q== - dependencies: - minimatch "^5.0.1" - -filesize@^8.0.6: - version "8.0.7" - resolved "http://localhost:4873/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" - integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== - -fill-range@^7.0.1: - version "7.0.1" - resolved "http://localhost:4873/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -finalhandler@1.2.0: - version "1.2.0" - resolved "http://localhost:4873/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== - dependencies: - debug "2.6.9" - encodeurl "~1.0.2" - escape-html "~1.0.3" - on-finished "2.4.1" - parseurl "~1.3.3" - statuses "2.0.1" - unpipe "~1.0.0" - -find-cache-dir@^3.3.1: - version "3.3.2" - resolved "http://localhost:4873/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" - integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== - dependencies: - commondir "^1.0.1" - make-dir "^3.0.2" - pkg-dir "^4.1.0" - -find-up@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" - -find-up@^4.0.0, find-up@^4.1.0: - version "4.1.0" - resolved "http://localhost:4873/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -find-up@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" - integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== - dependencies: - locate-path "^6.0.0" - path-exists "^4.0.0" - -flat-cache@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== - dependencies: - flatted "^3.1.0" - rimraf "^3.0.2" - -flatted@^3.1.0: - version "3.2.7" - resolved "http://localhost:4873/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" - integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== - -follow-redirects@^1.0.0: - version "1.15.2" - resolved "http://localhost:4873/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" - integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== - -fork-ts-checker-webpack-plugin@^6.5.0: - version "6.5.2" - resolved "http://localhost:4873/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz#4f67183f2f9eb8ba7df7177ce3cf3e75cdafb340" - integrity sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA== - dependencies: - "@babel/code-frame" "^7.8.3" - "@types/json-schema" "^7.0.5" - chalk "^4.1.0" - chokidar "^3.4.2" - cosmiconfig "^6.0.0" - deepmerge "^4.2.2" - fs-extra "^9.0.0" - glob "^7.1.6" - memfs "^3.1.2" - minimatch "^3.0.4" - schema-utils "2.7.0" - semver "^7.3.2" - tapable "^1.0.0" - -form-data@^3.0.0: - version "3.0.1" - resolved "http://localhost:4873/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - -forwarded@0.2.0: - version "0.2.0" - resolved "http://localhost:4873/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" - integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== - -fraction.js@^4.2.0: - version "4.2.0" - resolved "http://localhost:4873/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" - integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== - -fresh@0.5.2: - version "0.5.2" - resolved "http://localhost:4873/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" - integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== - -fs-extra@^10.0.0: - version "10.1.0" - resolved "http://localhost:4873/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" - integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== - dependencies: - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs-extra@^9.0.0, fs-extra@^9.0.1: - version "9.1.0" - resolved "http://localhost:4873/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" - integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== - dependencies: - at-least-node "^1.0.0" - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs-monkey@^1.0.3: - version "1.0.3" - resolved "http://localhost:4873/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" - integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== - -fsevents@^2.3.2, fsevents@~2.3.2: - version "2.3.2" - resolved "http://localhost:4873/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" - integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== - -function-bind@^1.1.1: - version "1.1.1" - resolved "http://localhost:4873/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -function.prototype.name@^1.1.5: - version "1.1.5" - resolved "http://localhost:4873/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" - integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.0" - functions-have-names "^1.2.2" - -functions-have-names@^1.2.2: - version "1.2.3" - resolved "http://localhost:4873/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" - integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== - -gensync@^1.0.0-beta.2: - version "1.0.0-beta.2" - resolved "http://localhost:4873/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" - integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== - -get-caller-file@^2.0.5: - version "2.0.5" - resolved "http://localhost:4873/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: - version "1.1.3" - resolved "http://localhost:4873/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" - integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== - dependencies: - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.3" - -get-own-enumerable-property-symbols@^3.0.0: - version "3.0.2" - resolved "http://localhost:4873/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" - integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== - -get-package-type@^0.1.0: - version "0.1.0" - resolved "http://localhost:4873/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" - integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== - -get-stream@^6.0.0: - version "6.0.1" - resolved "http://localhost:4873/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" - integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== - -get-symbol-description@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" - integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.1" - -glob-parent@^5.1.2, glob-parent@~5.1.2: - version "5.1.2" - resolved "http://localhost:4873/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob-parent@^6.0.1, glob-parent@^6.0.2: - version "6.0.2" - resolved "http://localhost:4873/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" - integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== - dependencies: - is-glob "^4.0.3" - -glob-to-regexp@^0.4.1: - version "0.4.1" - resolved "http://localhost:4873/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" - integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== - -glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: - version "7.2.3" - resolved "http://localhost:4873/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -global-modules@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" - integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== - dependencies: - global-prefix "^3.0.0" - -global-prefix@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" - integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== - dependencies: - ini "^1.3.5" - kind-of "^6.0.2" - which "^1.3.1" - -globals@^11.1.0: - version "11.12.0" - resolved "http://localhost:4873/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globals@^13.15.0: - version "13.17.0" - resolved "http://localhost:4873/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" - integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== - dependencies: - type-fest "^0.20.2" - -globby@^11.0.4, globby@^11.1.0: - version "11.1.0" - resolved "http://localhost:4873/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" - integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.2.9" - ignore "^5.2.0" - merge2 "^1.4.1" - slash "^3.0.0" - -graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: - version "4.2.10" - resolved "http://localhost:4873/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" - integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== - -grapheme-splitter@^1.0.4: - version "1.0.4" - resolved "http://localhost:4873/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" - integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== - -gzip-size@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" - integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== - dependencies: - duplexer "^0.1.2" - -handle-thing@^2.0.0: - version "2.0.1" - resolved "http://localhost:4873/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" - integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== - -harmony-reflect@^1.4.6: - version "1.6.2" - resolved "http://localhost:4873/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" - integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== - -has-bigints@^1.0.1, has-bigints@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" - integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== - -has-flag@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - -has-flag@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has-property-descriptors@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" - integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== - dependencies: - get-intrinsic "^1.1.1" - -has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: - version "1.0.3" - resolved "http://localhost:4873/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" - integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== - -has-tostringtag@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" - integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== - dependencies: - has-symbols "^1.0.2" - -has@^1.0.3: - version "1.0.3" - resolved "http://localhost:4873/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" - -he@^1.2.0: - version "1.2.0" - resolved "http://localhost:4873/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" - integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== - -hoopy@^0.1.4: - version "0.1.4" - resolved "http://localhost:4873/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" - integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== - -hpack.js@^2.1.6: - version "2.1.6" - resolved "http://localhost:4873/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" - integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ== - dependencies: - inherits "^2.0.1" - obuf "^1.0.0" - readable-stream "^2.0.1" - wbuf "^1.1.0" - -html-encoding-sniffer@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" - integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== - dependencies: - whatwg-encoding "^1.0.5" - -html-entities@^2.1.0, html-entities@^2.3.2: - version "2.3.3" - resolved "http://localhost:4873/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46" - integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== - -html-escaper@^2.0.0: - version "2.0.2" - resolved "http://localhost:4873/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" - integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== - -html-minifier-terser@^6.0.2: - version "6.1.0" - resolved "http://localhost:4873/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" - integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== - dependencies: - camel-case "^4.1.2" - clean-css "^5.2.2" - commander "^8.3.0" - he "^1.2.0" - param-case "^3.0.4" - relateurl "^0.2.7" - terser "^5.10.0" - -html-webpack-plugin@^5.5.0: - version "5.5.0" - resolved "http://localhost:4873/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50" - integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== - dependencies: - "@types/html-minifier-terser" "^6.0.0" - html-minifier-terser "^6.0.2" - lodash "^4.17.21" - pretty-error "^4.0.0" - tapable "^2.0.0" - -htmlparser2@^6.1.0: - version "6.1.0" - resolved "http://localhost:4873/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" - integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== - dependencies: - domelementtype "^2.0.1" - domhandler "^4.0.0" - domutils "^2.5.2" - entities "^2.0.0" - -http-deceiver@^1.2.7: - version "1.2.7" - resolved "http://localhost:4873/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" - integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== - -http-errors@2.0.0: - version "2.0.0" - resolved "http://localhost:4873/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" - integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== - dependencies: - depd "2.0.0" - inherits "2.0.4" - setprototypeof "1.2.0" - statuses "2.0.1" - toidentifier "1.0.1" - -http-errors@~1.6.2: - version "1.6.3" - resolved "http://localhost:4873/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" - integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - -http-parser-js@>=0.5.1: - version "0.5.8" - resolved "http://localhost:4873/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3" - integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== - -http-proxy-agent@^4.0.1: - version "4.0.1" - resolved "http://localhost:4873/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" - integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== - dependencies: - "@tootallnate/once" "1" - agent-base "6" - debug "4" - -http-proxy-middleware@^2.0.3: - version "2.0.6" - resolved "http://localhost:4873/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" - integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== - dependencies: - "@types/http-proxy" "^1.17.8" - http-proxy "^1.18.1" - is-glob "^4.0.1" - is-plain-obj "^3.0.0" - micromatch "^4.0.2" - -http-proxy@^1.18.1: - version "1.18.1" - resolved "http://localhost:4873/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" - integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== - dependencies: - eventemitter3 "^4.0.0" - follow-redirects "^1.0.0" - requires-port "^1.0.0" - -https-proxy-agent@^5.0.0: - version "5.0.1" - resolved "http://localhost:4873/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" - integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== - dependencies: - agent-base "6" - debug "4" - -human-signals@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" - integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== - -iconv-lite@0.4.24: - version "0.4.24" - resolved "http://localhost:4873/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - -iconv-lite@^0.6.3: - version "0.6.3" - resolved "http://localhost:4873/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" - integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== - dependencies: - safer-buffer ">= 2.1.2 < 3.0.0" - -icss-utils@^5.0.0, icss-utils@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" - integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== - -idb@^7.0.1: - version "7.1.0" - resolved "http://localhost:4873/idb/-/idb-7.1.0.tgz#2cc886be57738419e57f9aab58f647e5e2160270" - integrity sha512-Wsk07aAxDsntgYJY4h0knZJuTxM73eQ4reRAO+Z1liOh8eMCJ/MoDS8fCui1vGT9mnjtl1sOu3I2i/W1swPYZg== - -identity-obj-proxy@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" - integrity sha512-00n6YnVHKrinT9t0d9+5yZC6UBNJANpYEQvL2LlX6Ab9lnmxzIRcEmTPuyGScvl1+jKuCICX1Z0Ab1pPKKdikA== - dependencies: - harmony-reflect "^1.4.6" - -ignore@^5.2.0: - version "5.2.0" - resolved "http://localhost:4873/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" - integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== - -immer@^9.0.7: - version "9.0.15" - resolved "http://localhost:4873/immer/-/immer-9.0.15.tgz#0b9169e5b1d22137aba7d43f8a81a495dd1b62dc" - integrity sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ== - -import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: - version "3.3.0" - resolved "http://localhost:4873/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -import-local@^3.0.2: - version "3.1.0" - resolved "http://localhost:4873/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" - integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== - dependencies: - pkg-dir "^4.2.0" - resolve-cwd "^3.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "http://localhost:4873/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== - -indent-string@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" - integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== - -inflight@^1.0.4: - version "1.0.6" - resolved "http://localhost:4873/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: - version "2.0.4" - resolved "http://localhost:4873/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -inherits@2.0.3: - version "2.0.3" - resolved "http://localhost:4873/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== - -ini@^1.3.5: - version "1.3.8" - resolved "http://localhost:4873/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - -internal-slot@^1.0.3: - version "1.0.3" - resolved "http://localhost:4873/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" - integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== - dependencies: - get-intrinsic "^1.1.0" - has "^1.0.3" - side-channel "^1.0.4" - -ipaddr.js@1.9.1: - version "1.9.1" - resolved "http://localhost:4873/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" - integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== - -ipaddr.js@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" - integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "http://localhost:4873/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== - -is-bigint@^1.0.1: - version "1.0.4" - resolved "http://localhost:4873/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" - integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== - dependencies: - has-bigints "^1.0.1" - -is-binary-path@~2.1.0: - version "2.1.0" - resolved "http://localhost:4873/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" - integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== - dependencies: - binary-extensions "^2.0.0" - -is-boolean-object@^1.1.0: - version "1.1.2" - resolved "http://localhost:4873/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" - integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== - dependencies: - call-bind "^1.0.2" - has-tostringtag "^1.0.0" - -is-callable@^1.1.4, is-callable@^1.2.7: - version "1.2.7" - resolved "http://localhost:4873/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" - integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== - -is-core-module@^2.8.1, is-core-module@^2.9.0: - version "2.10.0" - resolved "http://localhost:4873/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" - integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== - dependencies: - has "^1.0.3" - -is-date-object@^1.0.1: - version "1.0.5" - resolved "http://localhost:4873/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" - integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== - dependencies: - has-tostringtag "^1.0.0" - -is-docker@^2.0.0, is-docker@^2.1.1: - version "2.2.1" - resolved "http://localhost:4873/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" - integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== - -is-extglob@^2.1.1: - version "2.1.1" - resolved "http://localhost:4873/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-generator-fn@^2.0.0: - version "2.1.0" - resolved "http://localhost:4873/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" - integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: - version "4.0.3" - resolved "http://localhost:4873/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" - integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== - dependencies: - is-extglob "^2.1.1" - -is-module@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" - integrity sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g== - -is-negative-zero@^2.0.2: - version "2.0.2" - resolved "http://localhost:4873/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" - integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== - -is-number-object@^1.0.4: - version "1.0.7" - resolved "http://localhost:4873/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" - integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== - dependencies: - has-tostringtag "^1.0.0" - -is-number@^7.0.0: - version "7.0.0" - resolved "http://localhost:4873/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-obj@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" - integrity sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg== - -is-plain-obj@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" - integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== - -is-plain-object@^2.0.4: - version "2.0.4" - resolved "http://localhost:4873/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== - dependencies: - isobject "^3.0.1" - -is-potential-custom-element-name@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" - integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== - -is-regex@^1.1.4: - version "1.1.4" - resolved "http://localhost:4873/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" - integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== - dependencies: - call-bind "^1.0.2" - has-tostringtag "^1.0.0" - -is-regexp@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" - integrity sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA== - -is-root@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" - integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== - -is-shared-array-buffer@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" - integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== - dependencies: - call-bind "^1.0.2" - -is-stream@^2.0.0: - version "2.0.1" - resolved "http://localhost:4873/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" - integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== - -is-string@^1.0.5, is-string@^1.0.7: - version "1.0.7" - resolved "http://localhost:4873/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" - integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== - dependencies: - has-tostringtag "^1.0.0" - -is-symbol@^1.0.2, is-symbol@^1.0.3: - version "1.0.4" - resolved "http://localhost:4873/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" - integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== - dependencies: - has-symbols "^1.0.2" - -is-typedarray@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" - integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== - -is-weakref@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" - integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== - dependencies: - call-bind "^1.0.2" - -is-wsl@^2.2.0: - version "2.2.0" - resolved "http://localhost:4873/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" - integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== - dependencies: - is-docker "^2.0.0" - -isarray@~1.0.0: - version "1.0.0" - resolved "http://localhost:4873/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== - -isexe@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== - -isobject@^3.0.1: - version "3.0.1" - resolved "http://localhost:4873/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== - -istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: - version "3.2.0" - resolved "http://localhost:4873/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" - integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== - -istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: - version "5.2.1" - resolved "http://localhost:4873/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" - integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== - dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.2.0" - semver "^6.3.0" - -istanbul-lib-report@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" - integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== - dependencies: - istanbul-lib-coverage "^3.0.0" - make-dir "^3.0.0" - supports-color "^7.1.0" - -istanbul-lib-source-maps@^4.0.0: - version "4.0.1" - resolved "http://localhost:4873/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" - integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== - dependencies: - debug "^4.1.1" - istanbul-lib-coverage "^3.0.0" - source-map "^0.6.1" - -istanbul-reports@^3.1.3: - version "3.1.5" - resolved "http://localhost:4873/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" - integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== - dependencies: - html-escaper "^2.0.0" - istanbul-lib-report "^3.0.0" - -jake@^10.8.5: - version "10.8.5" - resolved "http://localhost:4873/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" - integrity sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw== - dependencies: - async "^3.2.3" - chalk "^4.0.2" - filelist "^1.0.1" - minimatch "^3.0.4" - -jest-changed-files@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" - integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== - dependencies: - "@jest/types" "^27.5.1" - execa "^5.0.0" - throat "^6.0.1" - -jest-circus@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" - integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - dedent "^0.7.0" - expect "^27.5.1" - is-generator-fn "^2.0.0" - jest-each "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - slash "^3.0.0" - stack-utils "^2.0.3" - throat "^6.0.1" - -jest-cli@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" - integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== - dependencies: - "@jest/core" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - chalk "^4.0.0" - exit "^0.1.2" - graceful-fs "^4.2.9" - import-local "^3.0.2" - jest-config "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - prompts "^2.0.1" - yargs "^16.2.0" - -jest-config@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" - integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== - dependencies: - "@babel/core" "^7.8.0" - "@jest/test-sequencer" "^27.5.1" - "@jest/types" "^27.5.1" - babel-jest "^27.5.1" - chalk "^4.0.0" - ci-info "^3.2.0" - deepmerge "^4.2.2" - glob "^7.1.1" - graceful-fs "^4.2.9" - jest-circus "^27.5.1" - jest-environment-jsdom "^27.5.1" - jest-environment-node "^27.5.1" - jest-get-type "^27.5.1" - jest-jasmine2 "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-runner "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - micromatch "^4.0.4" - parse-json "^5.2.0" - pretty-format "^27.5.1" - slash "^3.0.0" - strip-json-comments "^3.1.1" - -jest-diff@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def" - integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== - dependencies: - chalk "^4.0.0" - diff-sequences "^27.5.1" - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-diff@^29.1.2: - version "29.1.2" - resolved "http://localhost:4873/jest-diff/-/jest-diff-29.1.2.tgz#bb7aaf5353227d6f4f96c5e7e8713ce576a607dc" - integrity sha512-4GQts0aUopVvecIT4IwD/7xsBaMhKTYoM4/njE/aVw9wpw+pIUVp8Vab/KnSzSilr84GnLBkaP3JLDnQYCKqVQ== - dependencies: - chalk "^4.0.0" - diff-sequences "^29.0.0" - jest-get-type "^29.0.0" - pretty-format "^29.1.2" - -jest-docblock@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" - integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== - dependencies: - detect-newline "^3.0.0" - -jest-each@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" - integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== - dependencies: - "@jest/types" "^27.5.1" - chalk "^4.0.0" - jest-get-type "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - -jest-environment-jsdom@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" - integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - jest-util "^27.5.1" - jsdom "^16.6.0" - -jest-environment-node@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" - integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - jest-util "^27.5.1" - -jest-get-type@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1" - integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== - -jest-get-type@^29.0.0: - version "29.0.0" - resolved "http://localhost:4873/jest-get-type/-/jest-get-type-29.0.0.tgz#843f6c50a1b778f7325df1129a0fd7aa713aef80" - integrity sha512-83X19z/HuLKYXYHskZlBAShO7UfLFXu/vWajw9ZNJASN32li8yHMaVGAQqxFW1RCFOkB7cubaL6FaJVQqqJLSw== - -jest-haste-map@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f" - integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== - dependencies: - "@jest/types" "^27.5.1" - "@types/graceful-fs" "^4.1.2" - "@types/node" "*" - anymatch "^3.0.3" - fb-watchman "^2.0.0" - graceful-fs "^4.2.9" - jest-regex-util "^27.5.1" - jest-serializer "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - micromatch "^4.0.4" - walker "^1.0.7" - optionalDependencies: - fsevents "^2.3.2" - -jest-jasmine2@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" - integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/source-map" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - expect "^27.5.1" - is-generator-fn "^2.0.0" - jest-each "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - throat "^6.0.1" - -jest-leak-detector@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" - integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== - dependencies: - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-matcher-utils@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" - integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== - dependencies: - chalk "^4.0.0" - jest-diff "^27.5.1" - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-matcher-utils@^29.1.2: - version "29.1.2" - resolved "http://localhost:4873/jest-matcher-utils/-/jest-matcher-utils-29.1.2.tgz#e68c4bcc0266e70aa1a5c13fb7b8cd4695e318a1" - integrity sha512-MV5XrD3qYSW2zZSHRRceFzqJ39B2z11Qv0KPyZYxnzDHFeYZGJlgGi0SW+IXSJfOewgJp/Km/7lpcFT+cgZypw== - dependencies: - chalk "^4.0.0" - jest-diff "^29.1.2" - jest-get-type "^29.0.0" - pretty-format "^29.1.2" - -jest-message-util@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" - integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^27.5.1" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^27.5.1" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-message-util@^28.1.3: - version "28.1.3" - resolved "http://localhost:4873/jest-message-util/-/jest-message-util-28.1.3.tgz#232def7f2e333f1eecc90649b5b94b0055e7c43d" - integrity sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^28.1.3" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^28.1.3" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-message-util@^29.1.2: - version "29.1.2" - resolved "http://localhost:4873/jest-message-util/-/jest-message-util-29.1.2.tgz#c21a33c25f9dc1ebfcd0f921d89438847a09a501" - integrity sha512-9oJ2Os+Qh6IlxLpmvshVbGUiSkZVc2FK+uGOm6tghafnB2RyjKAxMZhtxThRMxfX1J1SOMhTn9oK3/MutRWQJQ== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^29.1.2" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^29.1.2" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-mock@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" - integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - -jest-pnp-resolver@^1.2.2: - version "1.2.2" - resolved "http://localhost:4873/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" - integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== - -jest-regex-util@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95" - integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg== - -jest-regex-util@^28.0.0: - version "28.0.2" - resolved "http://localhost:4873/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" - integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== - -jest-resolve-dependencies@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" - integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== - dependencies: - "@jest/types" "^27.5.1" - jest-regex-util "^27.5.1" - jest-snapshot "^27.5.1" - -jest-resolve@^27.4.2, jest-resolve@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" - integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== - dependencies: - "@jest/types" "^27.5.1" - chalk "^4.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-pnp-resolver "^1.2.2" - jest-util "^27.5.1" - jest-validate "^27.5.1" - resolve "^1.20.0" - resolve.exports "^1.1.0" - slash "^3.0.0" - -jest-runner@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" - integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== - dependencies: - "@jest/console" "^27.5.1" - "@jest/environment" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - emittery "^0.8.1" - graceful-fs "^4.2.9" - jest-docblock "^27.5.1" - jest-environment-jsdom "^27.5.1" - jest-environment-node "^27.5.1" - jest-haste-map "^27.5.1" - jest-leak-detector "^27.5.1" - jest-message-util "^27.5.1" - jest-resolve "^27.5.1" - jest-runtime "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - source-map-support "^0.5.6" - throat "^6.0.1" - -jest-runtime@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" - integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/globals" "^27.5.1" - "@jest/source-map" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - chalk "^4.0.0" - cjs-module-lexer "^1.0.0" - collect-v8-coverage "^1.0.0" - execa "^5.0.0" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-message-util "^27.5.1" - jest-mock "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - slash "^3.0.0" - strip-bom "^4.0.0" - -jest-serializer@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64" - integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== - dependencies: - "@types/node" "*" - graceful-fs "^4.2.9" - -jest-snapshot@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" - integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== - dependencies: - "@babel/core" "^7.7.2" - "@babel/generator" "^7.7.2" - "@babel/plugin-syntax-typescript" "^7.7.2" - "@babel/traverse" "^7.7.2" - "@babel/types" "^7.0.0" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/babel__traverse" "^7.0.4" - "@types/prettier" "^2.1.5" - babel-preset-current-node-syntax "^1.0.0" - chalk "^4.0.0" - expect "^27.5.1" - graceful-fs "^4.2.9" - jest-diff "^27.5.1" - jest-get-type "^27.5.1" - jest-haste-map "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-util "^27.5.1" - natural-compare "^1.4.0" - pretty-format "^27.5.1" - semver "^7.3.2" - -jest-util@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9" - integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-util@^28.1.3: - version "28.1.3" - resolved "http://localhost:4873/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" - integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== - dependencies: - "@jest/types" "^28.1.3" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-util@^29.1.2: - version "29.1.2" - resolved "http://localhost:4873/jest-util/-/jest-util-29.1.2.tgz#ac5798e93cb6a6703084e194cfa0898d66126df1" - integrity sha512-vPCk9F353i0Ymx3WQq3+a4lZ07NXu9Ca8wya6o4Fe4/aO1e1awMMprZ3woPFpKwghEOW+UXgd15vVotuNN9ONQ== - dependencies: - "@jest/types" "^29.1.2" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-validate@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" - integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== - dependencies: - "@jest/types" "^27.5.1" - camelcase "^6.2.0" - chalk "^4.0.0" - jest-get-type "^27.5.1" - leven "^3.1.0" - pretty-format "^27.5.1" - -jest-watch-typeahead@^1.0.0: - version "1.1.0" - resolved "http://localhost:4873/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz#b4a6826dfb9c9420da2f7bc900de59dad11266a9" - integrity sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== - dependencies: - ansi-escapes "^4.3.1" - chalk "^4.0.0" - jest-regex-util "^28.0.0" - jest-watcher "^28.0.0" - slash "^4.0.0" - string-length "^5.0.1" - strip-ansi "^7.0.1" - -jest-watcher@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" - integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== - dependencies: - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - jest-util "^27.5.1" - string-length "^4.0.1" - -jest-watcher@^28.0.0: - version "28.1.3" - resolved "http://localhost:4873/jest-watcher/-/jest-watcher-28.1.3.tgz#c6023a59ba2255e3b4c57179fc94164b3e73abd4" - integrity sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g== - dependencies: - "@jest/test-result" "^28.1.3" - "@jest/types" "^28.1.3" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.10.2" - jest-util "^28.1.3" - string-length "^4.0.1" - -jest-worker@^26.2.1: - version "26.6.2" - resolved "http://localhost:4873/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" - integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^7.0.0" - -jest-worker@^27.0.2, jest-worker@^27.4.5, jest-worker@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" - integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -jest-worker@^28.0.2: - version "28.1.3" - resolved "http://localhost:4873/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" - integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -jest@^27.4.3: - version "27.5.1" - resolved "http://localhost:4873/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" - integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== - dependencies: - "@jest/core" "^27.5.1" - import-local "^3.0.2" - jest-cli "^27.5.1" - -js-sdsl@^4.1.4: - version "4.1.5" - resolved "http://localhost:4873/js-sdsl/-/js-sdsl-4.1.5.tgz#1ff1645e6b4d1b028cd3f862db88c9d887f26e2a" - integrity sha512-08bOAKweV2NUC1wqTtf3qZlnpOX/R2DU9ikpjOHs0H+ibQv3zpncVQg6um4uYtRtrwIX8M4Nh3ytK4HGlYAq7Q== - -"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.13.1: - version "3.14.1" - resolved "http://localhost:4873/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -js-yaml@^4.1.0: - version "4.1.0" - resolved "http://localhost:4873/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - -jsdom@^16.6.0: - version "16.7.0" - resolved "http://localhost:4873/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" - integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== - dependencies: - abab "^2.0.5" - acorn "^8.2.4" - acorn-globals "^6.0.0" - cssom "^0.4.4" - cssstyle "^2.3.0" - data-urls "^2.0.0" - decimal.js "^10.2.1" - domexception "^2.0.1" - escodegen "^2.0.0" - form-data "^3.0.0" - html-encoding-sniffer "^2.0.1" - http-proxy-agent "^4.0.1" - https-proxy-agent "^5.0.0" - is-potential-custom-element-name "^1.0.1" - nwsapi "^2.2.0" - parse5 "6.0.1" - saxes "^5.0.1" - symbol-tree "^3.2.4" - tough-cookie "^4.0.0" - w3c-hr-time "^1.0.2" - w3c-xmlserializer "^2.0.0" - webidl-conversions "^6.1.0" - whatwg-encoding "^1.0.5" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.5.0" - ws "^7.4.6" - xml-name-validator "^3.0.0" - -jsesc@^2.5.1: - version "2.5.2" - resolved "http://localhost:4873/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -jsesc@~0.5.0: - version "0.5.0" - resolved "http://localhost:4873/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" - integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== - -json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: - version "2.3.1" - resolved "http://localhost:4873/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "http://localhost:4873/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-schema-traverse@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" - integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== - -json-schema@^0.4.0: - version "0.4.0" - resolved "http://localhost:4873/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" - integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== - -json5@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" - integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== - dependencies: - minimist "^1.2.0" - -json5@^2.1.2, json5@^2.2.0, json5@^2.2.1: - version "2.2.1" - resolved "http://localhost:4873/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" - integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== - -jsonfile@^6.0.1: - version "6.1.0" - resolved "http://localhost:4873/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" - integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== - dependencies: - universalify "^2.0.0" - optionalDependencies: - graceful-fs "^4.1.6" - -jsonpointer@^5.0.0: - version "5.0.1" - resolved "http://localhost:4873/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" - integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== - -"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.2: - version "3.3.3" - resolved "http://localhost:4873/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz#76b3e6e6cece5c69d49a5792c3d01bd1a0cdc7ea" - integrity sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw== - dependencies: - array-includes "^3.1.5" - object.assign "^4.1.3" - -kind-of@^6.0.2: - version "6.0.3" - resolved "http://localhost:4873/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -kleur@^3.0.3: - version "3.0.3" - resolved "http://localhost:4873/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" - integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== - -klona@^2.0.4, klona@^2.0.5: - version "2.0.5" - resolved "http://localhost:4873/klona/-/klona-2.0.5.tgz#d166574d90076395d9963aa7a928fabb8d76afbc" - integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== - -language-subtag-registry@~0.3.2: - version "0.3.22" - resolved "http://localhost:4873/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d" - integrity sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w== - -language-tags@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" - integrity sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ== - dependencies: - language-subtag-registry "~0.3.2" - -leven@^3.1.0: - version "3.1.0" - resolved "http://localhost:4873/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" - integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== - -levn@^0.4.1: - version "0.4.1" - resolved "http://localhost:4873/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" - integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== - dependencies: - prelude-ls "^1.2.1" - type-check "~0.4.0" - -levn@~0.3.0: - version "0.3.0" - resolved "http://localhost:4873/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - -lilconfig@^2.0.3, lilconfig@^2.0.5, lilconfig@^2.0.6: - version "2.0.6" - resolved "http://localhost:4873/lilconfig/-/lilconfig-2.0.6.tgz#32a384558bd58af3d4c6e077dd1ad1d397bc69d4" - integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg== - -lines-and-columns@^1.1.6: - version "1.2.4" - resolved "http://localhost:4873/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" - integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== - -loader-runner@^4.2.0: - version "4.3.0" - resolved "http://localhost:4873/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" - integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== - -loader-utils@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c" - integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== - dependencies: - big.js "^5.2.2" - emojis-list "^3.0.0" - json5 "^2.1.2" - -loader-utils@^3.2.0: - version "3.2.0" - resolved "http://localhost:4873/loader-utils/-/loader-utils-3.2.0.tgz#bcecc51a7898bee7473d4bc6b845b23af8304d4f" - integrity sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ== - -locate-path@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== - dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" - -locate-path@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - -locate-path@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" - integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== - dependencies: - p-locate "^5.0.0" - -lodash.debounce@^4.0.8: - version "4.0.8" - resolved "http://localhost:4873/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" - integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== - -lodash.memoize@^4.1.2: - version "4.1.2" - resolved "http://localhost:4873/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" - integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== - -lodash.merge@^4.6.2: - version "4.6.2" - resolved "http://localhost:4873/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" - integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== - -lodash.sortby@^4.7.0: - version "4.7.0" - resolved "http://localhost:4873/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" - integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== - -lodash.uniq@^4.5.0: - version "4.5.0" - resolved "http://localhost:4873/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" - integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== - -lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: - version "4.17.21" - resolved "http://localhost:4873/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== - -loose-envify@^1.1.0, loose-envify@^1.4.0: - version "1.4.0" - resolved "http://localhost:4873/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" - integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== - dependencies: - js-tokens "^3.0.0 || ^4.0.0" - -lower-case@^2.0.2: - version "2.0.2" - resolved "http://localhost:4873/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" - integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== - dependencies: - tslib "^2.0.3" - -lru-cache@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -lz-string@^1.4.4: - version "1.4.4" - resolved "http://localhost:4873/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" - integrity sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ== - -magic-string@^0.25.0, magic-string@^0.25.7: - version "0.25.9" - resolved "http://localhost:4873/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c" - integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== - dependencies: - sourcemap-codec "^1.4.8" - -make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: - version "3.1.0" - resolved "http://localhost:4873/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== - dependencies: - semver "^6.0.0" - -makeerror@1.0.12: - version "1.0.12" - resolved "http://localhost:4873/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" - integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== - dependencies: - tmpl "1.0.5" - -mdn-data@2.0.14: - version "2.0.14" - resolved "http://localhost:4873/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" - integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== - -mdn-data@2.0.4: - version "2.0.4" - resolved "http://localhost:4873/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" - integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== - -media-typer@0.3.0: - version "0.3.0" - resolved "http://localhost:4873/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== - -memfs@^3.1.2, memfs@^3.4.3: - version "3.4.7" - resolved "http://localhost:4873/memfs/-/memfs-3.4.7.tgz#e5252ad2242a724f938cb937e3c4f7ceb1f70e5a" - integrity sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw== - dependencies: - fs-monkey "^1.0.3" - -merge-descriptors@1.0.1: - version "1.0.1" - resolved "http://localhost:4873/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== - -merge-stream@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" - integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== - -merge2@^1.3.0, merge2@^1.4.1: - version "1.4.1" - resolved "http://localhost:4873/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - -methods@~1.1.2: - version "1.1.2" - resolved "http://localhost:4873/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== - -micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: - version "4.0.5" - resolved "http://localhost:4873/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== - dependencies: - braces "^3.0.2" - picomatch "^2.3.1" - -mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": - version "1.52.0" - resolved "http://localhost:4873/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: - version "2.1.35" - resolved "http://localhost:4873/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - -mime@1.6.0: - version "1.6.0" - resolved "http://localhost:4873/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - -mimic-fn@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - -min-indent@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" - integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== - -mini-css-extract-plugin@^2.4.5: - version "2.6.1" - resolved "http://localhost:4873/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz#9a1251d15f2035c342d99a468ab9da7a0451b71e" - integrity sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg== - dependencies: - schema-utils "^4.0.0" - -minimalistic-assert@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - -minimatch@3.0.4: - version "3.0.4" - resolved "http://localhost:4873/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: - version "3.1.2" - resolved "http://localhost:4873/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -minimatch@^5.0.1: - version "5.1.0" - resolved "http://localhost:4873/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" - integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== - dependencies: - brace-expansion "^2.0.1" - -minimist@^1.2.0, minimist@^1.2.6: - version "1.2.7" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" - integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== - -mkdirp@~0.5.1: - version "0.5.6" - resolved "http://localhost:4873/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" - integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== - dependencies: - minimist "^1.2.6" - -ms@2.0.0: - version "2.0.0" - resolved "http://localhost:4873/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== - -ms@2.1.2: - version "2.1.2" - resolved "http://localhost:4873/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -ms@2.1.3, ms@^2.1.1: - version "2.1.3" - resolved "http://localhost:4873/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" - integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== - -multicast-dns@^7.2.5: - version "7.2.5" - resolved "http://localhost:4873/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced" - integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== - dependencies: - dns-packet "^5.2.2" - thunky "^1.0.2" - -nanoid@^3.3.4: - version "3.3.4" - resolved "http://localhost:4873/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" - integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== - -natural-compare@^1.4.0: - version "1.4.0" - resolved "http://localhost:4873/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== - -negotiator@0.6.3: - version "0.6.3" - resolved "http://localhost:4873/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" - integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== - -neo-async@^2.6.2: - version "2.6.2" - resolved "http://localhost:4873/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" - integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== - -no-case@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" - integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== - dependencies: - lower-case "^2.0.2" - tslib "^2.0.3" - -node-forge@^1: - version "1.3.1" - resolved "http://localhost:4873/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" - integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== - -node-int64@^0.4.0: - version "0.4.0" - resolved "http://localhost:4873/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" - integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== - -node-releases@^2.0.6: - version "2.0.6" - resolved "http://localhost:4873/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" - integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== - -normalize-path@^3.0.0, normalize-path@~3.0.0: - version "3.0.0" - resolved "http://localhost:4873/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -normalize-range@^0.1.2: - version "0.1.2" - resolved "http://localhost:4873/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" - integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== - -normalize-url@^6.0.1: - version "6.1.0" - resolved "http://localhost:4873/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" - integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== - -npm-run-path@^4.0.1: - version "4.0.1" - resolved "http://localhost:4873/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" - integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== - dependencies: - path-key "^3.0.0" - -nth-check@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" - integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== - dependencies: - boolbase "~1.0.0" - -nth-check@^2.0.1: - version "2.1.1" - resolved "http://localhost:4873/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" - integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== - dependencies: - boolbase "^1.0.0" - -nwsapi@^2.2.0: - version "2.2.2" - resolved "http://localhost:4873/nwsapi/-/nwsapi-2.2.2.tgz#e5418863e7905df67d51ec95938d67bf801f0bb0" - integrity sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw== - -object-assign@^4.1.1: - version "4.1.1" - resolved "http://localhost:4873/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== - -object-hash@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" - integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== - -object-inspect@^1.12.2, object-inspect@^1.9.0: - version "1.12.2" - resolved "http://localhost:4873/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" - integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== - -object-keys@^1.1.1: - version "1.1.1" - resolved "http://localhost:4873/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" - integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== - -object.assign@^4.1.0, object.assign@^4.1.3, object.assign@^4.1.4: - version "4.1.4" - resolved "http://localhost:4873/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" - integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.4" - has-symbols "^1.0.3" - object-keys "^1.1.1" - -object.entries@^1.1.5: - version "1.1.5" - resolved "http://localhost:4873/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" - integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - -object.fromentries@^2.0.5: - version "2.0.5" - resolved "http://localhost:4873/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" - integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - -object.getownpropertydescriptors@^2.1.0: - version "2.1.4" - resolved "http://localhost:4873/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.4.tgz#7965e6437a57278b587383831a9b829455a4bc37" - integrity sha512-sccv3L/pMModT6dJAYF3fzGMVcb38ysQ0tEE6ixv2yXJDtEIPph268OlAdJj5/qZMZDq2g/jqvwppt36uS/uQQ== - dependencies: - array.prototype.reduce "^1.0.4" - call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.20.1" - -object.hasown@^1.1.1: - version "1.1.1" - resolved "http://localhost:4873/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3" - integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A== - dependencies: - define-properties "^1.1.4" - es-abstract "^1.19.5" - -object.values@^1.1.0, object.values@^1.1.5: - version "1.1.5" - resolved "http://localhost:4873/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" - integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - -obuf@^1.0.0, obuf@^1.1.2: - version "1.1.2" - resolved "http://localhost:4873/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" - integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== - -on-finished@2.4.1: - version "2.4.1" - resolved "http://localhost:4873/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" - integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== - dependencies: - ee-first "1.1.1" - -on-headers@~1.0.2: - version "1.0.2" - resolved "http://localhost:4873/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" - integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== - -once@^1.3.0: - version "1.4.0" - resolved "http://localhost:4873/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== - dependencies: - wrappy "1" - -onetime@^5.1.2: - version "5.1.2" - resolved "http://localhost:4873/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" - integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== - dependencies: - mimic-fn "^2.1.0" - -open@^8.0.9, open@^8.4.0: - version "8.4.0" - resolved "http://localhost:4873/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" - integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== - dependencies: - define-lazy-prop "^2.0.0" - is-docker "^2.1.1" - is-wsl "^2.2.0" - -optionator@^0.8.1: - version "0.8.3" - resolved "http://localhost:4873/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" - integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.6" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - word-wrap "~1.2.3" - -optionator@^0.9.1: - version "0.9.1" - resolved "http://localhost:4873/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" - integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== - dependencies: - deep-is "^0.1.3" - fast-levenshtein "^2.0.6" - levn "^0.4.1" - prelude-ls "^1.2.1" - type-check "^0.4.0" - word-wrap "^1.2.3" - -p-limit@^2.0.0, p-limit@^2.2.0: - version "2.3.0" - resolved "http://localhost:4873/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-limit@^3.0.2: - version "3.1.0" - resolved "http://localhost:4873/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" - integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== - dependencies: - yocto-queue "^0.1.0" - -p-locate@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== - dependencies: - p-limit "^2.0.0" - -p-locate@^4.1.0: - version "4.1.0" - resolved "http://localhost:4873/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - -p-locate@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" - integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== - dependencies: - p-limit "^3.0.2" - -p-retry@^4.5.0: - version "4.6.2" - resolved "http://localhost:4873/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" - integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== - dependencies: - "@types/retry" "0.12.0" - retry "^0.13.1" - -p-try@^2.0.0: - version "2.2.0" - resolved "http://localhost:4873/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -param-case@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" - integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== - dependencies: - dot-case "^3.0.4" - tslib "^2.0.3" - -parent-module@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -parse-json@^5.0.0, parse-json@^5.2.0: - version "5.2.0" - resolved "http://localhost:4873/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" - integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== - dependencies: - "@babel/code-frame" "^7.0.0" - error-ex "^1.3.1" - json-parse-even-better-errors "^2.3.0" - lines-and-columns "^1.1.6" - -parse5@6.0.1: - version "6.0.1" - resolved "http://localhost:4873/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" - integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== - -parseurl@~1.3.2, parseurl@~1.3.3: - version "1.3.3" - resolved "http://localhost:4873/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" - integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== - -pascal-case@^3.1.2: - version "3.1.2" - resolved "http://localhost:4873/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" - integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== - dependencies: - no-case "^3.0.4" - tslib "^2.0.3" - -path-exists@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== - -path-exists@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "http://localhost:4873/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== - -path-key@^3.0.0, path-key@^3.1.0: - version "3.1.1" - resolved "http://localhost:4873/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-parse@^1.0.7: - version "1.0.7" - resolved "http://localhost:4873/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -path-to-regexp@0.1.7: - version "0.1.7" - resolved "http://localhost:4873/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== - -path-type@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" - integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== - -performance-now@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" - integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== - -picocolors@^0.2.1: - version "0.2.1" - resolved "http://localhost:4873/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" - integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== - -picocolors@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: - version "2.3.1" - resolved "http://localhost:4873/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -pify@^2.3.0: - version "2.3.0" - resolved "http://localhost:4873/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== - -pirates@^4.0.4: - version "4.0.5" - resolved "http://localhost:4873/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" - integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== - -pkg-dir@^4.1.0, pkg-dir@^4.2.0: - version "4.2.0" - resolved "http://localhost:4873/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - -pkg-up@^3.1.0: - version "3.1.0" - resolved "http://localhost:4873/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" - integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== - dependencies: - find-up "^3.0.0" - -postcss-attribute-case-insensitive@^5.0.2: - version "5.0.2" - resolved "http://localhost:4873/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.2.tgz#03d761b24afc04c09e757e92ff53716ae8ea2741" - integrity sha512-XIidXV8fDr0kKt28vqki84fRK8VW8eTuIa4PChv2MqKuT6C9UjmSKzen6KaWhWEoYvwxFCa7n/tC1SZ3tyq4SQ== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-browser-comments@^4: - version "4.0.0" - resolved "http://localhost:4873/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" - integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== - -postcss-calc@^8.2.3: - version "8.2.4" - resolved "http://localhost:4873/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" - integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== - dependencies: - postcss-selector-parser "^6.0.9" - postcss-value-parser "^4.2.0" - -postcss-clamp@^4.1.0: - version "4.1.0" - resolved "http://localhost:4873/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363" - integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-functional-notation@^4.2.4: - version "4.2.4" - resolved "http://localhost:4873/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.4.tgz#21a909e8d7454d3612d1659e471ce4696f28caec" - integrity sha512-2yrTAUZUab9s6CpxkxC4rVgFEVaR6/2Pipvi6qcgvnYiVqZcbDHEoBDhrXzyb7Efh2CCfHQNtcqWcIruDTIUeg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-hex-alpha@^8.0.4: - version "8.0.4" - resolved "http://localhost:4873/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.4.tgz#c66e2980f2fbc1a63f5b079663340ce8b55f25a5" - integrity sha512-nLo2DCRC9eE4w2JmuKgVA3fGL3d01kGq752pVALF68qpGLmx2Qrk91QTKkdUqqp45T1K1XV8IhQpcu1hoAQflQ== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-rebeccapurple@^7.1.1: - version "7.1.1" - resolved "http://localhost:4873/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.1.1.tgz#63fdab91d878ebc4dd4b7c02619a0c3d6a56ced0" - integrity sha512-pGxkuVEInwLHgkNxUc4sdg4g3py7zUeCQ9sMfwyHAT+Ezk8a4OaaVZ8lIY5+oNqA/BXXgLyXv0+5wHP68R79hg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-colormin@^5.3.0: - version "5.3.0" - resolved "http://localhost:4873/postcss-colormin/-/postcss-colormin-5.3.0.tgz#3cee9e5ca62b2c27e84fce63affc0cfb5901956a" - integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== - dependencies: - browserslist "^4.16.6" - caniuse-api "^3.0.0" - colord "^2.9.1" - postcss-value-parser "^4.2.0" - -postcss-convert-values@^5.1.2: - version "5.1.2" - resolved "http://localhost:4873/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz#31586df4e184c2e8890e8b34a0b9355313f503ab" - integrity sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g== - dependencies: - browserslist "^4.20.3" - postcss-value-parser "^4.2.0" - -postcss-custom-media@^8.0.2: - version "8.0.2" - resolved "http://localhost:4873/postcss-custom-media/-/postcss-custom-media-8.0.2.tgz#c8f9637edf45fef761b014c024cee013f80529ea" - integrity sha512-7yi25vDAoHAkbhAzX9dHx2yc6ntS4jQvejrNcC+csQJAXjj15e7VcWfMgLqBNAbOvqi5uIa9huOVwdHbf+sKqg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-custom-properties@^12.1.9: - version "12.1.9" - resolved "http://localhost:4873/postcss-custom-properties/-/postcss-custom-properties-12.1.9.tgz#0883429a7ef99f1ba239d1fea29ce84906daa8bd" - integrity sha512-/E7PRvK8DAVljBbeWrcEQJPG72jaImxF3vvCNFwv9cC8CzigVoNIpeyfnJzphnN3Fd8/auBf5wvkw6W9MfmTyg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-custom-selectors@^6.0.3: - version "6.0.3" - resolved "http://localhost:4873/postcss-custom-selectors/-/postcss-custom-selectors-6.0.3.tgz#1ab4684d65f30fed175520f82d223db0337239d9" - integrity sha512-fgVkmyiWDwmD3JbpCmB45SvvlCD6z9CG6Ie6Iere22W5aHea6oWa7EM2bpnv2Fj3I94L3VbtvX9KqwSi5aFzSg== - dependencies: - postcss-selector-parser "^6.0.4" - -postcss-dir-pseudo-class@^6.0.5: - version "6.0.5" - resolved "http://localhost:4873/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.5.tgz#2bf31de5de76added44e0a25ecf60ae9f7c7c26c" - integrity sha512-eqn4m70P031PF7ZQIvSgy9RSJ5uI2171O/OO/zcRNYpJbvaeKFUlar1aJ7rmgiQtbm0FSPsRewjpdS0Oew7MPA== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-discard-comments@^5.1.2: - version "5.1.2" - resolved "http://localhost:4873/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz#8df5e81d2925af2780075840c1526f0660e53696" - integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== - -postcss-discard-duplicates@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" - integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== - -postcss-discard-empty@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" - integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== - -postcss-discard-overridden@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" - integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== - -postcss-double-position-gradients@^3.1.2: - version "3.1.2" - resolved "http://localhost:4873/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.2.tgz#b96318fdb477be95997e86edd29c6e3557a49b91" - integrity sha512-GX+FuE/uBR6eskOK+4vkXgT6pDkexLokPaz/AbJna9s5Kzp/yl488pKPjhy0obB475ovfT1Wv8ho7U/cHNaRgQ== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -postcss-env-function@^4.0.6: - version "4.0.6" - resolved "http://localhost:4873/postcss-env-function/-/postcss-env-function-4.0.6.tgz#7b2d24c812f540ed6eda4c81f6090416722a8e7a" - integrity sha512-kpA6FsLra+NqcFnL81TnsU+Z7orGtDTxcOhl6pwXeEq1yFPpRMkCDpHhrz8CFQDr/Wfm0jLiNQ1OsGGPjlqPwA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-flexbugs-fixes@^5.0.2: - version "5.0.2" - resolved "http://localhost:4873/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" - integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== - -postcss-focus-visible@^6.0.4: - version "6.0.4" - resolved "http://localhost:4873/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz#50c9ea9afa0ee657fb75635fabad25e18d76bf9e" - integrity sha512-QcKuUU/dgNsstIK6HELFRT5Y3lbrMLEOwG+A4s5cA+fx3A3y/JTq3X9LaOj3OC3ALH0XqyrgQIgey/MIZ8Wczw== - dependencies: - postcss-selector-parser "^6.0.9" - -postcss-focus-within@^5.0.4: - version "5.0.4" - resolved "http://localhost:4873/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz#5b1d2ec603195f3344b716c0b75f61e44e8d2e20" - integrity sha512-vvjDN++C0mu8jz4af5d52CB184ogg/sSxAFS+oUJQq2SuCe7T5U2iIsVJtsCp2d6R4j0jr5+q3rPkBVZkXD9fQ== - dependencies: - postcss-selector-parser "^6.0.9" - -postcss-font-variant@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" - integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== - -postcss-gap-properties@^3.0.5: - version "3.0.5" - resolved "http://localhost:4873/postcss-gap-properties/-/postcss-gap-properties-3.0.5.tgz#f7e3cddcf73ee19e94ccf7cb77773f9560aa2fff" - integrity sha512-IuE6gKSdoUNcvkGIqdtjtcMtZIFyXZhmFd5RUlg97iVEvp1BZKV5ngsAjCjrVy+14uhGBQl9tzmi1Qwq4kqVOg== - -postcss-image-set-function@^4.0.7: - version "4.0.7" - resolved "http://localhost:4873/postcss-image-set-function/-/postcss-image-set-function-4.0.7.tgz#08353bd756f1cbfb3b6e93182c7829879114481f" - integrity sha512-9T2r9rsvYzm5ndsBE8WgtrMlIT7VbtTfE7b3BQnudUqnBcBo7L758oc+o+pdj/dUV0l5wjwSdjeOH2DZtfv8qw== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-import@^14.1.0: - version "14.1.0" - resolved "http://localhost:4873/postcss-import/-/postcss-import-14.1.0.tgz#a7333ffe32f0b8795303ee9e40215dac922781f0" - integrity sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw== - dependencies: - postcss-value-parser "^4.0.0" - read-cache "^1.0.0" - resolve "^1.1.7" - -postcss-initial@^4.0.1: - version "4.0.1" - resolved "http://localhost:4873/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" - integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== - -postcss-js@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/postcss-js/-/postcss-js-4.0.0.tgz#31db79889531b80dc7bc9b0ad283e418dce0ac00" - integrity sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ== - dependencies: - camelcase-css "^2.0.1" - -postcss-lab-function@^4.2.1: - version "4.2.1" - resolved "http://localhost:4873/postcss-lab-function/-/postcss-lab-function-4.2.1.tgz#6fe4c015102ff7cd27d1bd5385582f67ebdbdc98" - integrity sha512-xuXll4isR03CrQsmxyz92LJB2xX9n+pZJ5jE9JgcnmsCammLyKdlzrBin+25dy6wIjfhJpKBAN80gsTlCgRk2w== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -postcss-load-config@^3.1.4: - version "3.1.4" - resolved "http://localhost:4873/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" - integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== - dependencies: - lilconfig "^2.0.5" - yaml "^1.10.2" - -postcss-loader@^6.2.1: - version "6.2.1" - resolved "http://localhost:4873/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" - integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== - dependencies: - cosmiconfig "^7.0.0" - klona "^2.0.5" - semver "^7.3.5" - -postcss-logical@^5.0.4: - version "5.0.4" - resolved "http://localhost:4873/postcss-logical/-/postcss-logical-5.0.4.tgz#ec75b1ee54421acc04d5921576b7d8db6b0e6f73" - integrity sha512-RHXxplCeLh9VjinvMrZONq7im4wjWGlRJAqmAVLXyZaXwfDWP73/oq4NdIp+OZwhQUMj0zjqDfM5Fj7qby+B4g== - -postcss-media-minmax@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" - integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== - -postcss-merge-longhand@^5.1.6: - version "5.1.6" - resolved "http://localhost:4873/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz#f378a8a7e55766b7b644f48e5d8c789ed7ed51ce" - integrity sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw== - dependencies: - postcss-value-parser "^4.2.0" - stylehacks "^5.1.0" - -postcss-merge-rules@^5.1.2: - version "5.1.2" - resolved "http://localhost:4873/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz#7049a14d4211045412116d79b751def4484473a5" - integrity sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ== - dependencies: - browserslist "^4.16.6" - caniuse-api "^3.0.0" - cssnano-utils "^3.1.0" - postcss-selector-parser "^6.0.5" - -postcss-minify-font-values@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" - integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-minify-gradients@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" - integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== - dependencies: - colord "^2.9.1" - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-minify-params@^5.1.3: - version "5.1.3" - resolved "http://localhost:4873/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz#ac41a6465be2db735099bbd1798d85079a6dc1f9" - integrity sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg== - dependencies: - browserslist "^4.16.6" - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-minify-selectors@^5.2.1: - version "5.2.1" - resolved "http://localhost:4873/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz#d4e7e6b46147b8117ea9325a915a801d5fe656c6" - integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== - dependencies: - postcss-selector-parser "^6.0.5" - -postcss-modules-extract-imports@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" - integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== - -postcss-modules-local-by-default@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" - integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== - dependencies: - icss-utils "^5.0.0" - postcss-selector-parser "^6.0.2" - postcss-value-parser "^4.1.0" - -postcss-modules-scope@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" - integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== - dependencies: - postcss-selector-parser "^6.0.4" - -postcss-modules-values@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" - integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== - dependencies: - icss-utils "^5.0.0" - -postcss-nested@5.0.6: - version "5.0.6" - resolved "http://localhost:4873/postcss-nested/-/postcss-nested-5.0.6.tgz#466343f7fc8d3d46af3e7dba3fcd47d052a945bc" - integrity sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA== - dependencies: - postcss-selector-parser "^6.0.6" - -postcss-nesting@^10.2.0: - version "10.2.0" - resolved "http://localhost:4873/postcss-nesting/-/postcss-nesting-10.2.0.tgz#0b12ce0db8edfd2d8ae0aaf86427370b898890be" - integrity sha512-EwMkYchxiDiKUhlJGzWsD9b2zvq/r2SSubcRrgP+jujMXFzqvANLt16lJANC+5uZ6hjI7lpRmI6O8JIl+8l1KA== - dependencies: - "@csstools/selector-specificity" "^2.0.0" - postcss-selector-parser "^6.0.10" - -postcss-normalize-charset@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" - integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== - -postcss-normalize-display-values@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" - integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-positions@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz#ef97279d894087b59325b45c47f1e863daefbb92" - integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-repeat-style@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz#e9eb96805204f4766df66fd09ed2e13545420fb2" - integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-string@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" - integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-timing-functions@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" - integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-unicode@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz#3d23aede35e160089a285e27bf715de11dc9db75" - integrity sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ== - dependencies: - browserslist "^4.16.6" - postcss-value-parser "^4.2.0" - -postcss-normalize-url@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" - integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== - dependencies: - normalize-url "^6.0.1" - postcss-value-parser "^4.2.0" - -postcss-normalize-whitespace@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" - integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize@^10.0.1: - version "10.0.1" - resolved "http://localhost:4873/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" - integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== - dependencies: - "@csstools/normalize.css" "*" - postcss-browser-comments "^4" - sanitize.css "*" - -postcss-opacity-percentage@^1.1.2: - version "1.1.2" - resolved "http://localhost:4873/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.2.tgz#bd698bb3670a0a27f6d657cc16744b3ebf3b1145" - integrity sha512-lyUfF7miG+yewZ8EAk9XUBIlrHyUE6fijnesuz+Mj5zrIHIEw6KcIZSOk/elVMqzLvREmXB83Zi/5QpNRYd47w== - -postcss-ordered-values@^5.1.3: - version "5.1.3" - resolved "http://localhost:4873/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz#b6fd2bd10f937b23d86bc829c69e7732ce76ea38" - integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== - dependencies: - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-overflow-shorthand@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.4.tgz#7ed6486fec44b76f0eab15aa4866cda5d55d893e" - integrity sha512-otYl/ylHK8Y9bcBnPLo3foYFLL6a6Ak+3EQBPOTR7luMYCOsiVTUk1iLvNf6tVPNGXcoL9Hoz37kpfriRIFb4A== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-page-break@^3.0.4: - version "3.0.4" - resolved "http://localhost:4873/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" - integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== - -postcss-place@^7.0.5: - version "7.0.5" - resolved "http://localhost:4873/postcss-place/-/postcss-place-7.0.5.tgz#95dbf85fd9656a3a6e60e832b5809914236986c4" - integrity sha512-wR8igaZROA6Z4pv0d+bvVrvGY4GVHihBCBQieXFY3kuSuMyOmEnnfFzHl/tQuqHZkfkIVBEbDvYcFfHmpSet9g== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-preset-env@^7.0.1: - version "7.8.2" - resolved "http://localhost:4873/postcss-preset-env/-/postcss-preset-env-7.8.2.tgz#4c834d5cbd2e29df2abf59118947c456922b79ba" - integrity sha512-rSMUEaOCnovKnwc5LvBDHUDzpGP+nrUeWZGWt9M72fBvckCi45JmnJigUr4QG4zZeOHmOCNCZnd2LKDvP++ZuQ== - dependencies: - "@csstools/postcss-cascade-layers" "^1.1.0" - "@csstools/postcss-color-function" "^1.1.1" - "@csstools/postcss-font-format-keywords" "^1.0.1" - "@csstools/postcss-hwb-function" "^1.0.2" - "@csstools/postcss-ic-unit" "^1.0.1" - "@csstools/postcss-is-pseudo-class" "^2.0.7" - "@csstools/postcss-nested-calc" "^1.0.0" - "@csstools/postcss-normalize-display-values" "^1.0.1" - "@csstools/postcss-oklab-function" "^1.1.1" - "@csstools/postcss-progressive-custom-properties" "^1.3.0" - "@csstools/postcss-stepped-value-functions" "^1.0.1" - "@csstools/postcss-text-decoration-shorthand" "^1.0.0" - "@csstools/postcss-trigonometric-functions" "^1.0.2" - "@csstools/postcss-unset-value" "^1.0.2" - autoprefixer "^10.4.11" - browserslist "^4.21.3" - css-blank-pseudo "^3.0.3" - css-has-pseudo "^3.0.4" - css-prefers-color-scheme "^6.0.3" - cssdb "^7.0.1" - postcss-attribute-case-insensitive "^5.0.2" - postcss-clamp "^4.1.0" - postcss-color-functional-notation "^4.2.4" - postcss-color-hex-alpha "^8.0.4" - postcss-color-rebeccapurple "^7.1.1" - postcss-custom-media "^8.0.2" - postcss-custom-properties "^12.1.9" - postcss-custom-selectors "^6.0.3" - postcss-dir-pseudo-class "^6.0.5" - postcss-double-position-gradients "^3.1.2" - postcss-env-function "^4.0.6" - postcss-focus-visible "^6.0.4" - postcss-focus-within "^5.0.4" - postcss-font-variant "^5.0.0" - postcss-gap-properties "^3.0.5" - postcss-image-set-function "^4.0.7" - postcss-initial "^4.0.1" - postcss-lab-function "^4.2.1" - postcss-logical "^5.0.4" - postcss-media-minmax "^5.0.0" - postcss-nesting "^10.2.0" - postcss-opacity-percentage "^1.1.2" - postcss-overflow-shorthand "^3.0.4" - postcss-page-break "^3.0.4" - postcss-place "^7.0.5" - postcss-pseudo-class-any-link "^7.1.6" - postcss-replace-overflow-wrap "^4.0.0" - postcss-selector-not "^6.0.1" - postcss-value-parser "^4.2.0" - -postcss-pseudo-class-any-link@^7.1.6: - version "7.1.6" - resolved "http://localhost:4873/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.6.tgz#2693b221902da772c278def85a4d9a64b6e617ab" - integrity sha512-9sCtZkO6f/5ML9WcTLcIyV1yz9D1rf0tWc+ulKcvV30s0iZKS/ONyETvoWsr6vnrmW+X+KmuK3gV/w5EWnT37w== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-reduce-initial@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz#fc31659ea6e85c492fb2a7b545370c215822c5d6" - integrity sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw== - dependencies: - browserslist "^4.16.6" - caniuse-api "^3.0.0" - -postcss-reduce-transforms@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" - integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-replace-overflow-wrap@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" - integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== - -postcss-selector-not@^6.0.1: - version "6.0.1" - resolved "http://localhost:4873/postcss-selector-not/-/postcss-selector-not-6.0.1.tgz#8f0a709bf7d4b45222793fc34409be407537556d" - integrity sha512-1i9affjAe9xu/y9uqWH+tD4r6/hDaXJruk8xn2x1vzxC2U3J3LKO3zJW4CyxlNhA56pADJ/djpEwpH1RClI2rQ== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.6, postcss-selector-parser@^6.0.9: - version "6.0.10" - resolved "http://localhost:4873/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" - integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== - dependencies: - cssesc "^3.0.0" - util-deprecate "^1.0.2" - -postcss-svgo@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" - integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== - dependencies: - postcss-value-parser "^4.2.0" - svgo "^2.7.0" - -postcss-unique-selectors@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" - integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== - dependencies: - postcss-selector-parser "^6.0.5" - -postcss-value-parser@^4.0.0, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: - version "4.2.0" - resolved "http://localhost:4873/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" - integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== - -postcss@^7.0.35: - version "7.0.39" - resolved "http://localhost:4873/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" - integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== - dependencies: - picocolors "^0.2.1" - source-map "^0.6.1" - -postcss@^8.3.5, postcss@^8.4.14, postcss@^8.4.4, postcss@^8.4.7: - version "8.4.17" - resolved "http://localhost:4873/postcss/-/postcss-8.4.17.tgz#f87863ec7cd353f81f7ab2dec5d67d861bbb1be5" - integrity sha512-UNxNOLQydcOFi41yHNMcKRZ39NeXlr8AxGuZJsdub8vIb12fHzcq37DTU/QtbI6WLxNg2gF9Z+8qtRwTj1UI1Q== - dependencies: - nanoid "^3.3.4" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -prelude-ls@^1.2.1: - version "1.2.1" - resolved "http://localhost:4873/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" - integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== - -prelude-ls@~1.1.2: - version "1.1.2" - resolved "http://localhost:4873/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== - -pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: - version "5.6.0" - resolved "http://localhost:4873/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" - integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== - -pretty-error@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" - integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== - dependencies: - lodash "^4.17.20" - renderkid "^3.0.0" - -pretty-format@^27.0.2, pretty-format@^27.5.1: - version "27.5.1" - resolved "http://localhost:4873/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" - integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== - dependencies: - ansi-regex "^5.0.1" - ansi-styles "^5.0.0" - react-is "^17.0.1" - -pretty-format@^28.1.3: - version "28.1.3" - resolved "http://localhost:4873/pretty-format/-/pretty-format-28.1.3.tgz#c9fba8cedf99ce50963a11b27d982a9ae90970d5" - integrity sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q== - dependencies: - "@jest/schemas" "^28.1.3" - ansi-regex "^5.0.1" - ansi-styles "^5.0.0" - react-is "^18.0.0" - -pretty-format@^29.0.0, pretty-format@^29.1.2: - version "29.1.2" - resolved "http://localhost:4873/pretty-format/-/pretty-format-29.1.2.tgz#b1f6b75be7d699be1a051f5da36e8ae9e76a8e6a" - integrity sha512-CGJ6VVGXVRP2o2Dorl4mAwwvDWT25luIsYhkyVQW32E4nL+TgW939J7LlKT/npq5Cpq6j3s+sy+13yk7xYpBmg== - dependencies: - "@jest/schemas" "^29.0.0" - ansi-styles "^5.0.0" - react-is "^18.0.0" - -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "http://localhost:4873/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - -promise@^8.1.0: - version "8.2.0" - resolved "http://localhost:4873/promise/-/promise-8.2.0.tgz#a1f6280ab67457fbfc8aad2b198c9497e9e5c806" - integrity sha512-+CMAlLHqwRYwBMXKCP+o8ns7DN+xHDUiI+0nArsiJ9y+kJVPLFxEaSw6Ha9s9H0tftxg2Yzl25wqj9G7m5wLZg== - dependencies: - asap "~2.0.6" - -prompts@^2.0.1, prompts@^2.4.2: - version "2.4.2" - resolved "http://localhost:4873/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" - integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== - dependencies: - kleur "^3.0.3" - sisteransi "^1.0.5" - -prop-types@^15.8.1: - version "15.8.1" - resolved "http://localhost:4873/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" - integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== - dependencies: - loose-envify "^1.4.0" - object-assign "^4.1.1" - react-is "^16.13.1" - -proxy-addr@~2.0.7: - version "2.0.7" - resolved "http://localhost:4873/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" - integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== - dependencies: - forwarded "0.2.0" - ipaddr.js "1.9.1" - -psl@^1.1.33: - version "1.9.0" - resolved "http://localhost:4873/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" - integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== - -punycode@^2.1.0, punycode@^2.1.1: - version "2.1.1" - resolved "http://localhost:4873/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== - -q@^1.1.2: - version "1.5.1" - resolved "http://localhost:4873/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" - integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw== - -qs@6.10.3: - version "6.10.3" - resolved "http://localhost:4873/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" - integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== - dependencies: - side-channel "^1.0.4" - -querystringify@^2.1.1: - version "2.2.0" - resolved "http://localhost:4873/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" - integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== - -queue-microtask@^1.2.2: - version "1.2.3" - resolved "http://localhost:4873/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - -quick-lru@^5.1.1: - version "5.1.1" - resolved "http://localhost:4873/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" - integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== - -raf@^3.4.1: - version "3.4.1" - resolved "http://localhost:4873/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" - integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== - dependencies: - performance-now "^2.1.0" - -randombytes@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" - integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== - dependencies: - safe-buffer "^5.1.0" - -range-parser@^1.2.1, range-parser@~1.2.1: - version "1.2.1" - resolved "http://localhost:4873/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - -raw-body@2.5.1: - version "2.5.1" - resolved "http://localhost:4873/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" - integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== - dependencies: - bytes "3.1.2" - http-errors "2.0.0" - iconv-lite "0.4.24" - unpipe "1.0.0" - -react-app-polyfill@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" - integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== - dependencies: - core-js "^3.19.2" - object-assign "^4.1.1" - promise "^8.1.0" - raf "^3.4.1" - regenerator-runtime "^0.13.9" - whatwg-fetch "^3.6.2" - -react-dev-utils@^12.0.1: - version "12.0.1" - resolved "http://localhost:4873/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73" - integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== - dependencies: - "@babel/code-frame" "^7.16.0" - address "^1.1.2" - browserslist "^4.18.1" - chalk "^4.1.2" - cross-spawn "^7.0.3" - detect-port-alt "^1.1.6" - escape-string-regexp "^4.0.0" - filesize "^8.0.6" - find-up "^5.0.0" - fork-ts-checker-webpack-plugin "^6.5.0" - global-modules "^2.0.0" - globby "^11.0.4" - gzip-size "^6.0.0" - immer "^9.0.7" - is-root "^2.1.0" - loader-utils "^3.2.0" - open "^8.4.0" - pkg-up "^3.1.0" - prompts "^2.4.2" - react-error-overlay "^6.0.11" - recursive-readdir "^2.2.2" - shell-quote "^1.7.3" - strip-ansi "^6.0.1" - text-table "^0.2.0" - -react-dom@^18.2.0: - version "18.2.0" - resolved "http://localhost:4873/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d" - integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g== - dependencies: - loose-envify "^1.1.0" - scheduler "^0.23.0" - -react-error-overlay@^6.0.11: - version "6.0.11" - resolved "http://localhost:4873/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb" - integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== - -react-is@^16.13.1: - version "16.13.1" - resolved "http://localhost:4873/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" - integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== - -react-is@^17.0.1: - version "17.0.2" - resolved "http://localhost:4873/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" - integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== - -react-is@^18.0.0: - version "18.2.0" - resolved "http://localhost:4873/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" - integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== - -react-refresh@^0.11.0: - version "0.11.0" - resolved "http://localhost:4873/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" - integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== - -react-scripts@5.0.1: - version "5.0.1" - resolved "http://localhost:4873/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003" - integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ== - dependencies: - "@babel/core" "^7.16.0" - "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" - "@svgr/webpack" "^5.5.0" - babel-jest "^27.4.2" - babel-loader "^8.2.3" - babel-plugin-named-asset-import "^0.3.8" - babel-preset-react-app "^10.0.1" - bfj "^7.0.2" - browserslist "^4.18.1" - camelcase "^6.2.1" - case-sensitive-paths-webpack-plugin "^2.4.0" - css-loader "^6.5.1" - css-minimizer-webpack-plugin "^3.2.0" - dotenv "^10.0.0" - dotenv-expand "^5.1.0" - eslint "^8.3.0" - eslint-config-react-app "^7.0.1" - eslint-webpack-plugin "^3.1.1" - file-loader "^6.2.0" - fs-extra "^10.0.0" - html-webpack-plugin "^5.5.0" - identity-obj-proxy "^3.0.0" - jest "^27.4.3" - jest-resolve "^27.4.2" - jest-watch-typeahead "^1.0.0" - mini-css-extract-plugin "^2.4.5" - postcss "^8.4.4" - postcss-flexbugs-fixes "^5.0.2" - postcss-loader "^6.2.1" - postcss-normalize "^10.0.1" - postcss-preset-env "^7.0.1" - prompts "^2.4.2" - react-app-polyfill "^3.0.0" - react-dev-utils "^12.0.1" - react-refresh "^0.11.0" - resolve "^1.20.0" - resolve-url-loader "^4.0.0" - sass-loader "^12.3.0" - semver "^7.3.5" - source-map-loader "^3.0.0" - style-loader "^3.3.1" - tailwindcss "^3.0.2" - terser-webpack-plugin "^5.2.5" - webpack "^5.64.4" - webpack-dev-server "^4.6.0" - webpack-manifest-plugin "^4.0.2" - workbox-webpack-plugin "^6.4.1" - optionalDependencies: - fsevents "^2.3.2" - -react@^18.2.0: - version "18.2.0" - resolved "http://localhost:4873/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5" - integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ== - dependencies: - loose-envify "^1.1.0" - -read-cache@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" - integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== - dependencies: - pify "^2.3.0" - -readable-stream@^2.0.1: - version "2.3.7" - resolved "http://localhost:4873/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^3.0.6: - version "3.6.0" - resolved "http://localhost:4873/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" - integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -readdirp@~3.6.0: - version "3.6.0" - resolved "http://localhost:4873/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" - integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== - dependencies: - picomatch "^2.2.1" - -recursive-readdir@^2.2.2: - version "2.2.2" - resolved "http://localhost:4873/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f" - integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== - dependencies: - minimatch "3.0.4" - -redent@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" - integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== - dependencies: - indent-string "^4.0.0" - strip-indent "^3.0.0" - -regenerate-unicode-properties@^10.1.0: - version "10.1.0" - resolved "http://localhost:4873/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c" - integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== - dependencies: - regenerate "^1.4.2" - -regenerate@^1.4.2: - version "1.4.2" - resolved "http://localhost:4873/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" - integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== - -regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: - version "0.13.9" - resolved "http://localhost:4873/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" - integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== - -regenerator-transform@^0.15.0: - version "0.15.0" - resolved "http://localhost:4873/regenerator-transform/-/regenerator-transform-0.15.0.tgz#cbd9ead5d77fae1a48d957cf889ad0586adb6537" - integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg== - dependencies: - "@babel/runtime" "^7.8.4" - -regex-parser@^2.2.11: - version "2.2.11" - resolved "http://localhost:4873/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" - integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== - -regexp.prototype.flags@^1.4.1, regexp.prototype.flags@^1.4.3: - version "1.4.3" - resolved "http://localhost:4873/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" - integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - functions-have-names "^1.2.2" - -regexpp@^3.2.0: - version "3.2.0" - resolved "http://localhost:4873/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" - integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== - -regexpu-core@^5.1.0: - version "5.2.1" - resolved "http://localhost:4873/regexpu-core/-/regexpu-core-5.2.1.tgz#a69c26f324c1e962e9ffd0b88b055caba8089139" - integrity sha512-HrnlNtpvqP1Xkb28tMhBUO2EbyUHdQlsnlAhzWcwHy8WJR53UWr7/MAvqrsQKMbV4qdpv03oTMG8iIhfsPFktQ== - dependencies: - regenerate "^1.4.2" - regenerate-unicode-properties "^10.1.0" - regjsgen "^0.7.1" - regjsparser "^0.9.1" - unicode-match-property-ecmascript "^2.0.0" - unicode-match-property-value-ecmascript "^2.0.0" - -regjsgen@^0.7.1: - version "0.7.1" - resolved "http://localhost:4873/regjsgen/-/regjsgen-0.7.1.tgz#ee5ef30e18d3f09b7c369b76e7c2373ed25546f6" - integrity sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA== - -regjsparser@^0.9.1: - version "0.9.1" - resolved "http://localhost:4873/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" - integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== - dependencies: - jsesc "~0.5.0" - -relateurl@^0.2.7: - version "0.2.7" - resolved "http://localhost:4873/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" - integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== - -renderkid@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" - integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== - dependencies: - css-select "^4.1.3" - dom-converter "^0.2.0" - htmlparser2 "^6.1.0" - lodash "^4.17.21" - strip-ansi "^6.0.1" - -require-directory@^2.1.1: - version "2.1.1" - resolved "http://localhost:4873/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== - -require-from-string@^2.0.2: - version "2.0.2" - resolved "http://localhost:4873/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" - integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== - -requires-port@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" - integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== - -resolve-cwd@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" - integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== - dependencies: - resolve-from "^5.0.0" - -resolve-from@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== - -resolve-from@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" - integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== - -resolve-url-loader@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" - integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== - dependencies: - adjust-sourcemap-loader "^4.0.0" - convert-source-map "^1.7.0" - loader-utils "^2.0.0" - postcss "^7.0.35" - source-map "0.6.1" - -resolve.exports@^1.1.0: - version "1.1.0" - resolved "http://localhost:4873/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" - integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== - -resolve@^1.1.7, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.0, resolve@^1.22.1: - version "1.22.1" - resolved "http://localhost:4873/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" - integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== - dependencies: - is-core-module "^2.9.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -resolve@^2.0.0-next.3: - version "2.0.0-next.4" - resolved "http://localhost:4873/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" - integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== - dependencies: - is-core-module "^2.9.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -retry@^0.13.1: - version "0.13.1" - resolved "http://localhost:4873/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" - integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== - -reusify@^1.0.4: - version "1.0.4" - resolved "http://localhost:4873/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" - integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== - -rimraf@^3.0.0, rimraf@^3.0.2: - version "3.0.2" - resolved "http://localhost:4873/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - -rollup-plugin-terser@^7.0.0: - version "7.0.2" - resolved "http://localhost:4873/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" - integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== - dependencies: - "@babel/code-frame" "^7.10.4" - jest-worker "^26.2.1" - serialize-javascript "^4.0.0" - terser "^5.0.0" - -rollup@^2.43.1: - version "2.79.1" - resolved "http://localhost:4873/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" - integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== - optionalDependencies: - fsevents "~2.3.2" - -run-parallel@^1.1.9: - version "1.2.0" - resolved "http://localhost:4873/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" - integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== - dependencies: - queue-microtask "^1.2.2" - -safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "http://localhost:4873/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: - version "5.2.1" - resolved "http://localhost:4873/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -safe-regex-test@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" - integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.3" - is-regex "^1.1.4" - -"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": - version "2.1.2" - resolved "http://localhost:4873/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== - -sanitize.css@*: - version "13.0.0" - resolved "http://localhost:4873/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" - integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== - -sass-loader@^12.3.0: - version "12.6.0" - resolved "http://localhost:4873/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" - integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== - dependencies: - klona "^2.0.4" - neo-async "^2.6.2" - -sax@~1.2.4: - version "1.2.4" - resolved "http://localhost:4873/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" - integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== - -saxes@^5.0.1: - version "5.0.1" - resolved "http://localhost:4873/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" - integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== - dependencies: - xmlchars "^2.2.0" - -scheduler@^0.23.0: - version "0.23.0" - resolved "http://localhost:4873/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe" - integrity sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw== - dependencies: - loose-envify "^1.1.0" - -schema-utils@2.7.0: - version "2.7.0" - resolved "http://localhost:4873/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" - integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== - dependencies: - "@types/json-schema" "^7.0.4" - ajv "^6.12.2" - ajv-keywords "^3.4.1" - -schema-utils@^2.6.5: - version "2.7.1" - resolved "http://localhost:4873/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" - integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== - dependencies: - "@types/json-schema" "^7.0.5" - ajv "^6.12.4" - ajv-keywords "^3.5.2" - -schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: - version "3.1.1" - resolved "http://localhost:4873/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" - integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== - dependencies: - "@types/json-schema" "^7.0.8" - ajv "^6.12.5" - ajv-keywords "^3.5.2" - -schema-utils@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" - integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== - dependencies: - "@types/json-schema" "^7.0.9" - ajv "^8.8.0" - ajv-formats "^2.1.1" - ajv-keywords "^5.0.0" - -select-hose@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" - integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== - -selfsigned@^2.1.1: - version "2.1.1" - resolved "http://localhost:4873/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" - integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== - dependencies: - node-forge "^1" - -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: - version "6.3.0" - resolved "http://localhost:4873/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== - -semver@^7.3.2, semver@^7.3.5, semver@^7.3.7: - version "7.3.8" - resolved "http://localhost:4873/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" - integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== - dependencies: - lru-cache "^6.0.0" - -send@0.18.0: - version "0.18.0" - resolved "http://localhost:4873/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== - dependencies: - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - fresh "0.5.2" - http-errors "2.0.0" - mime "1.6.0" - ms "2.1.3" - on-finished "2.4.1" - range-parser "~1.2.1" - statuses "2.0.1" - -serialize-javascript@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" - integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== - dependencies: - randombytes "^2.1.0" - -serialize-javascript@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" - integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== - dependencies: - randombytes "^2.1.0" - -serve-index@^1.9.1: - version "1.9.1" - resolved "http://localhost:4873/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" - integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw== - dependencies: - accepts "~1.3.4" - batch "0.6.1" - debug "2.6.9" - escape-html "~1.0.3" - http-errors "~1.6.2" - mime-types "~2.1.17" - parseurl "~1.3.2" - -serve-static@1.15.0: - version "1.15.0" - resolved "http://localhost:4873/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== - dependencies: - encodeurl "~1.0.2" - escape-html "~1.0.3" - parseurl "~1.3.3" - send "0.18.0" - -setprototypeof@1.1.0: - version "1.1.0" - resolved "http://localhost:4873/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" - integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== - -setprototypeof@1.2.0: - version "1.2.0" - resolved "http://localhost:4873/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" - integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== - -shallow-clone@^3.0.0: - version "3.0.1" - resolved "http://localhost:4873/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" - integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== - dependencies: - kind-of "^6.0.2" - -shebang-command@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - -shebang-regex@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - -shell-quote@^1.7.3: - version "1.7.3" - resolved "http://localhost:4873/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" - integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== - -side-channel@^1.0.4: - version "1.0.4" - resolved "http://localhost:4873/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== - dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" - -signal-exit@^3.0.2, signal-exit@^3.0.3: - version "3.0.7" - resolved "http://localhost:4873/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" - integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== - -sisteransi@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" - integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== - -slash@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" - integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== - -slash@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" - integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== - -sockjs@^0.3.24: - version "0.3.24" - resolved "http://localhost:4873/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" - integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== - dependencies: - faye-websocket "^0.11.3" - uuid "^8.3.2" - websocket-driver "^0.7.4" - -source-list-map@^2.0.0, source-list-map@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" - integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== - -source-map-js@^1.0.1, source-map-js@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" - integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== - -source-map-loader@^3.0.0: - version "3.0.1" - resolved "http://localhost:4873/source-map-loader/-/source-map-loader-3.0.1.tgz#9ae5edc7c2d42570934be4c95d1ccc6352eba52d" - integrity sha512-Vp1UsfyPvgujKQzi4pyDiTOnE3E4H+yHvkVRN3c/9PJmQS4CQJExvcDvaX/D+RV+xQben9HJ56jMJS3CgUeWyA== - dependencies: - abab "^2.0.5" - iconv-lite "^0.6.3" - source-map-js "^1.0.1" - -source-map-support@^0.5.6, source-map-support@~0.5.20: - version "0.5.21" - resolved "http://localhost:4873/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: - version "0.6.1" - resolved "http://localhost:4873/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -source-map@^0.7.3: - version "0.7.4" - resolved "http://localhost:4873/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" - integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== - -source-map@^0.8.0-beta.0: - version "0.8.0-beta.0" - resolved "http://localhost:4873/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" - integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== - dependencies: - whatwg-url "^7.0.0" - -sourcemap-codec@^1.4.8: - version "1.4.8" - resolved "http://localhost:4873/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" - integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== - -spdy-transport@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" - integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== - dependencies: - debug "^4.1.0" - detect-node "^2.0.4" - hpack.js "^2.1.6" - obuf "^1.1.2" - readable-stream "^3.0.6" - wbuf "^1.7.3" - -spdy@^4.0.2: - version "4.0.2" - resolved "http://localhost:4873/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" - integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== - dependencies: - debug "^4.1.0" - handle-thing "^2.0.0" - http-deceiver "^1.2.7" - select-hose "^2.0.0" - spdy-transport "^3.0.0" - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "http://localhost:4873/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== - -stable@^0.1.8: - version "0.1.8" - resolved "http://localhost:4873/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" - integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== - -stack-utils@^2.0.3: - version "2.0.5" - resolved "http://localhost:4873/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" - integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== - dependencies: - escape-string-regexp "^2.0.0" - -stackframe@^1.3.4: - version "1.3.4" - resolved "http://localhost:4873/stackframe/-/stackframe-1.3.4.tgz#b881a004c8c149a5e8efef37d51b16e412943310" - integrity sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw== - -statuses@2.0.1: - version "2.0.1" - resolved "http://localhost:4873/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" - integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== - -"statuses@>= 1.4.0 < 2": - version "1.5.0" - resolved "http://localhost:4873/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" - integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== - -string-length@^4.0.1: - version "4.0.2" - resolved "http://localhost:4873/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" - integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== - dependencies: - char-regex "^1.0.2" - strip-ansi "^6.0.0" - -string-length@^5.0.1: - version "5.0.1" - resolved "http://localhost:4873/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" - integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== - dependencies: - char-regex "^2.0.0" - strip-ansi "^7.0.1" - -string-natural-compare@^3.0.1: - version "3.0.1" - resolved "http://localhost:4873/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" - integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== - -string-width@^4.1.0, string-width@^4.2.0: - version "4.2.3" - resolved "http://localhost:4873/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.7: - version "4.0.7" - resolved "http://localhost:4873/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" - integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" - get-intrinsic "^1.1.1" - has-symbols "^1.0.3" - internal-slot "^1.0.3" - regexp.prototype.flags "^1.4.1" - side-channel "^1.0.4" - -string.prototype.trimend@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" - integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.19.5" - -string.prototype.trimstart@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" - integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.19.5" - -string_decoder@^1.1.1: - version "1.3.0" - resolved "http://localhost:4873/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "http://localhost:4873/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -stringify-object@^3.3.0: - version "3.3.0" - resolved "http://localhost:4873/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" - integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== - dependencies: - get-own-enumerable-property-symbols "^3.0.0" - is-obj "^1.0.1" - is-regexp "^1.0.0" - -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "http://localhost:4873/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-ansi@^7.0.1: - version "7.0.1" - resolved "http://localhost:4873/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" - integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== - dependencies: - ansi-regex "^6.0.1" - -strip-bom@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" - integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== - -strip-bom@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" - integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== - -strip-comments@^2.0.1: - version "2.0.1" - resolved "http://localhost:4873/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" - integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== - -strip-final-newline@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" - integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== - -strip-indent@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" - integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== - dependencies: - min-indent "^1.0.0" - -strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: - version "3.1.1" - resolved "http://localhost:4873/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - -style-loader@^3.3.1: - version "3.3.1" - resolved "http://localhost:4873/style-loader/-/style-loader-3.3.1.tgz#057dfa6b3d4d7c7064462830f9113ed417d38575" - integrity sha512-GPcQ+LDJbrcxHORTRes6Jy2sfvK2kS6hpSfI/fXhPt+spVzxF6LJ1dHLN9zIGmVaaP044YKaIatFaufENRiDoQ== - -stylehacks@^5.1.0: - version "5.1.0" - resolved "http://localhost:4873/stylehacks/-/stylehacks-5.1.0.tgz#a40066490ca0caca04e96c6b02153ddc39913520" - integrity sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q== - dependencies: - browserslist "^4.16.6" - postcss-selector-parser "^6.0.4" - -supports-color@^5.3.0: - version "5.5.0" - resolved "http://localhost:4873/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -supports-color@^7.0.0, supports-color@^7.1.0: - version "7.2.0" - resolved "http://localhost:4873/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -supports-color@^8.0.0: - version "8.1.1" - resolved "http://localhost:4873/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" - -supports-hyperlinks@^2.0.0: - version "2.3.0" - resolved "http://localhost:4873/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" - integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== - dependencies: - has-flag "^4.0.0" - supports-color "^7.0.0" - -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "http://localhost:4873/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== - -svg-parser@^2.0.2: - version "2.0.4" - resolved "http://localhost:4873/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" - integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== - -svgo@^1.2.2: - version "1.3.2" - resolved "http://localhost:4873/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" - integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== - dependencies: - chalk "^2.4.1" - coa "^2.0.2" - css-select "^2.0.0" - css-select-base-adapter "^0.1.1" - css-tree "1.0.0-alpha.37" - csso "^4.0.2" - js-yaml "^3.13.1" - mkdirp "~0.5.1" - object.values "^1.1.0" - sax "~1.2.4" - stable "^0.1.8" - unquote "~1.1.1" - util.promisify "~1.0.0" - -svgo@^2.7.0: - version "2.8.0" - resolved "http://localhost:4873/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" - integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== - dependencies: - "@trysound/sax" "0.2.0" - commander "^7.2.0" - css-select "^4.1.3" - css-tree "^1.1.3" - csso "^4.2.0" - picocolors "^1.0.0" - stable "^0.1.8" - -symbol-tree@^3.2.4: - version "3.2.4" - resolved "http://localhost:4873/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" - integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== - -tailwindcss@^3.0.2: - version "3.1.8" - resolved "http://localhost:4873/tailwindcss/-/tailwindcss-3.1.8.tgz#4f8520550d67a835d32f2f4021580f9fddb7b741" - integrity sha512-YSneUCZSFDYMwk+TGq8qYFdCA3yfBRdBlS7txSq0LUmzyeqRe3a8fBQzbz9M3WS/iFT4BNf/nmw9mEzrnSaC0g== - dependencies: - arg "^5.0.2" - chokidar "^3.5.3" - color-name "^1.1.4" - detective "^5.2.1" - didyoumean "^1.2.2" - dlv "^1.1.3" - fast-glob "^3.2.11" - glob-parent "^6.0.2" - is-glob "^4.0.3" - lilconfig "^2.0.6" - normalize-path "^3.0.0" - object-hash "^3.0.0" - picocolors "^1.0.0" - postcss "^8.4.14" - postcss-import "^14.1.0" - postcss-js "^4.0.0" - postcss-load-config "^3.1.4" - postcss-nested "5.0.6" - postcss-selector-parser "^6.0.10" - postcss-value-parser "^4.2.0" - quick-lru "^5.1.1" - resolve "^1.22.1" - -tapable@^1.0.0: - version "1.1.3" - resolved "http://localhost:4873/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" - integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== - -tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: - version "2.2.1" - resolved "http://localhost:4873/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" - integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== - -temp-dir@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" - integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== - -tempy@^0.6.0: - version "0.6.0" - resolved "http://localhost:4873/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" - integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== - dependencies: - is-stream "^2.0.0" - temp-dir "^2.0.0" - type-fest "^0.16.0" - unique-string "^2.0.0" - -terminal-link@^2.0.0: - version "2.1.1" - resolved "http://localhost:4873/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" - integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== - dependencies: - ansi-escapes "^4.2.1" - supports-hyperlinks "^2.0.0" - -terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: - version "5.3.6" - resolved "http://localhost:4873/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz#5590aec31aa3c6f771ce1b1acca60639eab3195c" - integrity sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ== - dependencies: - "@jridgewell/trace-mapping" "^0.3.14" - jest-worker "^27.4.5" - schema-utils "^3.1.1" - serialize-javascript "^6.0.0" - terser "^5.14.1" - -terser@^5.0.0, terser@^5.10.0, terser@^5.14.1: - version "5.15.1" - resolved "http://localhost:4873/terser/-/terser-5.15.1.tgz#8561af6e0fd6d839669c73b92bdd5777d870ed6c" - integrity sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw== - dependencies: - "@jridgewell/source-map" "^0.3.2" - acorn "^8.5.0" - commander "^2.20.0" - source-map-support "~0.5.20" - -test-exclude@^6.0.0: - version "6.0.0" - resolved "http://localhost:4873/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" - integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== - dependencies: - "@istanbuljs/schema" "^0.1.2" - glob "^7.1.4" - minimatch "^3.0.4" - -text-table@^0.2.0: - version "0.2.0" - resolved "http://localhost:4873/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== - -throat@^6.0.1: - version "6.0.1" - resolved "http://localhost:4873/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" - integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== - -thunky@^1.0.2: - version "1.1.0" - resolved "http://localhost:4873/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" - integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== - -tmpl@1.0.5: - version "1.0.5" - resolved "http://localhost:4873/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" - integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== - -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "http://localhost:4873/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -toidentifier@1.0.1: - version "1.0.1" - resolved "http://localhost:4873/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" - integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== - -tough-cookie@^4.0.0: - version "4.1.2" - resolved "http://localhost:4873/tough-cookie/-/tough-cookie-4.1.2.tgz#e53e84b85f24e0b65dd526f46628db6c85f6b874" - integrity sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ== - dependencies: - psl "^1.1.33" - punycode "^2.1.1" - universalify "^0.2.0" - url-parse "^1.5.3" - -tr46@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" - integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== - dependencies: - punycode "^2.1.0" - -tr46@^2.1.0: - version "2.1.0" - resolved "http://localhost:4873/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" - integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== - dependencies: - punycode "^2.1.1" - -tryer@^1.0.1: - version "1.0.1" - resolved "http://localhost:4873/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" - integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== - -tsconfig-paths@^3.14.1: - version "3.14.1" - resolved "http://localhost:4873/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" - integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== - dependencies: - "@types/json5" "^0.0.29" - json5 "^1.0.1" - minimist "^1.2.6" - strip-bom "^3.0.0" - -tslib@^1.8.1: - version "1.14.1" - resolved "http://localhost:4873/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" - integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== - -tslib@^2.0.3: - version "2.4.0" - resolved "http://localhost:4873/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" - integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== - -tsutils@^3.21.0: - version "3.21.0" - resolved "http://localhost:4873/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" - integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== - dependencies: - tslib "^1.8.1" - -type-check@^0.4.0, type-check@~0.4.0: - version "0.4.0" - resolved "http://localhost:4873/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" - integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== - dependencies: - prelude-ls "^1.2.1" - -type-check@~0.3.2: - version "0.3.2" - resolved "http://localhost:4873/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== - dependencies: - prelude-ls "~1.1.2" - -type-detect@4.0.8: - version "4.0.8" - resolved "http://localhost:4873/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" - integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== - -type-fest@^0.16.0: - version "0.16.0" - resolved "http://localhost:4873/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" - integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== - -type-fest@^0.20.2: - version "0.20.2" - resolved "http://localhost:4873/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" - integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== - -type-fest@^0.21.3: - version "0.21.3" - resolved "http://localhost:4873/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" - integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== - -type-is@~1.6.18: - version "1.6.18" - resolved "http://localhost:4873/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" - integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== - dependencies: - media-typer "0.3.0" - mime-types "~2.1.24" - -typedarray-to-buffer@^3.1.5: - version "3.1.5" - resolved "http://localhost:4873/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" - integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== - dependencies: - is-typedarray "^1.0.0" - -unbox-primitive@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" - integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== - dependencies: - call-bind "^1.0.2" - has-bigints "^1.0.2" - has-symbols "^1.0.3" - which-boxed-primitive "^1.0.2" - -unicode-canonical-property-names-ecmascript@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" - integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== - -unicode-match-property-ecmascript@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" - integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== - dependencies: - unicode-canonical-property-names-ecmascript "^2.0.0" - unicode-property-aliases-ecmascript "^2.0.0" - -unicode-match-property-value-ecmascript@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714" - integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== - -unicode-property-aliases-ecmascript@^2.0.0: - version "2.1.0" - resolved "http://localhost:4873/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" - integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== - -unique-string@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" - integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== - dependencies: - crypto-random-string "^2.0.0" - -universalify@^0.2.0: - version "0.2.0" - resolved "http://localhost:4873/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" - integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== - -universalify@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" - integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== - -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "http://localhost:4873/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== - -unquote@~1.1.1: - version "1.1.1" - resolved "http://localhost:4873/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" - integrity sha512-vRCqFv6UhXpWxZPyGDh/F3ZpNv8/qo7w6iufLpQg9aKnQ71qM4B5KiI7Mia9COcjEhrO9LueHpMYjYzsWH3OIg== - -upath@^1.2.0: - version "1.2.0" - resolved "http://localhost:4873/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" - integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== - -update-browserslist-db@^1.0.9: - version "1.0.10" - resolved "http://localhost:4873/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz#0f54b876545726f17d00cd9a2561e6dade943ff3" - integrity sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ== - dependencies: - escalade "^3.1.1" - picocolors "^1.0.0" - -uri-js@^4.2.2: - version "4.4.1" - resolved "http://localhost:4873/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - -url-parse@^1.5.3: - version "1.5.10" - resolved "http://localhost:4873/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" - integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== - dependencies: - querystringify "^2.1.1" - requires-port "^1.0.0" - -util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: - version "1.0.2" - resolved "http://localhost:4873/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== - -util.promisify@~1.0.0: - version "1.0.1" - resolved "http://localhost:4873/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" - integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.17.2" - has-symbols "^1.0.1" - object.getownpropertydescriptors "^2.1.0" - -utila@~0.4: - version "0.4.0" - resolved "http://localhost:4873/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" - integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA== - -utils-merge@1.0.1: - version "1.0.1" - resolved "http://localhost:4873/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== - -uuid@^8.3, uuid@^8.3.2: - version "8.3.2" - resolved "http://localhost:4873/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" - integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== - -v8-to-istanbul@^8.1.0: - version "8.1.1" - resolved "http://localhost:4873/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" - integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" - source-map "^0.7.3" - -vary@~1.1.2: - version "1.1.2" - resolved "http://localhost:4873/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== - -w3c-hr-time@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" - integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== - dependencies: - browser-process-hrtime "^1.0.0" - -w3c-xmlserializer@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" - integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== - dependencies: - xml-name-validator "^3.0.0" - -walker@^1.0.7: - version "1.0.8" - resolved "http://localhost:4873/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" - integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== - dependencies: - makeerror "1.0.12" - -watchpack@^2.4.0: - version "2.4.0" - resolved "http://localhost:4873/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== - dependencies: - glob-to-regexp "^0.4.1" - graceful-fs "^4.1.2" - -wbuf@^1.1.0, wbuf@^1.7.3: - version "1.7.3" - resolved "http://localhost:4873/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" - integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== - dependencies: - minimalistic-assert "^1.0.0" - -web-vitals@^2.1.4: - version "2.1.4" - resolved "http://localhost:4873/web-vitals/-/web-vitals-2.1.4.tgz#76563175a475a5e835264d373704f9dde718290c" - integrity sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg== - -webidl-conversions@^4.0.2: - version "4.0.2" - resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" - integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== - -webidl-conversions@^5.0.0: - version "5.0.0" - resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" - integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== - -webidl-conversions@^6.1.0: - version "6.1.0" - resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" - integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== - -webpack-dev-middleware@^5.3.1: - version "5.3.3" - resolved "http://localhost:4873/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f" - integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== - dependencies: - colorette "^2.0.10" - memfs "^3.4.3" - mime-types "^2.1.31" - range-parser "^1.2.1" - schema-utils "^4.0.0" - -webpack-dev-server@^4.6.0: - version "4.11.1" - resolved "http://localhost:4873/webpack-dev-server/-/webpack-dev-server-4.11.1.tgz#ae07f0d71ca0438cf88446f09029b92ce81380b5" - integrity sha512-lILVz9tAUy1zGFwieuaQtYiadImb5M3d+H+L1zDYalYoDl0cksAB1UNyuE5MMWJrG6zR1tXkCP2fitl7yoUJiw== - dependencies: - "@types/bonjour" "^3.5.9" - "@types/connect-history-api-fallback" "^1.3.5" - "@types/express" "^4.17.13" - "@types/serve-index" "^1.9.1" - "@types/serve-static" "^1.13.10" - "@types/sockjs" "^0.3.33" - "@types/ws" "^8.5.1" - ansi-html-community "^0.0.8" - bonjour-service "^1.0.11" - chokidar "^3.5.3" - colorette "^2.0.10" - compression "^1.7.4" - connect-history-api-fallback "^2.0.0" - default-gateway "^6.0.3" - express "^4.17.3" - graceful-fs "^4.2.6" - html-entities "^2.3.2" - http-proxy-middleware "^2.0.3" - ipaddr.js "^2.0.1" - open "^8.0.9" - p-retry "^4.5.0" - rimraf "^3.0.2" - schema-utils "^4.0.0" - selfsigned "^2.1.1" - serve-index "^1.9.1" - sockjs "^0.3.24" - spdy "^4.0.2" - webpack-dev-middleware "^5.3.1" - ws "^8.4.2" - -webpack-manifest-plugin@^4.0.2: - version "4.1.1" - resolved "http://localhost:4873/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" - integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== - dependencies: - tapable "^2.0.0" - webpack-sources "^2.2.0" - -webpack-merge@^5.8.0: - version "5.8.0" - resolved "http://localhost:4873/webpack-merge/-/webpack-merge-5.8.0.tgz#2b39dbf22af87776ad744c390223731d30a68f61" - integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== - dependencies: - clone-deep "^4.0.1" - wildcard "^2.0.0" - -webpack-sources@^1.4.3: - version "1.4.3" - resolved "http://localhost:4873/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" - integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== - dependencies: - source-list-map "^2.0.0" - source-map "~0.6.1" - -webpack-sources@^2.2.0: - version "2.3.1" - resolved "http://localhost:4873/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" - integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== - dependencies: - source-list-map "^2.0.1" - source-map "^0.6.1" - -webpack-sources@^3.2.3: - version "3.2.3" - resolved "http://localhost:4873/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" - integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== - -webpack@^5.64.4: - version "5.74.0" - resolved "http://localhost:4873/webpack/-/webpack-5.74.0.tgz#02a5dac19a17e0bb47093f2be67c695102a55980" - integrity sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^0.0.51" - "@webassemblyjs/ast" "1.11.1" - "@webassemblyjs/wasm-edit" "1.11.1" - "@webassemblyjs/wasm-parser" "1.11.1" - acorn "^8.7.1" - acorn-import-assertions "^1.7.6" - browserslist "^4.14.5" - chrome-trace-event "^1.0.2" - enhanced-resolve "^5.10.0" - es-module-lexer "^0.9.0" - eslint-scope "5.1.1" - events "^3.2.0" - glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" - json-parse-even-better-errors "^2.3.1" - loader-runner "^4.2.0" - mime-types "^2.1.27" - neo-async "^2.6.2" - schema-utils "^3.1.0" - tapable "^2.1.1" - terser-webpack-plugin "^5.1.3" - watchpack "^2.4.0" - webpack-sources "^3.2.3" - -websocket-driver@>=0.5.1, websocket-driver@^0.7.4: - version "0.7.4" - resolved "http://localhost:4873/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" - integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== - dependencies: - http-parser-js ">=0.5.1" - safe-buffer ">=5.1.0" - websocket-extensions ">=0.1.1" - -websocket-extensions@>=0.1.1: - version "0.1.4" - resolved "http://localhost:4873/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" - integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== - -whatwg-encoding@^1.0.5: - version "1.0.5" - resolved "http://localhost:4873/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" - integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== - dependencies: - iconv-lite "0.4.24" - -whatwg-fetch@^3.6.2: - version "3.6.2" - resolved "http://localhost:4873/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" - integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== - -whatwg-mimetype@^2.3.0: - version "2.3.0" - resolved "http://localhost:4873/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" - integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== - -whatwg-url@^7.0.0: - version "7.1.0" - resolved "http://localhost:4873/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" - integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== - dependencies: - lodash.sortby "^4.7.0" - tr46 "^1.0.1" - webidl-conversions "^4.0.2" - -whatwg-url@^8.0.0, whatwg-url@^8.5.0: - version "8.7.0" - resolved "http://localhost:4873/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" - integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== - dependencies: - lodash "^4.7.0" - tr46 "^2.1.0" - webidl-conversions "^6.1.0" - -which-boxed-primitive@^1.0.2: - version "1.0.2" - resolved "http://localhost:4873/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" - integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== - dependencies: - is-bigint "^1.0.1" - is-boolean-object "^1.1.0" - is-number-object "^1.0.4" - is-string "^1.0.5" - is-symbol "^1.0.3" - -which@^1.3.1: - version "1.3.1" - resolved "http://localhost:4873/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" - integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== - dependencies: - isexe "^2.0.0" - -which@^2.0.1: - version "2.0.2" - resolved "http://localhost:4873/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -wildcard@^2.0.0: - version "2.0.0" - resolved "http://localhost:4873/wildcard/-/wildcard-2.0.0.tgz#a77d20e5200c6faaac979e4b3aadc7b3dd7f8fec" - integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== - -word-wrap@^1.2.3, word-wrap@~1.2.3: - version "1.2.3" - resolved "http://localhost:4873/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== - -workbox-background-sync@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-background-sync/-/workbox-background-sync-6.5.4.tgz#3141afba3cc8aa2ae14c24d0f6811374ba8ff6a9" - integrity sha512-0r4INQZMyPky/lj4Ou98qxcThrETucOde+7mRGJl13MPJugQNKeZQOdIJe/1AchOP23cTqHcN/YVpD6r8E6I8g== - dependencies: - idb "^7.0.1" - workbox-core "6.5.4" - -workbox-broadcast-update@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-broadcast-update/-/workbox-broadcast-update-6.5.4.tgz#8441cff5417cd41f384ba7633ca960a7ffe40f66" - integrity sha512-I/lBERoH1u3zyBosnpPEtcAVe5lwykx9Yg1k6f8/BGEPGaMMgZrwVrqL1uA9QZ1NGGFoyE6t9i7lBjOlDhFEEw== - dependencies: - workbox-core "6.5.4" - -workbox-build@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-build/-/workbox-build-6.5.4.tgz#7d06d31eb28a878817e1c991c05c5b93409f0389" - integrity sha512-kgRevLXEYvUW9WS4XoziYqZ8Q9j/2ziJYEtTrjdz5/L/cTUa2XfyMP2i7c3p34lgqJ03+mTiz13SdFef2POwbA== - dependencies: - "@apideck/better-ajv-errors" "^0.3.1" - "@babel/core" "^7.11.1" - "@babel/preset-env" "^7.11.0" - "@babel/runtime" "^7.11.2" - "@rollup/plugin-babel" "^5.2.0" - "@rollup/plugin-node-resolve" "^11.2.1" - "@rollup/plugin-replace" "^2.4.1" - "@surma/rollup-plugin-off-main-thread" "^2.2.3" - ajv "^8.6.0" - common-tags "^1.8.0" - fast-json-stable-stringify "^2.1.0" - fs-extra "^9.0.1" - glob "^7.1.6" - lodash "^4.17.20" - pretty-bytes "^5.3.0" - rollup "^2.43.1" - rollup-plugin-terser "^7.0.0" - source-map "^0.8.0-beta.0" - stringify-object "^3.3.0" - strip-comments "^2.0.1" - tempy "^0.6.0" - upath "^1.2.0" - workbox-background-sync "6.5.4" - workbox-broadcast-update "6.5.4" - workbox-cacheable-response "6.5.4" - workbox-core "6.5.4" - workbox-expiration "6.5.4" - workbox-google-analytics "6.5.4" - workbox-navigation-preload "6.5.4" - workbox-precaching "6.5.4" - workbox-range-requests "6.5.4" - workbox-recipes "6.5.4" - workbox-routing "6.5.4" - workbox-strategies "6.5.4" - workbox-streams "6.5.4" - workbox-sw "6.5.4" - workbox-window "6.5.4" - -workbox-cacheable-response@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-cacheable-response/-/workbox-cacheable-response-6.5.4.tgz#a5c6ec0c6e2b6f037379198d4ef07d098f7cf137" - integrity sha512-DCR9uD0Fqj8oB2TSWQEm1hbFs/85hXXoayVwFKLVuIuxwJaihBsLsp4y7J9bvZbqtPJ1KlCkmYVGQKrBU4KAug== - dependencies: - workbox-core "6.5.4" - -workbox-core@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-core/-/workbox-core-6.5.4.tgz#df48bf44cd58bb1d1726c49b883fb1dffa24c9ba" - integrity sha512-OXYb+m9wZm8GrORlV2vBbE5EC1FKu71GGp0H4rjmxmF4/HLbMCoTFws87M3dFwgpmg0v00K++PImpNQ6J5NQ6Q== - -workbox-expiration@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-expiration/-/workbox-expiration-6.5.4.tgz#501056f81e87e1d296c76570bb483ce5e29b4539" - integrity sha512-jUP5qPOpH1nXtjGGh1fRBa1wJL2QlIb5mGpct3NzepjGG2uFFBn4iiEBiI9GUmfAFR2ApuRhDydjcRmYXddiEQ== - dependencies: - idb "^7.0.1" - workbox-core "6.5.4" - -workbox-google-analytics@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-google-analytics/-/workbox-google-analytics-6.5.4.tgz#c74327f80dfa4c1954cbba93cd7ea640fe7ece7d" - integrity sha512-8AU1WuaXsD49249Wq0B2zn4a/vvFfHkpcFfqAFHNHwln3jK9QUYmzdkKXGIZl9wyKNP+RRX30vcgcyWMcZ9VAg== - dependencies: - workbox-background-sync "6.5.4" - workbox-core "6.5.4" - workbox-routing "6.5.4" - workbox-strategies "6.5.4" - -workbox-navigation-preload@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-navigation-preload/-/workbox-navigation-preload-6.5.4.tgz#ede56dd5f6fc9e860a7e45b2c1a8f87c1c793212" - integrity sha512-IIwf80eO3cr8h6XSQJF+Hxj26rg2RPFVUmJLUlM0+A2GzB4HFbQyKkrgD5y2d84g2IbJzP4B4j5dPBRzamHrng== - dependencies: - workbox-core "6.5.4" - -workbox-precaching@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-precaching/-/workbox-precaching-6.5.4.tgz#740e3561df92c6726ab5f7471e6aac89582cab72" - integrity sha512-hSMezMsW6btKnxHB4bFy2Qfwey/8SYdGWvVIKFaUm8vJ4E53JAY+U2JwLTRD8wbLWoP6OVUdFlXsTdKu9yoLTg== - dependencies: - workbox-core "6.5.4" - workbox-routing "6.5.4" - workbox-strategies "6.5.4" - -workbox-range-requests@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-range-requests/-/workbox-range-requests-6.5.4.tgz#86b3d482e090433dab38d36ae031b2bb0bd74399" - integrity sha512-Je2qR1NXCFC8xVJ/Lux6saH6IrQGhMpDrPXWZWWS8n/RD+WZfKa6dSZwU+/QksfEadJEr/NfY+aP/CXFFK5JFg== - dependencies: - workbox-core "6.5.4" - -workbox-recipes@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-recipes/-/workbox-recipes-6.5.4.tgz#cca809ee63b98b158b2702dcfb741b5cc3e24acb" - integrity sha512-QZNO8Ez708NNwzLNEXTG4QYSKQ1ochzEtRLGaq+mr2PyoEIC1xFW7MrWxrONUxBFOByksds9Z4//lKAX8tHyUA== - dependencies: - workbox-cacheable-response "6.5.4" - workbox-core "6.5.4" - workbox-expiration "6.5.4" - workbox-precaching "6.5.4" - workbox-routing "6.5.4" - workbox-strategies "6.5.4" - -workbox-routing@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-routing/-/workbox-routing-6.5.4.tgz#6a7fbbd23f4ac801038d9a0298bc907ee26fe3da" - integrity sha512-apQswLsbrrOsBUWtr9Lf80F+P1sHnQdYodRo32SjiByYi36IDyL2r7BH1lJtFX8fwNHDa1QOVY74WKLLS6o5Pg== - dependencies: - workbox-core "6.5.4" - -workbox-strategies@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-strategies/-/workbox-strategies-6.5.4.tgz#4edda035b3c010fc7f6152918370699334cd204d" - integrity sha512-DEtsxhx0LIYWkJBTQolRxG4EI0setTJkqR4m7r4YpBdxtWJH1Mbg01Cj8ZjNOO8etqfA3IZaOPHUxCs8cBsKLw== - dependencies: - workbox-core "6.5.4" - -workbox-streams@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-streams/-/workbox-streams-6.5.4.tgz#1cb3c168a6101df7b5269d0353c19e36668d7d69" - integrity sha512-FXKVh87d2RFXkliAIheBojBELIPnWbQdyDvsH3t74Cwhg0fDheL1T8BqSM86hZvC0ZESLsznSYWw+Va+KVbUzg== - dependencies: - workbox-core "6.5.4" - workbox-routing "6.5.4" - -workbox-sw@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-sw/-/workbox-sw-6.5.4.tgz#d93e9c67924dd153a61367a4656ff4d2ae2ed736" - integrity sha512-vo2RQo7DILVRoH5LjGqw3nphavEjK4Qk+FenXeUsknKn14eCNedHOXWbmnvP4ipKhlE35pvJ4yl4YYf6YsJArA== - -workbox-webpack-plugin@^6.4.1: - version "6.5.4" - resolved "http://localhost:4873/workbox-webpack-plugin/-/workbox-webpack-plugin-6.5.4.tgz#baf2d3f4b8f435f3469887cf4fba2b7fac3d0fd7" - integrity sha512-LmWm/zoaahe0EGmMTrSLUi+BjyR3cdGEfU3fS6PN1zKFYbqAKuQ+Oy/27e4VSXsyIwAw8+QDfk1XHNGtZu9nQg== - dependencies: - fast-json-stable-stringify "^2.1.0" - pretty-bytes "^5.4.1" - upath "^1.2.0" - webpack-sources "^1.4.3" - workbox-build "6.5.4" - -workbox-window@6.5.4: - version "6.5.4" - resolved "http://localhost:4873/workbox-window/-/workbox-window-6.5.4.tgz#d991bc0a94dff3c2dbb6b84558cff155ca878e91" - integrity sha512-HnLZJDwYBE+hpG25AQBO8RUWBJRaCsI9ksQJEp3aCOFCaG5kqaToAYXFRAHxzRluM2cQbGzdQF5rjKPWPA1fug== - dependencies: - "@types/trusted-types" "^2.0.2" - workbox-core "6.5.4" - -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "http://localhost:4873/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrappy@1: - version "1.0.2" - resolved "http://localhost:4873/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== - -write-file-atomic@^3.0.0: - version "3.0.3" - resolved "http://localhost:4873/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" - integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== - dependencies: - imurmurhash "^0.1.4" - is-typedarray "^1.0.0" - signal-exit "^3.0.2" - typedarray-to-buffer "^3.1.5" - -ws@^7.4.6: - version "7.5.9" - resolved "http://localhost:4873/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" - integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== - -ws@^8.4.2: - version "8.9.0" - resolved "http://localhost:4873/ws/-/ws-8.9.0.tgz#2a994bb67144be1b53fe2d23c53c028adeb7f45e" - integrity sha512-Ja7nszREasGaYUYCI2k4lCKIRTt+y7XuqVoHR44YpI49TtryyqbqvDMn5eqfW7e6HzTukDRIsXqzVHScqRcafg== - -xml-name-validator@^3.0.0: - version "3.0.0" - resolved "http://localhost:4873/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" - integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== - -xmlchars@^2.2.0: - version "2.2.0" - resolved "http://localhost:4873/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" - integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== - -xtend@^4.0.2: - version "4.0.2" - resolved "http://localhost:4873/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" - integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== - -y18n@^5.0.5: - version "5.0.8" - resolved "http://localhost:4873/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" - integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== - -yallist@^4.0.0: - version "4.0.0" - resolved "http://localhost:4873/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: - version "1.10.2" - resolved "http://localhost:4873/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" - integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== - -yargs-parser@^20.2.2: - version "20.2.9" - resolved "http://localhost:4873/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" - integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== - -yargs@^16.2.0: - version "16.2.0" - resolved "http://localhost:4873/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" - integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== - dependencies: - cliui "^7.0.2" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.0" - y18n "^5.0.5" - yargs-parser "^20.2.2" - -yocto-queue@^0.1.0: - version "0.1.0" - resolved "http://localhost:4873/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" - integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/javascript/examples/vite/.gitignore b/javascript/examples/vite/.gitignore deleted file mode 100644 index 23d67fc1..00000000 --- a/javascript/examples/vite/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -node_modules/ -yarn.lock diff --git a/javascript/examples/vite/README.md b/javascript/examples/vite/README.md deleted file mode 100644 index c84594f5..00000000 --- a/javascript/examples/vite/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# Vite + Automerge - -There are three things you need to do to get WASM packaging working with vite: - -1. Install the top level await plugin -2. Install the `vite-plugin-wasm` plugin -3. Exclude `automerge-wasm` from the optimizer - -First, install the packages we need: - -```bash -yarn add vite-plugin-top-level-await -yarn add vite-plugin-wasm -``` - -In `vite.config.js` - -```javascript -import { defineConfig } from "vite" -import wasm from "vite-plugin-wasm" -import topLevelAwait from "vite-plugin-top-level-await" - -export default defineConfig({ - plugins: [topLevelAwait(), wasm()], - - // This is only necessary if you are using `SharedWorker` or `WebWorker`, as - // documented in https://vitejs.dev/guide/features.html#import-with-constructors - worker: { - format: "es", - plugins: [topLevelAwait(), wasm()], - }, - - optimizeDeps: { - // This is necessary because otherwise `vite dev` includes two separate - // versions of the JS wrapper. This causes problems because the JS - // wrapper has a module level variable to track JS side heap - // allocations, initializing this twice causes horrible breakage - exclude: ["@automerge/automerge-wasm"], - }, -}) -``` - -Now start the dev server: - -```bash -yarn vite -``` - -## Running the example - -```bash -yarn install -yarn dev -``` diff --git a/javascript/examples/vite/index.html b/javascript/examples/vite/index.html deleted file mode 100644 index f86e483c..00000000 --- a/javascript/examples/vite/index.html +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - Vite + TS - - -
- - - diff --git a/javascript/examples/vite/main.ts b/javascript/examples/vite/main.ts deleted file mode 100644 index 0ba18f48..00000000 --- a/javascript/examples/vite/main.ts +++ /dev/null @@ -1,15 +0,0 @@ -import * as Automerge from "/node_modules/.vite/deps/automerge-js.js?v=6e973f28" -console.log(Automerge) -let doc = Automerge.init() -doc = Automerge.change(doc, d => (d.hello = "from automerge-js")) -console.log(doc) -const result = JSON.stringify(doc) -if (typeof document !== "undefined") { - const element = document.createElement("div") - element.innerHTML = JSON.stringify(result) - document.body.appendChild(element) -} else { - console.log("node:", result) -} - -//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi9ob21lL2FsZXgvUHJvamVjdHMvYXV0b21lcmdlL2F1dG9tZXJnZS1ycy9hdXRvbWVyZ2UtanMvZXhhbXBsZXMvdml0ZS9zcmMvbWFpbi50cyJdLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgKiBhcyBBdXRvbWVyZ2UgZnJvbSBcImF1dG9tZXJnZS1qc1wiXG5cbi8vIGhlbGxvIHdvcmxkIGNvZGUgdGhhdCB3aWxsIHJ1biBjb3JyZWN0bHkgb24gd2ViIG9yIG5vZGVcblxuY29uc29sZS5sb2coQXV0b21lcmdlKVxubGV0IGRvYyA9IEF1dG9tZXJnZS5pbml0KClcbmRvYyA9IEF1dG9tZXJnZS5jaGFuZ2UoZG9jLCAoZDogYW55KSA9PiBkLmhlbGxvID0gXCJmcm9tIGF1dG9tZXJnZS1qc1wiKVxuY29uc29sZS5sb2coZG9jKVxuY29uc3QgcmVzdWx0ID0gSlNPTi5zdHJpbmdpZnkoZG9jKVxuXG5pZiAodHlwZW9mIGRvY3VtZW50ICE9PSAndW5kZWZpbmVkJykge1xuICAgIC8vIGJyb3dzZXJcbiAgICBjb25zdCBlbGVtZW50ID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnZGl2Jyk7XG4gICAgZWxlbWVudC5pbm5lckhUTUwgPSBKU09OLnN0cmluZ2lmeShyZXN1bHQpXG4gICAgZG9jdW1lbnQuYm9keS5hcHBlbmRDaGlsZChlbGVtZW50KTtcbn0gZWxzZSB7XG4gICAgLy8gc2VydmVyXG4gICAgY29uc29sZS5sb2coXCJub2RlOlwiLCByZXN1bHQpXG59XG5cbiJdLCJtYXBwaW5ncyI6IkFBQUEsWUFBWSxlQUFlO0FBSTNCLFFBQVEsSUFBSSxTQUFTO0FBQ3JCLElBQUksTUFBTSxVQUFVLEtBQUs7QUFDekIsTUFBTSxVQUFVLE9BQU8sS0FBSyxDQUFDLE1BQVcsRUFBRSxRQUFRLG1CQUFtQjtBQUNyRSxRQUFRLElBQUksR0FBRztBQUNmLE1BQU0sU0FBUyxLQUFLLFVBQVUsR0FBRztBQUVqQyxJQUFJLE9BQU8sYUFBYSxhQUFhO0FBRWpDLFFBQU0sVUFBVSxTQUFTLGNBQWMsS0FBSztBQUM1QyxVQUFRLFlBQVksS0FBSyxVQUFVLE1BQU07QUFDekMsV0FBUyxLQUFLLFlBQVksT0FBTztBQUNyQyxPQUFPO0FBRUgsVUFBUSxJQUFJLFNBQVMsTUFBTTtBQUMvQjsiLCJuYW1lcyI6W119 diff --git a/javascript/examples/vite/package.json b/javascript/examples/vite/package.json deleted file mode 100644 index d9a13681..00000000 --- a/javascript/examples/vite/package.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "autovite", - "private": true, - "version": "0.0.0", - "type": "module", - "scripts": { - "dev": "vite", - "build": "tsc && vite build", - "preview": "vite preview" - }, - "dependencies": { - "@automerge/automerge": "2.0.0-alpha.7" - }, - "devDependencies": { - "typescript": "^4.6.4", - "vite": "^3.1.0", - "vite-plugin-top-level-await": "^1.1.1", - "vite-plugin-wasm": "^2.1.0" - } -} diff --git a/javascript/examples/vite/public/vite.svg b/javascript/examples/vite/public/vite.svg deleted file mode 100644 index e7b8dfb1..00000000 --- a/javascript/examples/vite/public/vite.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/javascript/examples/vite/src/counter.ts b/javascript/examples/vite/src/counter.ts deleted file mode 100644 index 3e516b6d..00000000 --- a/javascript/examples/vite/src/counter.ts +++ /dev/null @@ -1,9 +0,0 @@ -export function setupCounter(element: HTMLButtonElement) { - let counter = 0 - const setCounter = (count: number) => { - counter = count - element.innerHTML = `count is ${counter}` - } - element.addEventListener("click", () => setCounter(++counter)) - setCounter(0) -} diff --git a/javascript/examples/vite/src/main.ts b/javascript/examples/vite/src/main.ts deleted file mode 100644 index 8dc8f92c..00000000 --- a/javascript/examples/vite/src/main.ts +++ /dev/null @@ -1,17 +0,0 @@ -import * as Automerge from "@automerge/automerge" - -// hello world code that will run correctly on web or node - -let doc = Automerge.init() -doc = Automerge.change(doc, (d: any) => (d.hello = "from automerge")) -const result = JSON.stringify(doc) - -if (typeof document !== "undefined") { - // browser - const element = document.createElement("div") - element.innerHTML = JSON.stringify(result) - document.body.appendChild(element) -} else { - // server - console.log("node:", result) -} diff --git a/javascript/examples/vite/src/style.css b/javascript/examples/vite/src/style.css deleted file mode 100644 index ac37d84b..00000000 --- a/javascript/examples/vite/src/style.css +++ /dev/null @@ -1,97 +0,0 @@ -:root { - font-family: Inter, Avenir, Helvetica, Arial, sans-serif; - font-size: 16px; - line-height: 24px; - font-weight: 400; - - color-scheme: light dark; - color: rgba(255, 255, 255, 0.87); - background-color: #242424; - - font-synthesis: none; - text-rendering: optimizeLegibility; - -webkit-font-smoothing: antialiased; - -moz-osx-font-smoothing: grayscale; - -webkit-text-size-adjust: 100%; -} - -a { - font-weight: 500; - color: #646cff; - text-decoration: inherit; -} -a:hover { - color: #535bf2; -} - -body { - margin: 0; - display: flex; - place-items: center; - min-width: 320px; - min-height: 100vh; -} - -h1 { - font-size: 3.2em; - line-height: 1.1; -} - -#app { - max-width: 1280px; - margin: 0 auto; - padding: 2rem; - text-align: center; -} - -.logo { - height: 6em; - padding: 1.5em; - will-change: filter; -} -.logo:hover { - filter: drop-shadow(0 0 2em #646cffaa); -} -.logo.vanilla:hover { - filter: drop-shadow(0 0 2em #3178c6aa); -} - -.card { - padding: 2em; -} - -.read-the-docs { - color: #888; -} - -button { - border-radius: 8px; - border: 1px solid transparent; - padding: 0.6em 1.2em; - font-size: 1em; - font-weight: 500; - font-family: inherit; - background-color: #1a1a1a; - cursor: pointer; - transition: border-color 0.25s; -} -button:hover { - border-color: #646cff; -} -button:focus, -button:focus-visible { - outline: 4px auto -webkit-focus-ring-color; -} - -@media (prefers-color-scheme: light) { - :root { - color: #213547; - background-color: #ffffff; - } - a:hover { - color: #747bff; - } - button { - background-color: #f9f9f9; - } -} diff --git a/javascript/examples/vite/src/typescript.svg b/javascript/examples/vite/src/typescript.svg deleted file mode 100644 index d91c910c..00000000 --- a/javascript/examples/vite/src/typescript.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/javascript/examples/vite/src/vite-env.d.ts b/javascript/examples/vite/src/vite-env.d.ts deleted file mode 100644 index 11f02fe2..00000000 --- a/javascript/examples/vite/src/vite-env.d.ts +++ /dev/null @@ -1 +0,0 @@ -/// diff --git a/javascript/examples/vite/tsconfig.json b/javascript/examples/vite/tsconfig.json deleted file mode 100644 index fbd02253..00000000 --- a/javascript/examples/vite/tsconfig.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "compilerOptions": { - "target": "ESNext", - "useDefineForClassFields": true, - "module": "ESNext", - "lib": ["ESNext", "DOM"], - "moduleResolution": "Node", - "strict": true, - "sourceMap": true, - "resolveJsonModule": true, - "isolatedModules": true, - "esModuleInterop": true, - "noEmit": true, - "noUnusedLocals": true, - "noUnusedParameters": true, - "noImplicitReturns": true, - "skipLibCheck": true - }, - "include": ["src"] -} diff --git a/javascript/examples/vite/vite.config.js b/javascript/examples/vite/vite.config.js deleted file mode 100644 index d80981bf..00000000 --- a/javascript/examples/vite/vite.config.js +++ /dev/null @@ -1,22 +0,0 @@ -import { defineConfig } from "vite" -import wasm from "vite-plugin-wasm" -import topLevelAwait from "vite-plugin-top-level-await" - -export default defineConfig({ - plugins: [topLevelAwait(), wasm()], - - // This is only necessary if you are using `SharedWorker` or `WebWorker`, as - // documented in https://vitejs.dev/guide/features.html#import-with-constructors - worker: { - format: "es", - plugins: [topLevelAwait(), wasm()], - }, - - optimizeDeps: { - // This is necessary because otherwise `vite dev` includes two separate - // versions of the JS wrapper. This causes problems because the JS - // wrapper has a module level variable to track JS side heap - // allocations, initializing this twice causes horrible breakage - exclude: ["@automerge/automerge-wasm"], - }, -}) diff --git a/javascript/examples/webpack/.gitignore b/javascript/examples/webpack/.gitignore deleted file mode 100644 index da9d3ff5..00000000 --- a/javascript/examples/webpack/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -yarn.lock -node_modules -public/*.wasm -public/main.js -dist diff --git a/javascript/examples/webpack/README.md b/javascript/examples/webpack/README.md deleted file mode 100644 index 7563f27d..00000000 --- a/javascript/examples/webpack/README.md +++ /dev/null @@ -1,35 +0,0 @@ -# Webpack + Automerge - -Getting WASM working in webpack 5 is very easy. You just need to enable the -`asyncWebAssembly` -[experiment](https://webpack.js.org/configuration/experiments/). For example: - -```javascript -const path = require("path") - -const clientConfig = { - experiments: { asyncWebAssembly: true }, - target: "web", - entry: "./src/index.js", - output: { - filename: "main.js", - path: path.resolve(__dirname, "public"), - }, - mode: "development", // or production - performance: { - // we dont want the wasm blob to generate warnings - hints: false, - maxEntrypointSize: 512000, - maxAssetSize: 512000, - }, -} - -module.exports = clientConfig -``` - -## Running the example - -```bash -yarn install -yarn start -``` diff --git a/javascript/examples/webpack/package.json b/javascript/examples/webpack/package.json deleted file mode 100644 index 2b63e7cc..00000000 --- a/javascript/examples/webpack/package.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "name": "webpack-automerge-example", - "version": "0.1.0", - "description": "", - "private": true, - "scripts": { - "build": "webpack", - "start": "serve public", - "test": "node dist/node.js" - }, - "author": "", - "dependencies": { - "@automerge/automerge": "2.0.0-alpha.7" - }, - "devDependencies": { - "serve": "^13.0.2", - "webpack": "^5.72.1", - "webpack-cli": "^4.9.2", - "webpack-dev-server": "^4.11.1", - "webpack-node-externals": "^3.0.0" - } -} diff --git a/javascript/examples/webpack/public/index.html b/javascript/examples/webpack/public/index.html deleted file mode 100644 index 5003393a..00000000 --- a/javascript/examples/webpack/public/index.html +++ /dev/null @@ -1,10 +0,0 @@ - - - - - Simple Webpack for automerge-wasm - - - - - diff --git a/javascript/examples/webpack/src/index.js b/javascript/examples/webpack/src/index.js deleted file mode 100644 index 3a9086e4..00000000 --- a/javascript/examples/webpack/src/index.js +++ /dev/null @@ -1,17 +0,0 @@ -import * as Automerge from "@automerge/automerge" - -// hello world code that will run correctly on web or node - -let doc = Automerge.init() -doc = Automerge.change(doc, d => (d.hello = "from automerge")) -const result = JSON.stringify(doc) - -if (typeof document !== "undefined") { - // browser - const element = document.createElement("div") - element.innerHTML = JSON.stringify(result) - document.body.appendChild(element) -} else { - // server - console.log("node:", result) -} diff --git a/javascript/examples/webpack/webpack.config.js b/javascript/examples/webpack/webpack.config.js deleted file mode 100644 index 51fd5127..00000000 --- a/javascript/examples/webpack/webpack.config.js +++ /dev/null @@ -1,37 +0,0 @@ -const path = require("path") -const nodeExternals = require("webpack-node-externals") - -// the most basic webpack config for node or web targets for automerge-wasm - -const serverConfig = { - // basic setup for bundling a node package - target: "node", - externals: [nodeExternals()], - externalsPresets: { node: true }, - - entry: "./src/index.js", - output: { - filename: "node.js", - path: path.resolve(__dirname, "dist"), - }, - mode: "development", // or production -} - -const clientConfig = { - experiments: { asyncWebAssembly: true }, - target: "web", - entry: "./src/index.js", - output: { - filename: "main.js", - path: path.resolve(__dirname, "public"), - }, - mode: "development", // or production - performance: { - // we dont want the wasm blob to generate warnings - hints: false, - maxEntrypointSize: 512000, - maxAssetSize: 512000, - }, -} - -module.exports = [serverConfig, clientConfig] diff --git a/javascript/package.json b/javascript/package.json deleted file mode 100644 index 79309907..00000000 --- a/javascript/package.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "name": "@automerge/automerge", - "collaborators": [ - "Orion Henry ", - "Martin Kleppmann" - ], - "version": "2.0.2", - "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", - "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", - "repository": "github:automerge/automerge-rs", - "files": [ - "README.md", - "LICENSE", - "package.json", - "dist/index.d.ts", - "dist/cjs/**/*.js", - "dist/mjs/**/*.js", - "dist/*.d.ts" - ], - "types": "./dist/index.d.ts", - "module": "./dist/mjs/index.js", - "main": "./dist/cjs/index.js", - "license": "MIT", - "scripts": { - "lint": "eslint src", - "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/declonly.json --emitDeclarationOnly", - "test": "ts-mocha test/*.ts", - "deno:build": "denoify && node ./scripts/deno-prefixer.mjs", - "deno:test": "deno test ./deno-tests/deno.ts --allow-read --allow-net", - "watch-docs": "typedoc src/index.ts --watch --readme none" - }, - "devDependencies": { - "@types/expect": "^24.3.0", - "@types/mocha": "^10.0.1", - "@types/uuid": "^9.0.0", - "@typescript-eslint/eslint-plugin": "^5.46.0", - "@typescript-eslint/parser": "^5.46.0", - "denoify": "^1.4.5", - "eslint": "^8.29.0", - "fast-sha256": "^1.3.0", - "mocha": "^10.2.0", - "pako": "^2.1.0", - "prettier": "^2.8.1", - "ts-mocha": "^10.0.0", - "ts-node": "^10.9.1", - "typedoc": "^0.23.22", - "typescript": "^4.9.4" - }, - "dependencies": { - "@automerge/automerge-wasm": "0.1.25", - "uuid": "^9.0.0" - } -} diff --git a/javascript/scripts/deno-prefixer.mjs b/javascript/scripts/deno-prefixer.mjs deleted file mode 100644 index 28544102..00000000 --- a/javascript/scripts/deno-prefixer.mjs +++ /dev/null @@ -1,9 +0,0 @@ -import * as fs from "fs" - -const files = ["./deno_dist/proxies.ts"] -for (const filepath of files) { - const data = fs.readFileSync(filepath) - fs.writeFileSync(filepath, "// @ts-nocheck \n" + data) - - console.log('Prepended "// @ts-nocheck" to ' + filepath) -} diff --git a/javascript/scripts/denoify-replacer.mjs b/javascript/scripts/denoify-replacer.mjs deleted file mode 100644 index e183ba0d..00000000 --- a/javascript/scripts/denoify-replacer.mjs +++ /dev/null @@ -1,42 +0,0 @@ -// @denoify-ignore - -import { makeThisModuleAnExecutableReplacer } from "denoify" -// import { assert } from "tsafe"; -// import * as path from "path"; - -makeThisModuleAnExecutableReplacer( - async ({ parsedImportExportStatement, destDirPath, version }) => { - version = process.env.VERSION || version - - switch (parsedImportExportStatement.parsedArgument.nodeModuleName) { - case "@automerge/automerge-wasm": - { - const moduleRoot = - process.env.ROOT_MODULE || - `https://deno.land/x/automerge_wasm@${version}` - /* - *We expect not to run against statements like - *import(..).then(...) - *or - *export * from "..." - *in our code. - */ - if ( - !parsedImportExportStatement.isAsyncImport && - (parsedImportExportStatement.statementType === "import" || - parsedImportExportStatement.statementType === "export") - ) { - if (parsedImportExportStatement.isTypeOnly) { - return `${parsedImportExportStatement.statementType} type ${parsedImportExportStatement.target} from "${moduleRoot}/index.d.ts";` - } else { - return `${parsedImportExportStatement.statementType} ${parsedImportExportStatement.target} from "${moduleRoot}/automerge_wasm.js";` - } - } - } - break - } - - //The replacer should return undefined when we want to let denoify replace the statement - return undefined - } -) diff --git a/javascript/src/conflicts.ts b/javascript/src/conflicts.ts deleted file mode 100644 index 52af23e1..00000000 --- a/javascript/src/conflicts.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { Counter, type AutomergeValue } from "./types" -import { Text } from "./text" -import { type AutomergeValue as UnstableAutomergeValue } from "./unstable_types" -import { type Target, Text1Target, Text2Target } from "./proxies" -import { mapProxy, listProxy, ValueType } from "./proxies" -import type { Prop, ObjID } from "@automerge/automerge-wasm" -import { Automerge } from "@automerge/automerge-wasm" - -export type ConflictsF = { [key: string]: ValueType } -export type Conflicts = ConflictsF -export type UnstableConflicts = ConflictsF - -export function stableConflictAt( - context: Automerge, - objectId: ObjID, - prop: Prop -): Conflicts | undefined { - return conflictAt( - context, - objectId, - prop, - true, - (context: Automerge, conflictId: ObjID): AutomergeValue => { - return new Text(context.text(conflictId)) - } - ) -} - -export function unstableConflictAt( - context: Automerge, - objectId: ObjID, - prop: Prop -): UnstableConflicts | undefined { - return conflictAt( - context, - objectId, - prop, - true, - (context: Automerge, conflictId: ObjID): UnstableAutomergeValue => { - return context.text(conflictId) - } - ) -} - -function conflictAt( - context: Automerge, - objectId: ObjID, - prop: Prop, - textV2: boolean, - handleText: (a: Automerge, conflictId: ObjID) => ValueType -): ConflictsF | undefined { - const values = context.getAll(objectId, prop) - if (values.length <= 1) { - return - } - const result: ConflictsF = {} - for (const fullVal of values) { - switch (fullVal[0]) { - case "map": - result[fullVal[1]] = mapProxy( - context, - fullVal[1], - textV2, - [prop], - true - ) - break - case "list": - result[fullVal[1]] = listProxy( - context, - fullVal[1], - textV2, - [prop], - true - ) - break - case "text": - result[fullVal[1]] = handleText(context, fullVal[1] as ObjID) - break - case "str": - case "uint": - case "int": - case "f64": - case "boolean": - case "bytes": - case "null": - result[fullVal[2]] = fullVal[1] as ValueType - break - case "counter": - result[fullVal[2]] = new Counter(fullVal[1]) as ValueType - break - case "timestamp": - result[fullVal[2]] = new Date(fullVal[1]) as ValueType - break - default: - throw RangeError(`datatype ${fullVal[0]} unimplemented`) - } - } - return result -} diff --git a/javascript/src/constants.ts b/javascript/src/constants.ts deleted file mode 100644 index 7b714772..00000000 --- a/javascript/src/constants.ts +++ /dev/null @@ -1,12 +0,0 @@ -// Properties of the document root object - -export const STATE = Symbol.for("_am_meta") // symbol used to hide application metadata on automerge objects -export const TRACE = Symbol.for("_am_trace") // used for debugging -export const OBJECT_ID = Symbol.for("_am_objectId") // symbol used to hide the object id on automerge objects -export const IS_PROXY = Symbol.for("_am_isProxy") // symbol used to test if the document is a proxy object - -export const UINT = Symbol.for("_am_uint") -export const INT = Symbol.for("_am_int") -export const F64 = Symbol.for("_am_f64") -export const COUNTER = Symbol.for("_am_counter") -export const TEXT = Symbol.for("_am_text") diff --git a/javascript/src/index.ts b/javascript/src/index.ts deleted file mode 100644 index bf84c68d..00000000 --- a/javascript/src/index.ts +++ /dev/null @@ -1,242 +0,0 @@ -/** - * # Automerge - * - * This library provides the core automerge data structure and sync algorithms. - * Other libraries can be built on top of this one which provide IO and - * persistence. - * - * An automerge document can be though of an immutable POJO (plain old javascript - * object) which `automerge` tracks the history of, allowing it to be merged with - * any other automerge document. - * - * ## Creating and modifying a document - * - * You can create a document with {@link init} or {@link from} and then make - * changes to it with {@link change}, you can merge two documents with {@link - * merge}. - * - * ```ts - * import * as automerge from "@automerge/automerge" - * - * type DocType = {ideas: Array} - * - * let doc1 = automerge.init() - * doc1 = automerge.change(doc1, d => { - * d.ideas = [new automerge.Text("an immutable document")] - * }) - * - * let doc2 = automerge.init() - * doc2 = automerge.merge(doc2, automerge.clone(doc1)) - * doc2 = automerge.change(doc2, d => { - * d.ideas.push(new automerge.Text("which records it's history")) - * }) - * - * // Note the `automerge.clone` call, see the "cloning" section of this readme for - * // more detail - * doc1 = automerge.merge(doc1, automerge.clone(doc2)) - * doc1 = automerge.change(doc1, d => { - * d.ideas[0].deleteAt(13, 8) - * d.ideas[0].insertAt(13, "object") - * }) - * - * let doc3 = automerge.merge(doc1, doc2) - * // doc3 is now {ideas: ["an immutable object", "which records it's history"]} - * ``` - * - * ## Applying changes from another document - * - * You can get a representation of the result of the last {@link change} you made - * to a document with {@link getLastLocalChange} and you can apply that change to - * another document using {@link applyChanges}. - * - * If you need to get just the changes which are in one document but not in another - * you can use {@link getHeads} to get the heads of the document without the - * changes and then {@link getMissingDeps}, passing the result of {@link getHeads} - * on the document with the changes. - * - * ## Saving and loading documents - * - * You can {@link save} a document to generate a compresed binary representation of - * the document which can be loaded with {@link load}. If you have a document which - * you have recently made changes to you can generate recent changes with {@link - * saveIncremental}, this will generate all the changes since you last called - * `saveIncremental`, the changes generated can be applied to another document with - * {@link loadIncremental}. - * - * ## Viewing different versions of a document - * - * Occasionally you may wish to explicitly step to a different point in a document - * history. One common reason to do this is if you need to obtain a set of changes - * which take the document from one state to another in order to send those changes - * to another peer (or to save them somewhere). You can use {@link view} to do this. - * - * ```ts - * import * as automerge from "@automerge/automerge" - * import * as assert from "assert" - * - * let doc = automerge.from({ - * key1: "value1", - * }) - * - * // Make a clone of the document at this point, maybe this is actually on another - * // peer. - * let doc2 = automerge.clone < any > doc - * - * let heads = automerge.getHeads(doc) - * - * doc = - * automerge.change < - * any > - * (doc, - * d => { - * d.key2 = "value2" - * }) - * - * doc = - * automerge.change < - * any > - * (doc, - * d => { - * d.key3 = "value3" - * }) - * - * // At this point we've generated two separate changes, now we want to send - * // just those changes to someone else - * - * // view is a cheap reference based copy of a document at a given set of heads - * let before = automerge.view(doc, heads) - * - * // This view doesn't show the last two changes in the document state - * assert.deepEqual(before, { - * key1: "value1", - * }) - * - * // Get the changes to send to doc2 - * let changes = automerge.getChanges(before, doc) - * - * // Apply the changes at doc2 - * doc2 = automerge.applyChanges < any > (doc2, changes)[0] - * assert.deepEqual(doc2, { - * key1: "value1", - * key2: "value2", - * key3: "value3", - * }) - * ``` - * - * If you have a {@link view} of a document which you want to make changes to you - * can {@link clone} the viewed document. - * - * ## Syncing - * - * The sync protocol is stateful. This means that we start by creating a {@link - * SyncState} for each peer we are communicating with using {@link initSyncState}. - * Then we generate a message to send to the peer by calling {@link - * generateSyncMessage}. When we receive a message from the peer we call {@link - * receiveSyncMessage}. Here's a simple example of a loop which just keeps two - * peers in sync. - * - * ```ts - * let sync1 = automerge.initSyncState() - * let msg: Uint8Array | null - * ;[sync1, msg] = automerge.generateSyncMessage(doc1, sync1) - * - * while (true) { - * if (msg != null) { - * network.send(msg) - * } - * let resp: Uint8Array = - * (network.receive()[(doc1, sync1, _ignore)] = - * automerge.receiveSyncMessage(doc1, sync1, resp)[(sync1, msg)] = - * automerge.generateSyncMessage(doc1, sync1)) - * } - * ``` - * - * ## Conflicts - * - * The only time conflicts occur in automerge documents is in concurrent - * assignments to the same key in an object. In this case automerge - * deterministically chooses an arbitrary value to present to the application but - * you can examine the conflicts using {@link getConflicts}. - * - * ``` - * import * as automerge from "@automerge/automerge" - * - * type Profile = { - * pets: Array<{name: string, type: string}> - * } - * - * let doc1 = automerge.init("aaaa") - * doc1 = automerge.change(doc1, d => { - * d.pets = [{name: "Lassie", type: "dog"}] - * }) - * let doc2 = automerge.init("bbbb") - * doc2 = automerge.merge(doc2, automerge.clone(doc1)) - * - * doc2 = automerge.change(doc2, d => { - * d.pets[0].name = "Beethoven" - * }) - * - * doc1 = automerge.change(doc1, d => { - * d.pets[0].name = "Babe" - * }) - * - * const doc3 = automerge.merge(doc1, doc2) - * - * // Note that here we pass `doc3.pets`, not `doc3` - * let conflicts = automerge.getConflicts(doc3.pets[0], "name") - * - * // The two conflicting values are the keys of the conflicts object - * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) - * ``` - * - * ## Actor IDs - * - * By default automerge will generate a random actor ID for you, but most methods - * for creating a document allow you to set the actor ID. You can get the actor ID - * associated with the document by calling {@link getActorId}. Actor IDs must not - * be used in concurrent threads of executiong - all changes by a given actor ID - * are expected to be sequential. - * - * ## Listening to patches - * - * Sometimes you want to respond to changes made to an automerge document. In this - * case you can use the {@link PatchCallback} type to receive notifications when - * changes have been made. - * - * ## Cloning - * - * Currently you cannot make mutating changes (i.e. call {@link change}) to a - * document which you have two pointers to. For example, in this code: - * - * ```javascript - * let doc1 = automerge.init() - * let doc2 = automerge.change(doc1, d => (d.key = "value")) - * ``` - * - * `doc1` and `doc2` are both pointers to the same state. Any attempt to call - * mutating methods on `doc1` will now result in an error like - * - * Attempting to change an out of date document - * - * If you encounter this you need to clone the original document, the above sample - * would work as: - * - * ```javascript - * let doc1 = automerge.init() - * let doc2 = automerge.change(automerge.clone(doc1), d => (d.key = "value")) - * ``` - * @packageDocumentation - * - * ## The {@link unstable} module - * - * We are working on some changes to automerge which are not yet complete and - * will result in backwards incompatible API changes. Once these changes are - * ready for production use we will release a new major version of automerge. - * However, until that point you can use the {@link unstable} module to try out - * the new features, documents from the {@link unstable} module are - * interoperable with documents from the main module. Please see the docs for - * the {@link unstable} module for more details. - */ -export * from "./stable" -import * as unstable from "./unstable" -export { unstable } diff --git a/javascript/src/internal_state.ts b/javascript/src/internal_state.ts deleted file mode 100644 index f3da49b1..00000000 --- a/javascript/src/internal_state.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { type ObjID, type Heads, Automerge } from "@automerge/automerge-wasm" - -import { STATE, OBJECT_ID, TRACE, IS_PROXY } from "./constants" - -import type { Doc, PatchCallback } from "./types" - -export interface InternalState { - handle: Automerge - heads: Heads | undefined - freeze: boolean - patchCallback?: PatchCallback - textV2: boolean -} - -export function _state(doc: Doc, checkroot = true): InternalState { - if (typeof doc !== "object") { - throw new RangeError("must be the document root") - } - const state = Reflect.get(doc, STATE) as InternalState - if ( - state === undefined || - state == null || - (checkroot && _obj(doc) !== "_root") - ) { - throw new RangeError("must be the document root") - } - return state -} - -export function _trace(doc: Doc): string | undefined { - return Reflect.get(doc, TRACE) as string -} - -export function _obj(doc: Doc): ObjID | null { - if (!(typeof doc === "object") || doc === null) { - return null - } - return Reflect.get(doc, OBJECT_ID) as ObjID -} - -export function _is_proxy(doc: Doc): boolean { - return !!Reflect.get(doc, IS_PROXY) -} diff --git a/javascript/src/low_level.ts b/javascript/src/low_level.ts deleted file mode 100644 index f44f3a32..00000000 --- a/javascript/src/low_level.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { - type API, - Automerge, - type Change, - type DecodedChange, - type Actor, - SyncState, - type SyncMessage, - type JsSyncState, - type DecodedSyncMessage, - type ChangeToEncode, -} from "@automerge/automerge-wasm" -export type { ChangeToEncode } from "@automerge/automerge-wasm" - -export function UseApi(api: API) { - for (const k in api) { - // eslint-disable-next-line @typescript-eslint/no-extra-semi,@typescript-eslint/no-explicit-any - ;(ApiHandler as any)[k] = (api as any)[k] - } -} - -/* eslint-disable */ -export const ApiHandler: API = { - create(textV2: boolean, actor?: Actor): Automerge { - throw new RangeError("Automerge.use() not called") - }, - load(data: Uint8Array, textV2: boolean, actor?: Actor): Automerge { - throw new RangeError("Automerge.use() not called (load)") - }, - encodeChange(change: ChangeToEncode): Change { - throw new RangeError("Automerge.use() not called (encodeChange)") - }, - decodeChange(change: Change): DecodedChange { - throw new RangeError("Automerge.use() not called (decodeChange)") - }, - initSyncState(): SyncState { - throw new RangeError("Automerge.use() not called (initSyncState)") - }, - encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { - throw new RangeError("Automerge.use() not called (encodeSyncMessage)") - }, - decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage { - throw new RangeError("Automerge.use() not called (decodeSyncMessage)") - }, - encodeSyncState(state: SyncState): Uint8Array { - throw new RangeError("Automerge.use() not called (encodeSyncState)") - }, - decodeSyncState(data: Uint8Array): SyncState { - throw new RangeError("Automerge.use() not called (decodeSyncState)") - }, - exportSyncState(state: SyncState): JsSyncState { - throw new RangeError("Automerge.use() not called (exportSyncState)") - }, - importSyncState(state: JsSyncState): SyncState { - throw new RangeError("Automerge.use() not called (importSyncState)") - }, -} -/* eslint-enable */ diff --git a/javascript/src/numbers.ts b/javascript/src/numbers.ts deleted file mode 100644 index 7ad95998..00000000 --- a/javascript/src/numbers.ts +++ /dev/null @@ -1,54 +0,0 @@ -// Convenience classes to allow users to strictly specify the number type they want - -import { INT, UINT, F64 } from "./constants" - -export class Int { - value: number - - constructor(value: number) { - if ( - !( - Number.isInteger(value) && - value <= Number.MAX_SAFE_INTEGER && - value >= Number.MIN_SAFE_INTEGER - ) - ) { - throw new RangeError(`Value ${value} cannot be a uint`) - } - this.value = value - Reflect.defineProperty(this, INT, { value: true }) - Object.freeze(this) - } -} - -export class Uint { - value: number - - constructor(value: number) { - if ( - !( - Number.isInteger(value) && - value <= Number.MAX_SAFE_INTEGER && - value >= 0 - ) - ) { - throw new RangeError(`Value ${value} cannot be a uint`) - } - this.value = value - Reflect.defineProperty(this, UINT, { value: true }) - Object.freeze(this) - } -} - -export class Float64 { - value: number - - constructor(value: number) { - if (typeof value !== "number") { - throw new RangeError(`Value ${value} cannot be a float64`) - } - this.value = value || 0.0 - Reflect.defineProperty(this, F64, { value: true }) - Object.freeze(this) - } -} diff --git a/javascript/src/proxies.ts b/javascript/src/proxies.ts deleted file mode 100644 index 54a8dd71..00000000 --- a/javascript/src/proxies.ts +++ /dev/null @@ -1,1005 +0,0 @@ -/* eslint-disable @typescript-eslint/no-explicit-any */ -import { Text } from "./text" -import { - Automerge, - type Heads, - type ObjID, - type Prop, -} from "@automerge/automerge-wasm" - -import type { AutomergeValue, ScalarValue, MapValue, ListValue } from "./types" -import { - type AutomergeValue as UnstableAutomergeValue, - MapValue as UnstableMapValue, - ListValue as UnstableListValue, -} from "./unstable_types" -import { Counter, getWriteableCounter } from "./counter" -import { - STATE, - TRACE, - IS_PROXY, - OBJECT_ID, - COUNTER, - INT, - UINT, - F64, -} from "./constants" -import { RawString } from "./raw_string" - -type TargetCommon = { - context: Automerge - objectId: ObjID - path: Array - readonly: boolean - heads?: Array - cache: object - trace?: any - frozen: boolean -} - -export type Text2Target = TargetCommon & { textV2: true } -export type Text1Target = TargetCommon & { textV2: false } -export type Target = Text1Target | Text2Target - -export type ValueType = T extends Text2Target - ? UnstableAutomergeValue - : T extends Text1Target - ? AutomergeValue - : never -type MapValueType = T extends Text2Target - ? UnstableMapValue - : T extends Text1Target - ? MapValue - : never -type ListValueType = T extends Text2Target - ? UnstableListValue - : T extends Text1Target - ? ListValue - : never - -function parseListIndex(key: any) { - if (typeof key === "string" && /^[0-9]+$/.test(key)) key = parseInt(key, 10) - if (typeof key !== "number") { - return key - } - if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) { - throw new RangeError("A list index must be positive, but you passed " + key) - } - return key -} - -function valueAt( - target: T, - prop: Prop -): ValueType | undefined { - const { context, objectId, path, readonly, heads, textV2 } = target - const value = context.getWithType(objectId, prop, heads) - if (value === null) { - return - } - const datatype = value[0] - const val = value[1] - switch (datatype) { - case undefined: - return - case "map": - return mapProxy( - context, - val as ObjID, - textV2, - [...path, prop], - readonly, - heads - ) - case "list": - return listProxy( - context, - val as ObjID, - textV2, - [...path, prop], - readonly, - heads - ) - case "text": - if (textV2) { - return context.text(val as ObjID, heads) as ValueType - } else { - return textProxy( - context, - val as ObjID, - [...path, prop], - readonly, - heads - ) as unknown as ValueType - } - case "str": - return val as ValueType - case "uint": - return val as ValueType - case "int": - return val as ValueType - case "f64": - return val as ValueType - case "boolean": - return val as ValueType - case "null": - return null as ValueType - case "bytes": - return val as ValueType - case "timestamp": - return val as ValueType - case "counter": { - if (readonly) { - return new Counter(val as number) as ValueType - } else { - const counter: Counter = getWriteableCounter( - val as number, - context, - path, - objectId, - prop - ) - return counter as ValueType - } - } - default: - throw RangeError(`datatype ${datatype} unimplemented`) - } -} - -type ImportedValue = - | [null, "null"] - | [number, "uint"] - | [number, "int"] - | [number, "f64"] - | [number, "counter"] - | [number, "timestamp"] - | [string, "str"] - | [Text | string, "text"] - | [Uint8Array, "bytes"] - | [Array, "list"] - | [Record, "map"] - | [boolean, "boolean"] - -function import_value(value: any, textV2: boolean): ImportedValue { - switch (typeof value) { - case "object": - if (value == null) { - return [null, "null"] - } else if (value[UINT]) { - return [value.value, "uint"] - } else if (value[INT]) { - return [value.value, "int"] - } else if (value[F64]) { - return [value.value, "f64"] - } else if (value[COUNTER]) { - return [value.value, "counter"] - } else if (value instanceof Date) { - return [value.getTime(), "timestamp"] - } else if (value instanceof RawString) { - return [value.val, "str"] - } else if (value instanceof Text) { - return [value, "text"] - } else if (value instanceof Uint8Array) { - return [value, "bytes"] - } else if (value instanceof Array) { - return [value, "list"] - } else if (Object.getPrototypeOf(value) === Object.getPrototypeOf({})) { - return [value, "map"] - } else if (value[OBJECT_ID]) { - throw new RangeError( - "Cannot create a reference to an existing document object" - ) - } else { - throw new RangeError(`Cannot assign unknown object: ${value}`) - } - case "boolean": - return [value, "boolean"] - case "number": - if (Number.isInteger(value)) { - return [value, "int"] - } else { - return [value, "f64"] - } - case "string": - if (textV2) { - return [value, "text"] - } else { - return [value, "str"] - } - default: - throw new RangeError(`Unsupported type of value: ${typeof value}`) - } -} - -const MapHandler = { - get( - target: T, - key: any - ): ValueType | ObjID | boolean | { handle: Automerge } { - const { context, objectId, cache } = target - if (key === Symbol.toStringTag) { - return target[Symbol.toStringTag] - } - if (key === OBJECT_ID) return objectId - if (key === IS_PROXY) return true - if (key === TRACE) return target.trace - if (key === STATE) return { handle: context } - if (!cache[key]) { - cache[key] = valueAt(target, key) - } - return cache[key] - }, - - set(target: Target, key: any, val: any) { - const { context, objectId, path, readonly, frozen, textV2 } = target - target.cache = {} // reset cache on set - if (val && val[OBJECT_ID]) { - throw new RangeError( - "Cannot create a reference to an existing document object" - ) - } - if (key === TRACE) { - target.trace = val - return true - } - const [value, datatype] = import_value(val, textV2) - if (frozen) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (readonly) { - throw new RangeError(`Object property "${key}" cannot be modified`) - } - switch (datatype) { - case "list": { - const list = context.putObject(objectId, key, []) - const proxyList = listProxy( - context, - list, - textV2, - [...path, key], - readonly - ) - for (let i = 0; i < value.length; i++) { - proxyList[i] = value[i] - } - break - } - case "text": { - if (textV2) { - assertString(value) - context.putObject(objectId, key, value) - } else { - assertText(value) - const text = context.putObject(objectId, key, "") - const proxyText = textProxy(context, text, [...path, key], readonly) - for (let i = 0; i < value.length; i++) { - proxyText[i] = value.get(i) - } - } - break - } - case "map": { - const map = context.putObject(objectId, key, {}) - const proxyMap = mapProxy( - context, - map, - textV2, - [...path, key], - readonly - ) - for (const key in value) { - proxyMap[key] = value[key] - } - break - } - default: - context.put(objectId, key, value, datatype) - } - return true - }, - - deleteProperty(target: Target, key: any) { - const { context, objectId, readonly } = target - target.cache = {} // reset cache on delete - if (readonly) { - throw new RangeError(`Object property "${key}" cannot be modified`) - } - context.delete(objectId, key) - return true - }, - - has(target: Target, key: any) { - const value = this.get(target, key) - return value !== undefined - }, - - getOwnPropertyDescriptor(target: Target, key: any) { - // const { context, objectId } = target - const value = this.get(target, key) - if (typeof value !== "undefined") { - return { - configurable: true, - enumerable: true, - value, - } - } - }, - - ownKeys(target: Target) { - const { context, objectId, heads } = target - // FIXME - this is a tmp workaround until fix the dupe key bug in keys() - const keys = context.keys(objectId, heads) - return [...new Set(keys)] - }, -} - -const ListHandler = { - get( - target: T, - index: any - ): - | ValueType - | boolean - | ObjID - | { handle: Automerge } - | number - | ((_: any) => boolean) { - const { context, objectId, heads } = target - index = parseListIndex(index) - if (index === Symbol.hasInstance) { - return (instance: any) => { - return Array.isArray(instance) - } - } - if (index === Symbol.toStringTag) { - return target[Symbol.toStringTag] - } - if (index === OBJECT_ID) return objectId - if (index === IS_PROXY) return true - if (index === TRACE) return target.trace - if (index === STATE) return { handle: context } - if (index === "length") return context.length(objectId, heads) - if (typeof index === "number") { - return valueAt(target, index) as ValueType - } else { - return listMethods(target)[index] - } - }, - - set(target: Target, index: any, val: any) { - const { context, objectId, path, readonly, frozen, textV2 } = target - index = parseListIndex(index) - if (val && val[OBJECT_ID]) { - throw new RangeError( - "Cannot create a reference to an existing document object" - ) - } - if (index === TRACE) { - target.trace = val - return true - } - if (typeof index == "string") { - throw new RangeError("list index must be a number") - } - const [value, datatype] = import_value(val, textV2) - if (frozen) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (readonly) { - throw new RangeError(`Object property "${index}" cannot be modified`) - } - switch (datatype) { - case "list": { - let list: ObjID - if (index >= context.length(objectId)) { - list = context.insertObject(objectId, index, []) - } else { - list = context.putObject(objectId, index, []) - } - const proxyList = listProxy( - context, - list, - textV2, - [...path, index], - readonly - ) - proxyList.splice(0, 0, ...value) - break - } - case "text": { - if (textV2) { - assertString(value) - if (index >= context.length(objectId)) { - context.insertObject(objectId, index, value) - } else { - context.putObject(objectId, index, value) - } - } else { - let text: ObjID - assertText(value) - if (index >= context.length(objectId)) { - text = context.insertObject(objectId, index, "") - } else { - text = context.putObject(objectId, index, "") - } - const proxyText = textProxy(context, text, [...path, index], readonly) - proxyText.splice(0, 0, ...value) - } - break - } - case "map": { - let map: ObjID - if (index >= context.length(objectId)) { - map = context.insertObject(objectId, index, {}) - } else { - map = context.putObject(objectId, index, {}) - } - const proxyMap = mapProxy( - context, - map, - textV2, - [...path, index], - readonly - ) - for (const key in value) { - proxyMap[key] = value[key] - } - break - } - default: - if (index >= context.length(objectId)) { - context.insert(objectId, index, value, datatype) - } else { - context.put(objectId, index, value, datatype) - } - } - return true - }, - - deleteProperty(target: Target, index: any) { - const { context, objectId } = target - index = parseListIndex(index) - const elem = context.get(objectId, index) - if (elem != null && elem[0] == "counter") { - throw new TypeError( - "Unsupported operation: deleting a counter from a list" - ) - } - context.delete(objectId, index) - return true - }, - - has(target: Target, index: any) { - const { context, objectId, heads } = target - index = parseListIndex(index) - if (typeof index === "number") { - return index < context.length(objectId, heads) - } - return index === "length" - }, - - getOwnPropertyDescriptor(target: Target, index: any) { - const { context, objectId, heads } = target - - if (index === "length") - return { writable: true, value: context.length(objectId, heads) } - if (index === OBJECT_ID) - return { configurable: false, enumerable: false, value: objectId } - - index = parseListIndex(index) - - const value = valueAt(target, index) - return { configurable: true, enumerable: true, value } - }, - - getPrototypeOf(target: Target) { - return Object.getPrototypeOf(target) - }, - ownKeys(/*target*/): string[] { - const keys: string[] = [] - // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array - // but not uncommenting it causes for (i in list) {} to not enumerate values properly - //const {context, objectId, heads } = target - //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } - keys.push("length") - return keys - }, -} - -const TextHandler = Object.assign({}, ListHandler, { - get(target: Target, index: any) { - const { context, objectId, heads } = target - index = parseListIndex(index) - if (index === Symbol.hasInstance) { - return (instance: any) => { - return Array.isArray(instance) - } - } - if (index === Symbol.toStringTag) { - return target[Symbol.toStringTag] - } - if (index === OBJECT_ID) return objectId - if (index === IS_PROXY) return true - if (index === TRACE) return target.trace - if (index === STATE) return { handle: context } - if (index === "length") return context.length(objectId, heads) - if (typeof index === "number") { - return valueAt(target, index) - } else { - return textMethods(target)[index] || listMethods(target)[index] - } - }, - getPrototypeOf(/*target*/) { - return Object.getPrototypeOf(new Text()) - }, -}) - -export function mapProxy( - context: Automerge, - objectId: ObjID, - textV2: boolean, - path?: Prop[], - readonly?: boolean, - heads?: Heads -): MapValueType { - const target: Target = { - context, - objectId, - path: path || [], - readonly: !!readonly, - frozen: false, - heads, - cache: {}, - textV2, - } - const proxied = {} - Object.assign(proxied, target) - const result = new Proxy(proxied, MapHandler) - // conversion through unknown is necessary because the types are so different - return result as unknown as MapValueType -} - -export function listProxy( - context: Automerge, - objectId: ObjID, - textV2: boolean, - path?: Prop[], - readonly?: boolean, - heads?: Heads -): ListValueType { - const target: Target = { - context, - objectId, - path: path || [], - readonly: !!readonly, - frozen: false, - heads, - cache: {}, - textV2, - } - const proxied = [] - Object.assign(proxied, target) - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - return new Proxy(proxied, ListHandler) as unknown as ListValue -} - -interface TextProxy extends Text { - splice: (index: any, del: any, ...vals: any[]) => void -} - -export function textProxy( - context: Automerge, - objectId: ObjID, - path?: Prop[], - readonly?: boolean, - heads?: Heads -): TextProxy { - const target: Target = { - context, - objectId, - path: path || [], - readonly: !!readonly, - frozen: false, - heads, - cache: {}, - textV2: false, - } - const proxied = {} - Object.assign(proxied, target) - return new Proxy(proxied, TextHandler) as unknown as TextProxy -} - -export function rootProxy( - context: Automerge, - textV2: boolean, - readonly?: boolean -): T { - /* eslint-disable-next-line */ - return mapProxy(context, "_root", textV2, [], !!readonly) -} - -function listMethods(target: T) { - const { context, objectId, path, readonly, frozen, heads, textV2 } = target - const methods = { - deleteAt(index: number, numDelete: number) { - if (typeof numDelete === "number") { - context.splice(objectId, index, numDelete) - } else { - context.delete(objectId, index) - } - return this - }, - - fill(val: ScalarValue, start: number, end: number) { - const [value, datatype] = import_value(val, textV2) - const length = context.length(objectId) - start = parseListIndex(start || 0) - end = parseListIndex(end || length) - for (let i = start; i < Math.min(end, length); i++) { - if (datatype === "list" || datatype === "map") { - context.putObject(objectId, i, value) - } else if (datatype === "text") { - if (textV2) { - assertString(value) - context.putObject(objectId, i, value) - } else { - assertText(value) - const text = context.putObject(objectId, i, "") - const proxyText = textProxy(context, text, [...path, i], readonly) - for (let i = 0; i < value.length; i++) { - proxyText[i] = value.get(i) - } - } - } else { - context.put(objectId, i, value, datatype) - } - } - return this - }, - - indexOf(o: any, start = 0) { - const length = context.length(objectId) - for (let i = start; i < length; i++) { - const value = context.getWithType(objectId, i, heads) - if (value && (value[1] === o[OBJECT_ID] || value[1] === o)) { - return i - } - } - return -1 - }, - - insertAt(index: number, ...values: any[]) { - this.splice(index, 0, ...values) - return this - }, - - pop() { - const length = context.length(objectId) - if (length == 0) { - return undefined - } - const last = valueAt(target, length - 1) - context.delete(objectId, length - 1) - return last - }, - - push(...values: any[]) { - const len = context.length(objectId) - this.splice(len, 0, ...values) - return context.length(objectId) - }, - - shift() { - if (context.length(objectId) == 0) return - const first = valueAt(target, 0) - context.delete(objectId, 0) - return first - }, - - splice(index: any, del: any, ...vals: any[]) { - index = parseListIndex(index) - del = parseListIndex(del) - for (const val of vals) { - if (val && val[OBJECT_ID]) { - throw new RangeError( - "Cannot create a reference to an existing document object" - ) - } - } - if (frozen) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (readonly) { - throw new RangeError( - "Sequence object cannot be modified outside of a change block" - ) - } - const result: ValueType[] = [] - for (let i = 0; i < del; i++) { - const value = valueAt(target, index) - if (value !== undefined) { - result.push(value) - } - context.delete(objectId, index) - } - const values = vals.map(val => import_value(val, textV2)) - for (const [value, datatype] of values) { - switch (datatype) { - case "list": { - const list = context.insertObject(objectId, index, []) - const proxyList = listProxy( - context, - list, - textV2, - [...path, index], - readonly - ) - proxyList.splice(0, 0, ...value) - break - } - case "text": { - if (textV2) { - assertString(value) - context.insertObject(objectId, index, value) - } else { - const text = context.insertObject(objectId, index, "") - const proxyText = textProxy( - context, - text, - [...path, index], - readonly - ) - proxyText.splice(0, 0, ...value) - } - break - } - case "map": { - const map = context.insertObject(objectId, index, {}) - const proxyMap = mapProxy( - context, - map, - textV2, - [...path, index], - readonly - ) - for (const key in value) { - proxyMap[key] = value[key] - } - break - } - default: - context.insert(objectId, index, value, datatype) - } - index += 1 - } - return result - }, - - unshift(...values: any) { - this.splice(0, 0, ...values) - return context.length(objectId) - }, - - entries() { - const i = 0 - const iterator = { - next: () => { - const value = valueAt(target, i) - if (value === undefined) { - return { value: undefined, done: true } - } else { - return { value: [i, value], done: false } - } - }, - } - return iterator - }, - - keys() { - let i = 0 - const len = context.length(objectId, heads) - const iterator = { - next: () => { - let value: undefined | number = undefined - if (i < len) { - value = i - i++ - } - return { value, done: true } - }, - } - return iterator - }, - - values() { - const i = 0 - const iterator = { - next: () => { - const value = valueAt(target, i) - if (value === undefined) { - return { value: undefined, done: true } - } else { - return { value, done: false } - } - }, - } - return iterator - }, - - toArray(): ValueType[] { - const list: Array> = [] - let value: ValueType | undefined - do { - value = valueAt(target, list.length) - if (value !== undefined) { - list.push(value) - } - } while (value !== undefined) - - return list - }, - - map(f: (_a: ValueType, _n: number) => U): U[] { - return this.toArray().map(f) - }, - - toString(): string { - return this.toArray().toString() - }, - - toLocaleString(): string { - return this.toArray().toLocaleString() - }, - - forEach(f: (_a: ValueType, _n: number) => undefined) { - return this.toArray().forEach(f) - }, - - // todo: real concat function is different - concat(other: ValueType[]): ValueType[] { - return this.toArray().concat(other) - }, - - every(f: (_a: ValueType, _n: number) => boolean): boolean { - return this.toArray().every(f) - }, - - filter(f: (_a: ValueType, _n: number) => boolean): ValueType[] { - return this.toArray().filter(f) - }, - - find( - f: (_a: ValueType, _n: number) => boolean - ): ValueType | undefined { - let index = 0 - for (const v of this) { - if (f(v, index)) { - return v - } - index += 1 - } - }, - - findIndex(f: (_a: ValueType, _n: number) => boolean): number { - let index = 0 - for (const v of this) { - if (f(v, index)) { - return index - } - index += 1 - } - return -1 - }, - - includes(elem: ValueType): boolean { - return this.find(e => e === elem) !== undefined - }, - - join(sep?: string): string { - return this.toArray().join(sep) - }, - - reduce( - f: (acc: U, currentValue: ValueType) => U, - initialValue: U - ): U | undefined { - return this.toArray().reduce(f, initialValue) - }, - - reduceRight( - f: (acc: U, item: ValueType) => U, - initialValue: U - ): U | undefined { - return this.toArray().reduceRight(f, initialValue) - }, - - lastIndexOf(search: ValueType, fromIndex = +Infinity): number { - // this can be faster - return this.toArray().lastIndexOf(search, fromIndex) - }, - - slice(index?: number, num?: number): ValueType[] { - return this.toArray().slice(index, num) - }, - - some(f: (v: ValueType, i: number) => boolean): boolean { - let index = 0 - for (const v of this) { - if (f(v, index)) { - return true - } - index += 1 - } - return false - }, - - [Symbol.iterator]: function* () { - let i = 0 - let value = valueAt(target, i) - while (value !== undefined) { - yield value - i += 1 - value = valueAt(target, i) - } - }, - } - return methods -} - -function textMethods(target: Target) { - const { context, objectId, heads } = target - const methods = { - set(index: number, value: any) { - return (this[index] = value) - }, - get(index: number): AutomergeValue { - return this[index] - }, - toString(): string { - return context.text(objectId, heads).replace(//g, "") - }, - toSpans(): AutomergeValue[] { - const spans: AutomergeValue[] = [] - let chars = "" - const length = context.length(objectId) - for (let i = 0; i < length; i++) { - const value = this[i] - if (typeof value === "string") { - chars += value - } else { - if (chars.length > 0) { - spans.push(chars) - chars = "" - } - spans.push(value) - } - } - if (chars.length > 0) { - spans.push(chars) - } - return spans - }, - toJSON(): string { - return this.toString() - }, - indexOf(o: any, start = 0) { - const text = context.text(objectId) - return text.indexOf(o, start) - }, - } - return methods -} - -function assertText(value: Text | string): asserts value is Text { - if (!(value instanceof Text)) { - throw new Error("value was not a Text instance") - } -} - -function assertString(value: Text | string): asserts value is string { - if (typeof value !== "string") { - throw new Error("value was not a string") - } -} diff --git a/javascript/src/raw_string.ts b/javascript/src/raw_string.ts deleted file mode 100644 index 7fc02084..00000000 --- a/javascript/src/raw_string.ts +++ /dev/null @@ -1,6 +0,0 @@ -export class RawString { - val: string - constructor(val: string) { - this.val = val - } -} diff --git a/javascript/src/stable.ts b/javascript/src/stable.ts deleted file mode 100644 index e83b127f..00000000 --- a/javascript/src/stable.ts +++ /dev/null @@ -1,944 +0,0 @@ -/** @hidden **/ -export { /** @hidden */ uuid } from "./uuid" - -import { rootProxy } from "./proxies" -import { STATE } from "./constants" - -import { - type AutomergeValue, - Counter, - type Doc, - type PatchCallback, -} from "./types" -export { - type AutomergeValue, - Counter, - type Doc, - Int, - Uint, - Float64, - type Patch, - type PatchCallback, - type ScalarValue, -} from "./types" - -import { Text } from "./text" -export { Text } from "./text" - -import type { - API as WasmAPI, - Actor as ActorId, - Prop, - ObjID, - Change, - DecodedChange, - Heads, - MaterializeValue, - JsSyncState, - SyncMessage, - DecodedSyncMessage, -} from "@automerge/automerge-wasm" -export type { - PutPatch, - DelPatch, - SpliceTextPatch, - InsertPatch, - IncPatch, - SyncMessage, -} from "@automerge/automerge-wasm" - -/** @hidden **/ -type API = WasmAPI - -const SyncStateSymbol = Symbol("_syncstate") - -/** - * An opaque type tracking the state of sync with a remote peer - */ -type SyncState = JsSyncState & { _opaque: typeof SyncStateSymbol } - -import { ApiHandler, type ChangeToEncode, UseApi } from "./low_level" - -import { Automerge } from "@automerge/automerge-wasm" - -import { RawString } from "./raw_string" - -import { _state, _is_proxy, _trace, _obj } from "./internal_state" - -import { stableConflictAt } from "./conflicts" - -/** Options passed to {@link change}, and {@link emptyChange} - * @typeParam T - The type of value contained in the document - */ -export type ChangeOptions = { - /** A message which describes the changes */ - message?: string - /** The unix timestamp of the change (purely advisory, not used in conflict resolution) */ - time?: number - /** A callback which will be called to notify the caller of any changes to the document */ - patchCallback?: PatchCallback -} - -/** Options passed to {@link loadIncremental}, {@link applyChanges}, and {@link receiveSyncMessage} - * @typeParam T - The type of value contained in the document - */ -export type ApplyOptions = { patchCallback?: PatchCallback } - -/** - * A List is an extended Array that adds the two helper methods `deleteAt` and `insertAt`. - */ -export interface List extends Array { - insertAt(index: number, ...args: T[]): List - deleteAt(index: number, numDelete?: number): List -} - -/** - * To extend an arbitrary type, we have to turn any arrays that are part of the type's definition into Lists. - * So we recurse through the properties of T, turning any Arrays we find into Lists. - */ -export type Extend = - // is it an array? make it a list (we recursively extend the type of the array's elements as well) - T extends Array - ? List> - : // is it an object? recursively extend all of its properties - // eslint-disable-next-line @typescript-eslint/ban-types - T extends Object - ? { [P in keyof T]: Extend } - : // otherwise leave the type alone - T - -/** - * Function which is called by {@link change} when making changes to a `Doc` - * @typeParam T - The type of value contained in the document - * - * This function may mutate `doc` - */ -export type ChangeFn = (doc: Extend) => void - -/** @hidden **/ -export interface State { - change: DecodedChange - snapshot: T -} - -/** @hidden **/ -export function use(api: API) { - UseApi(api) -} - -import * as wasm from "@automerge/automerge-wasm" -use(wasm) - -/** - * Options to be passed to {@link init} or {@link load} - * @typeParam T - The type of the value the document contains - */ -export type InitOptions = { - /** The actor ID to use for this document, a random one will be generated if `null` is passed */ - actor?: ActorId - freeze?: boolean - /** A callback which will be called with the initial patch once the document has finished loading */ - patchCallback?: PatchCallback - /** @hidden */ - enableTextV2?: boolean -} - -/** @hidden */ -export function getBackend(doc: Doc): Automerge { - return _state(doc).handle -} - -function importOpts(_actor?: ActorId | InitOptions): InitOptions { - if (typeof _actor === "object") { - return _actor - } else { - return { actor: _actor } - } -} - -/** - * Create a new automerge document - * - * @typeParam T - The type of value contained in the document. This will be the - * type that is passed to the change closure in {@link change} - * @param _opts - Either an actorId or an {@link InitOptions} (which may - * contain an actorId). If this is null the document will be initialised with a - * random actor ID - */ -export function init(_opts?: ActorId | InitOptions): Doc { - const opts = importOpts(_opts) - const freeze = !!opts.freeze - const patchCallback = opts.patchCallback - const handle = ApiHandler.create(opts.enableTextV2 || false, opts.actor) - handle.enablePatches(true) - handle.enableFreeze(!!opts.freeze) - handle.registerDatatype("counter", (n: number) => new Counter(n)) - const textV2 = opts.enableTextV2 || false - if (textV2) { - handle.registerDatatype("str", (n: string) => new RawString(n)) - } else { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - handle.registerDatatype("text", (n: any) => new Text(n)) - } - const doc = handle.materialize("/", undefined, { - handle, - heads: undefined, - freeze, - patchCallback, - textV2, - }) as Doc - return doc -} - -/** - * Make an immutable view of an automerge document as at `heads` - * - * @remarks - * The document returned from this function cannot be passed to {@link change}. - * This is because it shares the same underlying memory as `doc`, but it is - * consequently a very cheap copy. - * - * Note that this function will throw an error if any of the hashes in `heads` - * are not in the document. - * - * @typeParam T - The type of the value contained in the document - * @param doc - The document to create a view of - * @param heads - The hashes of the heads to create a view at - */ -export function view(doc: Doc, heads: Heads): Doc { - const state = _state(doc) - const handle = state.handle - return state.handle.materialize("/", heads, { - ...state, - handle, - heads, - }) as Doc -} - -/** - * Make a full writable copy of an automerge document - * - * @remarks - * Unlike {@link view} this function makes a full copy of the memory backing - * the document and can thus be passed to {@link change}. It also generates a - * new actor ID so that changes made in the new document do not create duplicate - * sequence numbers with respect to the old document. If you need control over - * the actor ID which is generated you can pass the actor ID as the second - * argument - * - * @typeParam T - The type of the value contained in the document - * @param doc - The document to clone - * @param _opts - Either an actor ID to use for the new doc or an {@link InitOptions} - */ -export function clone( - doc: Doc, - _opts?: ActorId | InitOptions -): Doc { - const state = _state(doc) - const heads = state.heads - const opts = importOpts(_opts) - const handle = state.handle.fork(opts.actor, heads) - - // `change` uses the presence of state.heads to determine if we are in a view - // set it to undefined to indicate that this is a full fat document - const { heads: _oldHeads, ...stateSansHeads } = state - return handle.applyPatches(doc, { ...stateSansHeads, handle }) -} - -/** Explicity free the memory backing a document. Note that this is note - * necessary in environments which support - * [`FinalizationRegistry`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry) - */ -export function free(doc: Doc) { - return _state(doc).handle.free() -} - -/** - * Create an automerge document from a POJO - * - * @param initialState - The initial state which will be copied into the document - * @typeParam T - The type of the value passed to `from` _and_ the type the resulting document will contain - * @typeParam actor - The actor ID of the resulting document, if this is null a random actor ID will be used - * - * @example - * ``` - * const doc = automerge.from({ - * tasks: [ - * {description: "feed dogs", done: false} - * ] - * }) - * ``` - */ -export function from>( - initialState: T | Doc, - _opts?: ActorId | InitOptions -): Doc { - return change(init(_opts), d => Object.assign(d, initialState)) -} - -/** - * Update the contents of an automerge document - * @typeParam T - The type of the value contained in the document - * @param doc - The document to update - * @param options - Either a message, an {@link ChangeOptions}, or a {@link ChangeFn} - * @param callback - A `ChangeFn` to be used if `options` was a `string` - * - * Note that if the second argument is a function it will be used as the `ChangeFn` regardless of what the third argument is. - * - * @example A simple change - * ``` - * let doc1 = automerge.init() - * doc1 = automerge.change(doc1, d => { - * d.key = "value" - * }) - * assert.equal(doc1.key, "value") - * ``` - * - * @example A change with a message - * - * ``` - * doc1 = automerge.change(doc1, "add another value", d => { - * d.key2 = "value2" - * }) - * ``` - * - * @example A change with a message and a timestamp - * - * ``` - * doc1 = automerge.change(doc1, {message: "add another value", time: 1640995200}, d => { - * d.key2 = "value2" - * }) - * ``` - * - * @example responding to a patch callback - * ``` - * let patchedPath - * let patchCallback = patch => { - * patchedPath = patch.path - * } - * doc1 = automerge.change(doc1, {message, "add another value", time: 1640995200, patchCallback}, d => { - * d.key2 = "value2" - * }) - * assert.equal(patchedPath, ["key2"]) - * ``` - */ -export function change( - doc: Doc, - options: string | ChangeOptions | ChangeFn, - callback?: ChangeFn -): Doc { - if (typeof options === "function") { - return _change(doc, {}, options) - } else if (typeof callback === "function") { - if (typeof options === "string") { - options = { message: options } - } - return _change(doc, options, callback) - } else { - throw RangeError("Invalid args for change") - } -} - -function progressDocument( - doc: Doc, - heads: Heads | null, - callback?: PatchCallback -): Doc { - if (heads == null) { - return doc - } - const state = _state(doc) - const nextState = { ...state, heads: undefined } - const nextDoc = state.handle.applyPatches(doc, nextState, callback) - state.heads = heads - return nextDoc -} - -function _change( - doc: Doc, - options: ChangeOptions, - callback: ChangeFn -): Doc { - if (typeof callback !== "function") { - throw new RangeError("invalid change function") - } - - const state = _state(doc) - - if (doc === undefined || state === undefined) { - throw new RangeError("must be the document root") - } - if (state.heads) { - throw new RangeError( - "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." - ) - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - try { - state.heads = heads - const root: T = rootProxy(state.handle, state.textV2) - callback(root as Extend) - if (state.handle.pendingOps() === 0) { - state.heads = undefined - return doc - } else { - state.handle.commit(options.message, options.time) - return progressDocument( - doc, - heads, - options.patchCallback || state.patchCallback - ) - } - } catch (e) { - state.heads = undefined - state.handle.rollback() - throw e - } -} - -/** - * Make a change to a document which does not modify the document - * - * @param doc - The doc to add the empty change to - * @param options - Either a message or a {@link ChangeOptions} for the new change - * - * Why would you want to do this? One reason might be that you have merged - * changes from some other peers and you want to generate a change which - * depends on those merged changes so that you can sign the new change with all - * of the merged changes as part of the new change. - */ -export function emptyChange( - doc: Doc, - options: string | ChangeOptions | void -) { - if (options === undefined) { - options = {} - } - if (typeof options === "string") { - options = { message: options } - } - - const state = _state(doc) - - if (state.heads) { - throw new RangeError( - "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." - ) - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - - const heads = state.handle.getHeads() - state.handle.emptyChange(options.message, options.time) - return progressDocument(doc, heads) -} - -/** - * Load an automerge document from a compressed document produce by {@link save} - * - * @typeParam T - The type of the value which is contained in the document. - * Note that no validation is done to make sure this type is in - * fact the type of the contained value so be a bit careful - * @param data - The compressed document - * @param _opts - Either an actor ID or some {@link InitOptions}, if the actor - * ID is null a random actor ID will be created - * - * Note that `load` will throw an error if passed incomplete content (for - * example if you are receiving content over the network and don't know if you - * have the complete document yet). If you need to handle incomplete content use - * {@link init} followed by {@link loadIncremental}. - */ -export function load( - data: Uint8Array, - _opts?: ActorId | InitOptions -): Doc { - const opts = importOpts(_opts) - const actor = opts.actor - const patchCallback = opts.patchCallback - const handle = ApiHandler.load(data, opts.enableTextV2 || false, actor) - handle.enablePatches(true) - handle.enableFreeze(!!opts.freeze) - handle.registerDatatype("counter", (n: number) => new Counter(n)) - const textV2 = opts.enableTextV2 || false - if (textV2) { - handle.registerDatatype("str", (n: string) => new RawString(n)) - } else { - handle.registerDatatype("text", (n: string) => new Text(n)) - } - const doc = handle.materialize("/", undefined, { - handle, - heads: undefined, - patchCallback, - textV2, - }) as Doc - return doc -} - -/** - * Load changes produced by {@link saveIncremental}, or partial changes - * - * @typeParam T - The type of the value which is contained in the document. - * Note that no validation is done to make sure this type is in - * fact the type of the contained value so be a bit careful - * @param data - The compressedchanges - * @param opts - an {@link ApplyOptions} - * - * This function is useful when staying up to date with a connected peer. - * Perhaps the other end sent you a full compresed document which you loaded - * with {@link load} and they're sending you the result of - * {@link getLastLocalChange} every time they make a change. - * - * Note that this function will succesfully load the results of {@link save} as - * well as {@link getLastLocalChange} or any other incremental change. - */ -export function loadIncremental( - doc: Doc, - data: Uint8Array, - opts?: ApplyOptions -): Doc { - if (!opts) { - opts = {} - } - const state = _state(doc) - if (state.heads) { - throw new RangeError( - "Attempting to change an out of date document - set at: " + _trace(doc) - ) - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - state.handle.loadIncremental(data) - return progressDocument(doc, heads, opts.patchCallback || state.patchCallback) -} - -/** - * Export the contents of a document to a compressed format - * - * @param doc - The doc to save - * - * The returned bytes can be passed to {@link load} or {@link loadIncremental} - */ -export function save(doc: Doc): Uint8Array { - return _state(doc).handle.save() -} - -/** - * Merge `local` into `remote` - * @typeParam T - The type of values contained in each document - * @param local - The document to merge changes into - * @param remote - The document to merge changes from - * - * @returns - The merged document - * - * Often when you are merging documents you will also need to clone them. Both - * arguments to `merge` are frozen after the call so you can no longer call - * mutating methods (such as {@link change}) on them. The symtom of this will be - * an error which says "Attempting to change an out of date document". To - * overcome this call {@link clone} on the argument before passing it to {@link - * merge}. - */ -export function merge(local: Doc, remote: Doc): Doc { - const localState = _state(local) - - if (localState.heads) { - throw new RangeError( - "Attempting to change an out of date document - set at: " + _trace(local) - ) - } - const heads = localState.handle.getHeads() - const remoteState = _state(remote) - const changes = localState.handle.getChangesAdded(remoteState.handle) - localState.handle.applyChanges(changes) - return progressDocument(local, heads, localState.patchCallback) -} - -/** - * Get the actor ID associated with the document - */ -export function getActorId(doc: Doc): ActorId { - const state = _state(doc) - return state.handle.getActorId() -} - -/** - * The type of conflicts for particular key or index - * - * Maps and sequences in automerge can contain conflicting values for a - * particular key or index. In this case {@link getConflicts} can be used to - * obtain a `Conflicts` representing the multiple values present for the property - * - * A `Conflicts` is a map from a unique (per property or index) key to one of - * the possible conflicting values for the given property. - */ -type Conflicts = { [key: string]: AutomergeValue } - -/** - * Get the conflicts associated with a property - * - * The values of properties in a map in automerge can be conflicted if there - * are concurrent "put" operations to the same key. Automerge chooses one value - * arbitrarily (but deterministically, any two nodes who have the same set of - * changes will choose the same value) from the set of conflicting values to - * present as the value of the key. - * - * Sometimes you may want to examine these conflicts, in this case you can use - * {@link getConflicts} to get the conflicts for the key. - * - * @example - * ``` - * import * as automerge from "@automerge/automerge" - * - * type Profile = { - * pets: Array<{name: string, type: string}> - * } - * - * let doc1 = automerge.init("aaaa") - * doc1 = automerge.change(doc1, d => { - * d.pets = [{name: "Lassie", type: "dog"}] - * }) - * let doc2 = automerge.init("bbbb") - * doc2 = automerge.merge(doc2, automerge.clone(doc1)) - * - * doc2 = automerge.change(doc2, d => { - * d.pets[0].name = "Beethoven" - * }) - * - * doc1 = automerge.change(doc1, d => { - * d.pets[0].name = "Babe" - * }) - * - * const doc3 = automerge.merge(doc1, doc2) - * - * // Note that here we pass `doc3.pets`, not `doc3` - * let conflicts = automerge.getConflicts(doc3.pets[0], "name") - * - * // The two conflicting values are the keys of the conflicts object - * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) - * ``` - */ -export function getConflicts( - doc: Doc, - prop: Prop -): Conflicts | undefined { - const state = _state(doc, false) - if (state.textV2) { - throw new Error("use unstable.getConflicts for an unstable document") - } - const objectId = _obj(doc) - if (objectId != null) { - return stableConflictAt(state.handle, objectId, prop) - } else { - return undefined - } -} - -/** - * Get the binary representation of the last change which was made to this doc - * - * This is most useful when staying in sync with other peers, every time you - * make a change locally via {@link change} you immediately call {@link - * getLastLocalChange} and send the result over the network to other peers. - */ -export function getLastLocalChange(doc: Doc): Change | undefined { - const state = _state(doc) - return state.handle.getLastLocalChange() || undefined -} - -/** - * Return the object ID of an arbitrary javascript value - * - * This is useful to determine if something is actually an automerge document, - * if `doc` is not an automerge document this will return null. - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export function getObjectId(doc: any, prop?: Prop): ObjID | null { - if (prop) { - const state = _state(doc, false) - const objectId = _obj(doc) - if (!state || !objectId) { - return null - } - return state.handle.get(objectId, prop) as ObjID - } else { - return _obj(doc) - } -} - -/** - * Get the changes which are in `newState` but not in `oldState`. The returned - * changes can be loaded in `oldState` via {@link applyChanges}. - * - * Note that this will crash if there are changes in `oldState` which are not in `newState`. - */ -export function getChanges(oldState: Doc, newState: Doc): Change[] { - const n = _state(newState) - return n.handle.getChanges(getHeads(oldState)) -} - -/** - * Get all the changes in a document - * - * This is different to {@link save} because the output is an array of changes - * which can be individually applied via {@link applyChanges}` - * - */ -export function getAllChanges(doc: Doc): Change[] { - const state = _state(doc) - return state.handle.getChanges([]) -} - -/** - * Apply changes received from another document - * - * `doc` will be updated to reflect the `changes`. If there are changes which - * we do not have dependencies for yet those will be stored in the document and - * applied when the depended on changes arrive. - * - * You can use the {@link ApplyOptions} to pass a patchcallback which will be - * informed of any changes which occur as a result of applying the changes - * - */ -export function applyChanges( - doc: Doc, - changes: Change[], - opts?: ApplyOptions -): [Doc] { - const state = _state(doc) - if (!opts) { - opts = {} - } - if (state.heads) { - throw new RangeError( - "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." - ) - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - state.handle.applyChanges(changes) - state.heads = heads - return [ - progressDocument(doc, heads, opts.patchCallback || state.patchCallback), - ] -} - -/** @hidden */ -export function getHistory(doc: Doc): State[] { - const textV2 = _state(doc).textV2 - const history = getAllChanges(doc) - return history.map((change, index) => ({ - get change() { - return decodeChange(change) - }, - get snapshot() { - const [state] = applyChanges( - init({ enableTextV2: textV2 }), - history.slice(0, index + 1) - ) - return state - }, - })) -} - -/** @hidden */ -// FIXME : no tests -// FIXME can we just use deep equals now? -export function equals(val1: unknown, val2: unknown): boolean { - if (!isObject(val1) || !isObject(val2)) return val1 === val2 - const keys1 = Object.keys(val1).sort(), - keys2 = Object.keys(val2).sort() - if (keys1.length !== keys2.length) return false - for (let i = 0; i < keys1.length; i++) { - if (keys1[i] !== keys2[i]) return false - if (!equals(val1[keys1[i]], val2[keys2[i]])) return false - } - return true -} - -/** - * encode a {@link SyncState} into binary to send over the network - * - * @group sync - * */ -export function encodeSyncState(state: SyncState): Uint8Array { - const sync = ApiHandler.importSyncState(state) - const result = ApiHandler.encodeSyncState(sync) - sync.free() - return result -} - -/** - * Decode some binary data into a {@link SyncState} - * - * @group sync - */ -export function decodeSyncState(state: Uint8Array): SyncState { - const sync = ApiHandler.decodeSyncState(state) - const result = ApiHandler.exportSyncState(sync) - sync.free() - return result as SyncState -} - -/** - * Generate a sync message to send to the peer represented by `inState` - * @param doc - The doc to generate messages about - * @param inState - The {@link SyncState} representing the peer we are talking to - * - * @group sync - * - * @returns An array of `[newSyncState, syncMessage | null]` where - * `newSyncState` should replace `inState` and `syncMessage` should be sent to - * the peer if it is not null. If `syncMessage` is null then we are up to date. - */ -export function generateSyncMessage( - doc: Doc, - inState: SyncState -): [SyncState, SyncMessage | null] { - const state = _state(doc) - const syncState = ApiHandler.importSyncState(inState) - const message = state.handle.generateSyncMessage(syncState) - const outState = ApiHandler.exportSyncState(syncState) as SyncState - return [outState, message] -} - -/** - * Update a document and our sync state on receiving a sync message - * - * @group sync - * - * @param doc - The doc the sync message is about - * @param inState - The {@link SyncState} for the peer we are communicating with - * @param message - The message which was received - * @param opts - Any {@link ApplyOption}s, used for passing a - * {@link PatchCallback} which will be informed of any changes - * in `doc` which occur because of the received sync message. - * - * @returns An array of `[newDoc, newSyncState, syncMessage | null]` where - * `newDoc` is the updated state of `doc`, `newSyncState` should replace - * `inState` and `syncMessage` should be sent to the peer if it is not null. If - * `syncMessage` is null then we are up to date. - */ -export function receiveSyncMessage( - doc: Doc, - inState: SyncState, - message: SyncMessage, - opts?: ApplyOptions -): [Doc, SyncState, null] { - const syncState = ApiHandler.importSyncState(inState) - if (!opts) { - opts = {} - } - const state = _state(doc) - if (state.heads) { - throw new RangeError( - "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." - ) - } - if (_is_proxy(doc)) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const heads = state.handle.getHeads() - state.handle.receiveSyncMessage(syncState, message) - const outSyncState = ApiHandler.exportSyncState(syncState) as SyncState - return [ - progressDocument(doc, heads, opts.patchCallback || state.patchCallback), - outSyncState, - null, - ] -} - -/** - * Create a new, blank {@link SyncState} - * - * When communicating with a peer for the first time use this to generate a new - * {@link SyncState} for them - * - * @group sync - */ -export function initSyncState(): SyncState { - return ApiHandler.exportSyncState(ApiHandler.initSyncState()) as SyncState -} - -/** @hidden */ -export function encodeChange(change: ChangeToEncode): Change { - return ApiHandler.encodeChange(change) -} - -/** @hidden */ -export function decodeChange(data: Change): DecodedChange { - return ApiHandler.decodeChange(data) -} - -/** @hidden */ -export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { - return ApiHandler.encodeSyncMessage(message) -} - -/** @hidden */ -export function decodeSyncMessage(message: SyncMessage): DecodedSyncMessage { - return ApiHandler.decodeSyncMessage(message) -} - -/** - * Get any changes in `doc` which are not dependencies of `heads` - */ -export function getMissingDeps(doc: Doc, heads: Heads): Heads { - const state = _state(doc) - return state.handle.getMissingDeps(heads) -} - -/** - * Get the hashes of the heads of this document - */ -export function getHeads(doc: Doc): Heads { - const state = _state(doc) - return state.heads || state.handle.getHeads() -} - -/** @hidden */ -export function dump(doc: Doc) { - const state = _state(doc) - state.handle.dump() -} - -/** @hidden */ -export function toJS(doc: Doc): T { - const state = _state(doc) - const enabled = state.handle.enableFreeze(false) - const result = state.handle.materialize() - state.handle.enableFreeze(enabled) - return result as T -} - -export function isAutomerge(doc: unknown): boolean { - if (typeof doc == "object" && doc !== null) { - return getObjectId(doc) === "_root" && !!Reflect.get(doc, STATE) - } else { - return false - } -} - -function isObject(obj: unknown): obj is Record { - return typeof obj === "object" && obj !== null -} - -export type { - API, - SyncState, - ActorId, - Conflicts, - Prop, - Change, - ObjID, - DecodedChange, - DecodedSyncMessage, - Heads, - MaterializeValue, -} diff --git a/javascript/src/text.ts b/javascript/src/text.ts deleted file mode 100644 index b01bd7db..00000000 --- a/javascript/src/text.ts +++ /dev/null @@ -1,224 +0,0 @@ -import type { Value } from "@automerge/automerge-wasm" -import { TEXT, STATE } from "./constants" -import type { InternalState } from "./internal_state" - -export class Text { - //eslint-disable-next-line @typescript-eslint/no-explicit-any - elems: Array - str: string | undefined - //eslint-disable-next-line @typescript-eslint/no-explicit-any - spans: Array | undefined; - //eslint-disable-next-line @typescript-eslint/no-explicit-any - [STATE]?: InternalState - - constructor(text?: string | string[] | Value[]) { - if (typeof text === "string") { - this.elems = [...text] - } else if (Array.isArray(text)) { - this.elems = text - } else if (text === undefined) { - this.elems = [] - } else { - throw new TypeError(`Unsupported initial value for Text: ${text}`) - } - Reflect.defineProperty(this, TEXT, { value: true }) - } - - get length(): number { - return this.elems.length - } - - //eslint-disable-next-line @typescript-eslint/no-explicit-any - get(index: number): any { - return this.elems[index] - } - - /** - * Iterates over the text elements character by character, including any - * inline objects. - */ - [Symbol.iterator]() { - const elems = this.elems - let index = -1 - return { - next() { - index += 1 - if (index < elems.length) { - return { done: false, value: elems[index] } - } else { - return { done: true } - } - }, - } - } - - /** - * Returns the content of the Text object as a simple string, ignoring any - * non-character elements. - */ - toString(): string { - if (!this.str) { - // Concatting to a string is faster than creating an array and then - // .join()ing for small (<100KB) arrays. - // https://jsperf.com/join-vs-loop-w-type-test - this.str = "" - for (const elem of this.elems) { - if (typeof elem === "string") this.str += elem - else this.str += "\uFFFC" - } - } - return this.str - } - - /** - * Returns the content of the Text object as a sequence of strings, - * interleaved with non-character elements. - * - * For example, the value `['a', 'b', {x: 3}, 'c', 'd']` has spans: - * `=> ['ab', {x: 3}, 'cd']` - */ - toSpans(): Array { - if (!this.spans) { - this.spans = [] - let chars = "" - for (const elem of this.elems) { - if (typeof elem === "string") { - chars += elem - } else { - if (chars.length > 0) { - this.spans.push(chars) - chars = "" - } - this.spans.push(elem) - } - } - if (chars.length > 0) { - this.spans.push(chars) - } - } - return this.spans - } - - /** - * Returns the content of the Text object as a simple string, so that the - * JSON serialization of an Automerge document represents text nicely. - */ - toJSON(): string { - return this.toString() - } - - /** - * Updates the list item at position `index` to a new value `value`. - */ - set(index: number, value: Value) { - if (this[STATE]) { - throw new RangeError( - "object cannot be modified outside of a change block" - ) - } - this.elems[index] = value - } - - /** - * Inserts new list items `values` starting at position `index`. - */ - insertAt(index: number, ...values: Array) { - if (this[STATE]) { - throw new RangeError( - "object cannot be modified outside of a change block" - ) - } - this.elems.splice(index, 0, ...values) - } - - /** - * Deletes `numDelete` list items starting at position `index`. - * if `numDelete` is not given, one item is deleted. - */ - deleteAt(index: number, numDelete = 1) { - if (this[STATE]) { - throw new RangeError( - "object cannot be modified outside of a change block" - ) - } - this.elems.splice(index, numDelete) - } - - map(callback: (e: Value | object) => T) { - this.elems.map(callback) - } - - lastIndexOf(searchElement: Value, fromIndex?: number) { - this.elems.lastIndexOf(searchElement, fromIndex) - } - - concat(other: Text): Text { - return new Text(this.elems.concat(other.elems)) - } - - every(test: (v: Value) => boolean): boolean { - return this.elems.every(test) - } - - filter(test: (v: Value) => boolean): Text { - return new Text(this.elems.filter(test)) - } - - find(test: (v: Value) => boolean): Value | undefined { - return this.elems.find(test) - } - - findIndex(test: (v: Value) => boolean): number | undefined { - return this.elems.findIndex(test) - } - - forEach(f: (v: Value) => undefined) { - this.elems.forEach(f) - } - - includes(elem: Value): boolean { - return this.elems.includes(elem) - } - - indexOf(elem: Value) { - return this.elems.indexOf(elem) - } - - join(sep?: string): string { - return this.elems.join(sep) - } - - reduce( - f: ( - previousValue: Value, - currentValue: Value, - currentIndex: number, - array: Value[] - ) => Value - ) { - this.elems.reduce(f) - } - - reduceRight( - f: ( - previousValue: Value, - currentValue: Value, - currentIndex: number, - array: Value[] - ) => Value - ) { - this.elems.reduceRight(f) - } - - slice(start?: number, end?: number) { - new Text(this.elems.slice(start, end)) - } - - some(test: (arg: Value) => boolean): boolean { - return this.elems.some(test) - } - - toLocaleString() { - this.toString() - } -} diff --git a/javascript/src/types.ts b/javascript/src/types.ts deleted file mode 100644 index beb5cf70..00000000 --- a/javascript/src/types.ts +++ /dev/null @@ -1,46 +0,0 @@ -export { Text } from "./text" -import { Text } from "./text" -export { Counter } from "./counter" -export { Int, Uint, Float64 } from "./numbers" - -import { Counter } from "./counter" -import type { Patch } from "@automerge/automerge-wasm" -export type { Patch } from "@automerge/automerge-wasm" - -export type AutomergeValue = - | ScalarValue - | { [key: string]: AutomergeValue } - | Array - | Text -export type MapValue = { [key: string]: AutomergeValue } -export type ListValue = Array -export type ScalarValue = - | string - | number - | null - | boolean - | Date - | Counter - | Uint8Array - -/** - * An automerge document. - * @typeParam T - The type of the value contained in this document - * - * Note that this provides read only access to the fields of the value. To - * modify the value use {@link change} - */ -export type Doc = { readonly [P in keyof T]: T[P] } - -/** - * Callback which is called by various methods in this library to notify the - * user of what changes have been made. - * @param patch - A description of the changes made - * @param before - The document before the change was made - * @param after - The document after the change was made - */ -export type PatchCallback = ( - patches: Array, - before: Doc, - after: Doc -) => void diff --git a/javascript/src/unstable.ts b/javascript/src/unstable.ts deleted file mode 100644 index 7c73afb9..00000000 --- a/javascript/src/unstable.ts +++ /dev/null @@ -1,294 +0,0 @@ -/** - * # The unstable API - * - * This module contains new features we are working on which are either not yet - * ready for a stable release and/or which will result in backwards incompatible - * API changes. The API of this module may change in arbitrary ways between - * point releases - we will always document what these changes are in the - * [CHANGELOG](#changelog) below, but only depend on this module if you are prepared to deal - * with frequent changes. - * - * ## Differences from stable - * - * In the stable API text objects are represented using the {@link Text} class. - * This means you must decide up front whether your string data might need - * concurrent merges in the future and if you change your mind you have to - * figure out how to migrate your data. In the unstable API the `Text` class is - * gone and all `string`s are represented using the text CRDT, allowing for - * concurrent changes. Modifying a string is done using the {@link splice} - * function. You can still access the old behaviour of strings which do not - * support merging behaviour via the {@link RawString} class. - * - * This leads to the following differences from `stable`: - * - * * There is no `unstable.Text` class, all strings are text objects - * * Reading strings in an `unstable` document is the same as reading any other - * javascript string - * * To modify strings in an `unstable` document use {@link splice} - * * The {@link AutomergeValue} type does not include the {@link Text} - * class but the {@link RawString} class is included in the {@link ScalarValue} - * type - * - * ## CHANGELOG - * * Introduce this module to expose the new API which has no `Text` class - * - * - * @module - */ - -export { - Counter, - type Doc, - Int, - Uint, - Float64, - type Patch, - type PatchCallback, - type AutomergeValue, - type ScalarValue, -} from "./unstable_types" - -import type { PatchCallback } from "./stable" - -import { type UnstableConflicts as Conflicts } from "./conflicts" -import { unstableConflictAt } from "./conflicts" - -export type { - PutPatch, - DelPatch, - SpliceTextPatch, - InsertPatch, - IncPatch, - SyncMessage, -} from "@automerge/automerge-wasm" - -export type { ChangeOptions, ApplyOptions, ChangeFn } from "./stable" -export { - view, - free, - getHeads, - change, - emptyChange, - loadIncremental, - save, - merge, - getActorId, - getLastLocalChange, - getChanges, - getAllChanges, - applyChanges, - getHistory, - equals, - encodeSyncState, - decodeSyncState, - generateSyncMessage, - receiveSyncMessage, - initSyncState, - encodeChange, - decodeChange, - encodeSyncMessage, - decodeSyncMessage, - getMissingDeps, - dump, - toJS, - isAutomerge, - getObjectId, -} from "./stable" - -export type InitOptions = { - /** The actor ID to use for this document, a random one will be generated if `null` is passed */ - actor?: ActorId - freeze?: boolean - /** A callback which will be called with the initial patch once the document has finished loading */ - patchCallback?: PatchCallback -} - -import { ActorId, Doc } from "./stable" -import * as stable from "./stable" -export { RawString } from "./raw_string" - -/** @hidden */ -export const getBackend = stable.getBackend - -import { _is_proxy, _state, _obj } from "./internal_state" - -/** - * Create a new automerge document - * - * @typeParam T - The type of value contained in the document. This will be the - * type that is passed to the change closure in {@link change} - * @param _opts - Either an actorId or an {@link InitOptions} (which may - * contain an actorId). If this is null the document will be initialised with a - * random actor ID - */ -export function init(_opts?: ActorId | InitOptions): Doc { - const opts = importOpts(_opts) - opts.enableTextV2 = true - return stable.init(opts) -} - -/** - * Make a full writable copy of an automerge document - * - * @remarks - * Unlike {@link view} this function makes a full copy of the memory backing - * the document and can thus be passed to {@link change}. It also generates a - * new actor ID so that changes made in the new document do not create duplicate - * sequence numbers with respect to the old document. If you need control over - * the actor ID which is generated you can pass the actor ID as the second - * argument - * - * @typeParam T - The type of the value contained in the document - * @param doc - The document to clone - * @param _opts - Either an actor ID to use for the new doc or an {@link InitOptions} - */ -export function clone( - doc: Doc, - _opts?: ActorId | InitOptions -): Doc { - const opts = importOpts(_opts) - opts.enableTextV2 = true - return stable.clone(doc, opts) -} - -/** - * Create an automerge document from a POJO - * - * @param initialState - The initial state which will be copied into the document - * @typeParam T - The type of the value passed to `from` _and_ the type the resulting document will contain - * @typeParam actor - The actor ID of the resulting document, if this is null a random actor ID will be used - * - * @example - * ``` - * const doc = automerge.from({ - * tasks: [ - * {description: "feed dogs", done: false} - * ] - * }) - * ``` - */ -export function from>( - initialState: T | Doc, - _opts?: ActorId | InitOptions -): Doc { - const opts = importOpts(_opts) - opts.enableTextV2 = true - return stable.from(initialState, opts) -} - -/** - * Load an automerge document from a compressed document produce by {@link save} - * - * @typeParam T - The type of the value which is contained in the document. - * Note that no validation is done to make sure this type is in - * fact the type of the contained value so be a bit careful - * @param data - The compressed document - * @param _opts - Either an actor ID or some {@link InitOptions}, if the actor - * ID is null a random actor ID will be created - * - * Note that `load` will throw an error if passed incomplete content (for - * example if you are receiving content over the network and don't know if you - * have the complete document yet). If you need to handle incomplete content use - * {@link init} followed by {@link loadIncremental}. - */ -export function load( - data: Uint8Array, - _opts?: ActorId | InitOptions -): Doc { - const opts = importOpts(_opts) - opts.enableTextV2 = true - return stable.load(data, opts) -} - -function importOpts( - _actor?: ActorId | InitOptions -): stable.InitOptions { - if (typeof _actor === "object") { - return _actor - } else { - return { actor: _actor } - } -} - -export function splice( - doc: Doc, - prop: stable.Prop, - index: number, - del: number, - newText?: string -) { - if (!_is_proxy(doc)) { - throw new RangeError("object cannot be modified outside of a change block") - } - const state = _state(doc, false) - const objectId = _obj(doc) - if (!objectId) { - throw new RangeError("invalid object for splice") - } - const value = `${objectId}/${prop}` - try { - return state.handle.splice(value, index, del, newText) - } catch (e) { - throw new RangeError(`Cannot splice: ${e}`) - } -} - -/** - * Get the conflicts associated with a property - * - * The values of properties in a map in automerge can be conflicted if there - * are concurrent "put" operations to the same key. Automerge chooses one value - * arbitrarily (but deterministically, any two nodes who have the same set of - * changes will choose the same value) from the set of conflicting values to - * present as the value of the key. - * - * Sometimes you may want to examine these conflicts, in this case you can use - * {@link getConflicts} to get the conflicts for the key. - * - * @example - * ``` - * import * as automerge from "@automerge/automerge" - * - * type Profile = { - * pets: Array<{name: string, type: string}> - * } - * - * let doc1 = automerge.init("aaaa") - * doc1 = automerge.change(doc1, d => { - * d.pets = [{name: "Lassie", type: "dog"}] - * }) - * let doc2 = automerge.init("bbbb") - * doc2 = automerge.merge(doc2, automerge.clone(doc1)) - * - * doc2 = automerge.change(doc2, d => { - * d.pets[0].name = "Beethoven" - * }) - * - * doc1 = automerge.change(doc1, d => { - * d.pets[0].name = "Babe" - * }) - * - * const doc3 = automerge.merge(doc1, doc2) - * - * // Note that here we pass `doc3.pets`, not `doc3` - * let conflicts = automerge.getConflicts(doc3.pets[0], "name") - * - * // The two conflicting values are the keys of the conflicts object - * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) - * ``` - */ -export function getConflicts( - doc: Doc, - prop: stable.Prop -): Conflicts | undefined { - const state = _state(doc, false) - if (!state.textV2) { - throw new Error("use getConflicts for a stable document") - } - const objectId = _obj(doc) - if (objectId != null) { - return unstableConflictAt(state.handle, objectId, prop) - } else { - return undefined - } -} diff --git a/javascript/src/unstable_types.ts b/javascript/src/unstable_types.ts deleted file mode 100644 index 071e2cc4..00000000 --- a/javascript/src/unstable_types.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { Counter } from "./types" - -export { - Counter, - type Doc, - Int, - Uint, - Float64, - type Patch, - type PatchCallback, -} from "./types" - -import { RawString } from "./raw_string" -export { RawString } from "./raw_string" - -export type AutomergeValue = - | ScalarValue - | { [key: string]: AutomergeValue } - | Array -export type MapValue = { [key: string]: AutomergeValue } -export type ListValue = Array -export type ScalarValue = - | string - | number - | null - | boolean - | Date - | Counter - | Uint8Array - | RawString diff --git a/javascript/src/uuid.deno.ts b/javascript/src/uuid.deno.ts deleted file mode 100644 index 04c9b93d..00000000 --- a/javascript/src/uuid.deno.ts +++ /dev/null @@ -1,26 +0,0 @@ -import * as v4 from "https://deno.land/x/uuid@v0.1.2/mod.ts" - -// this file is a deno only port of the uuid module - -function defaultFactory() { - return v4.uuid().replace(/-/g, "") -} - -let factory = defaultFactory - -interface UUIDFactory extends Function { - setFactory(f: typeof factory): void - reset(): void -} - -export const uuid: UUIDFactory = () => { - return factory() -} - -uuid.setFactory = newFactory => { - factory = newFactory -} - -uuid.reset = () => { - factory = defaultFactory -} diff --git a/javascript/src/uuid.ts b/javascript/src/uuid.ts deleted file mode 100644 index 421ddf9d..00000000 --- a/javascript/src/uuid.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { v4 } from "uuid" - -function defaultFactory() { - return v4().replace(/-/g, "") -} - -let factory = defaultFactory - -interface UUIDFactory extends Function { - setFactory(f: typeof factory): void - reset(): void -} - -export const uuid: UUIDFactory = () => { - return factory() -} - -uuid.setFactory = newFactory => { - factory = newFactory -} - -uuid.reset = () => { - factory = defaultFactory -} diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts deleted file mode 100644 index e34484c4..00000000 --- a/javascript/test/basic_test.ts +++ /dev/null @@ -1,488 +0,0 @@ -import * as assert from "assert" -import { unstable as Automerge } from "../src" -import * as WASM from "@automerge/automerge-wasm" - -describe("Automerge", () => { - describe("basics", () => { - it("should init clone and free", () => { - let doc1 = Automerge.init() - let doc2 = Automerge.clone(doc1) - - // this is only needed if weakrefs are not supported - Automerge.free(doc1) - Automerge.free(doc2) - }) - - it("should be able to make a view with specifc heads", () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, d => (d.value = 1)) - let heads2 = Automerge.getHeads(doc2) - let doc3 = Automerge.change(doc2, d => (d.value = 2)) - let doc2_v2 = Automerge.view(doc3, heads2) - assert.deepEqual(doc2, doc2_v2) - let doc2_v2_clone = Automerge.clone(doc2, "aabbcc") - assert.deepEqual(doc2, doc2_v2_clone) - assert.equal(Automerge.getActorId(doc2_v2_clone), "aabbcc") - }) - - it("should allow you to change a clone of a view", () => { - let doc1 = Automerge.init() - doc1 = Automerge.change(doc1, d => (d.key = "value")) - let heads = Automerge.getHeads(doc1) - doc1 = Automerge.change(doc1, d => (d.key = "value2")) - let fork = Automerge.clone(Automerge.view(doc1, heads)) - assert.deepEqual(fork, { key: "value" }) - fork = Automerge.change(fork, d => (d.key = "value3")) - assert.deepEqual(fork, { key: "value3" }) - }) - - it("handle basic set and read on root object", () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, d => { - d.hello = "world" - d.big = "little" - d.zip = "zop" - d.app = "dap" - assert.deepEqual(d, { - hello: "world", - big: "little", - zip: "zop", - app: "dap", - }) - }) - assert.deepEqual(doc2, { - hello: "world", - big: "little", - zip: "zop", - app: "dap", - }) - }) - - it("should be able to insert and delete a large number of properties", () => { - let doc = Automerge.init() - - doc = Automerge.change(doc, doc => { - doc["k1"] = true - }) - - for (let idx = 1; idx <= 200; idx++) { - doc = Automerge.change(doc, doc => { - delete doc["k" + idx] - doc["k" + (idx + 1)] = true - assert(Object.keys(doc).length == 1) - }) - } - }) - - it("can detect an automerge doc with isAutomerge()", () => { - const doc1 = Automerge.from({ sub: { object: true } }) - assert(Automerge.isAutomerge(doc1)) - assert(!Automerge.isAutomerge(doc1.sub)) - assert(!Automerge.isAutomerge("String")) - assert(!Automerge.isAutomerge({ sub: { object: true } })) - assert(!Automerge.isAutomerge(undefined)) - const jsObj = Automerge.toJS(doc1) - assert(!Automerge.isAutomerge(jsObj)) - assert.deepEqual(jsObj, doc1) - }) - - it("it should recursively freeze the document if requested", () => { - let doc1 = Automerge.init({ freeze: true }) - let doc2 = Automerge.init() - - assert(Object.isFrozen(doc1)) - assert(!Object.isFrozen(doc2)) - - // will also freeze sub objects - doc1 = Automerge.change( - doc1, - doc => (doc.book = { title: "how to win friends" }) - ) - doc2 = Automerge.merge(doc2, doc1) - assert(Object.isFrozen(doc1)) - assert(Object.isFrozen(doc1.book)) - assert(!Object.isFrozen(doc2)) - assert(!Object.isFrozen(doc2.book)) - - // works on from - let doc3 = Automerge.from({ sub: { obj: "inner" } }, { freeze: true }) - assert(Object.isFrozen(doc3)) - assert(Object.isFrozen(doc3.sub)) - - // works on load - let doc4 = Automerge.load(Automerge.save(doc3), { freeze: true }) - assert(Object.isFrozen(doc4)) - assert(Object.isFrozen(doc4.sub)) - - // follows clone - let doc5 = Automerge.clone(doc4) - assert(Object.isFrozen(doc5)) - assert(Object.isFrozen(doc5.sub)) - - // toJS does not freeze - let exported = Automerge.toJS(doc5) - assert(!Object.isFrozen(exported)) - }) - - it("handle basic sets over many changes", () => { - let doc1 = Automerge.init() - let timestamp = new Date() - let counter = new Automerge.Counter(100) - let bytes = new Uint8Array([10, 11, 12]) - let doc2 = Automerge.change(doc1, d => { - d.hello = "world" - }) - let doc3 = Automerge.change(doc2, d => { - d.counter1 = counter - }) - let doc4 = Automerge.change(doc3, d => { - d.timestamp1 = timestamp - }) - let doc5 = Automerge.change(doc4, d => { - d.app = null - }) - let doc6 = Automerge.change(doc5, d => { - d.bytes1 = bytes - }) - let doc7 = Automerge.change(doc6, d => { - d.uint = new Automerge.Uint(1) - d.int = new Automerge.Int(-1) - d.float64 = new Automerge.Float64(5.5) - d.number1 = 100 - d.number2 = -45.67 - d.true = true - d.false = false - }) - - assert.deepEqual(doc7, { - hello: "world", - true: true, - false: false, - int: -1, - uint: 1, - float64: 5.5, - number1: 100, - number2: -45.67, - counter1: counter, - timestamp1: timestamp, - bytes1: bytes, - app: null, - }) - - let changes = Automerge.getAllChanges(doc7) - let t1 = Automerge.init() - let [t2] = Automerge.applyChanges(t1, changes) - assert.deepEqual(doc7, t2) - }) - - it("handle overwrites to values", () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, d => { - d.hello = "world1" - }) - let doc3 = Automerge.change(doc2, d => { - d.hello = "world2" - }) - let doc4 = Automerge.change(doc3, d => { - d.hello = "world3" - }) - let doc5 = Automerge.change(doc4, d => { - d.hello = "world4" - }) - assert.deepEqual(doc5, { hello: "world4" }) - }) - - it("handle set with object value", () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, d => { - d.subobj = { hello: "world", subsubobj: { zip: "zop" } } - }) - assert.deepEqual(doc2, { - subobj: { hello: "world", subsubobj: { zip: "zop" } }, - }) - }) - - it("handle simple list creation", () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, d => (d.list = [])) - assert.deepEqual(doc2, { list: [] }) - }) - - it("handle simple lists", () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, d => { - d.list = [1, 2, 3] - }) - assert.deepEqual(doc2.list.length, 3) - assert.deepEqual(doc2.list[0], 1) - assert.deepEqual(doc2.list[1], 2) - assert.deepEqual(doc2.list[2], 3) - assert.deepEqual(doc2, { list: [1, 2, 3] }) - // assert.deepStrictEqual(Automerge.toJS(doc2), { list: [1,2,3] }) - - let doc3 = Automerge.change(doc2, d => { - d.list[1] = "a" - }) - - assert.deepEqual(doc3.list.length, 3) - assert.deepEqual(doc3.list[0], 1) - assert.deepEqual(doc3.list[1], "a") - assert.deepEqual(doc3.list[2], 3) - assert.deepEqual(doc3, { list: [1, "a", 3] }) - }) - it("handle simple lists", () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, d => { - d.list = [1, 2, 3] - }) - let changes = Automerge.getChanges(doc1, doc2) - let docB1 = Automerge.init() - let [docB2] = Automerge.applyChanges(docB1, changes) - assert.deepEqual(docB2, doc2) - }) - it("handle text", () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, d => { - d.list = "hello" - Automerge.splice(d, "list", 2, 0, "Z") - }) - let changes = Automerge.getChanges(doc1, doc2) - let docB1 = Automerge.init() - let [docB2] = Automerge.applyChanges(docB1, changes) - assert.deepEqual(docB2, doc2) - }) - - it("handle non-text strings", () => { - let doc1 = WASM.create(true) - doc1.put("_root", "text", "hello world") - let doc2 = Automerge.load(doc1.save()) - assert.throws(() => { - Automerge.change(doc2, d => { - Automerge.splice(d, "text", 1, 0, "Z") - }) - }, /Cannot splice/) - }) - - it("have many list methods", () => { - let doc1 = Automerge.from({ list: [1, 2, 3] }) - assert.deepEqual(doc1, { list: [1, 2, 3] }) - let doc2 = Automerge.change(doc1, d => { - d.list.splice(1, 1, 9, 10) - }) - assert.deepEqual(doc2, { list: [1, 9, 10, 3] }) - let doc3 = Automerge.change(doc2, d => { - d.list.push(11, 12) - }) - assert.deepEqual(doc3, { list: [1, 9, 10, 3, 11, 12] }) - let doc4 = Automerge.change(doc3, d => { - d.list.unshift(2, 2) - }) - assert.deepEqual(doc4, { list: [2, 2, 1, 9, 10, 3, 11, 12] }) - let doc5 = Automerge.change(doc4, d => { - d.list.shift() - }) - assert.deepEqual(doc5, { list: [2, 1, 9, 10, 3, 11, 12] }) - let doc6 = Automerge.change(doc5, d => { - d.list.insertAt(3, 100, 101) - }) - assert.deepEqual(doc6, { list: [2, 1, 9, 100, 101, 10, 3, 11, 12] }) - }) - - it("allows access to the backend", () => { - let doc = Automerge.init() - assert.deepEqual(Object.keys(Automerge.getBackend(doc)), ["ptr"]) - }) - - it("lists and text have indexof", () => { - let doc = Automerge.from({ - list: [0, 1, 2, 3, 4, 5, 6], - text: "hello world", - }) - assert.deepEqual(doc.list.indexOf(5), 5) - assert.deepEqual(doc.text.indexOf("world"), 6) - }) - }) - - describe("emptyChange", () => { - it("should generate a hash", () => { - let doc = Automerge.init() - doc = Automerge.change(doc, d => { - d.key = "value" - }) - Automerge.save(doc) - let headsBefore = Automerge.getHeads(doc) - headsBefore.sort() - doc = Automerge.emptyChange(doc, "empty change") - let headsAfter = Automerge.getHeads(doc) - headsAfter.sort() - assert.notDeepEqual(headsBefore, headsAfter) - }) - }) - - describe("proxy lists", () => { - it("behave like arrays", () => { - let doc = Automerge.from({ - chars: ["a", "b", "c"], - numbers: [20, 3, 100], - repeats: [20, 20, 3, 3, 3, 3, 100, 100], - }) - let r1: Array = [] - doc = Automerge.change(doc, d => { - assert.deepEqual((d.chars as any[]).concat([1, 2]), [ - "a", - "b", - "c", - 1, - 2, - ]) - assert.deepEqual( - d.chars.map(n => n + "!"), - ["a!", "b!", "c!"] - ) - assert.deepEqual( - d.numbers.map(n => n + 10), - [30, 13, 110] - ) - assert.deepEqual(d.numbers.toString(), "20,3,100") - assert.deepEqual(d.numbers.toLocaleString(), "20,3,100") - assert.deepEqual( - d.numbers.forEach((n: number) => r1.push(n)), - undefined - ) - assert.deepEqual( - d.numbers.every(n => n > 1), - true - ) - assert.deepEqual( - d.numbers.every(n => n > 10), - false - ) - assert.deepEqual( - d.numbers.filter(n => n > 10), - [20, 100] - ) - assert.deepEqual( - d.repeats.find(n => n < 10), - 3 - ) - assert.deepEqual( - d.repeats.find(n => n < 10), - 3 - ) - assert.deepEqual( - d.repeats.find(n => n < 0), - undefined - ) - assert.deepEqual( - d.repeats.findIndex(n => n < 10), - 2 - ) - assert.deepEqual( - d.repeats.findIndex(n => n < 0), - -1 - ) - assert.deepEqual( - d.repeats.findIndex(n => n < 10), - 2 - ) - assert.deepEqual( - d.repeats.findIndex(n => n < 0), - -1 - ) - assert.deepEqual(d.numbers.includes(3), true) - assert.deepEqual(d.numbers.includes(-3), false) - assert.deepEqual(d.numbers.join("|"), "20|3|100") - assert.deepEqual(d.numbers.join(), "20,3,100") - assert.deepEqual( - d.numbers.some(f => f === 3), - true - ) - assert.deepEqual( - d.numbers.some(f => f < 0), - false - ) - assert.deepEqual( - d.numbers.reduce((sum, n) => sum + n, 100), - 223 - ) - assert.deepEqual( - d.repeats.reduce((sum, n) => sum + n, 100), - 352 - ) - assert.deepEqual( - d.chars.reduce((sum, n) => sum + n, "="), - "=abc" - ) - assert.deepEqual( - d.chars.reduceRight((sum, n) => sum + n, "="), - "=cba" - ) - assert.deepEqual( - d.numbers.reduceRight((sum, n) => sum + n, 100), - 223 - ) - assert.deepEqual(d.repeats.lastIndexOf(3), 5) - assert.deepEqual(d.repeats.lastIndexOf(3, 3), 3) - }) - doc = Automerge.change(doc, d => { - assert.deepEqual(d.numbers.fill(-1, 1, 2), [20, -1, 100]) - assert.deepEqual(d.chars.fill("z", 1, 100), ["a", "z", "z"]) - }) - assert.deepEqual(r1, [20, 3, 100]) - assert.deepEqual(doc.numbers, [20, -1, 100]) - assert.deepEqual(doc.chars, ["a", "z", "z"]) - }) - }) - - it("should obtain the same conflicts, regardless of merge order", () => { - let s1 = Automerge.init() - let s2 = Automerge.init() - s1 = Automerge.change(s1, doc => { - doc.x = 1 - doc.y = 2 - }) - s2 = Automerge.change(s2, doc => { - doc.x = 3 - doc.y = 4 - }) - const m1 = Automerge.merge(Automerge.clone(s1), Automerge.clone(s2)) - const m2 = Automerge.merge(Automerge.clone(s2), Automerge.clone(s1)) - assert.deepStrictEqual( - Automerge.getConflicts(m1, "x"), - Automerge.getConflicts(m2, "x") - ) - }) - - describe("getObjectId", () => { - let s1 = Automerge.from({ - string: "string", - number: 1, - null: null, - date: new Date(), - counter: new Automerge.Counter(), - bytes: new Uint8Array(10), - text: "", - list: [], - map: {}, - }) - - it("should return null for scalar values", () => { - assert.equal(Automerge.getObjectId(s1.string), null) - assert.equal(Automerge.getObjectId(s1.number), null) - assert.equal(Automerge.getObjectId(s1.null!), null) - assert.equal(Automerge.getObjectId(s1.date), null) - assert.equal(Automerge.getObjectId(s1.counter), null) - assert.equal(Automerge.getObjectId(s1.bytes), null) - }) - - it("should return _root for the root object", () => { - assert.equal(Automerge.getObjectId(s1), "_root") - }) - - it("should return non-null for map, list, text, and objects", () => { - assert.equal(Automerge.getObjectId(s1.text), null) - assert.notEqual(Automerge.getObjectId(s1.list), null) - assert.notEqual(Automerge.getObjectId(s1.map), null) - }) - }) -}) diff --git a/javascript/test/extra_api_tests.ts b/javascript/test/extra_api_tests.ts deleted file mode 100644 index 84fa4c39..00000000 --- a/javascript/test/extra_api_tests.ts +++ /dev/null @@ -1,28 +0,0 @@ -import * as assert from "assert" -import { unstable as Automerge } from "../src" - -describe("Automerge", () => { - describe("basics", () => { - it("should allow you to load incrementally", () => { - let doc1 = Automerge.from({ foo: "bar" }) - let doc2 = Automerge.init() - doc2 = Automerge.loadIncremental(doc2, Automerge.save(doc1)) - doc1 = Automerge.change(doc1, d => (d.foo2 = "bar2")) - doc2 = Automerge.loadIncremental( - doc2, - Automerge.getBackend(doc1).saveIncremental() - ) - doc1 = Automerge.change(doc1, d => (d.foo = "bar2")) - doc2 = Automerge.loadIncremental( - doc2, - Automerge.getBackend(doc1).saveIncremental() - ) - doc1 = Automerge.change(doc1, d => (d.x = "y")) - doc2 = Automerge.loadIncremental( - doc2, - Automerge.getBackend(doc1).saveIncremental() - ) - assert.deepEqual(doc1, doc2) - }) - }) -}) diff --git a/javascript/test/legacy/columnar.js b/javascript/test/legacy/columnar.js deleted file mode 100644 index 6a9b5874..00000000 --- a/javascript/test/legacy/columnar.js +++ /dev/null @@ -1,1315 +0,0 @@ -const pako = require("pako") -const { copyObject, parseOpId, equalBytes } = require("./common") -const { - utf8ToString, - hexStringToBytes, - bytesToHexString, - Encoder, - Decoder, - RLEEncoder, - RLEDecoder, - DeltaEncoder, - DeltaDecoder, - BooleanEncoder, - BooleanDecoder, -} = require("./encoding") - -// Maybe we should be using the platform's built-in hash implementation? -// Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have -// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest -// However, the WebCrypto API is asynchronous (returns promises), which would -// force all our APIs to become asynchronous as well, which would be annoying. -// -// I think on balance, it's safe enough to use a random library off npm: -// - We only need one hash function (not a full suite of crypto algorithms); -// - SHA256 is quite simple and has fairly few opportunities for subtle bugs -// (compared to asymmetric cryptography anyway); -// - It does not need a secure source of random bits and does not need to be -// constant-time; -// - I have reviewed the source code and it seems pretty reasonable. -const { Hash } = require("fast-sha256") - -// These bytes don't mean anything, they were generated randomly -const MAGIC_BYTES = new Uint8Array([0x85, 0x6f, 0x4a, 0x83]) - -const CHUNK_TYPE_DOCUMENT = 0 -const CHUNK_TYPE_CHANGE = 1 -const CHUNK_TYPE_DEFLATE = 2 // like CHUNK_TYPE_CHANGE but with DEFLATE compression - -// Minimum number of bytes in a value before we enable DEFLATE compression (there is no point -// compressing very short values since compression may actually make them bigger) -const DEFLATE_MIN_SIZE = 256 - -// The least-significant 3 bits of a columnId indicate its datatype -const COLUMN_TYPE = { - GROUP_CARD: 0, - ACTOR_ID: 1, - INT_RLE: 2, - INT_DELTA: 3, - BOOLEAN: 4, - STRING_RLE: 5, - VALUE_LEN: 6, - VALUE_RAW: 7, -} - -// The 4th-least-significant bit of a columnId is set if the column is DEFLATE-compressed -const COLUMN_TYPE_DEFLATE = 8 - -// In the values in a column of type VALUE_LEN, the bottom four bits indicate the type of the value, -// one of the following types in VALUE_TYPE. The higher bits indicate the length of the value in the -// associated VALUE_RAW column (in bytes). -const VALUE_TYPE = { - NULL: 0, - FALSE: 1, - TRUE: 2, - LEB128_UINT: 3, - LEB128_INT: 4, - IEEE754: 5, - UTF8: 6, - BYTES: 7, - COUNTER: 8, - TIMESTAMP: 9, - MIN_UNKNOWN: 10, - MAX_UNKNOWN: 15, -} - -// make* actions must be at even-numbered indexes in this list -const ACTIONS = [ - "makeMap", - "set", - "makeList", - "del", - "makeText", - "inc", - "makeTable", - "link", -] - -const OBJECT_TYPE = { - makeMap: "map", - makeList: "list", - makeText: "text", - makeTable: "table", -} - -const COMMON_COLUMNS = [ - { columnName: "objActor", columnId: (0 << 4) | COLUMN_TYPE.ACTOR_ID }, - { columnName: "objCtr", columnId: (0 << 4) | COLUMN_TYPE.INT_RLE }, - { columnName: "keyActor", columnId: (1 << 4) | COLUMN_TYPE.ACTOR_ID }, - { columnName: "keyCtr", columnId: (1 << 4) | COLUMN_TYPE.INT_DELTA }, - { columnName: "keyStr", columnId: (1 << 4) | COLUMN_TYPE.STRING_RLE }, - { columnName: "idActor", columnId: (2 << 4) | COLUMN_TYPE.ACTOR_ID }, - { columnName: "idCtr", columnId: (2 << 4) | COLUMN_TYPE.INT_DELTA }, - { columnName: "insert", columnId: (3 << 4) | COLUMN_TYPE.BOOLEAN }, - { columnName: "action", columnId: (4 << 4) | COLUMN_TYPE.INT_RLE }, - { columnName: "valLen", columnId: (5 << 4) | COLUMN_TYPE.VALUE_LEN }, - { columnName: "valRaw", columnId: (5 << 4) | COLUMN_TYPE.VALUE_RAW }, - { columnName: "chldActor", columnId: (6 << 4) | COLUMN_TYPE.ACTOR_ID }, - { columnName: "chldCtr", columnId: (6 << 4) | COLUMN_TYPE.INT_DELTA }, -] - -const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ - { columnName: "predNum", columnId: (7 << 4) | COLUMN_TYPE.GROUP_CARD }, - { columnName: "predActor", columnId: (7 << 4) | COLUMN_TYPE.ACTOR_ID }, - { columnName: "predCtr", columnId: (7 << 4) | COLUMN_TYPE.INT_DELTA }, -]) - -const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ - { columnName: "succNum", columnId: (8 << 4) | COLUMN_TYPE.GROUP_CARD }, - { columnName: "succActor", columnId: (8 << 4) | COLUMN_TYPE.ACTOR_ID }, - { columnName: "succCtr", columnId: (8 << 4) | COLUMN_TYPE.INT_DELTA }, -]) - -const DOCUMENT_COLUMNS = [ - { columnName: "actor", columnId: (0 << 4) | COLUMN_TYPE.ACTOR_ID }, - { columnName: "seq", columnId: (0 << 4) | COLUMN_TYPE.INT_DELTA }, - { columnName: "maxOp", columnId: (1 << 4) | COLUMN_TYPE.INT_DELTA }, - { columnName: "time", columnId: (2 << 4) | COLUMN_TYPE.INT_DELTA }, - { columnName: "message", columnId: (3 << 4) | COLUMN_TYPE.STRING_RLE }, - { columnName: "depsNum", columnId: (4 << 4) | COLUMN_TYPE.GROUP_CARD }, - { columnName: "depsIndex", columnId: (4 << 4) | COLUMN_TYPE.INT_DELTA }, - { columnName: "extraLen", columnId: (5 << 4) | COLUMN_TYPE.VALUE_LEN }, - { columnName: "extraRaw", columnId: (5 << 4) | COLUMN_TYPE.VALUE_RAW }, -] - -/** - * Maps an opId of the form {counter: 12345, actorId: 'someActorId'} to the form - * {counter: 12345, actorNum: 123, actorId: 'someActorId'}, where the actorNum - * is the index into the `actorIds` array. - */ -function actorIdToActorNum(opId, actorIds) { - if (!opId || !opId.actorId) return opId - const counter = opId.counter - const actorNum = actorIds.indexOf(opId.actorId) - if (actorNum < 0) throw new RangeError("missing actorId") // should not happen - return { counter, actorNum, actorId: opId.actorId } -} - -/** - * Comparison function to pass to Array.sort(), which compares two opIds in the - * form produced by `actorIdToActorNum` so that they are sorted in increasing - * Lamport timestamp order (sorted first by counter, then by actorId). - */ -function compareParsedOpIds(id1, id2) { - if (id1.counter < id2.counter) return -1 - if (id1.counter > id2.counter) return +1 - if (id1.actorId < id2.actorId) return -1 - if (id1.actorId > id2.actorId) return +1 - return 0 -} - -/** - * Takes `changes`, an array of changes (represented as JS objects). Returns an - * object `{changes, actorIds}`, where `changes` is a copy of the argument in - * which all string opIds have been replaced with `{counter, actorNum}` objects, - * and where `actorIds` is a lexicographically sorted array of actor IDs occurring - * in any of the operations. `actorNum` is an index into that array of actorIds. - * If `single` is true, the actorId of the author of the change is moved to the - * beginning of the array of actorIds, so that `actorNum` is zero when referencing - * the author of the change itself. This special-casing is omitted if `single` is - * false. - */ -function parseAllOpIds(changes, single) { - const actors = {}, - newChanges = [] - for (let change of changes) { - change = copyObject(change) - actors[change.actor] = true - change.ops = expandMultiOps(change.ops, change.startOp, change.actor) - change.ops = change.ops.map(op => { - op = copyObject(op) - if (op.obj !== "_root") op.obj = parseOpId(op.obj) - if (op.elemId && op.elemId !== "_head") op.elemId = parseOpId(op.elemId) - if (op.child) op.child = parseOpId(op.child) - if (op.pred) op.pred = op.pred.map(parseOpId) - if (op.obj.actorId) actors[op.obj.actorId] = true - if (op.elemId && op.elemId.actorId) actors[op.elemId.actorId] = true - if (op.child && op.child.actorId) actors[op.child.actorId] = true - for (let pred of op.pred) actors[pred.actorId] = true - return op - }) - newChanges.push(change) - } - - let actorIds = Object.keys(actors).sort() - if (single) { - actorIds = [changes[0].actor].concat( - actorIds.filter(actor => actor !== changes[0].actor) - ) - } - for (let change of newChanges) { - change.actorNum = actorIds.indexOf(change.actor) - for (let i = 0; i < change.ops.length; i++) { - let op = change.ops[i] - op.id = { - counter: change.startOp + i, - actorNum: change.actorNum, - actorId: change.actor, - } - op.obj = actorIdToActorNum(op.obj, actorIds) - op.elemId = actorIdToActorNum(op.elemId, actorIds) - op.child = actorIdToActorNum(op.child, actorIds) - op.pred = op.pred.map(pred => actorIdToActorNum(pred, actorIds)) - } - } - return { changes: newChanges, actorIds } -} - -/** - * Encodes the `obj` property of operation `op` into the two columns - * `objActor` and `objCtr`. - */ -function encodeObjectId(op, columns) { - if (op.obj === "_root") { - columns.objActor.appendValue(null) - columns.objCtr.appendValue(null) - } else if (op.obj.actorNum >= 0 && op.obj.counter > 0) { - columns.objActor.appendValue(op.obj.actorNum) - columns.objCtr.appendValue(op.obj.counter) - } else { - throw new RangeError( - `Unexpected objectId reference: ${JSON.stringify(op.obj)}` - ) - } -} - -/** - * Encodes the `key` and `elemId` properties of operation `op` into the three - * columns `keyActor`, `keyCtr`, and `keyStr`. - */ -function encodeOperationKey(op, columns) { - if (op.key) { - columns.keyActor.appendValue(null) - columns.keyCtr.appendValue(null) - columns.keyStr.appendValue(op.key) - } else if (op.elemId === "_head" && op.insert) { - columns.keyActor.appendValue(null) - columns.keyCtr.appendValue(0) - columns.keyStr.appendValue(null) - } else if (op.elemId && op.elemId.actorNum >= 0 && op.elemId.counter > 0) { - columns.keyActor.appendValue(op.elemId.actorNum) - columns.keyCtr.appendValue(op.elemId.counter) - columns.keyStr.appendValue(null) - } else { - throw new RangeError(`Unexpected operation key: ${JSON.stringify(op)}`) - } -} - -/** - * Encodes the `action` property of operation `op` into the `action` column. - */ -function encodeOperationAction(op, columns) { - const actionCode = ACTIONS.indexOf(op.action) - if (actionCode >= 0) { - columns.action.appendValue(actionCode) - } else if (typeof op.action === "number") { - columns.action.appendValue(op.action) - } else { - throw new RangeError(`Unexpected operation action: ${op.action}`) - } -} - -/** - * Given the datatype for a number, determine the typeTag and the value to encode - * otherwise guess - */ -function getNumberTypeAndValue(op) { - switch (op.datatype) { - case "counter": - return [VALUE_TYPE.COUNTER, op.value] - case "timestamp": - return [VALUE_TYPE.TIMESTAMP, op.value] - case "uint": - return [VALUE_TYPE.LEB128_UINT, op.value] - case "int": - return [VALUE_TYPE.LEB128_INT, op.value] - case "float64": { - const buf64 = new ArrayBuffer(8), - view64 = new DataView(buf64) - view64.setFloat64(0, op.value, true) - return [VALUE_TYPE.IEEE754, new Uint8Array(buf64)] - } - default: - // increment operators get resolved here ... - if ( - Number.isInteger(op.value) && - op.value <= Number.MAX_SAFE_INTEGER && - op.value >= Number.MIN_SAFE_INTEGER - ) { - return [VALUE_TYPE.LEB128_INT, op.value] - } else { - const buf64 = new ArrayBuffer(8), - view64 = new DataView(buf64) - view64.setFloat64(0, op.value, true) - return [VALUE_TYPE.IEEE754, new Uint8Array(buf64)] - } - } -} - -/** - * Encodes the `value` property of operation `op` into the two columns - * `valLen` and `valRaw`. - */ -function encodeValue(op, columns) { - if ((op.action !== "set" && op.action !== "inc") || op.value === null) { - columns.valLen.appendValue(VALUE_TYPE.NULL) - } else if (op.value === false) { - columns.valLen.appendValue(VALUE_TYPE.FALSE) - } else if (op.value === true) { - columns.valLen.appendValue(VALUE_TYPE.TRUE) - } else if (typeof op.value === "string") { - const numBytes = columns.valRaw.appendRawString(op.value) - columns.valLen.appendValue((numBytes << 4) | VALUE_TYPE.UTF8) - } else if (ArrayBuffer.isView(op.value)) { - const numBytes = columns.valRaw.appendRawBytes( - new Uint8Array(op.value.buffer) - ) - columns.valLen.appendValue((numBytes << 4) | VALUE_TYPE.BYTES) - } else if (typeof op.value === "number") { - let [typeTag, value] = getNumberTypeAndValue(op) - let numBytes - if (typeTag === VALUE_TYPE.LEB128_UINT) { - numBytes = columns.valRaw.appendUint53(value) - } else if (typeTag === VALUE_TYPE.IEEE754) { - numBytes = columns.valRaw.appendRawBytes(value) - } else { - numBytes = columns.valRaw.appendInt53(value) - } - columns.valLen.appendValue((numBytes << 4) | typeTag) - } else if ( - typeof op.datatype === "number" && - op.datatype >= VALUE_TYPE.MIN_UNKNOWN && - op.datatype <= VALUE_TYPE.MAX_UNKNOWN && - op.value instanceof Uint8Array - ) { - const numBytes = columns.valRaw.appendRawBytes(op.value) - columns.valLen.appendValue((numBytes << 4) | op.datatype) - } else if (op.datatype) { - throw new RangeError( - `Unknown datatype ${op.datatype} for value ${op.value}` - ) - } else { - throw new RangeError(`Unsupported value in operation: ${op.value}`) - } -} - -/** - * Given `sizeTag` (an unsigned integer read from a VALUE_LEN column) and `bytes` (a Uint8Array - * read from a VALUE_RAW column, with length `sizeTag >> 4`), this function returns an object of the - * form `{value: value, datatype: datatypeTag}` where `value` is a JavaScript primitive datatype - * corresponding to the value, and `datatypeTag` is a datatype annotation such as 'counter'. - */ -function decodeValue(sizeTag, bytes) { - if (sizeTag === VALUE_TYPE.NULL) { - return { value: null } - } else if (sizeTag === VALUE_TYPE.FALSE) { - return { value: false } - } else if (sizeTag === VALUE_TYPE.TRUE) { - return { value: true } - } else if (sizeTag % 16 === VALUE_TYPE.UTF8) { - return { value: utf8ToString(bytes) } - } else { - if (sizeTag % 16 === VALUE_TYPE.LEB128_UINT) { - return { value: new Decoder(bytes).readUint53(), datatype: "uint" } - } else if (sizeTag % 16 === VALUE_TYPE.LEB128_INT) { - return { value: new Decoder(bytes).readInt53(), datatype: "int" } - } else if (sizeTag % 16 === VALUE_TYPE.IEEE754) { - const view = new DataView( - bytes.buffer, - bytes.byteOffset, - bytes.byteLength - ) - if (bytes.byteLength === 8) { - return { value: view.getFloat64(0, true), datatype: "float64" } - } else { - throw new RangeError( - `Invalid length for floating point number: ${bytes.byteLength}` - ) - } - } else if (sizeTag % 16 === VALUE_TYPE.COUNTER) { - return { value: new Decoder(bytes).readInt53(), datatype: "counter" } - } else if (sizeTag % 16 === VALUE_TYPE.TIMESTAMP) { - return { value: new Decoder(bytes).readInt53(), datatype: "timestamp" } - } else { - return { value: bytes, datatype: sizeTag % 16 } - } - } -} - -/** - * Reads one value from the column `columns[colIndex]` and interprets it based - * on the column type. `actorIds` is a list of actors that appear in the change; - * `actorIds[0]` is the actorId of the change's author. Mutates the `result` - * object with the value, and returns the number of columns processed (this is 2 - * in the case of a pair of VALUE_LEN and VALUE_RAW columns, which are processed - * in one go). - */ -function decodeValueColumns(columns, colIndex, actorIds, result) { - const { columnId, columnName, decoder } = columns[colIndex] - if ( - columnId % 8 === COLUMN_TYPE.VALUE_LEN && - colIndex + 1 < columns.length && - columns[colIndex + 1].columnId === columnId + 1 - ) { - const sizeTag = decoder.readValue() - const rawValue = columns[colIndex + 1].decoder.readRawBytes(sizeTag >> 4) - const { value, datatype } = decodeValue(sizeTag, rawValue) - result[columnName] = value - if (datatype) result[columnName + "_datatype"] = datatype - return 2 - } else if (columnId % 8 === COLUMN_TYPE.ACTOR_ID) { - const actorNum = decoder.readValue() - if (actorNum === null) { - result[columnName] = null - } else { - if (!actorIds[actorNum]) - throw new RangeError(`No actor index ${actorNum}`) - result[columnName] = actorIds[actorNum] - } - } else { - result[columnName] = decoder.readValue() - } - return 1 -} - -/** - * Encodes an array of operations in a set of columns. The operations need to - * be parsed with `parseAllOpIds()` beforehand. If `forDocument` is true, we use - * the column structure of a whole document, otherwise we use the column - * structure for an individual change. Returns an array of - * `{columnId, columnName, encoder}` objects. - */ -function encodeOps(ops, forDocument) { - const columns = { - objActor: new RLEEncoder("uint"), - objCtr: new RLEEncoder("uint"), - keyActor: new RLEEncoder("uint"), - keyCtr: new DeltaEncoder(), - keyStr: new RLEEncoder("utf8"), - insert: new BooleanEncoder(), - action: new RLEEncoder("uint"), - valLen: new RLEEncoder("uint"), - valRaw: new Encoder(), - chldActor: new RLEEncoder("uint"), - chldCtr: new DeltaEncoder(), - } - - if (forDocument) { - columns.idActor = new RLEEncoder("uint") - columns.idCtr = new DeltaEncoder() - columns.succNum = new RLEEncoder("uint") - columns.succActor = new RLEEncoder("uint") - columns.succCtr = new DeltaEncoder() - } else { - columns.predNum = new RLEEncoder("uint") - columns.predCtr = new DeltaEncoder() - columns.predActor = new RLEEncoder("uint") - } - - for (let op of ops) { - encodeObjectId(op, columns) - encodeOperationKey(op, columns) - columns.insert.appendValue(!!op.insert) - encodeOperationAction(op, columns) - encodeValue(op, columns) - - if (op.child && op.child.counter) { - columns.chldActor.appendValue(op.child.actorNum) - columns.chldCtr.appendValue(op.child.counter) - } else { - columns.chldActor.appendValue(null) - columns.chldCtr.appendValue(null) - } - - if (forDocument) { - columns.idActor.appendValue(op.id.actorNum) - columns.idCtr.appendValue(op.id.counter) - columns.succNum.appendValue(op.succ.length) - op.succ.sort(compareParsedOpIds) - for (let i = 0; i < op.succ.length; i++) { - columns.succActor.appendValue(op.succ[i].actorNum) - columns.succCtr.appendValue(op.succ[i].counter) - } - } else { - columns.predNum.appendValue(op.pred.length) - op.pred.sort(compareParsedOpIds) - for (let i = 0; i < op.pred.length; i++) { - columns.predActor.appendValue(op.pred[i].actorNum) - columns.predCtr.appendValue(op.pred[i].counter) - } - } - } - - let columnList = [] - for (let { columnName, columnId } of forDocument - ? DOC_OPS_COLUMNS - : CHANGE_COLUMNS) { - if (columns[columnName]) - columnList.push({ columnId, columnName, encoder: columns[columnName] }) - } - return columnList.sort((a, b) => a.columnId - b.columnId) -} - -function validDatatype(value, datatype) { - if (datatype === undefined) { - return ( - typeof value === "string" || typeof value === "boolean" || value === null - ) - } else { - return typeof value === "number" - } -} - -function expandMultiOps(ops, startOp, actor) { - let opNum = startOp - let expandedOps = [] - for (const op of ops) { - if (op.action === "set" && op.values && op.insert) { - if (op.pred.length !== 0) - throw new RangeError("multi-insert pred must be empty") - let lastElemId = op.elemId - const datatype = op.datatype - for (const value of op.values) { - if (!validDatatype(value, datatype)) - throw new RangeError( - `Decode failed: bad value/datatype association (${value},${datatype})` - ) - expandedOps.push({ - action: "set", - obj: op.obj, - elemId: lastElemId, - datatype, - value, - pred: [], - insert: true, - }) - lastElemId = `${opNum}@${actor}` - opNum += 1 - } - } else if (op.action === "del" && op.multiOp > 1) { - if (op.pred.length !== 1) - throw new RangeError("multiOp deletion must have exactly one pred") - const startElemId = parseOpId(op.elemId), - startPred = parseOpId(op.pred[0]) - for (let i = 0; i < op.multiOp; i++) { - const elemId = `${startElemId.counter + i}@${startElemId.actorId}` - const pred = [`${startPred.counter + i}@${startPred.actorId}`] - expandedOps.push({ action: "del", obj: op.obj, elemId, pred }) - opNum += 1 - } - } else { - expandedOps.push(op) - opNum += 1 - } - } - return expandedOps -} - -/** - * Takes a change as decoded by `decodeColumns`, and changes it into the form - * expected by the rest of the backend. If `forDocument` is true, we use the op - * structure of a whole document, otherwise we use the op structure for an - * individual change. - */ -function decodeOps(ops, forDocument) { - const newOps = [] - for (let op of ops) { - const obj = op.objCtr === null ? "_root" : `${op.objCtr}@${op.objActor}` - const elemId = op.keyStr - ? undefined - : op.keyCtr === 0 - ? "_head" - : `${op.keyCtr}@${op.keyActor}` - const action = ACTIONS[op.action] || op.action - const newOp = elemId - ? { obj, elemId, action } - : { obj, key: op.keyStr, action } - newOp.insert = !!op.insert - if (ACTIONS[op.action] === "set" || ACTIONS[op.action] === "inc") { - newOp.value = op.valLen - if (op.valLen_datatype) newOp.datatype = op.valLen_datatype - } - if (!!op.chldCtr !== !!op.chldActor) { - throw new RangeError( - `Mismatched child columns: ${op.chldCtr} and ${op.chldActor}` - ) - } - if (op.chldCtr !== null) newOp.child = `${op.chldCtr}@${op.chldActor}` - if (forDocument) { - newOp.id = `${op.idCtr}@${op.idActor}` - newOp.succ = op.succNum.map(succ => `${succ.succCtr}@${succ.succActor}`) - checkSortedOpIds( - op.succNum.map(succ => ({ - counter: succ.succCtr, - actorId: succ.succActor, - })) - ) - } else { - newOp.pred = op.predNum.map(pred => `${pred.predCtr}@${pred.predActor}`) - checkSortedOpIds( - op.predNum.map(pred => ({ - counter: pred.predCtr, - actorId: pred.predActor, - })) - ) - } - newOps.push(newOp) - } - return newOps -} - -/** - * Throws an exception if the opIds in the given array are not in sorted order. - */ -function checkSortedOpIds(opIds) { - let last = null - for (let opId of opIds) { - if (last && compareParsedOpIds(last, opId) !== -1) { - throw new RangeError("operation IDs are not in ascending order") - } - last = opId - } -} - -function encoderByColumnId(columnId) { - if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { - return new DeltaEncoder() - } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { - return new BooleanEncoder() - } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) { - return new RLEEncoder("utf8") - } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) { - return new Encoder() - } else { - return new RLEEncoder("uint") - } -} - -function decoderByColumnId(columnId, buffer) { - if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) { - return new DeltaDecoder(buffer) - } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { - return new BooleanDecoder(buffer) - } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) { - return new RLEDecoder("utf8", buffer) - } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) { - return new Decoder(buffer) - } else { - return new RLEDecoder("uint", buffer) - } -} - -function makeDecoders(columns, columnSpec) { - const emptyBuf = new Uint8Array(0) - let decoders = [], - columnIndex = 0, - specIndex = 0 - - while (columnIndex < columns.length || specIndex < columnSpec.length) { - if ( - columnIndex === columns.length || - (specIndex < columnSpec.length && - columnSpec[specIndex].columnId < columns[columnIndex].columnId) - ) { - const { columnId, columnName } = columnSpec[specIndex] - decoders.push({ - columnId, - columnName, - decoder: decoderByColumnId(columnId, emptyBuf), - }) - specIndex++ - } else if ( - specIndex === columnSpec.length || - columns[columnIndex].columnId < columnSpec[specIndex].columnId - ) { - const { columnId, buffer } = columns[columnIndex] - decoders.push({ columnId, decoder: decoderByColumnId(columnId, buffer) }) - columnIndex++ - } else { - // columns[columnIndex].columnId === columnSpec[specIndex].columnId - const { columnId, buffer } = columns[columnIndex], - { columnName } = columnSpec[specIndex] - decoders.push({ - columnId, - columnName, - decoder: decoderByColumnId(columnId, buffer), - }) - columnIndex++ - specIndex++ - } - } - return decoders -} - -function decodeColumns(columns, actorIds, columnSpec) { - columns = makeDecoders(columns, columnSpec) - let parsedRows = [] - while (columns.some(col => !col.decoder.done)) { - let row = {}, - col = 0 - while (col < columns.length) { - const columnId = columns[col].columnId - let groupId = columnId >> 4, - groupCols = 1 - while ( - col + groupCols < columns.length && - columns[col + groupCols].columnId >> 4 === groupId - ) { - groupCols++ - } - - if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { - const values = [], - count = columns[col].decoder.readValue() - for (let i = 0; i < count; i++) { - let value = {} - for (let colOffset = 1; colOffset < groupCols; colOffset++) { - decodeValueColumns(columns, col + colOffset, actorIds, value) - } - values.push(value) - } - row[columns[col].columnName] = values - col += groupCols - } else { - col += decodeValueColumns(columns, col, actorIds, row) - } - } - parsedRows.push(row) - } - return parsedRows -} - -function decodeColumnInfo(decoder) { - // A number that is all 1 bits except for the bit that indicates whether a column is - // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. - const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 - - let lastColumnId = -1, - columns = [], - numColumns = decoder.readUint53() - for (let i = 0; i < numColumns; i++) { - const columnId = decoder.readUint53(), - bufferLen = decoder.readUint53() - if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { - throw new RangeError("Columns must be in ascending order") - } - lastColumnId = columnId - columns.push({ columnId, bufferLen }) - } - return columns -} - -function encodeColumnInfo(encoder, columns) { - const nonEmptyColumns = columns.filter( - column => column.encoder.buffer.byteLength > 0 - ) - encoder.appendUint53(nonEmptyColumns.length) - for (let column of nonEmptyColumns) { - encoder.appendUint53(column.columnId) - encoder.appendUint53(column.encoder.buffer.byteLength) - } -} - -function decodeChangeHeader(decoder) { - const numDeps = decoder.readUint53(), - deps = [] - for (let i = 0; i < numDeps; i++) { - deps.push(bytesToHexString(decoder.readRawBytes(32))) - } - let change = { - actor: decoder.readHexString(), - seq: decoder.readUint53(), - startOp: decoder.readUint53(), - time: decoder.readInt53(), - message: decoder.readPrefixedString(), - deps, - } - const actorIds = [change.actor], - numActorIds = decoder.readUint53() - for (let i = 0; i < numActorIds; i++) actorIds.push(decoder.readHexString()) - change.actorIds = actorIds - return change -} - -/** - * Assembles a chunk of encoded data containing a checksum, headers, and a - * series of encoded columns. Calls `encodeHeaderCallback` with an encoder that - * should be used to add the headers. The columns should be given as `columns`. - */ -function encodeContainer(chunkType, encodeContentsCallback) { - const CHECKSUM_SIZE = 4 // checksum is first 4 bytes of SHA-256 hash of the rest of the data - const HEADER_SPACE = MAGIC_BYTES.byteLength + CHECKSUM_SIZE + 1 + 5 // 1 byte type + 5 bytes length - const body = new Encoder() - // Make space for the header at the beginning of the body buffer. We will - // copy the header in here later. This is cheaper than copying the body since - // the body is likely to be much larger than the header. - body.appendRawBytes(new Uint8Array(HEADER_SPACE)) - encodeContentsCallback(body) - - const bodyBuf = body.buffer - const header = new Encoder() - header.appendByte(chunkType) - header.appendUint53(bodyBuf.byteLength - HEADER_SPACE) - - // Compute the hash over chunkType, length, and body - const headerBuf = header.buffer - const sha256 = new Hash() - sha256.update(headerBuf) - sha256.update(bodyBuf.subarray(HEADER_SPACE)) - const hash = sha256.digest(), - checksum = hash.subarray(0, CHECKSUM_SIZE) - - // Copy header into the body buffer so that they are contiguous - bodyBuf.set( - MAGIC_BYTES, - HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength - ) - bodyBuf.set(checksum, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE) - bodyBuf.set(headerBuf, HEADER_SPACE - headerBuf.byteLength) - return { - hash, - bytes: bodyBuf.subarray( - HEADER_SPACE - - headerBuf.byteLength - - CHECKSUM_SIZE - - MAGIC_BYTES.byteLength - ), - } -} - -function decodeContainerHeader(decoder, computeHash) { - if (!equalBytes(decoder.readRawBytes(MAGIC_BYTES.byteLength), MAGIC_BYTES)) { - throw new RangeError("Data does not begin with magic bytes 85 6f 4a 83") - } - const expectedHash = decoder.readRawBytes(4) - const hashStartOffset = decoder.offset - const chunkType = decoder.readByte() - const chunkLength = decoder.readUint53() - const header = { - chunkType, - chunkLength, - chunkData: decoder.readRawBytes(chunkLength), - } - - if (computeHash) { - const sha256 = new Hash() - sha256.update(decoder.buf.subarray(hashStartOffset, decoder.offset)) - const binaryHash = sha256.digest() - if (!equalBytes(binaryHash.subarray(0, 4), expectedHash)) { - throw new RangeError("checksum does not match data") - } - header.hash = bytesToHexString(binaryHash) - } - return header -} - -function encodeChange(changeObj) { - const { changes, actorIds } = parseAllOpIds([changeObj], true) - const change = changes[0] - - const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { - if (!Array.isArray(change.deps)) throw new TypeError("deps is not an array") - encoder.appendUint53(change.deps.length) - for (let hash of change.deps.slice().sort()) { - encoder.appendRawBytes(hexStringToBytes(hash)) - } - encoder.appendHexString(change.actor) - encoder.appendUint53(change.seq) - encoder.appendUint53(change.startOp) - encoder.appendInt53(change.time) - encoder.appendPrefixedString(change.message || "") - encoder.appendUint53(actorIds.length - 1) - for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) - - const columns = encodeOps(change.ops, false) - encodeColumnInfo(encoder, columns) - for (let column of columns) encoder.appendRawBytes(column.encoder.buffer) - if (change.extraBytes) encoder.appendRawBytes(change.extraBytes) - }) - - const hexHash = bytesToHexString(hash) - if (changeObj.hash && changeObj.hash !== hexHash) { - throw new RangeError( - `Change hash does not match encoding: ${changeObj.hash} != ${hexHash}` - ) - } - return bytes.byteLength >= DEFLATE_MIN_SIZE ? deflateChange(bytes) : bytes -} - -function decodeChangeColumns(buffer) { - if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) - const decoder = new Decoder(buffer) - const header = decodeContainerHeader(decoder, true) - const chunkDecoder = new Decoder(header.chunkData) - if (!decoder.done) throw new RangeError("Encoded change has trailing data") - if (header.chunkType !== CHUNK_TYPE_CHANGE) - throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - - const change = decodeChangeHeader(chunkDecoder) - const columns = decodeColumnInfo(chunkDecoder) - for (let i = 0; i < columns.length; i++) { - if ((columns[i].columnId & COLUMN_TYPE_DEFLATE) !== 0) { - throw new RangeError("change must not contain deflated columns") - } - columns[i].buffer = chunkDecoder.readRawBytes(columns[i].bufferLen) - } - if (!chunkDecoder.done) { - const restLen = chunkDecoder.buf.byteLength - chunkDecoder.offset - change.extraBytes = chunkDecoder.readRawBytes(restLen) - } - - change.columns = columns - change.hash = header.hash - return change -} - -/** - * Decodes one change in binary format into its JS object representation. - */ -function decodeChange(buffer) { - const change = decodeChangeColumns(buffer) - change.ops = decodeOps( - decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), - false - ) - delete change.actorIds - delete change.columns - return change -} - -/** - * Decodes the header fields of a change in binary format, but does not decode - * the operations. Saves work when we only need to inspect the headers. Only - * computes the hash of the change if `computeHash` is true. - */ -function decodeChangeMeta(buffer, computeHash) { - if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) - const header = decodeContainerHeader(new Decoder(buffer), computeHash) - if (header.chunkType !== CHUNK_TYPE_CHANGE) { - throw new RangeError("Buffer chunk type is not a change") - } - const meta = decodeChangeHeader(new Decoder(header.chunkData)) - meta.change = buffer - if (computeHash) meta.hash = header.hash - return meta -} - -/** - * Compresses a binary change using DEFLATE. - */ -function deflateChange(buffer) { - const header = decodeContainerHeader(new Decoder(buffer), false) - if (header.chunkType !== CHUNK_TYPE_CHANGE) - throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const compressed = pako.deflateRaw(header.chunkData) - const encoder = new Encoder() - encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum - encoder.appendByte(CHUNK_TYPE_DEFLATE) - encoder.appendUint53(compressed.byteLength) - encoder.appendRawBytes(compressed) - return encoder.buffer -} - -/** - * Decompresses a binary change that has been compressed with DEFLATE. - */ -function inflateChange(buffer) { - const header = decodeContainerHeader(new Decoder(buffer), false) - if (header.chunkType !== CHUNK_TYPE_DEFLATE) - throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const decompressed = pako.inflateRaw(header.chunkData) - const encoder = new Encoder() - encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum - encoder.appendByte(CHUNK_TYPE_CHANGE) - encoder.appendUint53(decompressed.byteLength) - encoder.appendRawBytes(decompressed) - return encoder.buffer -} - -/** - * Takes an Uint8Array that may contain multiple concatenated changes, and - * returns an array of subarrays, each subarray containing one change. - */ -function splitContainers(buffer) { - let decoder = new Decoder(buffer), - chunks = [], - startOffset = 0 - while (!decoder.done) { - decodeContainerHeader(decoder, false) - chunks.push(buffer.subarray(startOffset, decoder.offset)) - startOffset = decoder.offset - } - return chunks -} - -/** - * Decodes a list of changes from the binary format into JS objects. - * `binaryChanges` is an array of `Uint8Array` objects. - */ -function decodeChanges(binaryChanges) { - let decoded = [] - for (let binaryChange of binaryChanges) { - for (let chunk of splitContainers(binaryChange)) { - if (chunk[8] === CHUNK_TYPE_DOCUMENT) { - decoded = decoded.concat(decodeDocument(chunk)) - } else if ( - chunk[8] === CHUNK_TYPE_CHANGE || - chunk[8] === CHUNK_TYPE_DEFLATE - ) { - decoded.push(decodeChange(chunk)) - } else { - // ignoring chunk of unknown type - } - } - } - return decoded -} - -function sortOpIds(a, b) { - if (a === b) return 0 - if (a === "_root") return -1 - if (b === "_root") return +1 - const a_ = parseOpId(a), - b_ = parseOpId(b) - if (a_.counter < b_.counter) return -1 - if (a_.counter > b_.counter) return +1 - if (a_.actorId < b_.actorId) return -1 - if (a_.actorId > b_.actorId) return +1 - return 0 -} - -/** - * Takes a set of operations `ops` loaded from an encoded document, and - * reconstructs the changes that they originally came from. - * Does not return anything, only mutates `changes`. - */ -function groupChangeOps(changes, ops) { - let changesByActor = {} // map from actorId to array of changes by that actor - for (let change of changes) { - change.ops = [] - if (!changesByActor[change.actor]) changesByActor[change.actor] = [] - if (change.seq !== changesByActor[change.actor].length + 1) { - throw new RangeError( - `Expected seq = ${changesByActor[change.actor].length + 1}, got ${ - change.seq - }` - ) - } - if ( - change.seq > 1 && - changesByActor[change.actor][change.seq - 2].maxOp > change.maxOp - ) { - throw new RangeError("maxOp must increase monotonically per actor") - } - changesByActor[change.actor].push(change) - } - - let opsById = {} - for (let op of ops) { - if (op.action === "del") - throw new RangeError("document should not contain del operations") - op.pred = opsById[op.id] ? opsById[op.id].pred : [] - opsById[op.id] = op - for (let succ of op.succ) { - if (!opsById[succ]) { - if (op.elemId) { - const elemId = op.insert ? op.id : op.elemId - opsById[succ] = { - id: succ, - action: "del", - obj: op.obj, - elemId, - pred: [], - } - } else { - opsById[succ] = { - id: succ, - action: "del", - obj: op.obj, - key: op.key, - pred: [], - } - } - } - opsById[succ].pred.push(op.id) - } - delete op.succ - } - for (let op of Object.values(opsById)) { - if (op.action === "del") ops.push(op) - } - - for (let op of ops) { - const { counter, actorId } = parseOpId(op.id) - const actorChanges = changesByActor[actorId] - // Binary search to find the change that should contain this operation - let left = 0, - right = actorChanges.length - while (left < right) { - const index = Math.floor((left + right) / 2) - if (actorChanges[index].maxOp < counter) { - left = index + 1 - } else { - right = index - } - } - if (left >= actorChanges.length) { - throw new RangeError(`Operation ID ${op.id} outside of allowed range`) - } - actorChanges[left].ops.push(op) - } - - for (let change of changes) { - change.ops.sort((op1, op2) => sortOpIds(op1.id, op2.id)) - change.startOp = change.maxOp - change.ops.length + 1 - delete change.maxOp - for (let i = 0; i < change.ops.length; i++) { - const op = change.ops[i], - expectedId = `${change.startOp + i}@${change.actor}` - if (op.id !== expectedId) { - throw new RangeError(`Expected opId ${expectedId}, got ${op.id}`) - } - delete op.id - } - } -} - -function decodeDocumentChanges(changes, expectedHeads) { - let heads = {} // change hashes that are not a dependency of any other change - for (let i = 0; i < changes.length; i++) { - let change = changes[i] - change.deps = [] - for (let index of change.depsNum.map(d => d.depsIndex)) { - if (!changes[index] || !changes[index].hash) { - throw new RangeError( - `No hash for index ${index} while processing index ${i}` - ) - } - const hash = changes[index].hash - change.deps.push(hash) - if (heads[hash]) delete heads[hash] - } - change.deps.sort() - delete change.depsNum - - if (change.extraLen_datatype !== VALUE_TYPE.BYTES) { - throw new RangeError(`Bad datatype for extra bytes: ${VALUE_TYPE.BYTES}`) - } - change.extraBytes = change.extraLen - delete change.extraLen_datatype - - // Encoding and decoding again to compute the hash of the change - changes[i] = decodeChange(encodeChange(change)) - heads[changes[i].hash] = true - } - - const actualHeads = Object.keys(heads).sort() - let headsEqual = actualHeads.length === expectedHeads.length, - i = 0 - while (headsEqual && i < actualHeads.length) { - headsEqual = actualHeads[i] === expectedHeads[i] - i++ - } - if (!headsEqual) { - throw new RangeError( - `Mismatched heads hashes: expected ${expectedHeads.join( - ", " - )}, got ${actualHeads.join(", ")}` - ) - } -} - -function encodeDocumentHeader(doc) { - const { - changesColumns, - opsColumns, - actorIds, - heads, - headsIndexes, - extraBytes, - } = doc - for (let column of changesColumns) deflateColumn(column) - for (let column of opsColumns) deflateColumn(column) - - return encodeContainer(CHUNK_TYPE_DOCUMENT, encoder => { - encoder.appendUint53(actorIds.length) - for (let actor of actorIds) { - encoder.appendHexString(actor) - } - encoder.appendUint53(heads.length) - for (let head of heads.sort()) { - encoder.appendRawBytes(hexStringToBytes(head)) - } - encodeColumnInfo(encoder, changesColumns) - encodeColumnInfo(encoder, opsColumns) - for (let column of changesColumns) - encoder.appendRawBytes(column.encoder.buffer) - for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) - for (let index of headsIndexes) encoder.appendUint53(index) - if (extraBytes) encoder.appendRawBytes(extraBytes) - }).bytes -} - -function decodeDocumentHeader(buffer) { - const documentDecoder = new Decoder(buffer) - const header = decodeContainerHeader(documentDecoder, true) - const decoder = new Decoder(header.chunkData) - if (!documentDecoder.done) - throw new RangeError("Encoded document has trailing data") - if (header.chunkType !== CHUNK_TYPE_DOCUMENT) - throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - - const actorIds = [], - numActors = decoder.readUint53() - for (let i = 0; i < numActors; i++) { - actorIds.push(decoder.readHexString()) - } - const heads = [], - headsIndexes = [], - numHeads = decoder.readUint53() - for (let i = 0; i < numHeads; i++) { - heads.push(bytesToHexString(decoder.readRawBytes(32))) - } - - const changesColumns = decodeColumnInfo(decoder) - const opsColumns = decodeColumnInfo(decoder) - for (let i = 0; i < changesColumns.length; i++) { - changesColumns[i].buffer = decoder.readRawBytes(changesColumns[i].bufferLen) - inflateColumn(changesColumns[i]) - } - for (let i = 0; i < opsColumns.length; i++) { - opsColumns[i].buffer = decoder.readRawBytes(opsColumns[i].bufferLen) - inflateColumn(opsColumns[i]) - } - if (!decoder.done) { - for (let i = 0; i < numHeads; i++) headsIndexes.push(decoder.readUint53()) - } - - const extraBytes = decoder.readRawBytes( - decoder.buf.byteLength - decoder.offset - ) - return { - changesColumns, - opsColumns, - actorIds, - heads, - headsIndexes, - extraBytes, - } -} - -function decodeDocument(buffer) { - const { changesColumns, opsColumns, actorIds, heads } = - decodeDocumentHeader(buffer) - const changes = decodeColumns(changesColumns, actorIds, DOCUMENT_COLUMNS) - const ops = decodeOps( - decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), - true - ) - groupChangeOps(changes, ops) - decodeDocumentChanges(changes, heads) - return changes -} - -/** - * DEFLATE-compresses the given column if it is large enough to make the compression worthwhile. - */ -function deflateColumn(column) { - if (column.encoder.buffer.byteLength >= DEFLATE_MIN_SIZE) { - column.encoder = { buffer: pako.deflateRaw(column.encoder.buffer) } - column.columnId |= COLUMN_TYPE_DEFLATE - } -} - -/** - * Decompresses the given column if it is DEFLATE-compressed. - */ -function inflateColumn(column) { - if ((column.columnId & COLUMN_TYPE_DEFLATE) !== 0) { - column.buffer = pako.inflateRaw(column.buffer) - column.columnId ^= COLUMN_TYPE_DEFLATE - } -} - -module.exports = { - COLUMN_TYPE, - VALUE_TYPE, - ACTIONS, - OBJECT_TYPE, - DOC_OPS_COLUMNS, - CHANGE_COLUMNS, - DOCUMENT_COLUMNS, - encoderByColumnId, - decoderByColumnId, - makeDecoders, - decodeValue, - splitContainers, - encodeChange, - decodeChangeColumns, - decodeChange, - decodeChangeMeta, - decodeChanges, - encodeDocumentHeader, - decodeDocumentHeader, - decodeDocument, -} diff --git a/javascript/test/legacy_tests.ts b/javascript/test/legacy_tests.ts deleted file mode 100644 index 8c2e552e..00000000 --- a/javascript/test/legacy_tests.ts +++ /dev/null @@ -1,1874 +0,0 @@ -import * as assert from "assert" -import { unstable as Automerge } from "../src" -import { assertEqualsOneOf } from "./helpers" -import { decodeChange } from "./legacy/columnar" - -const UUID_PATTERN = /^[0-9a-f]{32}$/ -const OPID_PATTERN = /^[0-9]+@([0-9a-f][0-9a-f])*$/ - -// CORE FEATURES -// -// TODO - Cursors -// TODO - Tables -// TODO - on-pass load() & reconstruct change from opset -// TODO - micro-patches (needed for fully hydrated object in js) -// TODO - valueAt(heads) / GC -// -// AUTOMERGE UNSUPPORTED -// -// TODO - patchCallback - -describe("Automerge", () => { - describe("initialization ", () => { - it("should initially be an empty map", () => { - const doc = Automerge.init() - assert.deepStrictEqual(doc, {}) - }) - - it("should allow instantiating from an existing object", () => { - const initialState = { birds: { wrens: 3, magpies: 4 } } - const doc = Automerge.from(initialState) - assert.deepStrictEqual(doc, initialState) - }) - - it("should allow merging of an object initialized with `from`", () => { - let doc1 = Automerge.from({ cards: [] }) - let doc2 = Automerge.merge(Automerge.init(), doc1) - assert.deepStrictEqual(doc2, { cards: [] }) - }) - - it("should allow passing an actorId when instantiating from an existing object", () => { - const actorId = "1234" - let doc = Automerge.from({ foo: 1 }, actorId) - assert.strictEqual(Automerge.getActorId(doc), "1234") - }) - - it("accepts an empty object as initial state", () => { - const doc = Automerge.from({}) - assert.deepStrictEqual(doc, {}) - }) - - it("accepts an array as initial state, but converts it to an object", () => { - // @ts-ignore - const doc = Automerge.from(["a", "b", "c"]) - assert.deepStrictEqual(doc, { "0": "a", "1": "b", "2": "c" }) - }) - - it("accepts strings as initial values, but treats them as an array of characters", () => { - // @ts-ignore - const doc = Automerge.from("abc") - assert.deepStrictEqual(doc, { "0": "a", "1": "b", "2": "c" }) - }) - - it("ignores numbers provided as initial values", () => { - // @ts-ignore - const doc = Automerge.from(123) - assert.deepStrictEqual(doc, {}) - }) - - it("ignores booleans provided as initial values", () => { - // @ts-ignore - const doc1 = Automerge.from(false) - assert.deepStrictEqual(doc1, {}) - // @ts-ignore - const doc2 = Automerge.from(true) - assert.deepStrictEqual(doc2, {}) - }) - }) - - describe("sequential use", () => { - let s1: Automerge.Doc, s2: Automerge.Doc - beforeEach(() => { - s1 = Automerge.init("aabbcc") - }) - - it("should not mutate objects", () => { - s2 = Automerge.change(s1, doc => (doc.foo = "bar")) - assert.strictEqual(s1.foo, undefined) - assert.strictEqual(s2.foo, "bar") - }) - - it("changes should be retrievable", () => { - const change1 = Automerge.getLastLocalChange(s1) - s2 = Automerge.change(s1, doc => (doc.foo = "bar")) - const change2 = Automerge.getLastLocalChange(s2) - assert.strictEqual(change1, undefined) - const change = Automerge.decodeChange(change2!) - assert.deepStrictEqual(change, { - actor: change.actor, - deps: [], - seq: 1, - startOp: 1, - hash: change.hash, - message: null, - time: change.time, - ops: [ - { obj: "_root", key: "foo", action: "makeText", pred: [] }, - { - action: "set", - elemId: "_head", - insert: true, - obj: "1@aabbcc", - pred: [], - value: "b", - }, - { - action: "set", - elemId: "2@aabbcc", - insert: true, - obj: "1@aabbcc", - pred: [], - value: "a", - }, - { - action: "set", - elemId: "3@aabbcc", - insert: true, - obj: "1@aabbcc", - pred: [], - value: "r", - }, - ], - }) - }) - - it("should not register any conflicts on repeated assignment", () => { - assert.strictEqual(Automerge.getConflicts(s1, "foo"), undefined) - s1 = Automerge.change(s1, "change", doc => (doc.foo = "one")) - assert.strictEqual(Automerge.getConflicts(s1, "foo"), undefined) - s1 = Automerge.change(s1, "change", doc => (doc.foo = "two")) - assert.strictEqual(Automerge.getConflicts(s1, "foo"), undefined) - }) - - describe("changes", () => { - it("should group several changes", () => { - s2 = Automerge.change(s1, "change message", doc => { - doc.first = "one" - assert.strictEqual(doc.first, "one") - doc.second = "two" - assert.deepStrictEqual(doc, { - first: "one", - second: "two", - }) - }) - assert.deepStrictEqual(s1, {}) - assert.deepStrictEqual(s2, { first: "one", second: "two" }) - }) - - it("should freeze objects if desired", () => { - s1 = Automerge.init({ freeze: true }) - s2 = Automerge.change(s1, doc => (doc.foo = "bar")) - try { - // @ts-ignore - s2.foo = "lemon" - } catch (e) {} - assert.strictEqual(s2.foo, "bar") - - let deleted = false - try { - // @ts-ignore - deleted = delete s2.foo - } catch (e) {} - assert.strictEqual(s2.foo, "bar") - assert.strictEqual(deleted, false) - - Automerge.change(s2, () => { - try { - // @ts-ignore - s2.foo = "lemon" - } catch (e) {} - assert.strictEqual(s2.foo, "bar") - }) - - assert.throws(() => { - Object.assign(s2, { x: 4 }) - }) - assert.strictEqual(s2.x, undefined) - }) - - it("should allow repeated reading and writing of values", () => { - s2 = Automerge.change(s1, "change message", doc => { - doc.value = "a" - assert.strictEqual(doc.value, "a") - doc.value = "b" - doc.value = "c" - assert.strictEqual(doc.value, "c") - }) - assert.deepStrictEqual(s1, {}) - assert.deepStrictEqual(s2, { value: "c" }) - }) - - it("should not record conflicts when writing the same field several times within one change", () => { - s1 = Automerge.change(s1, "change message", doc => { - doc.value = "a" - doc.value = "b" - doc.value = "c" - }) - assert.strictEqual(s1.value, "c") - assert.strictEqual(Automerge.getConflicts(s1, "value"), undefined) - }) - - it("should return the unchanged state object if nothing changed", () => { - s2 = Automerge.change(s1, () => {}) - assert.strictEqual(s2, s1) - }) - - it("should ignore field updates that write the existing value", () => { - s1 = Automerge.change(s1, doc => (doc.field = 123)) - s2 = Automerge.change(s1, doc => (doc.field = 123)) - assert.strictEqual(s2, s1) - }) - - it("should not ignore field updates that resolve a conflict", () => { - s2 = Automerge.merge(Automerge.init(), s1) - s1 = Automerge.change(s1, doc => (doc.field = 123)) - s2 = Automerge.change(s2, doc => (doc.field = 321)) - s1 = Automerge.merge(s1, s2) - assert.strictEqual( - Object.keys(Automerge.getConflicts(s1, "field")!).length, - 2 - ) - const resolved = Automerge.change(s1, doc => (doc.field = s1.field)) - assert.notStrictEqual(resolved, s1) - assert.deepStrictEqual(resolved, { field: s1.field }) - assert.strictEqual(Automerge.getConflicts(resolved, "field"), undefined) - }) - - it("should ignore list element updates that write the existing value", () => { - s1 = Automerge.change(s1, doc => (doc.list = [123])) - s2 = Automerge.change(s1, doc => (doc.list[0] = 123)) - assert.strictEqual(s2, s1) - }) - - it("should not ignore list element updates that resolve a conflict", () => { - s1 = Automerge.change(s1, doc => (doc.list = [1])) - s2 = Automerge.merge(Automerge.init(), s1) - s1 = Automerge.change(s1, doc => (doc.list[0] = 123)) - s2 = Automerge.change(s2, doc => (doc.list[0] = 321)) - s1 = Automerge.merge(s1, s2) - assert.deepStrictEqual(Automerge.getConflicts(s1.list, 0), { - [`3@${Automerge.getActorId(s1)}`]: 123, - [`3@${Automerge.getActorId(s2)}`]: 321, - }) - const resolved = Automerge.change(s1, doc => (doc.list[0] = s1.list[0])) - assert.deepStrictEqual(resolved, s1) - assert.notStrictEqual(resolved, s1) - assert.strictEqual(Automerge.getConflicts(resolved.list, 0), undefined) - }) - - it("should sanity-check arguments", () => { - s1 = Automerge.change(s1, doc => (doc.nested = {})) - assert.throws(() => { - // @ts-ignore - Automerge.change({}, doc => (doc.foo = "bar")) - }, /must be the document root/) - assert.throws(() => { - // @ts-ignore - Automerge.change(s1.nested, doc => (doc.foo = "bar")) - }, /must be the document root/) - }) - - it("should not allow nested change blocks", () => { - assert.throws(() => { - Automerge.change(s1, doc1 => { - Automerge.change(doc1, doc2 => { - // @ts-ignore - doc2.foo = "bar" - }) - }) - }, /Calls to Automerge.change cannot be nested/) - assert.throws(() => { - s1 = Automerge.change(s1, doc1 => { - s2 = Automerge.change(s1, doc2 => (doc2.two = 2)) - doc1.one = 1 - }) - }, /Attempting to change an outdated document/) - }) - - it("should not allow the same base document to be used for multiple changes", () => { - assert.throws(() => { - Automerge.change(s1, doc => (doc.one = 1)) - Automerge.change(s1, doc => (doc.two = 2)) - }, /Attempting to change an outdated document/) - }) - - it("should allow a document to be cloned", () => { - s1 = Automerge.change(s1, doc => (doc.zero = 0)) - s2 = Automerge.clone(s1) - s1 = Automerge.change(s1, doc => (doc.one = 1)) - s2 = Automerge.change(s2, doc => (doc.two = 2)) - assert.deepStrictEqual(s1, { zero: 0, one: 1 }) - assert.deepStrictEqual(s2, { zero: 0, two: 2 }) - Automerge.free(s1) - Automerge.free(s2) - }) - - it("should work with Object.assign merges", () => { - s1 = Automerge.change(s1, doc1 => { - doc1.stuff = { foo: "bar", baz: "blur" } - }) - s1 = Automerge.change(s1, doc1 => { - doc1.stuff = Object.assign({}, doc1.stuff, { baz: "updated!" }) - }) - assert.deepStrictEqual(s1, { stuff: { foo: "bar", baz: "updated!" } }) - }) - - it("should support Date objects in maps", () => { - const now = new Date() - s1 = Automerge.change(s1, doc => (doc.now = now)) - let changes = Automerge.getAllChanges(s1) - ;[s2] = Automerge.applyChanges(Automerge.init(), changes) - assert.strictEqual(s2.now instanceof Date, true) - assert.strictEqual(s2.now.getTime(), now.getTime()) - }) - - it("should support Date objects in lists", () => { - const now = new Date() - s1 = Automerge.change(s1, doc => (doc.list = [now])) - let changes = Automerge.getAllChanges(s1) - ;[s2] = Automerge.applyChanges(Automerge.init(), changes) - assert.strictEqual(s2.list[0] instanceof Date, true) - assert.strictEqual(s2.list[0].getTime(), now.getTime()) - }) - - it("should call patchCallback if supplied", () => { - const callbacks: Array<{ - patches: Array - before: Automerge.Doc - after: Automerge.Doc - }> = [] - const s2 = Automerge.change( - s1, - { - patchCallback: (patches, before, after) => - callbacks.push({ patches, before, after }), - }, - doc => { - doc.birds = ["Goldfinch"] - } - ) - assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patches[0], { - action: "put", - path: ["birds"], - value: [], - }) - assert.deepStrictEqual(callbacks[0].patches[1], { - action: "insert", - path: ["birds", 0], - values: [""], - }) - assert.deepStrictEqual(callbacks[0].patches[2], { - action: "splice", - path: ["birds", 0, 0], - value: "Goldfinch", - }) - assert.strictEqual(callbacks[0].before, s1) - assert.strictEqual(callbacks[0].after, s2) - }) - - it("should call a patchCallback set up on document initialisation", () => { - const callbacks: Array<{ - patches: Array - before: Automerge.Doc - after: Automerge.Doc - }> = [] - s1 = Automerge.init({ - patchCallback: (patches, before, after) => - callbacks.push({ patches, before, after }), - }) - const s2 = Automerge.change(s1, doc => (doc.bird = "Goldfinch")) - assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patches[0], { - action: "put", - path: ["bird"], - value: "", - }) - assert.deepStrictEqual(callbacks[0].patches[1], { - action: "splice", - path: ["bird", 0], - value: "Goldfinch", - }) - assert.strictEqual(callbacks[0].before, s1) - assert.strictEqual(callbacks[0].after, s2) - }) - }) - - describe("emptyChange()", () => { - it("should append an empty change to the history", () => { - s1 = Automerge.change(s1, "first change", doc => (doc.field = 123)) - s2 = Automerge.emptyChange(s1, "empty change") - assert.notStrictEqual(s2, s1) - assert.deepStrictEqual(s2, s1) - assert.deepStrictEqual( - Automerge.getHistory(s2).map(state => state.change.message), - ["first change", "empty change"] - ) - }) - - it("should reference dependencies", () => { - s1 = Automerge.change(s1, doc => (doc.field = 123)) - s2 = Automerge.merge(Automerge.init(), s1) - s2 = Automerge.change(s2, doc => (doc.other = "hello")) - s1 = Automerge.emptyChange(Automerge.merge(s1, s2)) - const history = Automerge.getHistory(s1) - const emptyChange = history[2].change - assert.deepStrictEqual( - emptyChange.deps, - [history[0].change.hash, history[1].change.hash].sort() - ) - assert.deepStrictEqual(emptyChange.ops, []) - }) - }) - - describe("root object", () => { - it("should handle single-property assignment", () => { - s1 = Automerge.change(s1, "set bar", doc => (doc.foo = "bar")) - s1 = Automerge.change(s1, "set zap", doc => (doc.zip = "zap")) - assert.strictEqual(s1.foo, "bar") - assert.strictEqual(s1.zip, "zap") - assert.deepStrictEqual(s1, { foo: "bar", zip: "zap" }) - }) - - it("should allow floating-point values", () => { - s1 = Automerge.change(s1, doc => (doc.number = 1589032171.1)) - assert.strictEqual(s1.number, 1589032171.1) - }) - - it("should handle multi-property assignment", () => { - s1 = Automerge.change(s1, "multi-assign", doc => { - Object.assign(doc, { foo: "bar", answer: 42 }) - }) - assert.strictEqual(s1.foo, "bar") - assert.strictEqual(s1.answer, 42) - assert.deepStrictEqual(s1, { foo: "bar", answer: 42 }) - }) - - it("should handle root property deletion", () => { - s1 = Automerge.change(s1, "set foo", doc => { - doc.foo = "bar" - doc.something = null - }) - s1 = Automerge.change(s1, "del foo", doc => { - delete doc.foo - }) - assert.strictEqual(s1.foo, undefined) - assert.strictEqual(s1.something, null) - assert.deepStrictEqual(s1, { something: null }) - }) - - it("should follow JS delete behavior", () => { - s1 = Automerge.change(s1, "set foo", doc => { - doc.foo = "bar" - }) - let deleted: any - s1 = Automerge.change(s1, "del foo", doc => { - deleted = delete doc.foo - }) - assert.strictEqual(deleted, true) - let deleted2: any - assert.doesNotThrow(() => { - s1 = Automerge.change(s1, "del baz", doc => { - deleted2 = delete doc.baz - }) - }) - assert.strictEqual(deleted2, true) - }) - - it("should allow the type of a property to be changed", () => { - s1 = Automerge.change(s1, "set number", doc => (doc.prop = 123)) - assert.strictEqual(s1.prop, 123) - s1 = Automerge.change(s1, "set string", doc => (doc.prop = "123")) - assert.strictEqual(s1.prop, "123") - s1 = Automerge.change(s1, "set null", doc => (doc.prop = null)) - assert.strictEqual(s1.prop, null) - s1 = Automerge.change(s1, "set bool", doc => (doc.prop = true)) - assert.strictEqual(s1.prop, true) - }) - - it("should require property names to be valid", () => { - assert.throws(() => { - Automerge.change(s1, "foo", doc => (doc[""] = "x")) - }, /must not be an empty string/) - }) - - it("should not allow assignment of unsupported datatypes", () => { - Automerge.change(s1, doc => { - assert.throws(() => { - doc.foo = undefined - }, /Unsupported type of value: undefined/) - assert.throws(() => { - doc.foo = { prop: undefined } - }, /Unsupported type of value: undefined/) - assert.throws(() => { - doc.foo = () => {} - }, /Unsupported type of value: function/) - assert.throws(() => { - doc.foo = Symbol("foo") - }, /Unsupported type of value: symbol/) - }) - }) - }) - - describe("nested maps", () => { - it("should assign an objectId to nested maps", () => { - s1 = Automerge.change(s1, doc => { - doc.nested = {} - }) - Automerge.getObjectId(s1.nested) - assert.strictEqual( - OPID_PATTERN.test(Automerge.getObjectId(s1.nested)!), - true - ) - assert.notEqual(Automerge.getObjectId(s1.nested), "_root") - }) - - it("should handle assignment of a nested property", () => { - s1 = Automerge.change(s1, "first change", doc => { - doc.nested = {} - doc.nested.foo = "bar" - }) - s1 = Automerge.change(s1, "second change", doc => { - doc.nested.one = 1 - }) - assert.deepStrictEqual(s1, { nested: { foo: "bar", one: 1 } }) - assert.deepStrictEqual(s1.nested, { foo: "bar", one: 1 }) - assert.strictEqual(s1.nested.foo, "bar") - assert.strictEqual(s1.nested.one, 1) - }) - - it("should handle assignment of an object literal", () => { - s1 = Automerge.change(s1, doc => { - doc.textStyle = { bold: false, fontSize: 12 } - }) - assert.deepStrictEqual(s1, { - textStyle: { bold: false, fontSize: 12 }, - }) - assert.deepStrictEqual(s1.textStyle, { bold: false, fontSize: 12 }) - assert.strictEqual(s1.textStyle.bold, false) - assert.strictEqual(s1.textStyle.fontSize, 12) - }) - - it("should handle assignment of multiple nested properties", () => { - s1 = Automerge.change(s1, doc => { - doc.textStyle = { bold: false, fontSize: 12 } - Object.assign(doc.textStyle, { typeface: "Optima", fontSize: 14 }) - }) - assert.strictEqual(s1.textStyle.typeface, "Optima") - assert.strictEqual(s1.textStyle.bold, false) - assert.strictEqual(s1.textStyle.fontSize, 14) - assert.deepStrictEqual(s1.textStyle, { - typeface: "Optima", - bold: false, - fontSize: 14, - }) - }) - - it("should handle arbitrary-depth nesting", () => { - s1 = Automerge.change(s1, doc => { - doc.a = { b: { c: { d: { e: { f: { g: "h" } } } } } } - }) - s1 = Automerge.change(s1, doc => { - doc.a.b.c.d.e.f.i = "j" - }) - assert.deepStrictEqual(s1, { - a: { b: { c: { d: { e: { f: { g: "h", i: "j" } } } } } }, - }) - assert.strictEqual(s1.a.b.c.d.e.f.g, "h") - assert.strictEqual(s1.a.b.c.d.e.f.i, "j") - }) - - it("should allow an old object to be replaced with a new one", () => { - s1 = Automerge.change(s1, "change 1", doc => { - doc.myPet = { species: "dog", legs: 4, breed: "dachshund" } - }) - let s2 = Automerge.change(s1, "change 2", doc => { - doc.myPet = { - species: "koi", - variety: "紅白", - colors: { red: true, white: true, black: false }, - } - }) - assert.deepStrictEqual(s1.myPet, { - species: "dog", - legs: 4, - breed: "dachshund", - }) - assert.strictEqual(s1.myPet.breed, "dachshund") - assert.deepStrictEqual(s2.myPet, { - species: "koi", - variety: "紅白", - colors: { red: true, white: true, black: false }, - }) - // @ts-ignore - assert.strictEqual(s2.myPet.breed, undefined) - assert.strictEqual(s2.myPet.variety, "紅白") - }) - - it("should allow fields to be changed between primitive and nested map", () => { - s1 = Automerge.change(s1, doc => (doc.color = "#ff7f00")) - assert.strictEqual(s1.color, "#ff7f00") - s1 = Automerge.change( - s1, - doc => (doc.color = { red: 255, green: 127, blue: 0 }) - ) - assert.deepStrictEqual(s1.color, { red: 255, green: 127, blue: 0 }) - s1 = Automerge.change(s1, doc => (doc.color = "#ff7f00")) - assert.strictEqual(s1.color, "#ff7f00") - }) - - it("should not allow several references to the same map object", () => { - s1 = Automerge.change(s1, doc => (doc.object = {})) - assert.throws(() => { - Automerge.change(s1, doc => { - doc.x = doc.object - }) - }, /Cannot create a reference to an existing document object/) - assert.throws(() => { - Automerge.change(s1, doc => { - doc.x = s1.object - }) - }, /Cannot create a reference to an existing document object/) - assert.throws(() => { - Automerge.change(s1, doc => { - doc.x = {} - doc.y = doc.x - }) - }, /Cannot create a reference to an existing document object/) - }) - - it("should not allow object-copying idioms", () => { - s1 = Automerge.change(s1, doc => { - doc.items = [ - { id: "id1", name: "one" }, - { id: "id2", name: "two" }, - ] - }) - // People who have previously worked with immutable state in JavaScript may be tempted - // to use idioms like this, which don't work well with Automerge -- see e.g. - // https://github.com/automerge/automerge/issues/260 - assert.throws(() => { - Automerge.change(s1, doc => { - doc.items = [...doc.items, { id: "id3", name: "three" }] - }) - }, /Cannot create a reference to an existing document object/) - }) - - it("should handle deletion of properties within a map", () => { - s1 = Automerge.change(s1, "set style", doc => { - doc.textStyle = { typeface: "Optima", bold: false, fontSize: 12 } - }) - s1 = Automerge.change(s1, "non-bold", doc => delete doc.textStyle.bold) - assert.strictEqual(s1.textStyle.bold, undefined) - assert.deepStrictEqual(s1.textStyle, { - typeface: "Optima", - fontSize: 12, - }) - }) - - it("should handle deletion of references to a map", () => { - s1 = Automerge.change(s1, "make rich text doc", doc => { - Object.assign(doc, { - title: "Hello", - textStyle: { typeface: "Optima", fontSize: 12 }, - }) - }) - s1 = Automerge.change(s1, doc => delete doc.textStyle) - assert.strictEqual(s1.textStyle, undefined) - assert.deepStrictEqual(s1, { title: "Hello" }) - }) - - it("should validate field names", () => { - s1 = Automerge.change(s1, doc => (doc.nested = {})) - assert.throws(() => { - Automerge.change(s1, doc => (doc.nested[""] = "x")) - }, /must not be an empty string/) - assert.throws(() => { - Automerge.change(s1, doc => (doc.nested = { "": "x" })) - }, /must not be an empty string/) - }) - }) - - describe("lists", () => { - it("should allow elements to be inserted", () => { - s1 = Automerge.change(s1, doc => (doc.noodles = [])) - s1 = Automerge.change(s1, doc => - doc.noodles.insertAt(0, "udon", "soba") - ) - s1 = Automerge.change(s1, doc => doc.noodles.insertAt(1, "ramen")) - assert.deepStrictEqual(s1, { noodles: ["udon", "ramen", "soba"] }) - assert.deepStrictEqual(s1.noodles, ["udon", "ramen", "soba"]) - assert.strictEqual(s1.noodles[0], "udon") - assert.strictEqual(s1.noodles[1], "ramen") - assert.strictEqual(s1.noodles[2], "soba") - assert.strictEqual(s1.noodles.length, 3) - }) - - it("should handle assignment of a list literal", () => { - s1 = Automerge.change( - s1, - doc => (doc.noodles = ["udon", "ramen", "soba"]) - ) - assert.deepStrictEqual(s1, { noodles: ["udon", "ramen", "soba"] }) - assert.deepStrictEqual(s1.noodles, ["udon", "ramen", "soba"]) - assert.strictEqual(s1.noodles[0], "udon") - assert.strictEqual(s1.noodles[1], "ramen") - assert.strictEqual(s1.noodles[2], "soba") - assert.strictEqual(s1.noodles[3], undefined) - assert.strictEqual(s1.noodles.length, 3) - }) - - it("should only allow numeric indexes", () => { - s1 = Automerge.change( - s1, - doc => (doc.noodles = ["udon", "ramen", "soba"]) - ) - s1 = Automerge.change(s1, doc => (doc.noodles[1] = "Ramen!")) - assert.strictEqual(s1.noodles[1], "Ramen!") - s1 = Automerge.change(s1, doc => (doc.noodles["1"] = "RAMEN!!!")) - assert.strictEqual(s1.noodles[1], "RAMEN!!!") - assert.throws(() => { - Automerge.change(s1, doc => (doc.noodles.favourite = "udon")) - }, /list index must be a number/) - assert.throws(() => { - Automerge.change(s1, doc => (doc.noodles[""] = "udon")) - }, /list index must be a number/) - assert.throws(() => { - Automerge.change(s1, doc => (doc.noodles["1e6"] = "udon")) - }, /list index must be a number/) - }) - - it("should handle deletion of list elements", () => { - s1 = Automerge.change( - s1, - doc => (doc.noodles = ["udon", "ramen", "soba"]) - ) - s1 = Automerge.change(s1, doc => delete doc.noodles[1]) - assert.deepStrictEqual(s1.noodles, ["udon", "soba"]) - s1 = Automerge.change(s1, doc => doc.noodles.deleteAt(1)) - assert.deepStrictEqual(s1.noodles, ["udon"]) - assert.strictEqual(s1.noodles[0], "udon") - assert.strictEqual(s1.noodles[1], undefined) - assert.strictEqual(s1.noodles[2], undefined) - assert.strictEqual(s1.noodles.length, 1) - }) - - it("should handle assignment of individual list indexes", () => { - s1 = Automerge.change( - s1, - doc => (doc.japaneseFood = ["udon", "ramen", "soba"]) - ) - s1 = Automerge.change(s1, doc => (doc.japaneseFood[1] = "sushi")) - assert.deepStrictEqual(s1.japaneseFood, ["udon", "sushi", "soba"]) - assert.strictEqual(s1.japaneseFood[0], "udon") - assert.strictEqual(s1.japaneseFood[1], "sushi") - assert.strictEqual(s1.japaneseFood[2], "soba") - assert.strictEqual(s1.japaneseFood[3], undefined) - assert.strictEqual(s1.japaneseFood.length, 3) - }) - - it("concurrent edits insert in reverse actorid order if counters equal", () => { - s1 = Automerge.init("aaaa") - s2 = Automerge.init("bbbb") - s1 = Automerge.change(s1, doc => (doc.list = [])) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "2@bbbb")) - s2 = Automerge.merge(s2, s1) - assert.deepStrictEqual(Automerge.toJS(s2).list, ["2@bbbb", "2@aaaa"]) - }) - - it("concurrent edits insert in reverse counter order if different", () => { - s1 = Automerge.init("aaaa") - s2 = Automerge.init("bbbb") - s1 = Automerge.change(s1, doc => (doc.list = [])) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) - s2 = Automerge.change(s2, doc => (doc.foo = "2@bbbb")) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "3@bbbb")) - s2 = Automerge.merge(s2, s1) - assert.deepStrictEqual(s2.list, ["3@bbbb", "2@aaaa"]) - }) - - it("should treat out-by-one assignment as insertion", () => { - s1 = Automerge.change(s1, doc => (doc.japaneseFood = ["udon"])) - s1 = Automerge.change(s1, doc => (doc.japaneseFood[1] = "sushi")) - assert.deepStrictEqual(s1.japaneseFood, ["udon", "sushi"]) - assert.strictEqual(s1.japaneseFood[0], "udon") - assert.strictEqual(s1.japaneseFood[1], "sushi") - assert.strictEqual(s1.japaneseFood[2], undefined) - assert.strictEqual(s1.japaneseFood.length, 2) - }) - - it("should not allow out-of-range assignment", () => { - s1 = Automerge.change(s1, doc => (doc.japaneseFood = ["udon"])) - assert.throws(() => { - Automerge.change(s1, doc => (doc.japaneseFood[4] = "ramen")) - }, /is out of bounds/) - }) - - it("should allow bulk assignment of multiple list indexes", () => { - s1 = Automerge.change( - s1, - doc => (doc.noodles = ["udon", "ramen", "soba"]) - ) - s1 = Automerge.change(s1, doc => - Object.assign(doc.noodles, { 0: "うどん", 2: "そば" }) - ) - assert.deepStrictEqual(s1.noodles, ["うどん", "ramen", "そば"]) - assert.strictEqual(s1.noodles[0], "うどん") - assert.strictEqual(s1.noodles[1], "ramen") - assert.strictEqual(s1.noodles[2], "そば") - assert.strictEqual(s1.noodles.length, 3) - }) - - it("should handle nested objects", () => { - s1 = Automerge.change( - s1, - doc => - (doc.noodles = [{ type: "ramen", dishes: ["tonkotsu", "shoyu"] }]) - ) - s1 = Automerge.change(s1, doc => - doc.noodles.push({ type: "udon", dishes: ["tempura udon"] }) - ) - s1 = Automerge.change(s1, doc => doc.noodles[0].dishes.push("miso")) - assert.deepStrictEqual(s1, { - noodles: [ - { type: "ramen", dishes: ["tonkotsu", "shoyu", "miso"] }, - { type: "udon", dishes: ["tempura udon"] }, - ], - }) - assert.deepStrictEqual(s1.noodles[0], { - type: "ramen", - dishes: ["tonkotsu", "shoyu", "miso"], - }) - assert.deepStrictEqual(s1.noodles[1], { - type: "udon", - dishes: ["tempura udon"], - }) - }) - - it("should handle nested lists", () => { - s1 = Automerge.change( - s1, - doc => (doc.noodleMatrix = [["ramen", "tonkotsu", "shoyu"]]) - ) - s1 = Automerge.change(s1, doc => - doc.noodleMatrix.push(["udon", "tempura udon"]) - ) - s1 = Automerge.change(s1, doc => doc.noodleMatrix[0].push("miso")) - assert.deepStrictEqual(s1.noodleMatrix, [ - ["ramen", "tonkotsu", "shoyu", "miso"], - ["udon", "tempura udon"], - ]) - assert.deepStrictEqual(s1.noodleMatrix[0], [ - "ramen", - "tonkotsu", - "shoyu", - "miso", - ]) - assert.deepStrictEqual(s1.noodleMatrix[1], ["udon", "tempura udon"]) - }) - - it("should handle deep nesting", () => { - s1 = Automerge.change( - s1, - doc => - (doc.nesting = { - maps: { m1: { m2: { foo: "bar", baz: {} }, m2a: {} } }, - lists: [ - [1, 2, 3], - [[3, 4, 5, [6]], 7], - ], - mapsinlists: [{ foo: "bar" }, [{ bar: "baz" }]], - listsinmaps: { foo: [1, 2, 3], bar: [[{ baz: "123" }]] }, - }) - ) - s1 = Automerge.change(s1, doc => { - doc.nesting.maps.m1a = "123" - doc.nesting.maps.m1.m2.baz.xxx = "123" - delete doc.nesting.maps.m1.m2a - doc.nesting.lists.shift() - doc.nesting.lists[0][0].pop() - doc.nesting.lists[0][0].push(100) - doc.nesting.mapsinlists[0].foo = "baz" - doc.nesting.mapsinlists[1][0].foo = "bar" - delete doc.nesting.mapsinlists[1] - doc.nesting.listsinmaps.foo.push(4) - doc.nesting.listsinmaps.bar[0][0].baz = "456" - delete doc.nesting.listsinmaps.bar - }) - assert.deepStrictEqual(s1, { - nesting: { - maps: { - m1: { m2: { foo: "bar", baz: { xxx: "123" } } }, - m1a: "123", - }, - lists: [[[3, 4, 5, 100], 7]], - mapsinlists: [{ foo: "baz" }], - listsinmaps: { foo: [1, 2, 3, 4] }, - }, - }) - }) - - it("should handle replacement of the entire list", () => { - s1 = Automerge.change( - s1, - doc => (doc.noodles = ["udon", "soba", "ramen"]) - ) - s1 = Automerge.change( - s1, - doc => (doc.japaneseNoodles = doc.noodles.slice()) - ) - s1 = Automerge.change(s1, doc => (doc.noodles = ["wonton", "pho"])) - assert.deepStrictEqual(s1, { - noodles: ["wonton", "pho"], - japaneseNoodles: ["udon", "soba", "ramen"], - }) - assert.deepStrictEqual(s1.noodles, ["wonton", "pho"]) - assert.strictEqual(s1.noodles[0], "wonton") - assert.strictEqual(s1.noodles[1], "pho") - assert.strictEqual(s1.noodles[2], undefined) - assert.strictEqual(s1.noodles.length, 2) - }) - - it("should allow assignment to change the type of a list element", () => { - s1 = Automerge.change( - s1, - doc => (doc.noodles = ["udon", "soba", "ramen"]) - ) - assert.deepStrictEqual(s1.noodles, ["udon", "soba", "ramen"]) - s1 = Automerge.change( - s1, - doc => (doc.noodles[1] = { type: "soba", options: ["hot", "cold"] }) - ) - assert.deepStrictEqual(s1.noodles, [ - "udon", - { type: "soba", options: ["hot", "cold"] }, - "ramen", - ]) - s1 = Automerge.change( - s1, - doc => (doc.noodles[1] = ["hot soba", "cold soba"]) - ) - assert.deepStrictEqual(s1.noodles, [ - "udon", - ["hot soba", "cold soba"], - "ramen", - ]) - s1 = Automerge.change(s1, doc => (doc.noodles[1] = "soba is the best")) - assert.deepStrictEqual(s1.noodles, [ - "udon", - "soba is the best", - "ramen", - ]) - }) - - it("should allow list creation and assignment in the same change callback", () => { - s1 = Automerge.change(Automerge.init(), doc => { - doc.letters = ["a", "b", "c"] - doc.letters[1] = "d" - }) - assert.strictEqual(s1.letters[1], "d") - }) - - it("should allow adding and removing list elements in the same change callback", () => { - let s1 = Automerge.change( - Automerge.init<{ noodles: Array }>(), - // @ts-ignore - doc => (doc.noodles = []) - ) - s1 = Automerge.change(s1, doc => { - doc.noodles.push("udon") - // @ts-ignore - doc.noodles.deleteAt(0) - }) - assert.deepStrictEqual(s1, { noodles: [] }) - // do the add-remove cycle twice, test for #151 (https://github.com/automerge/automerge/issues/151) - s1 = Automerge.change(s1, doc => { - // @ts-ignore - doc.noodles.push("soba") - // @ts-ignore - doc.noodles.deleteAt(0) - }) - assert.deepStrictEqual(s1, { noodles: [] }) - }) - - it("should handle arbitrary-depth nesting", () => { - s1 = Automerge.change( - s1, - doc => (doc.maze = [[[[[[[["noodles", ["here"]]]]]]]]]) - ) - s1 = Automerge.change(s1, doc => - doc.maze[0][0][0][0][0][0][0][1].unshift("found") - ) - assert.deepStrictEqual(s1.maze, [ - [[[[[[["noodles", ["found", "here"]]]]]]]], - ]) - assert.deepStrictEqual(s1.maze[0][0][0][0][0][0][0][1][1], "here") - s2 = Automerge.load(Automerge.save(s1)) - assert.deepStrictEqual(s1, s2) - }) - - it("should not allow several references to the same list object", () => { - s1 = Automerge.change(s1, doc => (doc.list = [])) - assert.throws(() => { - Automerge.change(s1, doc => { - doc.x = doc.list - }) - }, /Cannot create a reference to an existing document object/) - assert.throws(() => { - Automerge.change(s1, doc => { - doc.x = s1.list - }) - }, /Cannot create a reference to an existing document object/) - assert.throws(() => { - Automerge.change(s1, doc => { - doc.x = [] - doc.y = doc.x - }) - }, /Cannot create a reference to an existing document object/) - }) - }) - - describe("counters", () => { - // counter - it("should allow deleting counters from maps", () => { - const s1 = Automerge.change( - Automerge.init(), - doc => (doc.birds = { wrens: new Automerge.Counter(1) }) - ) - const s2 = Automerge.change(s1, doc => doc.birds.wrens.increment(2)) - const s3 = Automerge.change(s2, doc => delete doc.birds.wrens) - assert.deepStrictEqual(s2, { - birds: { wrens: new Automerge.Counter(3) }, - }) - assert.deepStrictEqual(s3, { birds: {} }) - }) - - // counter - /* - it('should not allow deleting counters from lists', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.recordings = [new Automerge.Counter(1)]) - const s2 = Automerge.change(s1, doc => doc.recordings[0].increment(2)) - assert.deepStrictEqual(s2, {recordings: [new Automerge.Counter(3)]}) - assert.throws(() => { Automerge.change(s2, doc => doc.recordings.deleteAt(0)) }, /Unsupported operation/) - }) - */ - }) - }) - - describe("concurrent use", () => { - let s1: Automerge.Doc, - s2: Automerge.Doc, - s3: Automerge.Doc, - s4: Automerge.Doc - beforeEach(() => { - s1 = Automerge.init() - s2 = Automerge.init() - s3 = Automerge.init() - s4 = Automerge.init() - }) - - it("should merge concurrent updates of different properties", () => { - s1 = Automerge.change(s1, doc => (doc.foo = "bar")) - s2 = Automerge.change(s2, doc => (doc.hello = "world")) - s3 = Automerge.merge(s1, s2) - assert.strictEqual(s3.foo, "bar") - assert.strictEqual(s3.hello, "world") - assert.deepStrictEqual(s3, { foo: "bar", hello: "world" }) - assert.strictEqual(Automerge.getConflicts(s3, "foo"), undefined) - assert.strictEqual(Automerge.getConflicts(s3, "hello"), undefined) - s4 = Automerge.load(Automerge.save(s3)) - assert.deepEqual(s3, s4) - }) - - it("should add concurrent increments of the same property", () => { - s1 = Automerge.change(s1, doc => (doc.counter = new Automerge.Counter())) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.counter.increment()) - s2 = Automerge.change(s2, doc => doc.counter.increment(2)) - s3 = Automerge.merge(s1, s2) - assert.strictEqual(s1.counter.value, 1) - assert.strictEqual(s2.counter.value, 2) - assert.strictEqual(s3.counter.value, 3) - assert.strictEqual(Automerge.getConflicts(s3, "counter"), undefined) - s4 = Automerge.load(Automerge.save(s3)) - assert.deepEqual(s3, s4) - }) - - it("should add increments only to the values they precede", () => { - s1 = Automerge.change(s1, doc => (doc.counter = new Automerge.Counter(0))) - s1 = Automerge.change(s1, doc => doc.counter.increment()) - s2 = Automerge.change( - s2, - doc => (doc.counter = new Automerge.Counter(100)) - ) - s2 = Automerge.change(s2, doc => doc.counter.increment(3)) - s3 = Automerge.merge(s1, s2) - if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3, { counter: new Automerge.Counter(1) }) - } else { - assert.deepStrictEqual(s3, { counter: new Automerge.Counter(103) }) - } - assert.deepStrictEqual(Automerge.getConflicts(s3, "counter"), { - [`1@${Automerge.getActorId(s1)}`]: new Automerge.Counter(1), - [`1@${Automerge.getActorId(s2)}`]: new Automerge.Counter(103), - }) - s4 = Automerge.load(Automerge.save(s3)) - assert.deepEqual(s3, s4) - }) - - it("should detect concurrent updates of the same field", () => { - s1 = Automerge.change(s1, doc => (doc.field = "one")) - s2 = Automerge.change(s2, doc => (doc.field = "two")) - s3 = Automerge.merge(s1, s2) - if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3, { field: "one" }) - } else { - assert.deepStrictEqual(s3, { field: "two" }) - } - assert.deepStrictEqual(Automerge.getConflicts(s3, "field"), { - [`1@${Automerge.getActorId(s1)}`]: "one", - [`1@${Automerge.getActorId(s2)}`]: "two", - }) - }) - - it("should detect concurrent updates of the same list element", () => { - s1 = Automerge.change(s1, doc => (doc.birds = ["finch"])) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => (doc.birds[0] = "greenfinch")) - s2 = Automerge.change(s2, doc => (doc.birds[0] = "goldfinch_")) - s3 = Automerge.merge(s1, s2) - if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3.birds, ["greenfinch"]) - } else { - assert.deepStrictEqual(s3.birds, ["goldfinch_"]) - } - assert.deepStrictEqual(Automerge.getConflicts(s3.birds, 0), { - [`8@${Automerge.getActorId(s1)}`]: "greenfinch", - [`8@${Automerge.getActorId(s2)}`]: "goldfinch_", - }) - }) - - it("should handle assignment conflicts of different types", () => { - s1 = Automerge.change(s1, doc => (doc.field = "string")) - s2 = Automerge.change(s2, doc => (doc.field = ["list"])) - s3 = Automerge.change(s3, doc => (doc.field = { thing: "map" })) - s1 = Automerge.merge(Automerge.merge(s1, s2), s3) - assertEqualsOneOf(s1.field, "string", ["list"], { thing: "map" }) - assert.deepStrictEqual(Automerge.getConflicts(s1, "field"), { - [`1@${Automerge.getActorId(s1)}`]: "string", - [`1@${Automerge.getActorId(s2)}`]: ["list"], - [`1@${Automerge.getActorId(s3)}`]: { thing: "map" }, - }) - }) - - it("should handle changes within a conflicting map field", () => { - s1 = Automerge.change(s1, doc => (doc.field = "string")) - s2 = Automerge.change(s2, doc => (doc.field = {})) - s2 = Automerge.change(s2, doc => (doc.field.innerKey = 42)) - s3 = Automerge.merge(s1, s2) - assertEqualsOneOf(s3.field, "string", { innerKey: 42 }) - assert.deepStrictEqual(Automerge.getConflicts(s3, "field"), { - [`1@${Automerge.getActorId(s1)}`]: "string", - [`1@${Automerge.getActorId(s2)}`]: { innerKey: 42 }, - }) - }) - - it("should handle changes within a conflicting list element", () => { - s1 = Automerge.change(s1, doc => (doc.list = ["hello"])) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => (doc.list[0] = { map1: true })) - s1 = Automerge.change(s1, doc => (doc.list[0].key = 1)) - s2 = Automerge.change(s2, doc => (doc.list[0] = { map2: true })) - s2 = Automerge.change(s2, doc => (doc.list[0].key = 2)) - s3 = Automerge.merge(s1, s2) - if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3.list, [{ map1: true, key: 1 }]) - } else { - assert.deepStrictEqual(s3.list, [{ map2: true, key: 2 }]) - } - assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), { - [`8@${Automerge.getActorId(s1)}`]: { map1: true, key: 1 }, - [`8@${Automerge.getActorId(s2)}`]: { map2: true, key: 2 }, - }) - }) - - it("should not merge concurrently assigned nested maps", () => { - s1 = Automerge.change(s1, doc => (doc.config = { background: "blue" })) - s2 = Automerge.change(s2, doc => (doc.config = { logo_url: "logo.png" })) - s3 = Automerge.merge(s1, s2) - assertEqualsOneOf( - s3.config, - { background: "blue" }, - { logo_url: "logo.png" } - ) - assert.deepStrictEqual(Automerge.getConflicts(s3, "config"), { - [`1@${Automerge.getActorId(s1)}`]: { background: "blue" }, - [`1@${Automerge.getActorId(s2)}`]: { logo_url: "logo.png" }, - }) - }) - - it("should clear conflicts after assigning a new value", () => { - s1 = Automerge.change(s1, doc => (doc.field = "one")) - s2 = Automerge.change(s2, doc => (doc.field = "two")) - s3 = Automerge.merge(s1, s2) - s3 = Automerge.change(s3, doc => (doc.field = "three")) - assert.deepStrictEqual(s3, { field: "three" }) - assert.strictEqual(Automerge.getConflicts(s3, "field"), undefined) - s2 = Automerge.merge(s2, s3) - assert.deepStrictEqual(s2, { field: "three" }) - assert.strictEqual(Automerge.getConflicts(s2, "field"), undefined) - }) - - it("should handle concurrent insertions at different list positions", () => { - s1 = Automerge.change(s1, doc => (doc.list = ["one", "three"])) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.list.splice(1, 0, "two")) - s2 = Automerge.change(s2, doc => doc.list.push("four")) - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3, { list: ["one", "two", "three", "four"] }) - assert.strictEqual(Automerge.getConflicts(s3, "list"), undefined) - }) - - it("should handle concurrent insertions at the same list position", () => { - s1 = Automerge.change(s1, doc => (doc.birds = ["parakeet"])) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds.push("starling")) - s2 = Automerge.change(s2, doc => doc.birds.push("chaffinch")) - s3 = Automerge.merge(s1, s2) - assertEqualsOneOf( - s3.birds, - ["parakeet", "starling", "chaffinch"], - ["parakeet", "chaffinch", "starling"] - ) - s2 = Automerge.merge(s2, s3) - assert.deepStrictEqual(s2, s3) - }) - - it("should handle concurrent assignment and deletion of a map entry", () => { - // Add-wins semantics - s1 = Automerge.change(s1, doc => (doc.bestBird = "robin")) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => delete doc.bestBird) - s2 = Automerge.change(s2, doc => (doc.bestBird = "magpie")) - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s1, {}) - assert.deepStrictEqual(s2, { bestBird: "magpie" }) - assert.deepStrictEqual(s3, { bestBird: "magpie" }) - assert.strictEqual(Automerge.getConflicts(s3, "bestBird"), undefined) - }) - - it("should handle concurrent assignment and deletion of a list element", () => { - // Concurrent assignment ressurects a deleted list element. Perhaps a little - // surprising, but consistent with add-wins semantics of maps (see test above) - s1 = Automerge.change( - s1, - doc => (doc.birds = ["blackbird", "thrush", "goldfinch"]) - ) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => (doc.birds[1] = "starling")) - s2 = Automerge.change(s2, doc => doc.birds.splice(1, 1)) - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s1.birds, ["blackbird", "starling", "goldfinch"]) - assert.deepStrictEqual(s2.birds, ["blackbird", "goldfinch"]) - assert.deepStrictEqual(s3.birds, ["blackbird", "starling", "goldfinch"]) - s4 = Automerge.load(Automerge.save(s3)) - assert.deepStrictEqual(s3, s4) - }) - - it("should handle insertion after a deleted list element", () => { - s1 = Automerge.change( - s1, - doc => (doc.birds = ["blackbird", "thrush", "goldfinch"]) - ) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds.splice(1, 2)) - s2 = Automerge.change(s2, doc => doc.birds.splice(2, 0, "starling")) - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3, { birds: ["blackbird", "starling"] }) - assert.deepStrictEqual(Automerge.merge(s2, s3), { - birds: ["blackbird", "starling"], - }) - }) - - it("should handle concurrent deletion of the same element", () => { - s1 = Automerge.change( - s1, - doc => (doc.birds = ["albatross", "buzzard", "cormorant"]) - ) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds.deleteAt(1)) // buzzard - s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3.birds, ["albatross", "cormorant"]) - }) - - it("should handle concurrent deletion of different elements", () => { - s1 = Automerge.change( - s1, - doc => (doc.birds = ["albatross", "buzzard", "cormorant"]) - ) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds.deleteAt(0)) // albatross - s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3.birds, ["cormorant"]) - }) - - it("should handle concurrent updates at different levels of the tree", () => { - // A delete higher up in the tree overrides an update in a subtree - s1 = Automerge.change( - s1, - doc => - (doc.animals = { - birds: { pink: "flamingo", black: "starling" }, - mammals: ["badger"], - }) - ) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => (doc.animals.birds.brown = "sparrow")) - s2 = Automerge.change(s2, doc => delete doc.animals.birds) - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s1.animals, { - birds: { - pink: "flamingo", - brown: "sparrow", - black: "starling", - }, - mammals: ["badger"], - }) - assert.deepStrictEqual(s2.animals, { mammals: ["badger"] }) - assert.deepStrictEqual(s3.animals, { mammals: ["badger"] }) - }) - - it("should handle updates of concurrently deleted objects", () => { - s1 = Automerge.change( - s1, - doc => (doc.birds = { blackbird: { feathers: "black" } }) - ) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => delete doc.birds.blackbird) - s2 = Automerge.change(s2, doc => (doc.birds.blackbird.beak = "orange")) - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s1, { birds: {} }) - }) - - it("should not interleave sequence insertions at the same position", () => { - s1 = Automerge.change(s1, doc => (doc.wisdom = [])) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => - doc.wisdom.push("to", "be", "is", "to", "do") - ) - s2 = Automerge.change(s2, doc => - doc.wisdom.push("to", "do", "is", "to", "be") - ) - s3 = Automerge.merge(s1, s2) - assertEqualsOneOf( - s3.wisdom, - ["to", "be", "is", "to", "do", "to", "do", "is", "to", "be"], - ["to", "do", "is", "to", "be", "to", "be", "is", "to", "do"] - ) - // In case you're wondering: http://quoteinvestigator.com/2013/09/16/do-be-do/ - }) - - describe("multiple insertions at the same list position", () => { - it("should handle insertion by greater actor ID", () => { - s1 = Automerge.init("aaaa") - s2 = Automerge.init("bbbb") - s1 = Automerge.change(s1, doc => (doc.list = ["two"])) - s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "one")) - assert.deepStrictEqual(s2.list, ["one", "two"]) - }) - - it("should handle insertion by lesser actor ID", () => { - s1 = Automerge.init("bbbb") - s2 = Automerge.init("aaaa") - s1 = Automerge.change(s1, doc => (doc.list = ["two"])) - s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "one")) - assert.deepStrictEqual(s2.list, ["one", "two"]) - }) - - it("should handle insertion regardless of actor ID", () => { - s1 = Automerge.change(s1, doc => (doc.list = ["two"])) - s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "one")) - assert.deepStrictEqual(s2.list, ["one", "two"]) - }) - - it("should make insertion order consistent with causality", () => { - s1 = Automerge.change(s1, doc => (doc.list = ["four"])) - s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.unshift("three")) - s1 = Automerge.merge(s1, s2) - s1 = Automerge.change(s1, doc => doc.list.unshift("two")) - s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.unshift("one")) - assert.deepStrictEqual(s2.list, ["one", "two", "three", "four"]) - }) - }) - }) - - describe("saving and loading", () => { - it("should save and restore an empty document", () => { - let s = Automerge.load(Automerge.save(Automerge.init())) - assert.deepStrictEqual(s, {}) - }) - - it("should generate a new random actor ID", () => { - let s1 = Automerge.init() - let s2 = Automerge.load(Automerge.save(s1)) - assert.strictEqual( - UUID_PATTERN.test(Automerge.getActorId(s1).toString()), - true - ) - assert.strictEqual( - UUID_PATTERN.test(Automerge.getActorId(s2).toString()), - true - ) - assert.notEqual(Automerge.getActorId(s1), Automerge.getActorId(s2)) - }) - - it("should allow a custom actor ID to be set", () => { - let s = Automerge.load(Automerge.save(Automerge.init()), "333333") - assert.strictEqual(Automerge.getActorId(s), "333333") - }) - - it("should reconstitute complex datatypes", () => { - let s1 = Automerge.change( - Automerge.init(), - doc => (doc.todos = [{ title: "water plants", done: false }]) - ) - let s2 = Automerge.load(Automerge.save(s1)) - assert.deepStrictEqual(s2, { - todos: [{ title: "water plants", done: false }], - }) - }) - - it("should save and load maps with @ symbols in the keys", () => { - let s1 = Automerge.change( - Automerge.init(), - doc => (doc["123@4567"] = "hello") - ) - let s2 = Automerge.load(Automerge.save(s1)) - assert.deepStrictEqual(s2, { "123@4567": "hello" }) - }) - - it("should reconstitute conflicts", () => { - let s1 = Automerge.change( - Automerge.init("111111"), - doc => (doc.x = 3) - ) - let s2 = Automerge.change( - Automerge.init("222222"), - doc => (doc.x = 5) - ) - s1 = Automerge.merge(s1, s2) - let s3 = Automerge.load(Automerge.save(s1)) - assert.strictEqual(s1.x, 5) - assert.strictEqual(s3.x, 5) - assert.deepStrictEqual(Automerge.getConflicts(s1, "x"), { - "1@111111": 3, - "1@222222": 5, - }) - assert.deepStrictEqual(Automerge.getConflicts(s3, "x"), { - "1@111111": 3, - "1@222222": 5, - }) - }) - - it("should reconstitute element ID counters", () => { - const s1 = Automerge.init("01234567") - const s2 = Automerge.change(s1, doc => (doc.list = ["a"])) - const listId = Automerge.getObjectId(s2.list) - const changes12 = Automerge.getAllChanges(s2).map(Automerge.decodeChange) - assert.deepStrictEqual(changes12, [ - { - hash: changes12[0].hash, - actor: "01234567", - seq: 1, - startOp: 1, - time: changes12[0].time, - message: null, - deps: [], - ops: [ - { obj: "_root", action: "makeList", key: "list", pred: [] }, - { - obj: listId, - action: "makeText", - elemId: "_head", - insert: true, - pred: [], - }, - { - obj: "2@01234567", - action: "set", - elemId: "_head", - insert: true, - value: "a", - pred: [], - }, - ], - }, - ]) - const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0)) - const s4 = Automerge.load(Automerge.save(s3), "01234567") - const s5 = Automerge.change(s4, doc => doc.list.push("b")) - const changes45 = Automerge.getAllChanges(s5).map(Automerge.decodeChange) - assert.deepStrictEqual(s5, { list: ["b"] }) - assert.deepStrictEqual(changes45[2], { - hash: changes45[2].hash, - actor: "01234567", - seq: 3, - startOp: 5, - time: changes45[2].time, - message: null, - deps: [changes45[1].hash], - ops: [ - { - obj: listId, - action: "makeText", - elemId: "_head", - insert: true, - pred: [], - }, - { - obj: "5@01234567", - action: "set", - elemId: "_head", - insert: true, - value: "b", - pred: [], - }, - ], - }) - }) - - it("should allow a reloaded list to be mutated", () => { - let doc = Automerge.change(Automerge.init(), doc => (doc.foo = [])) - doc = Automerge.load(Automerge.save(doc)) - doc = Automerge.change(doc, "add", doc => doc.foo.push(1)) - doc = Automerge.load(Automerge.save(doc)) - assert.deepStrictEqual(doc.foo, [1]) - }) - - it("should reload a document containing deflated columns", () => { - // In this test, the keyCtr column is long enough for deflate compression to kick in, but the - // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr. - // When checking whether the columns appear in ascending order, we must ignore the deflate bit. - let doc = Automerge.change(Automerge.init(), doc => { - doc.list = [] - for (let i = 0; i < 200; i++) - doc.list.insertAt(Math.floor(Math.random() * i), "a") - }) - Automerge.load(Automerge.save(doc)) - let expected: Array = [] - for (let i = 0; i < 200; i++) expected.push("a") - assert.deepStrictEqual(doc, { list: expected }) - }) - - it.skip("should call patchCallback if supplied to load", () => { - const s1 = Automerge.change( - Automerge.init(), - doc => (doc.birds = ["Goldfinch"]) - ) - const s2 = Automerge.change(s1, doc => doc.birds.push("Chaffinch")) - const callbacks: Array = [], - actor = Automerge.getActorId(s1) - const reloaded = Automerge.load(Automerge.save(s2), { - patchCallback(patch, before, after) { - callbacks.push({ patch, before, after }) - }, - }) - assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch, { - maxOp: 3, - deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], - clock: { [actor]: 2 }, - pendingChanges: 0, - diffs: { - objectId: "_root", - type: "map", - props: { - birds: { - [`1@${actor}`]: { - objectId: `1@${actor}`, - type: "list", - edits: [ - { - action: "multi-insert", - index: 0, - elemId: `2@${actor}`, - values: ["Goldfinch", "Chaffinch"], - }, - ], - }, - }, - }, - }, - }) - assert.deepStrictEqual(callbacks[0].before, {}) - assert.strictEqual(callbacks[0].after, reloaded) - assert.strictEqual(callbacks[0].local, false) - }) - }) - - describe("history API", () => { - it("should return an empty history for an empty document", () => { - assert.deepStrictEqual(Automerge.getHistory(Automerge.init()), []) - }) - - it("should make past document states accessible", () => { - let s = Automerge.init() - s = Automerge.change(s, doc => (doc.config = { background: "blue" })) - s = Automerge.change(s, doc => (doc.birds = ["mallard"])) - s = Automerge.change(s, doc => doc.birds.unshift("oystercatcher")) - assert.deepStrictEqual( - Automerge.getHistory(s).map(state => state.snapshot), - [ - { config: { background: "blue" } }, - { config: { background: "blue" }, birds: ["mallard"] }, - { - config: { background: "blue" }, - birds: ["oystercatcher", "mallard"], - }, - ] - ) - }) - - it("should make change messages accessible", () => { - let s = Automerge.init() - s = Automerge.change(s, "Empty Bookshelf", doc => (doc.books = [])) - s = Automerge.change(s, "Add Orwell", doc => - doc.books.push("Nineteen Eighty-Four") - ) - s = Automerge.change(s, "Add Huxley", doc => - doc.books.push("Brave New World") - ) - assert.deepStrictEqual(s.books, [ - "Nineteen Eighty-Four", - "Brave New World", - ]) - assert.deepStrictEqual( - Automerge.getHistory(s).map(state => state.change.message), - ["Empty Bookshelf", "Add Orwell", "Add Huxley"] - ) - }) - }) - - describe("changes API", () => { - it("should return an empty list on an empty document", () => { - let changes = Automerge.getAllChanges(Automerge.init()) - assert.deepStrictEqual(changes, []) - }) - - it("should return an empty list when nothing changed", () => { - let s1 = Automerge.change( - Automerge.init(), - doc => (doc.birds = ["Chaffinch"]) - ) - assert.deepStrictEqual(Automerge.getChanges(s1, s1), []) - }) - - it("should do nothing when applying an empty list of changes", () => { - let s1 = Automerge.change( - Automerge.init(), - doc => (doc.birds = ["Chaffinch"]) - ) - assert.deepStrictEqual(Automerge.applyChanges(s1, [])[0], s1) - }) - - it("should return all changes when compared to an empty document", () => { - let s1 = Automerge.change( - Automerge.init(), - "Add Chaffinch", - doc => (doc.birds = ["Chaffinch"]) - ) - let s2 = Automerge.change(s1, "Add Bullfinch", doc => - doc.birds.push("Bullfinch") - ) - let changes = Automerge.getChanges(Automerge.init(), s2) - assert.strictEqual(changes.length, 2) - }) - - it("should allow a document copy to be reconstructed from scratch", () => { - let s1 = Automerge.change( - Automerge.init(), - "Add Chaffinch", - doc => (doc.birds = ["Chaffinch"]) - ) - let s2 = Automerge.change(s1, "Add Bullfinch", doc => - doc.birds.push("Bullfinch") - ) - let changes = Automerge.getAllChanges(s2) - let [s3] = Automerge.applyChanges(Automerge.init(), changes) - assert.deepStrictEqual(s3.birds, ["Chaffinch", "Bullfinch"]) - }) - - it("should return changes since the last given version", () => { - let s1 = Automerge.change( - Automerge.init(), - "Add Chaffinch", - doc => (doc.birds = ["Chaffinch"]) - ) - let changes1 = Automerge.getAllChanges(s1) - let s2 = Automerge.change(s1, "Add Bullfinch", doc => - doc.birds.push("Bullfinch") - ) - let changes2 = Automerge.getChanges(s1, s2) - assert.strictEqual(changes1.length, 1) // Add Chaffinch - assert.strictEqual(changes2.length, 1) // Add Bullfinch - }) - - it("should incrementally apply changes since the last given version", () => { - let s1 = Automerge.change( - Automerge.init(), - "Add Chaffinch", - doc => (doc.birds = ["Chaffinch"]) - ) - let changes1 = Automerge.getAllChanges(s1) - let s2 = Automerge.change(s1, "Add Bullfinch", doc => - doc.birds.push("Bullfinch") - ) - let changes2 = Automerge.getChanges(s1, s2) - let [s3] = Automerge.applyChanges(Automerge.init(), changes1) - let [s4] = Automerge.applyChanges(s3, changes2) - assert.deepStrictEqual(s3.birds, ["Chaffinch"]) - assert.deepStrictEqual(s4.birds, ["Chaffinch", "Bullfinch"]) - }) - - it("should handle updates to a list element", () => { - let s1 = Automerge.change( - Automerge.init(), - doc => (doc.birds = ["Chaffinch", "Bullfinch"]) - ) - let s2 = Automerge.change(s1, doc => (doc.birds[0] = "Goldfinch")) - let [s3] = Automerge.applyChanges( - Automerge.init(), - Automerge.getAllChanges(s2) - ) - assert.deepStrictEqual(s3.birds, ["Goldfinch", "Bullfinch"]) - assert.strictEqual(Automerge.getConflicts(s3.birds, 0), undefined) - }) - - // TEXT - it("should handle updates to a text object", () => { - let s1 = Automerge.change(Automerge.init(), doc => (doc.text = "ab")) - let s2 = Automerge.change(s1, doc => - Automerge.splice(doc, "text", 0, 1, "A") - ) - let [s3] = Automerge.applyChanges( - Automerge.init(), - Automerge.getAllChanges(s2) - ) - assert.deepStrictEqual([...s3.text], ["A", "b"]) - }) - - /* - it.skip('should report missing dependencies', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) - let s2 = Automerge.merge(Automerge.init(), s1) - s2 = Automerge.change(s2, doc => doc.birds.push('Bullfinch')) - let changes = Automerge.getAllChanges(s2) - let [s3, patch] = Automerge.applyChanges(Automerge.init(), [changes[1]]) - assert.deepStrictEqual(s3, {}) - assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), - decodeChange(changes[1]).deps) - assert.strictEqual(patch.pendingChanges, 1) - ;[s3, patch] = Automerge.applyChanges(s3, [changes[0]]) - assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch']) - assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), []) - assert.strictEqual(patch.pendingChanges, 0) - }) - */ - - it("should report missing dependencies with out-of-order applyChanges", () => { - let s0 = Automerge.init() - let s1 = Automerge.change(s0, doc => (doc.test = ["a"])) - let changes01 = Automerge.getAllChanges(s1) - let s2 = Automerge.change(s1, doc => (doc.test = ["b"])) - let changes12 = Automerge.getChanges(s1, s2) - let s3 = Automerge.change(s2, doc => (doc.test = ["c"])) - let changes23 = Automerge.getChanges(s2, s3) - let s4 = Automerge.init() - let [s5] = Automerge.applyChanges(s4, changes23) - let [s6] = Automerge.applyChanges(s5, changes12) - assert.deepStrictEqual(Automerge.getMissingDeps(s6, []), [ - decodeChange(changes01[0]).hash, - ]) - }) - - it("should call patchCallback if supplied when applying changes", () => { - const s1 = Automerge.change( - Automerge.init(), - doc => (doc.birds = ["Goldfinch"]) - ) - const callbacks: Array = [] - const before = Automerge.init() - const [after] = Automerge.applyChanges( - before, - Automerge.getAllChanges(s1), - { - patchCallback(patch, before, after) { - callbacks.push({ patch, before, after }) - }, - } - ) - assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch[0], { - action: "put", - path: ["birds"], - value: [], - }) - assert.deepStrictEqual(callbacks[0].patch[1], { - action: "insert", - path: ["birds", 0], - values: [""], - }) - assert.deepStrictEqual(callbacks[0].patch[2], { - action: "splice", - path: ["birds", 0, 0], - value: "Goldfinch", - }) - assert.strictEqual(callbacks[0].before, before) - assert.strictEqual(callbacks[0].after, after) - }) - - it("should merge multiple applied changes into one patch", () => { - const s1 = Automerge.change( - Automerge.init(), - doc => (doc.birds = ["Goldfinch"]) - ) - const s2 = Automerge.change(s1, doc => doc.birds.push("Chaffinch")) - const patches: Array = [] - Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2), { - patchCallback: p => patches.push(...p), - }) - assert.deepStrictEqual(patches, [ - { action: "put", path: ["birds"], value: [] }, - { action: "insert", path: ["birds", 0], values: ["", ""] }, - { action: "splice", path: ["birds", 0, 0], value: "Goldfinch" }, - { action: "splice", path: ["birds", 1, 0], value: "Chaffinch" }, - ]) - }) - - it("should call a patchCallback registered on doc initialisation", () => { - const s1 = Automerge.change( - Automerge.init(), - doc => (doc.bird = "Goldfinch") - ) - const patches: Array = [] - const before = Automerge.init({ - patchCallback: p => patches.push(...p), - }) - Automerge.applyChanges(before, Automerge.getAllChanges(s1)) - assert.deepStrictEqual(patches, [ - { action: "put", path: ["bird"], value: "" }, - { action: "splice", path: ["bird", 0], value: "Goldfinch" }, - ]) - }) - }) -}) diff --git a/javascript/test/stable_unstable_interop.ts b/javascript/test/stable_unstable_interop.ts deleted file mode 100644 index dc57f338..00000000 --- a/javascript/test/stable_unstable_interop.ts +++ /dev/null @@ -1,99 +0,0 @@ -import * as assert from "assert" -import * as stable from "../src" -import { unstable } from "../src" - -describe("stable/unstable interop", () => { - it("should allow reading Text from stable as strings in unstable", () => { - let stableDoc = stable.from({ - text: new stable.Text("abc"), - }) - let unstableDoc = unstable.init() - unstableDoc = unstable.merge(unstableDoc, stableDoc) - assert.deepStrictEqual(unstableDoc.text, "abc") - }) - - it("should allow string from stable as Text in unstable", () => { - let unstableDoc = unstable.from({ - text: "abc", - }) - let stableDoc = stable.init() - stableDoc = unstable.merge(stableDoc, unstableDoc) - assert.deepStrictEqual(stableDoc.text, new stable.Text("abc")) - }) - - it("should allow reading strings from stable as RawString in unstable", () => { - let stableDoc = stable.from({ - text: "abc", - }) - let unstableDoc = unstable.init() - unstableDoc = unstable.merge(unstableDoc, stableDoc) - assert.deepStrictEqual(unstableDoc.text, new unstable.RawString("abc")) - }) - - it("should allow reading RawString from unstable as string in stable", () => { - let unstableDoc = unstable.from({ - text: new unstable.RawString("abc"), - }) - let stableDoc = stable.init() - stableDoc = unstable.merge(stableDoc, unstableDoc) - assert.deepStrictEqual(stableDoc.text, "abc") - }) - - it("should show conflicts on text objects", () => { - let doc1 = stable.from({ text: new stable.Text("abc") }, "bb") - let doc2 = stable.from({ text: new stable.Text("def") }, "aa") - doc1 = stable.merge(doc1, doc2) - let conflicts = stable.getConflicts(doc1, "text")! - assert.equal(conflicts["1@bb"]!.toString(), "abc") - assert.equal(conflicts["1@aa"]!.toString(), "def") - - let unstableDoc = unstable.init() - unstableDoc = unstable.merge(unstableDoc, doc1) - let conflicts2 = unstable.getConflicts(unstableDoc, "text")! - assert.equal(conflicts2["1@bb"]!.toString(), "abc") - assert.equal(conflicts2["1@aa"]!.toString(), "def") - }) - - it("should allow filling a list with text in stable", () => { - let doc = stable.from<{ list: Array }>({ - list: [null, null, null], - }) - doc = stable.change(doc, doc => { - doc.list.fill(new stable.Text("abc"), 0, 3) - }) - assert.deepStrictEqual(doc.list, [ - new stable.Text("abc"), - new stable.Text("abc"), - new stable.Text("abc"), - ]) - }) - - it("should allow filling a list with text in unstable", () => { - let doc = unstable.from<{ list: Array }>({ - list: [null, null, null], - }) - doc = stable.change(doc, doc => { - doc.list.fill("abc", 0, 3) - }) - assert.deepStrictEqual(doc.list, ["abc", "abc", "abc"]) - }) - - it("should allow splicing text into a list on stable", () => { - let doc = stable.from<{ list: Array }>({ list: [] }) - doc = stable.change(doc, doc => { - doc.list.splice(0, 0, new stable.Text("abc"), new stable.Text("def")) - }) - assert.deepStrictEqual(doc.list, [ - new stable.Text("abc"), - new stable.Text("def"), - ]) - }) - - it("should allow splicing text into a list on unstable", () => { - let doc = unstable.from<{ list: Array }>({ list: [] }) - doc = unstable.change(doc, doc => { - doc.list.splice(0, 0, "abc", "def") - }) - assert.deepStrictEqual(doc.list, ["abc", "def"]) - }) -}) diff --git a/javascript/test/text_test.ts b/javascript/test/text_test.ts deleted file mode 100644 index 518c7d2b..00000000 --- a/javascript/test/text_test.ts +++ /dev/null @@ -1,111 +0,0 @@ -import * as assert from "assert" -import { unstable as Automerge } from "../src" -import { assertEqualsOneOf } from "./helpers" - -type DocType = { - text: string - [key: string]: any -} - -describe("Automerge.Text", () => { - let s1: Automerge.Doc, s2: Automerge.Doc - beforeEach(() => { - s1 = Automerge.change(Automerge.init(), doc => (doc.text = "")) - s2 = Automerge.merge(Automerge.init(), s1) - }) - - it("should support insertion", () => { - s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "a")) - assert.strictEqual(s1.text.length, 1) - assert.strictEqual(s1.text[0], "a") - assert.strictEqual(s1.text, "a") - //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`) - }) - - it("should support deletion", () => { - s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) - s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 1)) - assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text[0], "a") - assert.strictEqual(s1.text[1], "c") - assert.strictEqual(s1.text, "ac") - }) - - it("should support implicit and explicit deletion", () => { - s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) - s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 1)) - s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 0)) - assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text[0], "a") - assert.strictEqual(s1.text[1], "c") - assert.strictEqual(s1.text, "ac") - }) - - it("should handle concurrent insertion", () => { - s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) - s2 = Automerge.change(s2, doc => Automerge.splice(doc, "text", 0, 0, "xyz")) - s1 = Automerge.merge(s1, s2) - assert.strictEqual(s1.text.length, 6) - assertEqualsOneOf(s1.text, "abcxyz", "xyzabc") - }) - - it("should handle text and other ops in the same change", () => { - s1 = Automerge.change(s1, doc => { - doc.foo = "bar" - Automerge.splice(doc, "text", 0, 0, "a") - }) - assert.strictEqual(s1.foo, "bar") - assert.strictEqual(s1.text, "a") - assert.strictEqual(s1.text, "a") - }) - - it("should serialize to JSON as a simple string", () => { - s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, 'a"b')) - assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}') - }) - - it("should allow modification after an object is assigned to a document", () => { - s1 = Automerge.change(Automerge.init(), doc => { - doc.text = "" - Automerge.splice(doc, "text", 0, 0, "abcd") - Automerge.splice(doc, "text", 2, 1) - assert.strictEqual(doc.text, "abd") - }) - assert.strictEqual(s1.text, "abd") - }) - - it("should not allow modification outside of a change callback", () => { - assert.throws( - () => Automerge.splice(s1, "text", 0, 0, "a"), - /object cannot be modified outside of a change block/ - ) - }) - - describe("with initial value", () => { - it("should initialize text in Automerge.from()", () => { - let s1 = Automerge.from({ text: "init" }) - assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text[0], "i") - assert.strictEqual(s1.text[1], "n") - assert.strictEqual(s1.text[2], "i") - assert.strictEqual(s1.text[3], "t") - assert.strictEqual(s1.text, "init") - }) - - it("should encode the initial value as a change", () => { - const s1 = Automerge.from({ text: "init" }) - const changes = Automerge.getAllChanges(s1) - assert.strictEqual(changes.length, 1) - const [s2] = Automerge.applyChanges(Automerge.init(), changes) - assert.strictEqual(s2.text, "init") - assert.strictEqual(s2.text, "init") - }) - }) - - it("should support unicode when creating text", () => { - s1 = Automerge.from({ - text: "🐦", - }) - assert.strictEqual(s1.text, "🐦") - }) -}) diff --git a/javascript/test/text_v1.ts b/javascript/test/text_v1.ts deleted file mode 100644 index b111530f..00000000 --- a/javascript/test/text_v1.ts +++ /dev/null @@ -1,281 +0,0 @@ -import * as assert from "assert" -import * as Automerge from "../src" -import { assertEqualsOneOf } from "./helpers" - -type DocType = { text: Automerge.Text; [key: string]: any } - -describe("Automerge.Text", () => { - let s1: Automerge.Doc, s2: Automerge.Doc - beforeEach(() => { - s1 = Automerge.change( - Automerge.init(), - doc => (doc.text = new Automerge.Text()) - ) - s2 = Automerge.merge(Automerge.init(), s1) - }) - - it("should support insertion", () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a")) - assert.strictEqual(s1.text.length, 1) - assert.strictEqual(s1.text.get(0), "a") - assert.strictEqual(s1.text.toString(), "a") - //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`) - }) - - it("should support deletion", () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) - s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1)) - assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text.get(0), "a") - assert.strictEqual(s1.text.get(1), "c") - assert.strictEqual(s1.text.toString(), "ac") - }) - - it("should support implicit and explicit deletion", () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) - s1 = Automerge.change(s1, doc => doc.text.deleteAt(1)) - s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0)) - assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text.get(0), "a") - assert.strictEqual(s1.text.get(1), "c") - assert.strictEqual(s1.text.toString(), "ac") - }) - - it("should handle concurrent insertion", () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) - s2 = Automerge.change(s2, doc => doc.text.insertAt(0, "x", "y", "z")) - s1 = Automerge.merge(s1, s2) - assert.strictEqual(s1.text.length, 6) - assertEqualsOneOf(s1.text.toString(), "abcxyz", "xyzabc") - assertEqualsOneOf(s1.text.join(""), "abcxyz", "xyzabc") - }) - - it("should handle text and other ops in the same change", () => { - s1 = Automerge.change(s1, doc => { - doc.foo = "bar" - doc.text.insertAt(0, "a") - }) - assert.strictEqual(s1.foo, "bar") - assert.strictEqual(s1.text.toString(), "a") - assert.strictEqual(s1.text.join(""), "a") - }) - - it("should serialize to JSON as a simple string", () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", '"', "b")) - assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}') - }) - - it("should allow modification before an object is assigned to a document", () => { - s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text() - text.insertAt(0, "a", "b", "c", "d") - text.deleteAt(2) - doc.text = text - assert.strictEqual(doc.text.toString(), "abd") - assert.strictEqual(doc.text.join(""), "abd") - }) - assert.strictEqual(s1.text.toString(), "abd") - assert.strictEqual(s1.text.join(""), "abd") - }) - - it("should allow modification after an object is assigned to a document", () => { - s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text() - doc.text = text - doc.text.insertAt(0, "a", "b", "c", "d") - doc.text.deleteAt(2) - assert.strictEqual(doc.text.toString(), "abd") - assert.strictEqual(doc.text.join(""), "abd") - }) - assert.strictEqual(s1.text.join(""), "abd") - }) - - it("should not allow modification outside of a change callback", () => { - assert.throws( - () => s1.text.insertAt(0, "a"), - /object cannot be modified outside of a change block/ - ) - }) - - describe("with initial value", () => { - it("should accept a string as initial value", () => { - let s1 = Automerge.change( - Automerge.init(), - doc => (doc.text = new Automerge.Text("init")) - ) - assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text.get(0), "i") - assert.strictEqual(s1.text.get(1), "n") - assert.strictEqual(s1.text.get(2), "i") - assert.strictEqual(s1.text.get(3), "t") - assert.strictEqual(s1.text.toString(), "init") - }) - - it("should accept an array as initial value", () => { - let s1 = Automerge.change( - Automerge.init(), - doc => (doc.text = new Automerge.Text(["i", "n", "i", "t"])) - ) - assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text.get(0), "i") - assert.strictEqual(s1.text.get(1), "n") - assert.strictEqual(s1.text.get(2), "i") - assert.strictEqual(s1.text.get(3), "t") - assert.strictEqual(s1.text.toString(), "init") - }) - - it("should initialize text in Automerge.from()", () => { - let s1 = Automerge.from({ text: new Automerge.Text("init") }) - assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text.get(0), "i") - assert.strictEqual(s1.text.get(1), "n") - assert.strictEqual(s1.text.get(2), "i") - assert.strictEqual(s1.text.get(3), "t") - assert.strictEqual(s1.text.toString(), "init") - }) - - it("should encode the initial value as a change", () => { - const s1 = Automerge.from({ text: new Automerge.Text("init") }) - const changes = Automerge.getAllChanges(s1) - assert.strictEqual(changes.length, 1) - const [s2] = Automerge.applyChanges(Automerge.init(), changes) - assert.strictEqual(s2.text instanceof Automerge.Text, true) - assert.strictEqual(s2.text.toString(), "init") - assert.strictEqual(s2.text.join(""), "init") - }) - - it("should allow immediate access to the value", () => { - Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text("init") - assert.strictEqual(text.length, 4) - assert.strictEqual(text.get(0), "i") - assert.strictEqual(text.toString(), "init") - doc.text = text - assert.strictEqual(doc.text.length, 4) - assert.strictEqual(doc.text.get(0), "i") - assert.strictEqual(doc.text.toString(), "init") - }) - }) - - it("should allow pre-assignment modification of the initial value", () => { - let s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text("init") - text.deleteAt(3) - assert.strictEqual(text.join(""), "ini") - doc.text = text - assert.strictEqual(doc.text.join(""), "ini") - assert.strictEqual(doc.text.toString(), "ini") - }) - assert.strictEqual(s1.text.toString(), "ini") - assert.strictEqual(s1.text.join(""), "ini") - }) - - it("should allow post-assignment modification of the initial value", () => { - let s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text("init") - doc.text = text - doc.text.deleteAt(0) - doc.text.insertAt(0, "I") - assert.strictEqual(doc.text.join(""), "Init") - assert.strictEqual(doc.text.toString(), "Init") - }) - assert.strictEqual(s1.text.join(""), "Init") - assert.strictEqual(s1.text.toString(), "Init") - }) - }) - - describe("non-textual control characters", () => { - let s1: Automerge.Doc - beforeEach(() => { - s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text() - doc.text.insertAt(0, "a") - doc.text.insertAt(1, { attribute: "bold" }) - }) - }) - - it("should allow fetching non-textual characters", () => { - assert.deepEqual(s1.text.get(1), { attribute: "bold" }) - //assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`) - }) - - it("should include control characters in string length", () => { - assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text.get(0), "a") - }) - - it("should replace control characters from toString()", () => { - assert.strictEqual(s1.text.toString(), "a\uFFFC") - }) - - it("should allow control characters to be updated", () => { - const s2 = Automerge.change( - s1, - doc => (doc.text.get(1)!.attribute = "italic") - ) - const s3 = Automerge.load(Automerge.save(s2)) - assert.strictEqual(s1.text.get(1).attribute, "bold") - assert.strictEqual(s2.text.get(1).attribute, "italic") - assert.strictEqual(s3.text.get(1).attribute, "italic") - }) - - describe("spans interface to Text", () => { - it("should return a simple string as a single span", () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text("hello world") - }) - assert.deepEqual(s1.text.toSpans(), ["hello world"]) - }) - it("should return an empty string as an empty array", () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text() - }) - assert.deepEqual(s1.text.toSpans(), []) - }) - it("should split a span at a control character", () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text("hello world") - doc.text.insertAt(5, { attributes: { bold: true } }) - }) - assert.deepEqual(s1.text.toSpans(), [ - "hello", - { attributes: { bold: true } }, - " world", - ]) - }) - it("should allow consecutive control characters", () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text("hello world") - doc.text.insertAt(5, { attributes: { bold: true } }) - doc.text.insertAt(6, { attributes: { italic: true } }) - }) - assert.deepEqual(s1.text.toSpans(), [ - "hello", - { attributes: { bold: true } }, - { attributes: { italic: true } }, - " world", - ]) - }) - it("should allow non-consecutive control characters", () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text("hello world") - doc.text.insertAt(5, { attributes: { bold: true } }) - doc.text.insertAt(12, { attributes: { italic: true } }) - }) - assert.deepEqual(s1.text.toSpans(), [ - "hello", - { attributes: { bold: true } }, - " world", - { attributes: { italic: true } }, - ]) - }) - }) - }) - - it("should support unicode when creating text", () => { - s1 = Automerge.from({ - text: new Automerge.Text("🐦"), - }) - assert.strictEqual(s1.text.get(0), "🐦") - }) -}) diff --git a/javascript/test/uuid_test.ts b/javascript/test/uuid_test.ts deleted file mode 100644 index f6a0bde4..00000000 --- a/javascript/test/uuid_test.ts +++ /dev/null @@ -1,32 +0,0 @@ -import * as assert from "assert" -import * as Automerge from "../src" - -const uuid = Automerge.uuid - -describe("uuid", () => { - afterEach(() => { - uuid.reset() - }) - - describe("default implementation", () => { - it("generates unique values", () => { - assert.notEqual(uuid(), uuid()) - }) - }) - - describe("custom implementation", () => { - let counter - - function customUuid() { - return `custom-uuid-${counter++}` - } - - before(() => uuid.setFactory(customUuid)) - beforeEach(() => (counter = 0)) - - it("invokes the custom factory", () => { - assert.equal(uuid(), "custom-uuid-0") - assert.equal(uuid(), "custom-uuid-1") - }) - }) -}) diff --git a/javascript/tsconfig.json b/javascript/tsconfig.json deleted file mode 100644 index 628aea8e..00000000 --- a/javascript/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "compilerOptions": { - "target": "es2016", - "sourceMap": false, - "declaration": true, - "resolveJsonModule": true, - "module": "commonjs", - "moduleResolution": "node", - "noImplicitAny": false, - "allowSyntheticDefaultImports": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "noFallthroughCasesInSwitch": true, - "skipLibCheck": true, - "outDir": "./dist" - }, - "include": ["src/**/*", "test/**/*"], - "exclude": ["./dist/**/*", "./node_modules", "./src/**/*.deno.ts"] -} diff --git a/javascript/tslint.json b/javascript/tslint.json deleted file mode 100644 index f7bb7a71..00000000 --- a/javascript/tslint.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "tslint:recommended" -} diff --git a/rust/.gitignore b/rust/.gitignore deleted file mode 100644 index f859e0a3..00000000 --- a/rust/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -/target -/.direnv -perf.* -/Cargo.lock -build/ -.vim/* diff --git a/rust/automerge-c/.clang-format b/rust/automerge-c/.clang-format deleted file mode 100644 index dbf16c21..00000000 --- a/rust/automerge-c/.clang-format +++ /dev/null @@ -1,250 +0,0 @@ ---- -Language: Cpp -# BasedOnStyle: Chromium -AccessModifierOffset: -1 -AlignAfterOpenBracket: Align -AlignArrayOfStructures: None -AlignConsecutiveAssignments: - Enabled: false - AcrossEmptyLines: false - AcrossComments: false - AlignCompound: false - PadOperators: true -AlignConsecutiveBitFields: - Enabled: false - AcrossEmptyLines: false - AcrossComments: false - AlignCompound: false - PadOperators: false -AlignConsecutiveDeclarations: - Enabled: false - AcrossEmptyLines: false - AcrossComments: false - AlignCompound: false - PadOperators: false -AlignConsecutiveMacros: - Enabled: false - AcrossEmptyLines: false - AcrossComments: false - AlignCompound: false - PadOperators: false -AlignEscapedNewlines: Left -AlignOperands: Align -AlignTrailingComments: true -AllowAllArgumentsOnNextLine: true -AllowAllParametersOfDeclarationOnNextLine: false -AllowShortEnumsOnASingleLine: true -AllowShortBlocksOnASingleLine: Never -AllowShortCaseLabelsOnASingleLine: false -AllowShortFunctionsOnASingleLine: Inline -AllowShortLambdasOnASingleLine: All -AllowShortIfStatementsOnASingleLine: Never -AllowShortLoopsOnASingleLine: false -AlwaysBreakAfterDefinitionReturnType: None -AlwaysBreakAfterReturnType: None -AlwaysBreakBeforeMultilineStrings: true -AlwaysBreakTemplateDeclarations: Yes -AttributeMacros: - - __capability -BinPackArguments: true -BinPackParameters: false -BraceWrapping: - AfterCaseLabel: false - AfterClass: false - AfterControlStatement: Never - AfterEnum: false - AfterFunction: false - AfterNamespace: false - AfterObjCDeclaration: false - AfterStruct: false - AfterUnion: false - AfterExternBlock: false - BeforeCatch: false - BeforeElse: false - BeforeLambdaBody: false - BeforeWhile: false - IndentBraces: false - SplitEmptyFunction: true - SplitEmptyRecord: true - SplitEmptyNamespace: true -BreakBeforeBinaryOperators: None -BreakBeforeConceptDeclarations: Always -BreakBeforeBraces: Attach -BreakBeforeInheritanceComma: false -BreakInheritanceList: BeforeColon -BreakBeforeTernaryOperators: true -BreakConstructorInitializersBeforeComma: false -BreakConstructorInitializers: BeforeColon -BreakAfterJavaFieldAnnotations: false -BreakStringLiterals: true -ColumnLimit: 120 -CommentPragmas: '^ IWYU pragma:' -QualifierAlignment: Leave -CompactNamespaces: false -ConstructorInitializerIndentWidth: 4 -ContinuationIndentWidth: 4 -Cpp11BracedListStyle: true -DeriveLineEnding: true -DerivePointerAlignment: false -DisableFormat: false -EmptyLineAfterAccessModifier: Never -EmptyLineBeforeAccessModifier: LogicalBlock -ExperimentalAutoDetectBinPacking: false -PackConstructorInitializers: NextLine -BasedOnStyle: '' -ConstructorInitializerAllOnOneLineOrOnePerLine: false -AllowAllConstructorInitializersOnNextLine: true -FixNamespaceComments: true -ForEachMacros: - - foreach - - Q_FOREACH - - BOOST_FOREACH -IfMacros: - - KJ_IF_MAYBE -IncludeBlocks: Preserve -IncludeCategories: - - Regex: '^' - Priority: 2 - SortPriority: 0 - CaseSensitive: false - - Regex: '^<.*\.h>' - Priority: 1 - SortPriority: 0 - CaseSensitive: false - - Regex: '^<.*' - Priority: 2 - SortPriority: 0 - CaseSensitive: false - - Regex: '.*' - Priority: 3 - SortPriority: 0 - CaseSensitive: false -IncludeIsMainRegex: '([-_](test|unittest))?$' -IncludeIsMainSourceRegex: '' -IndentAccessModifiers: false -IndentCaseLabels: true -IndentCaseBlocks: false -IndentGotoLabels: true -IndentPPDirectives: None -IndentExternBlock: AfterExternBlock -IndentRequiresClause: true -IndentWidth: 4 -IndentWrappedFunctionNames: false -InsertBraces: false -InsertTrailingCommas: None -JavaScriptQuotes: Leave -JavaScriptWrapImports: true -KeepEmptyLinesAtTheStartOfBlocks: false -LambdaBodyIndentation: Signature -MacroBlockBegin: '' -MacroBlockEnd: '' -MaxEmptyLinesToKeep: 1 -NamespaceIndentation: None -ObjCBinPackProtocolList: Never -ObjCBlockIndentWidth: 2 -ObjCBreakBeforeNestedBlockParam: true -ObjCSpaceAfterProperty: false -ObjCSpaceBeforeProtocolList: true -PenaltyBreakAssignment: 2 -PenaltyBreakBeforeFirstCallParameter: 1 -PenaltyBreakComment: 300 -PenaltyBreakFirstLessLess: 120 -PenaltyBreakOpenParenthesis: 0 -PenaltyBreakString: 1000 -PenaltyBreakTemplateDeclaration: 10 -PenaltyExcessCharacter: 1000000 -PenaltyReturnTypeOnItsOwnLine: 200 -PenaltyIndentedWhitespace: 0 -PointerAlignment: Left -PPIndentWidth: -1 -RawStringFormats: - - Language: Cpp - Delimiters: - - cc - - CC - - cpp - - Cpp - - CPP - - 'c++' - - 'C++' - CanonicalDelimiter: '' - BasedOnStyle: google - - Language: TextProto - Delimiters: - - pb - - PB - - proto - - PROTO - EnclosingFunctions: - - EqualsProto - - EquivToProto - - PARSE_PARTIAL_TEXT_PROTO - - PARSE_TEST_PROTO - - PARSE_TEXT_PROTO - - ParseTextOrDie - - ParseTextProtoOrDie - - ParseTestProto - - ParsePartialTestProto - CanonicalDelimiter: pb - BasedOnStyle: google -ReferenceAlignment: Pointer -ReflowComments: true -RemoveBracesLLVM: false -RequiresClausePosition: OwnLine -SeparateDefinitionBlocks: Leave -ShortNamespaceLines: 1 -SortIncludes: CaseSensitive -SortJavaStaticImport: Before -SortUsingDeclarations: true -SpaceAfterCStyleCast: false -SpaceAfterLogicalNot: false -SpaceAfterTemplateKeyword: true -SpaceBeforeAssignmentOperators: true -SpaceBeforeCaseColon: false -SpaceBeforeCpp11BracedList: false -SpaceBeforeCtorInitializerColon: true -SpaceBeforeInheritanceColon: true -SpaceBeforeParens: ControlStatements -SpaceBeforeParensOptions: - AfterControlStatements: true - AfterForeachMacros: true - AfterFunctionDefinitionName: false - AfterFunctionDeclarationName: false - AfterIfMacros: true - AfterOverloadedOperator: false - AfterRequiresInClause: false - AfterRequiresInExpression: false - BeforeNonEmptyParentheses: false -SpaceAroundPointerQualifiers: Default -SpaceBeforeRangeBasedForLoopColon: true -SpaceInEmptyBlock: false -SpaceInEmptyParentheses: false -SpacesBeforeTrailingComments: 2 -SpacesInAngles: Never -SpacesInConditionalStatement: false -SpacesInContainerLiterals: true -SpacesInCStyleCastParentheses: false -SpacesInLineCommentPrefix: - Minimum: 1 - Maximum: -1 -SpacesInParentheses: false -SpacesInSquareBrackets: false -SpaceBeforeSquareBrackets: false -BitFieldColonSpacing: Both -Standard: Auto -StatementAttributeLikeMacros: - - Q_EMIT -StatementMacros: - - Q_UNUSED - - QT_REQUIRE_VERSION -TabWidth: 8 -UseCRLF: false -UseTab: Never -WhitespaceSensitiveMacros: - - STRINGIZE - - PP_STRINGIZE - - BOOST_PP_STRINGIZE - - NS_SWIFT_NAME - - CF_SWIFT_NAME -... - diff --git a/rust/automerge-c/.gitignore b/rust/automerge-c/.gitignore deleted file mode 100644 index 14d74973..00000000 --- a/rust/automerge-c/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -automerge -automerge.h -automerge.o -build/ -CMakeCache.txt -CMakeFiles -CMakePresets.json -Makefile -DartConfiguration.tcl -out/ diff --git a/rust/automerge-c/CMakeLists.txt b/rust/automerge-c/CMakeLists.txt deleted file mode 100644 index 0c35eebd..00000000 --- a/rust/automerge-c/CMakeLists.txt +++ /dev/null @@ -1,305 +0,0 @@ -cmake_minimum_required(VERSION 3.23 FATAL_ERROR) - -project(automerge-c VERSION 0.1.0 - LANGUAGES C - DESCRIPTION "C bindings for the Automerge Rust library.") - -set(LIBRARY_NAME "automerge") - -set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) - -option(BUILD_SHARED_LIBS "Enable the choice of a shared or static library.") - -include(CTest) - -include(CMakePackageConfigHelpers) - -include(GNUInstallDirs) - -set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake") - -string(MAKE_C_IDENTIFIER ${PROJECT_NAME} SYMBOL_PREFIX) - -string(TOUPPER ${SYMBOL_PREFIX} SYMBOL_PREFIX) - -set(CARGO_TARGET_DIR "${CMAKE_BINARY_DIR}/Cargo/target") - -set(CBINDGEN_INCLUDEDIR "${CMAKE_BINARY_DIR}/${CMAKE_INSTALL_INCLUDEDIR}") - -set(CBINDGEN_TARGET_DIR "${CBINDGEN_INCLUDEDIR}/${PROJECT_NAME}") - -find_program ( - CARGO_CMD - "cargo" - PATHS "$ENV{CARGO_HOME}/bin" - DOC "The Cargo command" -) - -if(NOT CARGO_CMD) - message(FATAL_ERROR "Cargo (Rust package manager) not found! " - "Please install it and/or set the CARGO_HOME " - "environment variable to its path.") -endif() - -string(TOLOWER "${CMAKE_BUILD_TYPE}" BUILD_TYPE_LOWER) - -# In order to build with -Z build-std, we need to pass target explicitly. -# https://doc.rust-lang.org/cargo/reference/unstable.html#build-std -execute_process ( - COMMAND rustc -vV - OUTPUT_VARIABLE RUSTC_VERSION - OUTPUT_STRIP_TRAILING_WHITESPACE -) -string(REGEX REPLACE ".*host: ([^ \n]*).*" "\\1" - CARGO_TARGET - ${RUSTC_VERSION} -) - -if(BUILD_TYPE_LOWER STREQUAL debug) - set(CARGO_BUILD_TYPE "debug") - - set(CARGO_FLAG --target=${CARGO_TARGET}) -else() - set(CARGO_BUILD_TYPE "release") - - if (NOT RUSTC_VERSION MATCHES "nightly") - set(RUSTUP_TOOLCHAIN nightly) - endif() - - set(RUSTFLAGS -C\ panic=abort) - - set(CARGO_FLAG -Z build-std=std,panic_abort --release --target=${CARGO_TARGET}) -endif() - -set(CARGO_FEATURES "") - -set(CARGO_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_TARGET}/${CARGO_BUILD_TYPE}") - -set(BINDINGS_NAME "${LIBRARY_NAME}_core") - -configure_file( - ${CMAKE_MODULE_PATH}/Cargo.toml.in - ${CMAKE_SOURCE_DIR}/Cargo.toml - @ONLY - NEWLINE_STYLE LF -) - -set(INCLUDE_GUARD_PREFIX "${SYMBOL_PREFIX}") - -configure_file( - ${CMAKE_MODULE_PATH}/cbindgen.toml.in - ${CMAKE_SOURCE_DIR}/cbindgen.toml - @ONLY - NEWLINE_STYLE LF -) - -set(CARGO_OUTPUT - ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - ${CARGO_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${BINDINGS_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX} -) - -# \note cbindgen's naming behavior isn't fully configurable and it ignores -# `const fn` calls (https://github.com/eqrion/cbindgen/issues/252). -add_custom_command( - OUTPUT - ${CARGO_OUTPUT} - COMMAND - # \note cbindgen won't regenerate its output header file after it's been removed but it will after its - # configuration file has been updated. - ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file-touch.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml - COMMAND - ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} CBINDGEN_TARGET_DIR=${CBINDGEN_TARGET_DIR} RUSTUP_TOOLCHAIN=${RUSTUP_TOOLCHAIN} RUSTFLAGS=${RUSTFLAGS} ${CARGO_CMD} build ${CARGO_FLAG} ${CARGO_FEATURES} - COMMAND - # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase". - ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file-regex-replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - COMMAND - # Compensate for cbindgen ignoring `std:mem::size_of()` calls. - ${CMAKE_COMMAND} -DMATCH_REGEX=USIZE_ -DREPLACE_EXPR=\+${CMAKE_SIZEOF_VOID_P} -P ${CMAKE_SOURCE_DIR}/cmake/file-regex-replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - MAIN_DEPENDENCY - src/lib.rs - DEPENDS - src/actor_id.rs - src/byte_span.rs - src/change.rs - src/doc.rs - src/doc/list.rs - src/doc/map.rs - src/doc/utils.rs - src/index.rs - src/item.rs - src/items.rs - src/obj.rs - src/result.rs - src/sync.rs - src/sync/have.rs - src/sync/message.rs - src/sync/state.rs - ${CMAKE_SOURCE_DIR}/build.rs - ${CMAKE_MODULE_PATH}/Cargo.toml.in - ${CMAKE_MODULE_PATH}/cbindgen.toml.in - WORKING_DIRECTORY - ${CMAKE_SOURCE_DIR} - COMMENT - "Producing the bindings' artifacts with Cargo..." - VERBATIM -) - -add_custom_target(${BINDINGS_NAME}_artifacts ALL - DEPENDS ${CARGO_OUTPUT} -) - -add_library(${BINDINGS_NAME} STATIC IMPORTED GLOBAL) - -target_include_directories(${BINDINGS_NAME} INTERFACE "${CBINDGEN_INCLUDEDIR}") - -set_target_properties( - ${BINDINGS_NAME} - PROPERTIES - # \note Cargo writes a debug build into a nested directory instead of - # decorating its name. - DEBUG_POSTFIX "" - DEFINE_SYMBOL "" - IMPORTED_IMPLIB "" - IMPORTED_LOCATION "${CARGO_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${BINDINGS_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}" - IMPORTED_NO_SONAME "TRUE" - IMPORTED_SONAME "" - LINKER_LANGUAGE C - PUBLIC_HEADER "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" - SOVERSION "${PROJECT_VERSION_MAJOR}" - VERSION "${PROJECT_VERSION}" - # \note Cargo exports all of the symbols automatically. - WINDOWS_EXPORT_ALL_SYMBOLS "TRUE" -) - -target_compile_definitions(${BINDINGS_NAME} INTERFACE $) - -set(UTILS_SUBDIR "utils") - -add_custom_command( - OUTPUT - ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h - ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c - COMMAND - ${CMAKE_COMMAND} -DPROJECT_NAME=${PROJECT_NAME} -DLIBRARY_NAME=${LIBRARY_NAME} -DSUBDIR=${UTILS_SUBDIR} -P ${CMAKE_SOURCE_DIR}/cmake/enum-string-functions-gen.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c - MAIN_DEPENDENCY - ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - DEPENDS - ${CMAKE_SOURCE_DIR}/cmake/enum-string-functions-gen.cmake - WORKING_DIRECTORY - ${CMAKE_SOURCE_DIR} - COMMENT - "Generating the enum string functions with CMake..." - VERBATIM -) - -add_custom_target(${LIBRARY_NAME}_utilities - DEPENDS ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h - ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c -) - -add_library(${LIBRARY_NAME}) - -target_compile_features(${LIBRARY_NAME} PRIVATE c_std_99) - -set(CMAKE_THREAD_PREFER_PTHREAD TRUE) - -set(THREADS_PREFER_PTHREAD_FLAG TRUE) - -find_package(Threads REQUIRED) - -set(LIBRARY_DEPENDENCIES Threads::Threads ${CMAKE_DL_LIBS}) - -if(WIN32) - list(APPEND LIBRARY_DEPENDENCIES Bcrypt userenv ws2_32) -else() - list(APPEND LIBRARY_DEPENDENCIES m) -endif() - -target_link_libraries(${LIBRARY_NAME} - PUBLIC ${BINDINGS_NAME} - ${LIBRARY_DEPENDENCIES} -) - -# \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't -# contain a non-existent path so its build-time include directory -# must be specified for all of its dependent targets instead. -target_include_directories(${LIBRARY_NAME} - PUBLIC "$" - "$" -) - -add_dependencies(${LIBRARY_NAME} ${BINDINGS_NAME}_artifacts) - -# Generate the configuration header. -math(EXPR INTEGER_PROJECT_VERSION_MAJOR "${PROJECT_VERSION_MAJOR} * 100000") - -math(EXPR INTEGER_PROJECT_VERSION_MINOR "${PROJECT_VERSION_MINOR} * 100") - -math(EXPR INTEGER_PROJECT_VERSION_PATCH "${PROJECT_VERSION_PATCH}") - -math(EXPR INTEGER_PROJECT_VERSION "${INTEGER_PROJECT_VERSION_MAJOR} + \ - ${INTEGER_PROJECT_VERSION_MINOR} + \ - ${INTEGER_PROJECT_VERSION_PATCH}") - -configure_file( - ${CMAKE_MODULE_PATH}/config.h.in - ${CBINDGEN_TARGET_DIR}/config.h - @ONLY - NEWLINE_STYLE LF -) - -target_sources(${LIBRARY_NAME} - PRIVATE - src/${UTILS_SUBDIR}/result.c - src/${UTILS_SUBDIR}/stack_callback_data.c - src/${UTILS_SUBDIR}/stack.c - src/${UTILS_SUBDIR}/string.c - ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c - PUBLIC - FILE_SET api TYPE HEADERS - BASE_DIRS - ${CBINDGEN_INCLUDEDIR} - ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR} - FILES - ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h - ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/result.h - ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack_callback_data.h - ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack.h - ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/string.h - INTERFACE - FILE_SET config TYPE HEADERS - BASE_DIRS - ${CBINDGEN_INCLUDEDIR} - FILES - ${CBINDGEN_TARGET_DIR}/config.h -) - -install( - TARGETS ${LIBRARY_NAME} - EXPORT ${PROJECT_NAME}-config - FILE_SET api - FILE_SET config -) - -# \note Install the Cargo-built core bindings to enable direct linkage. -install( - FILES $ - DESTINATION ${CMAKE_INSTALL_LIBDIR} -) - -install(EXPORT ${PROJECT_NAME}-config - FILE ${PROJECT_NAME}-config.cmake - NAMESPACE "${PROJECT_NAME}::" - DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${LIB} -) - -if(BUILD_TESTING) - add_subdirectory(test EXCLUDE_FROM_ALL) - - enable_testing() -endif() - -add_subdirectory(docs) - -add_subdirectory(examples EXCLUDE_FROM_ALL) diff --git a/rust/automerge-c/Cargo.toml b/rust/automerge-c/Cargo.toml deleted file mode 100644 index 95a3a29c..00000000 --- a/rust/automerge-c/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "automerge-c" -version = "0.1.0" -authors = ["Orion Henry ", "Jason Kankiewicz "] -edition = "2021" -license = "MIT" -rust-version = "1.57.0" - -[lib] -name = "automerge_core" -crate-type = ["staticlib"] -bench = false -doc = false - -[dependencies] -automerge = { path = "../automerge" } -hex = "^0.4.3" -libc = "^0.2" -smol_str = "^0.1.21" - -[build-dependencies] -cbindgen = "^0.24" diff --git a/rust/automerge-c/README.md b/rust/automerge-c/README.md deleted file mode 100644 index 1fbca3df..00000000 --- a/rust/automerge-c/README.md +++ /dev/null @@ -1,207 +0,0 @@ -# Overview - -automerge-c exposes a C API that can either be used directly or as the basis -for other language bindings that have good support for calling C functions. - -# Installing - -See the main README for instructions on getting your environment set up and then -you can build the automerge-c library and install its constituent files within -a root directory of your choosing (e.g. "/usr/local") like so: -```shell -cmake -E make_directory automerge-c/build -cmake -S automerge-c -B automerge-c/build -cmake --build automerge-c/build -cmake --install automerge-c/build --prefix "/usr/local" -``` -Installation is important because the name, location and structure of CMake's -out-of-source build subdirectory is subject to change based on the platform and -the release version; generated headers like `automerge-c/config.h` and -`automerge-c/utils/enum_string.h` are only sure to be found within their -installed locations. - -It's not obvious because they are versioned but the `Cargo.toml` and -`cbindgen.toml` configuration files are also generated in order to ensure that -the project name, project version and library name that they contain match those -specified within the top-level `CMakeLists.txt` file. - -If you'd like to cross compile the library for different platforms you can do so -using [cross](https://github.com/cross-rs/cross). For example: - -- `cross build --manifest-path rust/automerge-c/Cargo.toml -r --target aarch64-unknown-linux-gnu` - -This will output a shared library in the directory `rust/target/aarch64-unknown-linux-gnu/release/`. - -You can replace `aarch64-unknown-linux-gnu` with any -[cross supported targets](https://github.com/cross-rs/cross#supported-targets). -The targets below are known to work, though other targets are expected to work -too: - -- `x86_64-apple-darwin` -- `aarch64-apple-darwin` -- `x86_64-unknown-linux-gnu` -- `aarch64-unknown-linux-gnu` - -As a caveat, CMake generates the `automerge.h` header file in terms of the -processor architecture of the computer on which it was built so, for example, -don't use a header generated for a 64-bit processor if your target is a 32-bit -processor. - -# Usage - -You can build and view the C API's HTML reference documentation like so: -```shell -cmake -E make_directory automerge-c/build -cmake -S automerge-c -B automerge-c/build -cmake --build automerge-c/build --target automerge_docs -firefox automerge-c/build/src/html/index.html -``` - -To get started quickly, look at the -[examples](https://github.com/automerge/automerge-rs/tree/main/rust/automerge-c/examples). - -Almost all operations in automerge-c act on an Automerge document -(`AMdoc` struct) which is structurally similar to a JSON document. - -You can get a document by calling either `AMcreate()` or `AMload()`. Operations -on a given document are not thread-safe so you must use a mutex or similar to -avoid calling more than one function on the same one concurrently. - -A C API function that could succeed or fail returns a result (`AMresult` struct) -containing a status code (`AMstatus` enum) and either a sequence of at least one -item (`AMitem` struct) or a read-only view onto a UTF-8 error message string -(`AMbyteSpan` struct). -An item contains up to three components: an index within its parent object -(`AMbyteSpan` struct or `size_t`), a unique identifier (`AMobjId` struct) and a -value. -The result of a successful function call that doesn't produce any values will -contain a single item that is void (`AM_VAL_TYPE_VOID`). -A returned result **must** be passed to `AMresultFree()` once the item(s) or -error message it contains is no longer needed in order to avoid a memory leak. -``` -#include -#include -#include -#include - -int main(int argc, char** argv) { - AMresult *docResult = AMcreate(NULL); - - if (AMresultStatus(docResult) != AM_STATUS_OK) { - char* const err_msg = AMstrdup(AMresultError(docResult), NULL); - printf("failed to create doc: %s", err_msg); - free(err_msg); - goto cleanup; - } - - AMdoc *doc; - AMitemToDoc(AMresultItem(docResult), &doc); - - // useful code goes here! - -cleanup: - AMresultFree(docResult); -} -``` - -If you are writing an application in C, the `AMstackItem()`, `AMstackItems()` -and `AMstackResult()` functions enable the lifetimes of anonymous results to be -centrally managed and allow the same validation logic to be reused without -relying upon the `goto` statement (see examples/quickstart.c). - -If you are wrapping automerge-c in another language, particularly one that has a -garbage collector, you can call the `AMresultFree()` function within a finalizer -to ensure that memory is reclaimed when it is no longer needed. - -Automerge documents consist of a mutable root which is always a map from string -keys to values. A value can be one of the following types: - -- A number of type double / int64_t / uint64_t -- An explicit true / false / null -- An immutable UTF-8 string (`AMbyteSpan`). -- An immutable array of arbitrary bytes (`AMbyteSpan`). -- A mutable map from string keys to values. -- A mutable list of values. -- A mutable UTF-8 string. - -If you read from a location in the document with no value, an item with type -`AM_VAL_TYPE_VOID` will be returned, but you cannot write such a value -explicitly. - -Under the hood, automerge references a mutable object by its object identifier -where `AM_ROOT` signifies a document's root map object. - -There are functions to put each type of value into either a map or a list, and -functions to read the current or a historical value from a map or a list. As (in general) collaborators -may edit the document at any time, you cannot guarantee that the type of the -value at a given part of the document will stay the same. As a result, reading -from the document will return an `AMitem` struct that you can inspect to -determine the type of value that it contains. - -Strings in automerge-c are represented using an `AMbyteSpan` which contains a -pointer and a length. Strings must be valid UTF-8 and may contain NUL (`0`) -characters. -For your convenience, you can call `AMstr()` to get the `AMbyteSpan` struct -equivalent of a null-terminated byte string or `AMstrdup()` to get the -representation of an `AMbyteSpan` struct as a null-terminated byte string -wherein its NUL characters have been removed/replaced as you choose. - -Putting all of that together, to read and write from the root of the document -you can do this: - -``` -#include -#include -#include -#include - -int main(int argc, char** argv) { - // ...previous example... - AMdoc *doc; - AMitemToDoc(AMresultItem(docResult), &doc); - - AMresult *putResult = AMmapPutStr(doc, AM_ROOT, AMstr("key"), AMstr("value")); - if (AMresultStatus(putResult) != AM_STATUS_OK) { - char* const err_msg = AMstrdup(AMresultError(putResult), NULL); - printf("failed to put: %s", err_msg); - free(err_msg); - goto cleanup; - } - - AMresult *getResult = AMmapGet(doc, AM_ROOT, AMstr("key"), NULL); - if (AMresultStatus(getResult) != AM_STATUS_OK) { - char* const err_msg = AMstrdup(AMresultError(putResult), NULL); - printf("failed to get: %s", err_msg); - free(err_msg); - goto cleanup; - } - - AMbyteSpan got; - if (AMitemToStr(AMresultItem(getResult), &got)) { - char* const c_str = AMstrdup(got, NULL); - printf("Got %zu-character string \"%s\"", got.count, c_str); - free(c_str); - } else { - printf("expected to read a string!"); - goto cleanup; - } - - -cleanup: - AMresultFree(getResult); - AMresultFree(putResult); - AMresultFree(docResult); -} -``` - -Functions that do not return an `AMresult` (for example `AMitemKey()`) do -not allocate memory but rather reference memory that was previously -allocated. It's therefore important to keep the original `AMresult` alive (in -this case the one returned by `AMmapRange()`) until after you are finished with -the items that it contains. However, the memory for an individual `AMitem` can -be shared with a new `AMresult` by calling `AMitemResult()` on it. In other -words, a select group of items can be filtered out of a collection and only each -one's corresponding `AMresult` must be kept alive from that point forward; the -originating collection's `AMresult` can be safely freed. - -Beyond that, good luck! diff --git a/rust/automerge-c/build.rs b/rust/automerge-c/build.rs deleted file mode 100644 index bf12a105..00000000 --- a/rust/automerge-c/build.rs +++ /dev/null @@ -1,21 +0,0 @@ -extern crate cbindgen; - -use std::{env, path::PathBuf}; - -fn main() { - let crate_dir = PathBuf::from( - env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR env var is not defined"), - ); - - let config = cbindgen::Config::from_file("cbindgen.toml") - .expect("Unable to find cbindgen.toml configuration file"); - - if let Ok(writer) = cbindgen::generate_with_config(crate_dir, config) { - // \note CMake sets this environment variable before invoking Cargo so - // that it can direct the generated header file into its - // out-of-source build directory for post-processing. - if let Ok(target_dir) = env::var("CBINDGEN_TARGET_DIR") { - writer.write_to_file(PathBuf::from(target_dir).join("automerge.h")); - } - } -} diff --git a/rust/automerge-c/cbindgen.toml b/rust/automerge-c/cbindgen.toml deleted file mode 100644 index 21eaaadd..00000000 --- a/rust/automerge-c/cbindgen.toml +++ /dev/null @@ -1,48 +0,0 @@ -after_includes = """\n -/** - * \\defgroup enumerations Public Enumerations - * Symbolic names for integer constants. - */ - -/** - * \\memberof AMdoc - * \\def AM_ROOT - * \\brief The root object of a document. - */ -#define AM_ROOT NULL - -/** - * \\memberof AMdoc - * \\def AM_CHANGE_HASH_SIZE - * \\brief The count of bytes in a change hash. - */ -#define AM_CHANGE_HASH_SIZE 32 -""" -autogen_warning = """ -/** - * \\file - * \\brief All constants, functions and types in the core Automerge C API. - * - * \\warning This file is auto-generated by cbindgen. - */ -""" -documentation = true -documentation_style = "doxy" -include_guard = "AUTOMERGE_C_H" -includes = [] -language = "C" -line_length = 140 -no_includes = true -style = "both" -sys_includes = ["stdbool.h", "stddef.h", "stdint.h", "time.h"] -usize_is_size_t = true - -[enum] -derive_const_casts = true -enum_class = true -must_use = "MUST_USE_ENUM" -prefix_with_name = true -rename_variants = "ScreamingSnakeCase" - -[export] -item_types = ["constants", "enums", "functions", "opaque", "structs", "typedefs"] diff --git a/rust/automerge-c/cmake/Cargo.toml.in b/rust/automerge-c/cmake/Cargo.toml.in deleted file mode 100644 index 781e2fef..00000000 --- a/rust/automerge-c/cmake/Cargo.toml.in +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "@PROJECT_NAME@" -version = "@PROJECT_VERSION@" -authors = ["Orion Henry ", "Jason Kankiewicz "] -edition = "2021" -license = "MIT" -rust-version = "1.57.0" - -[lib] -name = "@BINDINGS_NAME@" -crate-type = ["staticlib"] -bench = false -doc = false - -[dependencies] -@LIBRARY_NAME@ = { path = "../@LIBRARY_NAME@" } -hex = "^0.4.3" -libc = "^0.2" -smol_str = "^0.1.21" - -[build-dependencies] -cbindgen = "^0.24" diff --git a/rust/automerge-c/cmake/automerge-c-config.cmake.in b/rust/automerge-c/cmake/automerge-c-config.cmake.in deleted file mode 100644 index fd39aee6..00000000 --- a/rust/automerge-c/cmake/automerge-c-config.cmake.in +++ /dev/null @@ -1,99 +0,0 @@ -@PACKAGE_INIT@ - -include(CMakeFindDependencyMacro) - -set(CMAKE_THREAD_PREFER_PTHREAD TRUE) - -set(THREADS_PREFER_PTHREAD_FLAG TRUE) - -find_dependency(Threads) - -find_library(@SYMBOL_PREFIX@_IMPLIB_DEBUG @LIBRARY_NAME@${CMAKE_DEBUG_POSTFIX} PATHS "${PACKAGE_PREFIX_DIR}/debug/${CMAKE_INSTALL_LIBDIR}" "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_LIBDIR}" NO_DEFAULT_PATH) - -find_library(@SYMBOL_PREFIX@_IMPLIB_RELEASE @LIBRARY_NAME@${CMAKE_RELEASE_POSTFIX} PATHS "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_LIBDIR}" NO_DEFAULT_PATH) - -find_file(@SYMBOL_PREFIX@_LOCATION_DEBUG "${CMAKE_SHARED_LIBRARY_PREFIX}@LIBRARY_NAME@${CMAKE_DEBUG_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}" PATHS "${PACKAGE_PREFIX_DIR}/debug/${CMAKE_INSTALL_BINDIR}" "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_LIBDIR}" NO_DEFAULT_PATH) - -find_file(@SYMBOL_PREFIX@_LOCATION_RELEASE "${CMAKE_SHARED_LIBRARY_PREFIX}@LIBRARY_NAME@${CMAKE_RELEASE_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}" PATHS "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_BINDIR}" NO_DEFAULT_PATH) - -if(@BUILD_SHARED_LIBS@) - set(@SYMBOL_PREFIX@_DEFINE_SYMBOL "@SYMBOL_PREFIX@_EXPORTS") - - if(WIN32) - set(@SYMBOL_PREFIX@_NO_SONAME_DEBUG "TRUE") - - set(@SYMBOL_PREFIX@_NO_SONAME_RELEASE "TRUE") - - set(@SYMBOL_PREFIX@_SONAME_DEBUG "") - - set(@SYMBOL_PREFIX@_SONAME_RELEASE "") - else() - set(@SYMBOL_PREFIX@_NO_SONAME_DEBUG "FALSE") - - set(@SYMBOL_PREFIX@_NO_SONAME_RELEASE "FALSE") - - get_filename_component(@SYMBOL_PREFIX@_SONAME_DEBUG "${@SYMBOL_PREFIX@_LOCATION_DEBUG}" NAME) - - get_filename_component(@SYMBOL_PREFIX@_SONAME_RELEASE "${@SYMBOL_PREFIX@_LOCATION_RELEASE}" NAME) - endif() - - set(@SYMBOL_PREFIX@_TYPE "SHARED") -else() - set(@SYMBOL_PREFIX@_DEFINE_SYMBOL "") - - set(@SYMBOL_PREFIX@_LOCATION_DEBUG "${@SYMBOL_PREFIX@_IMPLIB_DEBUG}") - - set(@SYMBOL_PREFIX@_IMPLIB_DEBUG "") - - set(@SYMBOL_PREFIX@_LOCATION_RELEASE "${@SYMBOL_PREFIX@_IMPLIB_RELEASE}") - - set(@SYMBOL_PREFIX@_IMPLIB_RELEASE "") - - set(@SYMBOL_PREFIX@_NO_SONAME_DEBUG "TRUE") - - set(@SYMBOL_PREFIX@_NO_SONAME_RELEASE "TRUE") - - set(@SYMBOL_PREFIX@_SONAME_DEBUG "") - - set(@SYMBOL_PREFIX@_SONAME_RELEASE "") - - set(@SYMBOL_PREFIX@_TYPE "STATIC") -endif() - -add_library(@NAMESPACE@@PROJECT_NAME@ ${@SYMBOL_PREFIX@_TYPE} IMPORTED) - -set_target_properties( - @NAMESPACE@@PROJECT_NAME@ - PROPERTIES - # \note Cargo writes a debug build into a nested directory instead of - # decorating its name. - DEBUG_POSTFIX "" - DEFINE_SYMBOL "${@SYMBOL_PREFIX@_DEFINE_SYMBOL}" - IMPORTED_CONFIGURATIONS "RELEASE;DEBUG" - IMPORTED_IMPLIB_DEBUG "${@SYMBOL_PREFIX@_IMPLIB_DEBUG}" - IMPORTED_IMPLIB_RELEASE "${@SYMBOL_PREFIX@_IMPLIB_RELEASE}" - IMPORTED_LOCATION_DEBUG "${@SYMBOL_PREFIX@_LOCATION_DEBUG}" - IMPORTED_LOCATION_RELEASE "${@SYMBOL_PREFIX@_LOCATION_RELEASE}" - IMPORTED_NO_SONAME_DEBUG "${@SYMBOL_PREFIX@_NO_SONAME_DEBUG}" - IMPORTED_NO_SONAME_RELEASE "${@SYMBOL_PREFIX@_NO_SONAME_RELEASE}" - IMPORTED_SONAME_DEBUG "${@SYMBOL_PREFIX@_SONAME_DEBUG}" - IMPORTED_SONAME_RELEASE "${@SYMBOL_PREFIX@_SONAME_RELEASE}" - INTERFACE_INCLUDE_DIRECTORIES "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_INCLUDEDIR}" - LINKER_LANGUAGE C - PUBLIC_HEADER "${PACKAGE_PREFIX_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/@PROJECT_NAME@/@LIBRARY_NAME@.h" - SOVERSION "@PROJECT_VERSION_MAJOR@" - VERSION "@PROJECT_VERSION@" - # \note Cargo exports all of the symbols automatically. - WINDOWS_EXPORT_ALL_SYMBOLS "TRUE" -) - -# Remove the variables that the find_* command calls cached. -unset(@SYMBOL_PREFIX@_IMPLIB_DEBUG CACHE) - -unset(@SYMBOL_PREFIX@_IMPLIB_RELEASE CACHE) - -unset(@SYMBOL_PREFIX@_LOCATION_DEBUG CACHE) - -unset(@SYMBOL_PREFIX@_LOCATION_RELEASE CACHE) - -check_required_components(@PROJECT_NAME@) diff --git a/rust/automerge-c/cmake/cbindgen.toml.in b/rust/automerge-c/cmake/cbindgen.toml.in deleted file mode 100644 index 5122b75c..00000000 --- a/rust/automerge-c/cmake/cbindgen.toml.in +++ /dev/null @@ -1,48 +0,0 @@ -after_includes = """\n -/** - * \\defgroup enumerations Public Enumerations - * Symbolic names for integer constants. - */ - -/** - * \\memberof AMdoc - * \\def AM_ROOT - * \\brief The root object of a document. - */ -#define AM_ROOT NULL - -/** - * \\memberof AMdoc - * \\def AM_CHANGE_HASH_SIZE - * \\brief The count of bytes in a change hash. - */ -#define AM_CHANGE_HASH_SIZE 32 -""" -autogen_warning = """ -/** - * \\file - * \\brief All constants, functions and types in the core Automerge C API. - * - * \\warning This file is auto-generated by cbindgen. - */ -""" -documentation = true -documentation_style = "doxy" -include_guard = "@INCLUDE_GUARD_PREFIX@_H" -includes = [] -language = "C" -line_length = 140 -no_includes = true -style = "both" -sys_includes = ["stdbool.h", "stddef.h", "stdint.h", "time.h"] -usize_is_size_t = true - -[enum] -derive_const_casts = true -enum_class = true -must_use = "MUST_USE_ENUM" -prefix_with_name = true -rename_variants = "ScreamingSnakeCase" - -[export] -item_types = ["constants", "enums", "functions", "opaque", "structs", "typedefs"] diff --git a/rust/automerge-c/cmake/config.h.in b/rust/automerge-c/cmake/config.h.in deleted file mode 100644 index 40482cb9..00000000 --- a/rust/automerge-c/cmake/config.h.in +++ /dev/null @@ -1,35 +0,0 @@ -#ifndef @INCLUDE_GUARD_PREFIX@_CONFIG_H -#define @INCLUDE_GUARD_PREFIX@_CONFIG_H -/** - * \file - * \brief Configuration pararameters defined by the build system. - * - * \warning This file is auto-generated by CMake. - */ - -/** - * \def @SYMBOL_PREFIX@_VERSION - * \brief Denotes a semantic version of the form {MAJOR}{MINOR}{PATCH} as three, - * two-digit decimal numbers without leading zeros (e.g. 100 is 0.1.0). - */ -#define @SYMBOL_PREFIX@_VERSION @INTEGER_PROJECT_VERSION@ - -/** - * \def @SYMBOL_PREFIX@_MAJOR_VERSION - * \brief Denotes a semantic major version as a decimal number. - */ -#define @SYMBOL_PREFIX@_MAJOR_VERSION (@SYMBOL_PREFIX@_VERSION / 100000) - -/** - * \def @SYMBOL_PREFIX@_MINOR_VERSION - * \brief Denotes a semantic minor version as a decimal number. - */ -#define @SYMBOL_PREFIX@_MINOR_VERSION ((@SYMBOL_PREFIX@_VERSION / 100) % 1000) - -/** - * \def @SYMBOL_PREFIX@_PATCH_VERSION - * \brief Denotes a semantic patch version as a decimal number. - */ -#define @SYMBOL_PREFIX@_PATCH_VERSION (@SYMBOL_PREFIX@_VERSION % 100) - -#endif /* @INCLUDE_GUARD_PREFIX@_CONFIG_H */ diff --git a/rust/automerge-c/cmake/enum-string-functions-gen.cmake b/rust/automerge-c/cmake/enum-string-functions-gen.cmake deleted file mode 100644 index 77080e8d..00000000 --- a/rust/automerge-c/cmake/enum-string-functions-gen.cmake +++ /dev/null @@ -1,183 +0,0 @@ -# This CMake script is used to generate a header and a source file for utility -# functions that convert the tags of generated enum types into strings and -# strings into the tags of generated enum types. -cmake_minimum_required(VERSION 3.23 FATAL_ERROR) - -# Seeks the starting line of the source enum's declaration. -macro(seek_enum_mode) - if (line MATCHES "^(typedef[ \t]+)?enum ") - string(REGEX REPLACE "^enum ([0-9a-zA-Z_]+).*$" "\\1" enum_name "${line}") - set(mode "read_tags") - endif() -endmacro() - -# Scans the input for the current enum's tags. -macro(read_tags_mode) - if(line MATCHES "^}") - set(mode "generate") - elseif(line MATCHES "^[A-Z0-9_]+.*$") - string(REGEX REPLACE "^([A-Za-z0-9_]+).*$" "\\1" tmp "${line}") - list(APPEND enum_tags "${tmp}") - endif() -endmacro() - -macro(write_header_file) - # Generate a to-string function declaration. - list(APPEND header_body - "/**\n" - " * \\ingroup enumerations\n" - " * \\brief Gets the string representation of an `${enum_name}` enum tag.\n" - " *\n" - " * \\param[in] tag An `${enum_name}` enum tag.\n" - " * \\return A null-terminated byte string.\n" - " */\n" - "char const* ${enum_name}ToString(${enum_name} const tag)\;\n" - "\n") - # Generate a from-string function declaration. - list(APPEND header_body - "/**\n" - " * \\ingroup enumerations\n" - " * \\brief Gets an `${enum_name}` enum tag from its string representation.\n" - " *\n" - " * \\param[out] dest An `${enum_name}` enum tag pointer.\n" - " * \\param[in] src A null-terminated byte string.\n" - " * \\return `true` if \\p src matches the string representation of an\n" - " * `${enum_name}` enum tag, `false` otherwise.\n" - " */\n" - "bool ${enum_name}FromString(${enum_name}* dest, char const* const src)\;\n" - "\n") -endmacro() - -macro(write_source_file) - # Generate a to-string function implementation. - list(APPEND source_body - "char const* ${enum_name}ToString(${enum_name} const tag) {\n" - " switch (tag) {\n" - " default:\n" - " return \"???\"\;\n") - foreach(label IN LISTS enum_tags) - list(APPEND source_body - " case ${label}:\n" - " return \"${label}\"\;\n") - endforeach() - list(APPEND source_body - " }\n" - "}\n" - "\n") - # Generate a from-string function implementation. - list(APPEND source_body - "bool ${enum_name}FromString(${enum_name}* dest, char const* const src) {\n") - foreach(label IN LISTS enum_tags) - list(APPEND source_body - " if (!strcmp(src, \"${label}\")) {\n" - " *dest = ${label}\;\n" - " return true\;\n" - " }\n") - endforeach() - list(APPEND source_body - " return false\;\n" - "}\n" - "\n") -endmacro() - -function(main) - set(header_body "") - # File header and includes. - list(APPEND header_body - "#ifndef ${include_guard}\n" - "#define ${include_guard}\n" - "/**\n" - " * \\file\n" - " * \\brief Utility functions for converting enum tags into null-terminated\n" - " * byte strings and vice versa.\n" - " *\n" - " * \\warning This file is auto-generated by CMake.\n" - " */\n" - "\n" - "#include \n" - "\n" - "#include <${library_include}>\n" - "\n") - set(source_body "") - # File includes. - list(APPEND source_body - "/** \\warning This file is auto-generated by CMake. */\n" - "\n" - "#include \"stdio.h\"\n" - "#include \"string.h\"\n" - "\n" - "#include <${header_include}>\n" - "\n") - set(enum_name "") - set(enum_tags "") - set(mode "seek_enum") - file(STRINGS "${input_path}" lines) - foreach(line IN LISTS lines) - string(REGEX REPLACE "^(.+)(//.*)?" "\\1" line "${line}") - string(STRIP "${line}" line) - if(mode STREQUAL "seek_enum") - seek_enum_mode() - elseif(mode STREQUAL "read_tags") - read_tags_mode() - else() - # The end of the enum declaration was reached. - if(NOT enum_name) - # The end of the file was reached. - return() - endif() - if(NOT enum_tags) - message(FATAL_ERROR "No tags found for `${enum_name}`.") - endif() - string(TOLOWER "${enum_name}" output_stem_prefix) - string(CONCAT output_stem "${output_stem_prefix}" "_string") - cmake_path(REPLACE_EXTENSION output_stem "h" OUTPUT_VARIABLE output_header_basename) - write_header_file() - write_source_file() - set(enum_name "") - set(enum_tags "") - set(mode "seek_enum") - endif() - endforeach() - # File footer. - list(APPEND header_body - "#endif /* ${include_guard} */\n") - message(STATUS "Generating header file \"${output_header_path}\"...") - file(WRITE "${output_header_path}" ${header_body}) - message(STATUS "Generating source file \"${output_source_path}\"...") - file(WRITE "${output_source_path}" ${source_body}) -endfunction() - -if(NOT DEFINED PROJECT_NAME) - message(FATAL_ERROR "Variable PROJECT_NAME is not defined.") -elseif(NOT DEFINED LIBRARY_NAME) - message(FATAL_ERROR "Variable LIBRARY_NAME is not defined.") -elseif(NOT DEFINED SUBDIR) - message(FATAL_ERROR "Variable SUBDIR is not defined.") -elseif(${CMAKE_ARGC} LESS 9) - message(FATAL_ERROR "Too few arguments.") -elseif(${CMAKE_ARGC} GREATER 10) - message(FATAL_ERROR "Too many arguments.") -elseif(NOT EXISTS ${CMAKE_ARGV5}) - message(FATAL_ERROR "Input header \"${CMAKE_ARGV7}\" not found.") -endif() -cmake_path(CONVERT "${CMAKE_ARGV7}" TO_CMAKE_PATH_LIST input_path NORMALIZE) -cmake_path(CONVERT "${CMAKE_ARGV8}" TO_CMAKE_PATH_LIST output_header_path NORMALIZE) -cmake_path(CONVERT "${CMAKE_ARGV9}" TO_CMAKE_PATH_LIST output_source_path NORMALIZE) -string(TOLOWER "${PROJECT_NAME}" project_root) -cmake_path(CONVERT "${SUBDIR}" TO_CMAKE_PATH_LIST project_subdir NORMALIZE) -string(TOLOWER "${project_subdir}" project_subdir) -string(TOLOWER "${LIBRARY_NAME}" library_stem) -cmake_path(REPLACE_EXTENSION library_stem "h" OUTPUT_VARIABLE library_basename) -string(JOIN "/" library_include "${project_root}" "${library_basename}") -string(TOUPPER "${PROJECT_NAME}" project_name_upper) -string(TOUPPER "${project_subdir}" include_guard_infix) -string(REGEX REPLACE "/" "_" include_guard_infix "${include_guard_infix}") -string(REGEX REPLACE "-" "_" include_guard_prefix "${project_name_upper}") -string(JOIN "_" include_guard_prefix "${include_guard_prefix}" "${include_guard_infix}") -string(JOIN "/" output_header_prefix "${project_root}" "${project_subdir}") -cmake_path(GET output_header_path STEM output_header_stem) -string(TOUPPER "${output_header_stem}" include_guard_stem) -string(JOIN "_" include_guard "${include_guard_prefix}" "${include_guard_stem}" "H") -cmake_path(GET output_header_path FILENAME output_header_basename) -string(JOIN "/" header_include "${output_header_prefix}" "${output_header_basename}") -main() diff --git a/rust/automerge-c/cmake/file-regex-replace.cmake b/rust/automerge-c/cmake/file-regex-replace.cmake deleted file mode 100644 index 09005bc2..00000000 --- a/rust/automerge-c/cmake/file-regex-replace.cmake +++ /dev/null @@ -1,33 +0,0 @@ -# This CMake script is used to perform string substitutions within a generated -# file. -cmake_minimum_required(VERSION 3.23 FATAL_ERROR) - -if(NOT DEFINED MATCH_REGEX) - message(FATAL_ERROR "Variable \"MATCH_REGEX\" is not defined.") -elseif(NOT DEFINED REPLACE_EXPR) - message(FATAL_ERROR "Variable \"REPLACE_EXPR\" is not defined.") -elseif(${CMAKE_ARGC} LESS 7) - message(FATAL_ERROR "Too few arguments.") -elseif(${CMAKE_ARGC} GREATER 8) - message(FATAL_ERROR "Too many arguments.") -elseif(NOT EXISTS ${CMAKE_ARGV6}) - message(FATAL_ERROR "Input file \"${CMAKE_ARGV6}\" not found.") -endif() - -message(STATUS "Replacing \"${MATCH_REGEX}\" with \"${REPLACE_EXPR}\" in \"${CMAKE_ARGV6}\"...") - -file(READ ${CMAKE_ARGV6} INPUT_STRING) - -string(REGEX REPLACE "${MATCH_REGEX}" "${REPLACE_EXPR}" OUTPUT_STRING "${INPUT_STRING}") - -if(DEFINED CMAKE_ARGV7) - set(OUTPUT_FILE "${CMAKE_ARGV7}") -else() - set(OUTPUT_FILE "${CMAKE_ARGV6}") -endif() - -if(NOT "${OUTPUT_STRING}" STREQUAL "${INPUT_STRING}") - file(WRITE ${OUTPUT_FILE} "${OUTPUT_STRING}") - - message(STATUS "Created/updated \"${OUTPUT_FILE}\".") -endif() diff --git a/rust/automerge-c/cmake/file-touch.cmake b/rust/automerge-c/cmake/file-touch.cmake deleted file mode 100644 index 2c196755..00000000 --- a/rust/automerge-c/cmake/file-touch.cmake +++ /dev/null @@ -1,35 +0,0 @@ -# This CMake script is used to force Cargo to regenerate the header file for the -# core bindings after the out-of-source build directory has been cleaned. -cmake_minimum_required(VERSION 3.23 FATAL_ERROR) - -if(NOT DEFINED CONDITION) - message(FATAL_ERROR "Variable \"CONDITION\" is not defined.") -elseif(${CMAKE_ARGC} LESS 7) - message(FATAL_ERROR "Too few arguments.") -elseif(${CMAKE_ARGC} GREATER 7) - message(FATAL_ERROR "Too many arguments.") -elseif(NOT EXISTS ${CMAKE_ARGV6}) - message(FATAL_ERROR "File \"${CMAKE_ARGV6}\" not found.") -elseif(IS_DIRECTORY "${CMAKE_ARG6}") - message(FATAL_ERROR "Directory \"${CMAKE_ARG6}\" can't be touched.") -endif() - -message(STATUS "Touching \"${CMAKE_ARGV6}\" if ${CONDITION} \"${CMAKE_ARGV5}\"...") - -if(CONDITION STREQUAL "EXISTS") - if(EXISTS "${CMAKE_ARGV5}") - set(DO_IT TRUE) - endif() -elseif((CONDITION STREQUAL "NOT_EXISTS") OR (CONDITION STREQUAL "!EXISTS")) - if(NOT EXISTS "${CMAKE_ARGV5}") - set(DO_IT TRUE) - endif() -else() - message(FATAL_ERROR "Unexpected condition \"${CONDITION}\".") -endif() - -if(DO_IT) - file(TOUCH_NOCREATE "${CMAKE_ARGV6}") - - message(STATUS "Touched \"${CMAKE_ARGV6}\".") -endif() diff --git a/rust/automerge-c/docs/CMakeLists.txt b/rust/automerge-c/docs/CMakeLists.txt deleted file mode 100644 index 1d94c872..00000000 --- a/rust/automerge-c/docs/CMakeLists.txt +++ /dev/null @@ -1,35 +0,0 @@ -find_package(Doxygen OPTIONAL_COMPONENTS dot) - -if(DOXYGEN_FOUND) - set(DOXYGEN_ALIASES "installed_headerfile=\\headerfile ${LIBRARY_NAME}.h <${PROJECT_NAME}/${LIBRARY_NAME}.h>") - - set(DOXYGEN_GENERATE_LATEX YES) - - set(DOXYGEN_PDF_HYPERLINKS YES) - - set(DOXYGEN_PROJECT_LOGO "${CMAKE_CURRENT_SOURCE_DIR}/img/brandmark.png") - - set(DOXYGEN_SORT_BRIEF_DOCS YES) - - set(DOXYGEN_USE_MDFILE_AS_MAINPAGE "${CMAKE_SOURCE_DIR}/README.md") - - doxygen_add_docs( - ${LIBRARY_NAME}_docs - "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" - "${CBINDGEN_TARGET_DIR}/config.h" - "${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h" - "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/result.h" - "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack_callback_data.h" - "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack.h" - "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/string.h" - "${CMAKE_SOURCE_DIR}/README.md" - WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} - COMMENT "Producing documentation with Doxygen..." - ) - - # \note A Doxygen input file isn't a file-level dependency so the Doxygen - # command must instead depend upon a target that either outputs the - # file or depends upon it also or it will just output an error message - # when it can't be found. - add_dependencies(${LIBRARY_NAME}_docs ${BINDINGS_NAME}_artifacts ${LIBRARY_NAME}_utilities) -endif() diff --git a/rust/automerge-c/docs/img/brandmark.png b/rust/automerge-c/docs/img/brandmark.png deleted file mode 100644 index 56e1c82d..00000000 Binary files a/rust/automerge-c/docs/img/brandmark.png and /dev/null differ diff --git a/rust/automerge-c/examples/CMakeLists.txt b/rust/automerge-c/examples/CMakeLists.txt deleted file mode 100644 index f080237b..00000000 --- a/rust/automerge-c/examples/CMakeLists.txt +++ /dev/null @@ -1,40 +0,0 @@ -add_executable( - ${LIBRARY_NAME}_quickstart - quickstart.c -) - -set_target_properties(${LIBRARY_NAME}_quickstart PROPERTIES LINKER_LANGUAGE C) - -# \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't -# contain a non-existent path so its build-time include directory -# must be specified for all of its dependent targets instead. -target_include_directories( - ${LIBRARY_NAME}_quickstart - PRIVATE "$" -) - -target_link_libraries(${LIBRARY_NAME}_quickstart PRIVATE ${LIBRARY_NAME}) - -add_dependencies(${LIBRARY_NAME}_quickstart ${BINDINGS_NAME}_artifacts) - -if(BUILD_SHARED_LIBS AND WIN32) - add_custom_command( - TARGET ${LIBRARY_NAME}_quickstart - POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy_if_different - ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX} - ${CMAKE_BINARY_DIR} - COMMENT "Copying the DLL built by Cargo into the examples directory..." - VERBATIM - ) -endif() - -add_custom_command( - TARGET ${LIBRARY_NAME}_quickstart - POST_BUILD - COMMAND - ${LIBRARY_NAME}_quickstart - COMMENT - "Running the example quickstart..." - VERBATIM -) diff --git a/rust/automerge-c/examples/README.md b/rust/automerge-c/examples/README.md deleted file mode 100644 index 17e69412..00000000 --- a/rust/automerge-c/examples/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Automerge C examples - -## Quickstart - -```shell -cmake -E make_directory automerge-c/build -cmake -S automerge-c -B automerge-c/build -cmake --build automerge-c/build --target automerge_quickstart -``` diff --git a/rust/automerge-c/examples/quickstart.c b/rust/automerge-c/examples/quickstart.c deleted file mode 100644 index ab6769ef..00000000 --- a/rust/automerge-c/examples/quickstart.c +++ /dev/null @@ -1,129 +0,0 @@ -#include -#include -#include - -#include -#include -#include -#include -#include - -static bool abort_cb(AMstack**, void*); - -/** - * \brief Based on https://automerge.github.io/docs/quickstart - */ -int main(int argc, char** argv) { - AMstack* stack = NULL; - AMdoc* doc1; - AMitemToDoc(AMstackItem(&stack, AMcreate(NULL), abort_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1); - AMobjId const* const cards = - AMitemObjId(AMstackItem(&stack, AMmapPutObject(doc1, AM_ROOT, AMstr("cards"), AM_OBJ_TYPE_LIST), abort_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - AMobjId const* const card1 = - AMitemObjId(AMstackItem(&stack, AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), abort_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - AMstackItem(NULL, AMmapPutStr(doc1, card1, AMstr("title"), AMstr("Rewrite everything in Clojure")), abort_cb, - AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutBool(doc1, card1, AMstr("done"), false), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMobjId const* const card2 = - AMitemObjId(AMstackItem(&stack, AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), abort_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - AMstackItem(NULL, AMmapPutStr(doc1, card2, AMstr("title"), AMstr("Rewrite everything in Haskell")), abort_cb, - AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutBool(doc1, card2, AMstr("done"), false), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc1, AMstr("Add card"), NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMdoc* doc2; - AMitemToDoc(AMstackItem(&stack, AMcreate(NULL), abort_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2); - AMstackItem(NULL, AMmerge(doc2, doc1), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMbyteSpan binary; - AMitemToBytes(AMstackItem(&stack, AMsave(doc1), abort_cb, AMexpect(AM_VAL_TYPE_BYTES)), &binary); - AMitemToDoc(AMstackItem(&stack, AMload(binary.src, binary.count), abort_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2); - - AMstackItem(NULL, AMmapPutBool(doc1, card1, AMstr("done"), true), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc1, AMstr("Mark card as done"), NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMstackItem(NULL, AMlistDelete(doc2, cards, 0), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc2, AMstr("Delete card"), NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMstackItem(NULL, AMmerge(doc1, doc2), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMitems changes = AMstackItems(&stack, AMgetChanges(doc1, NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - AMitem* item = NULL; - while ((item = AMitemsNext(&changes, 1)) != NULL) { - AMchange const* change; - AMitemToChange(item, &change); - AMitems const heads = AMstackItems(&stack, AMitemFromChangeHash(AMchangeHash(change)), abort_cb, - AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - char* const c_msg = AMstrdup(AMchangeMessage(change), NULL); - printf("%s %zu\n", c_msg, AMobjSize(doc1, cards, &heads)); - free(c_msg); - } - AMstackFree(&stack); -} - -/** - * \brief Examines the result at the top of the given stack and, if it's - * invalid, prints an error message to `stderr`, deallocates all results - * in the stack and exits. - * - * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. - * \param[in] data A pointer to an owned `AMstackCallbackData` struct or `NULL`. - * \return `true` if the top `AMresult` in \p stack is valid, `false` otherwise. - * \pre \p stack `!= NULL`. - */ -static bool abort_cb(AMstack** stack, void* data) { - static char buffer[512] = {0}; - - char const* suffix = NULL; - if (!stack) { - suffix = "Stack*"; - } else if (!*stack) { - suffix = "Stack"; - } else if (!(*stack)->result) { - suffix = ""; - } - if (suffix) { - fprintf(stderr, "Null `AMresult%s*`.\n", suffix); - AMstackFree(stack); - exit(EXIT_FAILURE); - return false; - } - AMstatus const status = AMresultStatus((*stack)->result); - switch (status) { - case AM_STATUS_ERROR: - strcpy(buffer, "Error"); - break; - case AM_STATUS_INVALID_RESULT: - strcpy(buffer, "Invalid result"); - break; - case AM_STATUS_OK: - break; - default: - sprintf(buffer, "Unknown `AMstatus` tag %d", status); - } - if (buffer[0]) { - char* const c_msg = AMstrdup(AMresultError((*stack)->result), NULL); - fprintf(stderr, "%s; %s.\n", buffer, c_msg); - free(c_msg); - AMstackFree(stack); - exit(EXIT_FAILURE); - return false; - } - if (data) { - AMstackCallbackData* sc_data = (AMstackCallbackData*)data; - AMvalType const tag = AMitemValType(AMresultItem((*stack)->result)); - if (tag != sc_data->bitmask) { - fprintf(stderr, "Unexpected tag `%s` (%d) instead of `%s` at %s:%d.\n", AMvalTypeToString(tag), tag, - AMvalTypeToString(sc_data->bitmask), sc_data->file, sc_data->line); - free(sc_data); - AMstackFree(stack); - exit(EXIT_FAILURE); - return false; - } - } - free(data); - return true; -} diff --git a/rust/automerge-c/include/automerge-c/utils/result.h b/rust/automerge-c/include/automerge-c/utils/result.h deleted file mode 100644 index ab8a2f93..00000000 --- a/rust/automerge-c/include/automerge-c/utils/result.h +++ /dev/null @@ -1,30 +0,0 @@ -#ifndef AUTOMERGE_C_UTILS_RESULT_H -#define AUTOMERGE_C_UTILS_RESULT_H -/** - * \file - * \brief Utility functions for use with `AMresult` structs. - */ - -#include - -#include - -/** - * \brief Transfers the items within an arbitrary list of results into a - * new result in their order of specification. - * \param[in] count The count of subsequent arguments. - * \param[in] ... A \p count list of arguments, each of which is a pointer to - * an `AMresult` struct whose items will be transferred out of it - * and which is subsequently freed. - * \return A pointer to an `AMresult` struct or `NULL`. - * \pre `∀𝑥 ∈` \p ... `, AMresultStatus(𝑥) == AM_STATUS_OK` - * \post `(∃𝑥 ∈` \p ... `, AMresultStatus(𝑥) != AM_STATUS_OK) -> NULL` - * \attention All `AMresult` struct pointer arguments are passed to - * `AMresultFree()` regardless of success; use `AMresultCat()` - * instead if you wish to pass them to `AMresultFree()` yourself. - * \warning The returned `AMresult` struct pointer must be passed to - * `AMresultFree()` in order to avoid a memory leak. - */ -AMresult* AMresultFrom(int count, ...); - -#endif /* AUTOMERGE_C_UTILS_RESULT_H */ diff --git a/rust/automerge-c/include/automerge-c/utils/stack.h b/rust/automerge-c/include/automerge-c/utils/stack.h deleted file mode 100644 index a8e9fd08..00000000 --- a/rust/automerge-c/include/automerge-c/utils/stack.h +++ /dev/null @@ -1,130 +0,0 @@ -#ifndef AUTOMERGE_C_UTILS_STACK_H -#define AUTOMERGE_C_UTILS_STACK_H -/** - * \file - * \brief Utility data structures and functions for hiding `AMresult` structs, - * managing their lifetimes, and automatically applying custom - * validation logic to the `AMitem` structs that they contain. - * - * \note The `AMstack` struct and its related functions drastically reduce the - * need for boilerplate code and/or `goto` statement usage within a C - * application but a higher-level programming language offers even better - * ways to do the same things. - */ - -#include - -/** - * \struct AMstack - * \brief A node in a singly-linked list of result pointers. - */ -typedef struct AMstack { - /** A result to be deallocated. */ - AMresult* result; - /** The previous node in the singly-linked list or `NULL`. */ - struct AMstack* prev; -} AMstack; - -/** - * \memberof AMstack - * \brief The prototype of a function that examines the result at the top of - * the given stack in terms of some arbitrary data. - * - * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. - * \param[in] data A pointer to arbitrary data or `NULL`. - * \return `true` if the top `AMresult` struct in \p stack is valid, `false` - * otherwise. - * \pre \p stack `!= NULL`. - */ -typedef bool (*AMstackCallback)(AMstack** stack, void* data); - -/** - * \memberof AMstack - * \brief Deallocates the storage for a stack of results. - * - * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. - * \pre \p stack `!= NULL` - * \post `*stack == NULL` - */ -void AMstackFree(AMstack** stack); - -/** - * \memberof AMstack - * \brief Gets a result from the stack after removing it. - * - * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. - * \param[in] result A pointer to the `AMresult` to be popped or `NULL` to - * select the top result in \p stack. - * \return A pointer to an `AMresult` struct or `NULL`. - * \pre \p stack `!= NULL` - * \warning The returned `AMresult` struct pointer must be passed to - * `AMresultFree()` in order to avoid a memory leak. - */ -AMresult* AMstackPop(AMstack** stack, AMresult const* result); - -/** - * \memberof AMstack - * \brief Pushes the given result onto the given stack, calls the given - * callback with the given data to validate it and then either gets the - * result if it's valid or gets `NULL` instead. - * - * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. - * \param[in] result A pointer to an `AMresult` struct. - * \param[in] callback A pointer to a function with the same signature as - * `AMstackCallback()` or `NULL`. - * \param[in] data A pointer to arbitrary data or `NULL` which is passed to - * \p callback. - * \return \p result or `NULL`. - * \warning If \p stack `== NULL` then \p result is deallocated in order to - * avoid a memory leak. - */ -AMresult* AMstackResult(AMstack** stack, AMresult* result, AMstackCallback callback, void* data); - -/** - * \memberof AMstack - * \brief Pushes the given result onto the given stack, calls the given - * callback with the given data to validate it and then either gets the - * first item in the sequence of items within that result if it's valid - * or gets `NULL` instead. - * - * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. - * \param[in] result A pointer to an `AMresult` struct. - * \param[in] callback A pointer to a function with the same signature as - * `AMstackCallback()` or `NULL`. - * \param[in] data A pointer to arbitrary data or `NULL` which is passed to - * \p callback. - * \return A pointer to an `AMitem` struct or `NULL`. - * \warning If \p stack `== NULL` then \p result is deallocated in order to - * avoid a memory leak. - */ -AMitem* AMstackItem(AMstack** stack, AMresult* result, AMstackCallback callback, void* data); - -/** - * \memberof AMstack - * \brief Pushes the given result onto the given stack, calls the given - * callback with the given data to validate it and then either gets an - * `AMitems` struct over the sequence of items within that result if it's - * valid or gets an empty `AMitems` instead. - * - * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. - * \param[in] result A pointer to an `AMresult` struct. - * \param[in] callback A pointer to a function with the same signature as - * `AMstackCallback()` or `NULL`. - * \param[in] data A pointer to arbitrary data or `NULL` which is passed to - * \p callback. - * \return An `AMitems` struct. - * \warning If \p stack `== NULL` then \p result is deallocated immediately - * in order to avoid a memory leak. - */ -AMitems AMstackItems(AMstack** stack, AMresult* result, AMstackCallback callback, void* data); - -/** - * \memberof AMstack - * \brief Gets the count of results that have been pushed onto the stack. - * - * \param[in,out] stack A pointer to an `AMstack` struct. - * \return A 64-bit unsigned integer. - */ -size_t AMstackSize(AMstack const* const stack); - -#endif /* AUTOMERGE_C_UTILS_STACK_H */ diff --git a/rust/automerge-c/include/automerge-c/utils/stack_callback_data.h b/rust/automerge-c/include/automerge-c/utils/stack_callback_data.h deleted file mode 100644 index 6f9f1edb..00000000 --- a/rust/automerge-c/include/automerge-c/utils/stack_callback_data.h +++ /dev/null @@ -1,53 +0,0 @@ -#ifndef AUTOMERGE_C_UTILS_PUSH_CALLBACK_DATA_H -#define AUTOMERGE_C_UTILS_PUSH_CALLBACK_DATA_H -/** - * \file - * \brief Utility data structures, functions and macros for supplying - * parameters to the custom validation logic applied to `AMitem` - * structs. - */ - -#include - -/** - * \struct AMstackCallbackData - * \brief A data structure for passing the parameters of an item value test - * to an implementation of the `AMstackCallback` function prototype. - */ -typedef struct { - /** A bitmask of `AMvalType` tags. */ - AMvalType bitmask; - /** A null-terminated file path string. */ - char const* file; - /** The ordinal number of a line within a file. */ - int line; -} AMstackCallbackData; - -/** - * \memberof AMstackCallbackData - * \brief Allocates a new `AMstackCallbackData` struct and initializes its - * members from their corresponding arguments. - * - * \param[in] bitmask A bitmask of `AMvalType` tags. - * \param[in] file A null-terminated file path string. - * \param[in] line The ordinal number of a line within a file. - * \return A pointer to a disowned `AMstackCallbackData` struct. - * \warning The returned pointer must be passed to `free()` to avoid a memory - * leak. - */ -AMstackCallbackData* AMstackCallbackDataInit(AMvalType const bitmask, char const* const file, int const line); - -/** - * \memberof AMstackCallbackData - * \def AMexpect - * \brief Allocates a new `AMstackCallbackData` struct and initializes it from - * an `AMvalueType` bitmask. - * - * \param[in] bitmask A bitmask of `AMvalType` tags. - * \return A pointer to a disowned `AMstackCallbackData` struct. - * \warning The returned pointer must be passed to `free()` to avoid a memory - * leak. - */ -#define AMexpect(bitmask) AMstackCallbackDataInit(bitmask, __FILE__, __LINE__) - -#endif /* AUTOMERGE_C_UTILS_PUSH_CALLBACK_DATA_H */ diff --git a/rust/automerge-c/include/automerge-c/utils/string.h b/rust/automerge-c/include/automerge-c/utils/string.h deleted file mode 100644 index 4d61c2e9..00000000 --- a/rust/automerge-c/include/automerge-c/utils/string.h +++ /dev/null @@ -1,29 +0,0 @@ -#ifndef AUTOMERGE_C_UTILS_STRING_H -#define AUTOMERGE_C_UTILS_STRING_H -/** - * \file - * \brief Utility functions for use with `AMbyteSpan` structs that provide - * UTF-8 string views. - */ - -#include - -/** - * \memberof AMbyteSpan - * \brief Returns a pointer to a null-terminated byte string which is a - * duplicate of the given UTF-8 string view except for the substitution - * of its NUL (0) characters with the specified null-terminated byte - * string. - * - * \param[in] str A UTF-8 string view as an `AMbyteSpan` struct. - * \param[in] nul A null-terminated byte string to substitute for NUL characters - * or `NULL` to substitute `"\\0"` for NUL characters. - * \return A disowned null-terminated byte string. - * \pre \p str.src `!= NULL` - * \pre \p str.count `<= sizeof(`\p str.src `)` - * \warning The returned pointer must be passed to `free()` to avoid a memory - * leak. - */ -char* AMstrdup(AMbyteSpan const str, char const* nul); - -#endif /* AUTOMERGE_C_UTILS_STRING_H */ diff --git a/rust/automerge-c/src/actor_id.rs b/rust/automerge-c/src/actor_id.rs deleted file mode 100644 index 5a28959e..00000000 --- a/rust/automerge-c/src/actor_id.rs +++ /dev/null @@ -1,193 +0,0 @@ -use automerge as am; -use libc::c_int; -use std::cell::RefCell; -use std::cmp::Ordering; -use std::str::FromStr; - -use crate::byte_span::AMbyteSpan; -use crate::result::{to_result, AMresult}; - -macro_rules! to_actor_id { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::error("Invalid `AMactorId*`").into(), - } - }}; -} - -pub(crate) use to_actor_id; - -/// \struct AMactorId -/// \installed_headerfile -/// \brief An actor's unique identifier. -#[derive(Eq, PartialEq)] -pub struct AMactorId { - body: *const am::ActorId, - hex_str: RefCell>>, -} - -impl AMactorId { - pub fn new(actor_id: &am::ActorId) -> Self { - Self { - body: actor_id, - hex_str: Default::default(), - } - } - - pub fn as_hex_str(&self) -> AMbyteSpan { - let mut hex_str = self.hex_str.borrow_mut(); - match hex_str.as_mut() { - None => { - let hex_string = unsafe { (*self.body).to_hex_string() }; - hex_str - .insert(hex_string.into_boxed_str()) - .as_bytes() - .into() - } - Some(hex_str) => hex_str.as_bytes().into(), - } - } -} - -impl AsRef for AMactorId { - fn as_ref(&self) -> &am::ActorId { - unsafe { &*self.body } - } -} - -/// \memberof AMactorId -/// \brief Gets the value of an actor identifier as an array of bytes. -/// -/// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \return An `AMbyteSpan` struct for an array of bytes. -/// \pre \p actor_id `!= NULL` -/// \internal -/// -/// # Safety -/// actor_id must be a valid pointer to an AMactorId -#[no_mangle] -pub unsafe extern "C" fn AMactorIdBytes(actor_id: *const AMactorId) -> AMbyteSpan { - match actor_id.as_ref() { - Some(actor_id) => actor_id.as_ref().into(), - None => Default::default(), - } -} - -/// \memberof AMactorId -/// \brief Compares two actor identifiers. -/// -/// \param[in] actor_id1 A pointer to an `AMactorId` struct. -/// \param[in] actor_id2 A pointer to an `AMactorId` struct. -/// \return `-1` if \p actor_id1 `<` \p actor_id2, `0` if -/// \p actor_id1 `==` \p actor_id2 and `1` if -/// \p actor_id1 `>` \p actor_id2. -/// \pre \p actor_id1 `!= NULL` -/// \pre \p actor_id2 `!= NULL` -/// \internal -/// -/// #Safety -/// actor_id1 must be a valid pointer to an AMactorId -/// actor_id2 must be a valid pointer to an AMactorId -#[no_mangle] -pub unsafe extern "C" fn AMactorIdCmp( - actor_id1: *const AMactorId, - actor_id2: *const AMactorId, -) -> c_int { - match (actor_id1.as_ref(), actor_id2.as_ref()) { - (Some(actor_id1), Some(actor_id2)) => match actor_id1.as_ref().cmp(actor_id2.as_ref()) { - Ordering::Less => -1, - Ordering::Equal => 0, - Ordering::Greater => 1, - }, - (None, Some(_)) => -1, - (None, None) => 0, - (Some(_), None) => 1, - } -} - -/// \memberof AMactorId -/// \brief Allocates a new actor identifier and initializes it from a random -/// UUID value. -/// -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -#[no_mangle] -pub unsafe extern "C" fn AMactorIdInit() -> *mut AMresult { - to_result(Ok::(am::ActorId::random())) -} - -/// \memberof AMactorId -/// \brief Allocates a new actor identifier and initializes it from an array of -/// bytes value. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The count of bytes to copy from the array pointed to by -/// \p src. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. -/// \pre \p src `!= NULL` -/// \pre `sizeof(`\p src `) > 0` -/// \pre \p count `<= sizeof(`\p src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// src must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMactorIdFromBytes(src: *const u8, count: usize) -> *mut AMresult { - if !src.is_null() { - let value = std::slice::from_raw_parts(src, count); - to_result(Ok::(am::ActorId::from( - value, - ))) - } else { - AMresult::error("Invalid uint8_t*").into() - } -} - -/// \memberof AMactorId -/// \brief Allocates a new actor identifier and initializes it from a -/// hexadecimal UTF-8 string view value. -/// -/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// hex_str must be a valid pointer to an AMbyteSpan -#[no_mangle] -pub unsafe extern "C" fn AMactorIdFromStr(value: AMbyteSpan) -> *mut AMresult { - use am::AutomergeError::InvalidActorId; - - to_result(match (&value).try_into() { - Ok(s) => match am::ActorId::from_str(s) { - Ok(actor_id) => Ok(actor_id), - Err(_) => Err(InvalidActorId(String::from(s))), - }, - Err(e) => Err(e), - }) -} - -/// \memberof AMactorId -/// \brief Gets the value of an actor identifier as a UTF-8 hexadecimal string -/// view. -/// -/// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \return A UTF-8 string view as an `AMbyteSpan` struct. -/// \pre \p actor_id `!= NULL` -/// \internal -/// -/// # Safety -/// actor_id must be a valid pointer to an AMactorId -#[no_mangle] -pub unsafe extern "C" fn AMactorIdStr(actor_id: *const AMactorId) -> AMbyteSpan { - match actor_id.as_ref() { - Some(actor_id) => actor_id.as_hex_str(), - None => Default::default(), - } -} diff --git a/rust/automerge-c/src/byte_span.rs b/rust/automerge-c/src/byte_span.rs deleted file mode 100644 index 5855cfc7..00000000 --- a/rust/automerge-c/src/byte_span.rs +++ /dev/null @@ -1,223 +0,0 @@ -use automerge as am; -use std::cmp::Ordering; -use std::convert::TryFrom; -use std::os::raw::c_char; - -use libc::{c_int, strlen}; -use smol_str::SmolStr; - -macro_rules! to_str { - ($byte_span:expr) => {{ - let result: Result<&str, am::AutomergeError> = (&$byte_span).try_into(); - match result { - Ok(s) => s, - Err(e) => return AMresult::error(&e.to_string()).into(), - } - }}; -} - -pub(crate) use to_str; - -/// \struct AMbyteSpan -/// \installed_headerfile -/// \brief A view onto an array of bytes. -#[repr(C)] -pub struct AMbyteSpan { - /// A pointer to the first byte of an array of bytes. - /// \warning \p src is only valid until the array of bytes to which it - /// points is freed. - /// \note If the `AMbyteSpan` came from within an `AMitem` struct then - /// \p src will be freed when the pointer to the `AMresult` struct - /// containing the `AMitem` struct is passed to `AMresultFree()`. - pub src: *const u8, - /// The count of bytes in the array. - pub count: usize, -} - -impl AMbyteSpan { - pub fn is_null(&self) -> bool { - self.src.is_null() - } -} - -impl Default for AMbyteSpan { - fn default() -> Self { - Self { - src: std::ptr::null(), - count: 0, - } - } -} - -impl PartialEq for AMbyteSpan { - fn eq(&self, other: &Self) -> bool { - if self.count != other.count { - return false; - } else if self.src == other.src { - return true; - } - <&[u8]>::from(self) == <&[u8]>::from(other) - } -} - -impl Eq for AMbyteSpan {} - -impl From<&am::ActorId> for AMbyteSpan { - fn from(actor: &am::ActorId) -> Self { - let slice = actor.to_bytes(); - Self { - src: slice.as_ptr(), - count: slice.len(), - } - } -} - -impl From<&mut am::ActorId> for AMbyteSpan { - fn from(actor: &mut am::ActorId) -> Self { - actor.as_ref().into() - } -} - -impl From<&am::ChangeHash> for AMbyteSpan { - fn from(change_hash: &am::ChangeHash) -> Self { - Self { - src: change_hash.0.as_ptr(), - count: change_hash.0.len(), - } - } -} - -impl From<*const c_char> for AMbyteSpan { - fn from(cs: *const c_char) -> Self { - if !cs.is_null() { - Self { - src: cs as *const u8, - count: unsafe { strlen(cs) }, - } - } else { - Self::default() - } - } -} - -impl From<&SmolStr> for AMbyteSpan { - fn from(smol_str: &SmolStr) -> Self { - smol_str.as_bytes().into() - } -} - -impl From<&[u8]> for AMbyteSpan { - fn from(slice: &[u8]) -> Self { - Self { - src: slice.as_ptr(), - count: slice.len(), - } - } -} - -impl From<&AMbyteSpan> for &[u8] { - fn from(byte_span: &AMbyteSpan) -> Self { - unsafe { std::slice::from_raw_parts(byte_span.src, byte_span.count) } - } -} - -impl From<&AMbyteSpan> for Vec { - fn from(byte_span: &AMbyteSpan) -> Self { - <&[u8]>::from(byte_span).to_vec() - } -} - -impl TryFrom<&AMbyteSpan> for am::ChangeHash { - type Error = am::AutomergeError; - - fn try_from(byte_span: &AMbyteSpan) -> Result { - use am::AutomergeError::InvalidChangeHashBytes; - - let slice: &[u8] = byte_span.into(); - match slice.try_into() { - Ok(change_hash) => Ok(change_hash), - Err(e) => Err(InvalidChangeHashBytes(e)), - } - } -} - -impl TryFrom<&AMbyteSpan> for &str { - type Error = am::AutomergeError; - - fn try_from(byte_span: &AMbyteSpan) -> Result { - use am::AutomergeError::InvalidCharacter; - - let slice = byte_span.into(); - match std::str::from_utf8(slice) { - Ok(str_) => Ok(str_), - Err(e) => Err(InvalidCharacter(e.valid_up_to())), - } - } -} - -/// \memberof AMbyteSpan -/// \brief Creates a view onto an array of bytes. -/// -/// \param[in] src A pointer to an array of bytes or `NULL`. -/// \param[in] count The count of bytes to view from the array pointed to by -/// \p src. -/// \return An `AMbyteSpan` struct. -/// \pre \p count `<= sizeof(`\p src `)` -/// \post `(`\p src `== NULL) -> (AMbyteSpan){NULL, 0}` -/// \internal -/// -/// #Safety -/// src must be a byte array of length `>= count` or `std::ptr::null()` -#[no_mangle] -pub unsafe extern "C" fn AMbytes(src: *const u8, count: usize) -> AMbyteSpan { - AMbyteSpan { - src, - count: if src.is_null() { 0 } else { count }, - } -} - -/// \memberof AMbyteSpan -/// \brief Creates a view onto a C string. -/// -/// \param[in] c_str A null-terminated byte string or `NULL`. -/// \return An `AMbyteSpan` struct. -/// \pre Each byte in \p c_str encodes one UTF-8 character. -/// \internal -/// -/// #Safety -/// c_str must be a null-terminated array of `std::os::raw::c_char` or `std::ptr::null()`. -#[no_mangle] -pub unsafe extern "C" fn AMstr(c_str: *const c_char) -> AMbyteSpan { - c_str.into() -} - -/// \memberof AMbyteSpan -/// \brief Compares two UTF-8 string views lexicographically. -/// -/// \param[in] lhs A UTF-8 string view as an `AMbyteSpan` struct. -/// \param[in] rhs A UTF-8 string view as an `AMbyteSpan` struct. -/// \return Negative value if \p lhs appears before \p rhs in lexicographical order. -/// Zero if \p lhs and \p rhs compare equal. -/// Positive value if \p lhs appears after \p rhs in lexicographical order. -/// \pre \p lhs.src `!= NULL` -/// \pre \p lhs.count `<= sizeof(`\p lhs.src `)` -/// \pre \p rhs.src `!= NULL` -/// \pre \p rhs.count `<= sizeof(`\p rhs.src `)` -/// \internal -/// -/// #Safety -/// lhs.src must be a byte array of length >= lhs.count -/// rhs.src must be a a byte array of length >= rhs.count -#[no_mangle] -pub unsafe extern "C" fn AMstrCmp(lhs: AMbyteSpan, rhs: AMbyteSpan) -> c_int { - match (<&str>::try_from(&lhs), <&str>::try_from(&rhs)) { - (Ok(lhs), Ok(rhs)) => match lhs.cmp(rhs) { - Ordering::Less => -1, - Ordering::Equal => 0, - Ordering::Greater => 1, - }, - (Err(_), Ok(_)) => -1, - (Err(_), Err(_)) => 0, - (Ok(_), Err(_)) => 1, - } -} diff --git a/rust/automerge-c/src/change.rs b/rust/automerge-c/src/change.rs deleted file mode 100644 index 8529ed94..00000000 --- a/rust/automerge-c/src/change.rs +++ /dev/null @@ -1,356 +0,0 @@ -use automerge as am; -use std::cell::RefCell; - -use crate::byte_span::AMbyteSpan; -use crate::result::{to_result, AMresult}; - -macro_rules! to_change { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::error("Invalid `AMchange*`").into(), - } - }}; -} - -/// \struct AMchange -/// \installed_headerfile -/// \brief A group of operations performed by an actor. -#[derive(Eq, PartialEq)] -pub struct AMchange { - body: *mut am::Change, - change_hash: RefCell>, -} - -impl AMchange { - pub fn new(change: &mut am::Change) -> Self { - Self { - body: change, - change_hash: Default::default(), - } - } - - pub fn message(&self) -> AMbyteSpan { - if let Some(message) = unsafe { (*self.body).message() } { - return message.as_str().as_bytes().into(); - } - Default::default() - } - - pub fn hash(&self) -> AMbyteSpan { - let mut change_hash = self.change_hash.borrow_mut(); - if let Some(change_hash) = change_hash.as_ref() { - change_hash.into() - } else { - let hash = unsafe { (*self.body).hash() }; - let ptr = change_hash.insert(hash); - AMbyteSpan { - src: ptr.0.as_ptr(), - count: hash.as_ref().len(), - } - } - } -} - -impl AsMut for AMchange { - fn as_mut(&mut self) -> &mut am::Change { - unsafe { &mut *self.body } - } -} - -impl AsRef for AMchange { - fn as_ref(&self) -> &am::Change { - unsafe { &*self.body } - } -} - -/// \memberof AMchange -/// \brief Gets the first referenced actor identifier in a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. -/// \pre \p change `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> *mut AMresult { - let change = to_change!(change); - to_result(Ok::( - change.as_ref().actor_id().clone(), - )) -} - -/// \memberof AMchange -/// \brief Compresses the raw bytes of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { - if let Some(change) = change.as_mut() { - let _ = change.as_mut().bytes(); - }; -} - -/// \memberof AMchange -/// \brief Gets the dependencies of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p change `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> *mut AMresult { - to_result(match change.as_ref() { - Some(change) => change.as_ref().deps(), - None => Default::default(), - }) -} - -/// \memberof AMchange -/// \brief Gets the extra bytes of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return An `AMbyteSpan` struct. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSpan { - if let Some(change) = change.as_ref() { - change.as_ref().extra_bytes().into() - } else { - Default::default() - } -} - -/// \memberof AMchange -/// \brief Allocates a new change and initializes it from an array of bytes value. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The count of bytes to load from the array pointed to by -/// \p src. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_CHANGE` item. -/// \pre \p src `!= NULL` -/// \pre `sizeof(`\p src `) > 0` -/// \pre \p count `<= sizeof(`\p src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// src must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut AMresult { - let data = std::slice::from_raw_parts(src, count); - to_result(am::Change::from_bytes(data.to_vec())) -} - -/// \memberof AMchange -/// \brief Gets the hash of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return An `AMbyteSpan` struct for a change hash. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { - match change.as_ref() { - Some(change) => change.hash(), - None => Default::default(), - } -} - -/// \memberof AMchange -/// \brief Tests the emptiness of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return `true` if \p change is empty, `false` otherwise. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeIsEmpty(change: *const AMchange) -> bool { - if let Some(change) = change.as_ref() { - change.as_ref().is_empty() - } else { - true - } -} - -/// \memberof AMchange -/// \brief Loads a document into a sequence of changes. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The count of bytes to load from the array pointed to by -/// \p src. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. -/// \pre \p src `!= NULL` -/// \pre `sizeof(`\p src `) > 0` -/// \pre \p count `<= sizeof(`\p src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// src must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMchangeLoadDocument(src: *const u8, count: usize) -> *mut AMresult { - let data = std::slice::from_raw_parts(src, count); - to_result::, _>>( - am::Automerge::load(data) - .and_then(|d| d.get_changes(&[]).map(|c| c.into_iter().cloned().collect())), - ) -} - -/// \memberof AMchange -/// \brief Gets the maximum operation index of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeMaxOp(change: *const AMchange) -> u64 { - if let Some(change) = change.as_ref() { - change.as_ref().max_op() - } else { - u64::MAX - } -} - -/// \memberof AMchange -/// \brief Gets the message of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return An `AMbyteSpan` struct for a UTF-8 string. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> AMbyteSpan { - if let Some(change) = change.as_ref() { - return change.message(); - }; - Default::default() -} - -/// \memberof AMchange -/// \brief Gets the index of a change in the changes from an actor. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeSeq(change: *const AMchange) -> u64 { - if let Some(change) = change.as_ref() { - change.as_ref().seq() - } else { - u64::MAX - } -} - -/// \memberof AMchange -/// \brief Gets the size of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeSize(change: *const AMchange) -> usize { - if let Some(change) = change.as_ref() { - return change.as_ref().len(); - } - 0 -} - -/// \memberof AMchange -/// \brief Gets the start operation index of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeStartOp(change: *const AMchange) -> u64 { - if let Some(change) = change.as_ref() { - u64::from(change.as_ref().start_op()) - } else { - u64::MAX - } -} - -/// \memberof AMchange -/// \brief Gets the commit time of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return A 64-bit signed integer. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeTime(change: *const AMchange) -> i64 { - if let Some(change) = change.as_ref() { - change.as_ref().timestamp() - } else { - i64::MAX - } -} - -/// \memberof AMchange -/// \brief Gets the raw bytes of a change. -/// -/// \param[in] change A pointer to an `AMchange` struct. -/// \return An `AMbyteSpan` struct for an array of bytes. -/// \pre \p change `!= NULL` -/// \internal -/// -/// # Safety -/// change must be a valid pointer to an AMchange -#[no_mangle] -pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan { - if let Some(change) = change.as_ref() { - change.as_ref().raw_bytes().into() - } else { - Default::default() - } -} diff --git a/rust/automerge-c/src/doc.rs b/rust/automerge-c/src/doc.rs deleted file mode 100644 index 82f52bf7..00000000 --- a/rust/automerge-c/src/doc.rs +++ /dev/null @@ -1,915 +0,0 @@ -use automerge as am; -use automerge::sync::SyncDoc; -use automerge::transaction::{CommitOptions, Transactable}; -use automerge::ReadDoc; -use std::ops::{Deref, DerefMut}; - -use crate::actor_id::{to_actor_id, AMactorId}; -use crate::byte_span::{to_str, AMbyteSpan}; -use crate::items::AMitems; -use crate::obj::{to_obj_id, AMobjId, AMobjType}; -use crate::result::{to_result, AMresult}; -use crate::sync::{to_sync_message, AMsyncMessage, AMsyncState}; - -pub mod list; -pub mod map; -pub mod utils; - -use crate::doc::utils::{clamp, to_doc, to_doc_mut, to_items}; - -macro_rules! to_sync_state_mut { - ($handle:expr) => {{ - let handle = $handle.as_mut(); - match handle { - Some(b) => b, - None => return AMresult::error("Invalid `AMsyncState*`").into(), - } - }}; -} - -/// \struct AMdoc -/// \installed_headerfile -/// \brief A JSON-like CRDT. -#[derive(Clone)] -pub struct AMdoc(am::AutoCommit); - -impl AMdoc { - pub fn new(auto_commit: am::AutoCommit) -> Self { - Self(auto_commit) - } - - pub fn is_equal_to(&mut self, other: &mut Self) -> bool { - self.document().get_heads() == other.document().get_heads() - } -} - -impl AsRef for AMdoc { - fn as_ref(&self) -> &am::AutoCommit { - &self.0 - } -} - -impl Deref for AMdoc { - type Target = am::AutoCommit; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for AMdoc { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -/// \memberof AMdoc -/// \brief Applies a sequence of changes to a document. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] items A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE` -/// items. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p items `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// items must be a valid pointer to an AMitems. -#[no_mangle] -pub unsafe extern "C" fn AMapplyChanges(doc: *mut AMdoc, items: *const AMitems) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let items = to_items!(items); - match Vec::::try_from(items) { - Ok(changes) => to_result(doc.apply_changes(changes)), - Err(e) => AMresult::error(&e.to_string()).into(), - } -} - -/// \memberof AMdoc -/// \brief Allocates storage for a document and initializes it by duplicating -/// the given document. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_DOC` item. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMclone(doc: *const AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.as_ref().clone()) -} - -/// \memberof AMdoc -/// \brief Allocates a new document and initializes it with defaults. -/// -/// \param[in] actor_id A pointer to an `AMactorId` struct or `NULL` for a -/// random one. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_DOC` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// actor_id must be a valid pointer to an AMactorId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMcreate(actor_id: *const AMactorId) -> *mut AMresult { - to_result(match actor_id.as_ref() { - Some(actor_id) => am::AutoCommit::new().with_actor(actor_id.as_ref().clone()), - None => am::AutoCommit::new(), - }) -} - -/// \memberof AMdoc -/// \brief Commits the current operations on a document with an optional -/// message and/or *nix timestamp (milliseconds). -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] message A UTF-8 string view as an `AMbyteSpan` struct. -/// \param[in] timestamp A pointer to a 64-bit integer or `NULL`. -/// \return A pointer to an `AMresult` struct with one `AM_VAL_TYPE_CHANGE_HASH` -/// item if there were operations to commit or an `AM_VAL_TYPE_VOID` item -/// if there were no operations to commit. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMcommit( - doc: *mut AMdoc, - message: AMbyteSpan, - timestamp: *const i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let mut options = CommitOptions::default(); - if !message.is_null() { - options.set_message(to_str!(message)); - } - if let Some(timestamp) = timestamp.as_ref() { - options.set_time(*timestamp); - } - to_result(doc.commit_with(options)) -} - -/// \memberof AMdoc -/// \brief Creates an empty change with an optional message and/or *nix -/// timestamp (milliseconds). -/// -/// \details This is useful if you wish to create a "merge commit" which has as -/// its dependents the current heads of the document but you don't have -/// any operations to add to the document. -/// -/// \note If there are outstanding uncommitted changes to the document -/// then two changes will be created: one for creating the outstanding -/// changes and one for the empty change. The empty change will always be -/// the latest change in the document after this call and the returned -/// hash will be the hash of that empty change. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] message A UTF-8 string view as an `AMbyteSpan` struct. -/// \param[in] timestamp A pointer to a 64-bit integer or `NULL`. -/// \return A pointer to an `AMresult` struct with one `AM_VAL_TYPE_CHANGE_HASH` -/// item. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMemptyChange( - doc: *mut AMdoc, - message: AMbyteSpan, - timestamp: *const i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let mut options = CommitOptions::default(); - if !message.is_null() { - options.set_message(to_str!(message)); - } - if let Some(timestamp) = timestamp.as_ref() { - options.set_time(*timestamp); - } - to_result(doc.empty_change(options)) -} - -/// \memberof AMdoc -/// \brief Tests the equality of two documents after closing their respective -/// transactions. -/// -/// \param[in] doc1 A pointer to an `AMdoc` struct. -/// \param[in] doc2 A pointer to an `AMdoc` struct. -/// \return `true` if \p doc1 `==` \p doc2 and `false` otherwise. -/// \pre \p doc1 `!= NULL` -/// \pre \p doc2 `!= NULL` -/// \internal -/// -/// #Safety -/// doc1 must be a valid pointer to an AMdoc -/// doc2 must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { - match (doc1.as_mut(), doc2.as_mut()) { - (Some(doc1), Some(doc2)) => doc1.is_equal_to(doc2), - (None, None) | (None, Some(_)) | (Some(_), None) => false, - } -} - -/// \memberof AMdoc -/// \brief Forks this document at its current or a historical point for use by -/// a different actor. -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items to select a historical point or `NULL` to select its -/// current point. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMfork(doc: *mut AMdoc, heads: *const AMitems) -> *mut AMresult { - let doc = to_doc_mut!(doc); - match heads.as_ref() { - None => to_result(doc.fork()), - Some(heads) => match >::try_from(heads) { - Ok(heads) => to_result(doc.fork_at(&heads)), - Err(e) => AMresult::error(&e.to_string()).into(), - }, - } -} - -/// \memberof AMdoc -/// \brief Generates a synchronization message for a peer based upon the given -/// synchronization state. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return A pointer to an `AMresult` struct with either an -/// `AM_VAL_TYPE_SYNC_MESSAGE` or `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p sync_state `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// sync_state must be a valid pointer to an AMsyncState -#[no_mangle] -pub unsafe extern "C" fn AMgenerateSyncMessage( - doc: *mut AMdoc, - sync_state: *mut AMsyncState, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let sync_state = to_sync_state_mut!(sync_state); - to_result(doc.sync().generate_sync_message(sync_state.as_mut())) -} - -/// \memberof AMdoc -/// \brief Gets a document's actor identifier. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetActorId(doc: *const AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(Ok::( - doc.get_actor().clone(), - )) -} - -/// \memberof AMdoc -/// \brief Gets the change added to a document by its respective hash. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The count of bytes to copy from the array pointed to by -/// \p src. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_CHANGE` item. -/// \pre \p doc `!= NULL` -/// \pre \p src `!= NULL` -/// \pre `sizeof(`\p src') >= AM_CHANGE_HASH_SIZE` -/// \pre \p count `<= sizeof(`\p src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// src must be a byte array of length `>= automerge::types::HASH_SIZE` -#[no_mangle] -pub unsafe extern "C" fn AMgetChangeByHash( - doc: *mut AMdoc, - src: *const u8, - count: usize, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let slice = std::slice::from_raw_parts(src, count); - match slice.try_into() { - Ok(change_hash) => to_result(doc.get_change_by_hash(&change_hash)), - Err(e) => AMresult::error(&e.to_string()).into(), - } -} - -/// \memberof AMdoc -/// \brief Gets the changes added to a document by their respective hashes. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] have_deps A pointer to an `AMitems` struct with -/// `AM_VAL_TYPE_CHANGE_HASH` items or `NULL`. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetChanges(doc: *mut AMdoc, have_deps: *const AMitems) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let have_deps = match have_deps.as_ref() { - Some(have_deps) => match Vec::::try_from(have_deps) { - Ok(change_hashes) => change_hashes, - Err(e) => return AMresult::error(&e.to_string()).into(), - }, - None => Vec::::new(), - }; - to_result(doc.get_changes(&have_deps)) -} - -/// \memberof AMdoc -/// \brief Gets the changes added to a second document that weren't added to -/// a first document. -/// -/// \param[in] doc1 A pointer to an `AMdoc` struct. -/// \param[in] doc2 A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. -/// \pre \p doc1 `!= NULL` -/// \pre \p doc2 `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc1 must be a valid pointer to an AMdoc -/// doc2 must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) -> *mut AMresult { - let doc1 = to_doc_mut!(doc1); - let doc2 = to_doc_mut!(doc2); - to_result(doc1.get_changes_added(doc2)) -} - -/// \memberof AMdoc -/// \brief Gets the current heads of a document. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(Ok::, am::AutomergeError>( - doc.get_heads(), - )) -} - -/// \memberof AMdoc -/// \brief Gets the hashes of the changes in a document that aren't transitive -/// dependencies of the given hashes of changes. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items or `NULL`. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMgetMissingDeps(doc: *mut AMdoc, heads: *const AMitems) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let heads = match heads.as_ref() { - None => Vec::::new(), - Some(heads) => match >::try_from(heads) { - Ok(heads) => heads, - Err(e) => { - return AMresult::error(&e.to_string()).into(); - } - }, - }; - to_result(doc.get_missing_deps(heads.as_slice())) -} - -/// \memberof AMdoc -/// \brief Gets the last change made to a document. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing either an -/// `AM_VAL_TYPE_CHANGE` or `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.get_last_local_change()) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical keys of a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items to select historical keys or `NULL` to select current -/// keys. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_STR` items. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMkeys( - doc: *const AMdoc, - obj_id: *const AMobjId, - heads: *const AMitems, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - match heads.as_ref() { - None => to_result(doc.keys(obj_id)), - Some(heads) => match >::try_from(heads) { - Ok(heads) => to_result(doc.keys_at(obj_id, &heads)), - Err(e) => AMresult::error(&e.to_string()).into(), - }, - } -} - -/// \memberof AMdoc -/// \brief Allocates storage for a document and initializes it with the compact -/// form of an incremental save. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The count of bytes to load from the array pointed to by -/// \p src. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_DOC` item. -/// \pre \p src `!= NULL` -/// \pre `sizeof(`\p src `) > 0` -/// \pre \p count `<= sizeof(`\p src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// src must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { - let data = std::slice::from_raw_parts(src, count); - to_result(am::AutoCommit::load(data)) -} - -/// \memberof AMdoc -/// \brief Loads the compact form of an incremental save into a document. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The count of bytes to load from the array pointed to by -/// \p src. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_UINT` item. -/// \pre \p doc `!= NULL` -/// \pre \p src `!= NULL` -/// \pre `sizeof(`\p src `) > 0` -/// \pre \p count `<= sizeof(`\p src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// src must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMloadIncremental( - doc: *mut AMdoc, - src: *const u8, - count: usize, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let data = std::slice::from_raw_parts(src, count); - to_result(doc.load_incremental(data)) -} - -/// \memberof AMdoc -/// \brief Applies all of the changes in \p src which are not in \p dest to -/// \p dest. -/// -/// \param[in] dest A pointer to an `AMdoc` struct. -/// \param[in] src A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p dest `!= NULL` -/// \pre \p src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// dest must be a valid pointer to an AMdoc -/// src must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMresult { - let dest = to_doc_mut!(dest); - to_result(dest.merge(to_doc_mut!(src))) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical size of an object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items to select a historical size or `NULL` to select its -/// current size. -/// \return The count of items in the object identified by \p obj_id. -/// \pre \p doc `!= NULL` -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMobjSize( - doc: *const AMdoc, - obj_id: *const AMobjId, - heads: *const AMitems, -) -> usize { - if let Some(doc) = doc.as_ref() { - let obj_id = to_obj_id!(obj_id); - match heads.as_ref() { - None => { - return doc.length(obj_id); - } - Some(heads) => { - if let Ok(heads) = >::try_from(heads) { - return doc.length_at(obj_id, &heads); - } - } - } - } - 0 -} - -/// \memberof AMdoc -/// \brief Gets the type of an object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \return An `AMobjType` tag or `0`. -/// \pre \p doc `!= NULL` -/// \pre \p obj_id `!= NULL` -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMobjObjType(doc: *const AMdoc, obj_id: *const AMobjId) -> AMobjType { - if let Some(doc) = doc.as_ref() { - let obj_id = to_obj_id!(obj_id); - if let Ok(obj_type) = doc.object_type(obj_id) { - return (&obj_type).into(); - } - } - Default::default() -} - -/// \memberof AMdoc -/// \brief Gets the current or historical items of an entire object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items to select its historical items or `NULL` to select -/// its current items. -/// \return A pointer to an `AMresult` struct with an `AMitems` struct. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMobjItems( - doc: *const AMdoc, - obj_id: *const AMobjId, - heads: *const AMitems, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - match heads.as_ref() { - None => to_result(doc.values(obj_id)), - Some(heads) => match >::try_from(heads) { - Ok(heads) => to_result(doc.values_at(obj_id, &heads)), - Err(e) => AMresult::error(&e.to_string()).into(), - }, - } -} - -/// \memberof AMdoc -/// \brief Gets the number of pending operations added during a document's -/// current transaction. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return The count of pending operations for \p doc. -/// \pre \p doc `!= NULL` -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMpendingOps(doc: *const AMdoc) -> usize { - if let Some(doc) = doc.as_ref() { - return doc.pending_ops(); - } - 0 -} - -/// \memberof AMdoc -/// \brief Receives a synchronization message from a peer based upon a given -/// synchronization state. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p sync_state `!= NULL` -/// \pre \p sync_message `!= NULL` -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// sync_state must be a valid pointer to an AMsyncState -/// sync_message must be a valid pointer to an AMsyncMessage -#[no_mangle] -pub unsafe extern "C" fn AMreceiveSyncMessage( - doc: *mut AMdoc, - sync_state: *mut AMsyncState, - sync_message: *const AMsyncMessage, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let sync_state = to_sync_state_mut!(sync_state); - let sync_message = to_sync_message!(sync_message); - to_result( - doc.sync() - .receive_sync_message(sync_state.as_mut(), sync_message.as_ref().clone()), - ) -} - -/// \memberof AMdoc -/// \brief Cancels the pending operations added during a document's current -/// transaction and gets the number of cancellations. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return The count of pending operations for \p doc that were cancelled. -/// \pre \p doc `!= NULL` -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMrollback(doc: *mut AMdoc) -> usize { - if let Some(doc) = doc.as_mut() { - return doc.rollback(); - } - 0 -} - -/// \memberof AMdoc -/// \brief Saves the entirety of a document into a compact form. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(Ok(doc.save())) -} - -/// \memberof AMdoc -/// \brief Saves the changes to a document since its last save into a compact -/// form. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(Ok(doc.save_incremental())) -} - -/// \memberof AMdoc -/// \brief Puts the actor identifier of a document. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p actor_id `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// actor_id must be a valid pointer to an AMactorId -#[no_mangle] -pub unsafe extern "C" fn AMsetActorId( - doc: *mut AMdoc, - actor_id: *const AMactorId, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let actor_id = to_actor_id!(actor_id); - doc.set_actor(actor_id.as_ref().clone()); - to_result(Ok(())) -} - -/// \memberof AMdoc -/// \brief Splices values into and/or removes values from the identified object -/// at a given position within it. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos A position in the object identified by \p obj_id or -/// `SIZE_MAX` to indicate one past its end. -/// \param[in] del The number of values to delete or `SIZE_MAX` to indicate -/// all of them. -/// \param[in] values A copy of an `AMitems` struct from which values will be -/// spliced starting at its current position; call -/// `AMitemsRewound()` on a used `AMitems` first to ensure -/// that all of its values are spliced in. Pass `(AMitems){0}` -/// when zero values should be spliced in. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id `)` or \p del `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// values must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMsplice( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - del: usize, - values: AMitems, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let len = doc.length(obj_id); - let pos = clamp!(pos, len, "pos"); - let del = clamp!(del, len, "del"); - match Vec::::try_from(&values) { - Ok(vals) => to_result(doc.splice(obj_id, pos, del, vals)), - Err(e) => AMresult::error(&e.to_string()).into(), - } -} - -/// \memberof AMdoc -/// \brief Splices characters into and/or removes characters from the -/// identified object at a given position within it. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos A position in the text object identified by \p obj_id or -/// `SIZE_MAX` to indicate one past its end. -/// \param[in] del The number of characters to delete or `SIZE_MAX` to indicate -/// all of them. -/// \param[in] text A UTF-8 string view as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id `)` or \p del `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMspliceText( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - del: usize, - text: AMbyteSpan, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let len = doc.length(obj_id); - let pos = clamp!(pos, len, "pos"); - let del = clamp!(del, len, "del"); - to_result(doc.splice_text(obj_id, pos, del, to_str!(text))) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical string represented by a text object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMitems` struct containing -/// `AM_VAL_TYPE_CHANGE_HASH` items to select a historical string -/// or `NULL` to select the current string. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_STR` item. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMtext( - doc: *const AMdoc, - obj_id: *const AMobjId, - heads: *const AMitems, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - match heads.as_ref() { - None => to_result(doc.text(obj_id)), - Some(heads) => match >::try_from(heads) { - Ok(heads) => to_result(doc.text_at(obj_id, &heads)), - Err(e) => AMresult::error(&e.to_string()).into(), - }, - } -} diff --git a/rust/automerge-c/src/doc/list.rs b/rust/automerge-c/src/doc/list.rs deleted file mode 100644 index c4503322..00000000 --- a/rust/automerge-c/src/doc/list.rs +++ /dev/null @@ -1,636 +0,0 @@ -use automerge as am; -use automerge::transaction::Transactable; -use automerge::ReadDoc; - -use crate::byte_span::{to_str, AMbyteSpan}; -use crate::doc::{to_doc, to_doc_mut, AMdoc}; -use crate::items::AMitems; -use crate::obj::{to_obj_id, to_obj_type, AMobjId, AMobjType}; -use crate::result::{to_result, AMresult}; - -macro_rules! adjust { - ($pos:expr, $insert:expr, $len:expr) => {{ - // An empty object can only be inserted into. - let insert = $insert || $len == 0; - let end = if insert { $len } else { $len - 1 }; - if $pos > end && $pos != usize::MAX { - return AMresult::error(&format!("Invalid pos {}", $pos)).into(); - } - (std::cmp::min($pos, end), insert) - }}; -} - -macro_rules! to_range { - ($begin:expr, $end:expr) => {{ - if $begin > $end { - return AMresult::error(&format!("Invalid range [{}-{})", $begin, $end)).into(); - }; - ($begin..$end) - }}; -} - -/// \memberof AMdoc -/// \brief Deletes an item from a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistDelete( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, _) = adjust!(pos, false, doc.length(obj_id)); - to_result(doc.delete(obj_id, pos)) -} - -/// \memberof AMdoc -/// \brief Gets a current or historical item within a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items to select a historical item at \p pos or `NULL` -/// to select the current item at \p pos. -/// \return A pointer to an `AMresult` struct with an `AMitem` struct. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistGet( - doc: *const AMdoc, - obj_id: *const AMobjId, - pos: usize, - heads: *const AMitems, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, _) = adjust!(pos, false, doc.length(obj_id)); - match heads.as_ref() { - None => to_result(doc.get(obj_id, pos)), - Some(heads) => match >::try_from(heads) { - Ok(heads) => to_result(doc.get_at(obj_id, pos, &heads)), - Err(e) => AMresult::error(&e.to_string()).into(), - }, - } -} - -/// \memberof AMdoc -/// \brief Gets all of the historical items at a position within a list object -/// until its current one or a specific one. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items to select a historical last item or `NULL` to select -/// the current last item. -/// \return A pointer to an `AMresult` struct with an `AMitems` struct. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistGetAll( - doc: *const AMdoc, - obj_id: *const AMobjId, - pos: usize, - heads: *const AMitems, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, _) = adjust!(pos, false, doc.length(obj_id)); - match heads.as_ref() { - None => to_result(doc.get_all(obj_id, pos)), - Some(heads) => match >::try_from(heads) { - Ok(heads) => to_result(doc.get_all_at(obj_id, pos, &heads)), - Err(e) => AMresult::error(&e.to_string()).into(), - }, - } -} - -/// \memberof AMdoc -/// \brief Increments a counter value in an item within a list object by the -/// given value. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistIncrement( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, _) = adjust!(pos, false, doc.length(obj_id)); - to_result(doc.increment(obj_id, pos, value)) -} - -/// \memberof AMdoc -/// \brief Puts a boolean value into an item within a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item if -/// \p insert `== false` or one past its last item if -/// \p insert `== true`. -/// \param[in] insert A flag for inserting a new item for \p value before -/// \p pos instead of putting \p value into the item at -/// \p pos. -/// \param[in] value A boolean. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutBool( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - insert: bool, - value: bool, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); - let value = am::ScalarValue::Boolean(value); - to_result(if insert { - doc.insert(obj_id, pos, value) - } else { - doc.put(obj_id, pos, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts an array of bytes value at a position within a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item if -/// \p insert `== false` or one past its last item if -/// \p insert `== true`. -/// \param[in] insert A flag for inserting a new item for \p value before -/// \p pos instead of putting \p value into the item at -/// \p pos. -/// \param[in] value A view onto the array of bytes to copy from as an -/// `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \pre \p value.src `!= NULL` -/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// value.src must be a byte array of length >= value.count -#[no_mangle] -pub unsafe extern "C" fn AMlistPutBytes( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - insert: bool, - value: AMbyteSpan, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); - let value: Vec = (&value).into(); - to_result(if insert { - doc.insert(obj_id, pos, value) - } else { - doc.put(obj_id, pos, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a CRDT counter value into an item within a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item if -/// \p insert `== false` or one past its last item if -/// \p insert `== true`. -/// \param[in] insert A flag for inserting a new item for \p value before -/// \p pos instead of putting \p value into the item at -/// \p pos. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutCounter( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - insert: bool, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); - let value = am::ScalarValue::Counter(value.into()); - to_result(if insert { - doc.insert(obj_id, pos, value) - } else { - doc.put(obj_id, pos, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a float value into an item within a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item if -/// \p insert `== false` or one past its last item if -/// \p insert `== true`. -/// \param[in] insert A flag for inserting a new item for \p value before -/// \p pos instead of putting \p value into the item at -/// \p pos. -/// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutF64( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - insert: bool, - value: f64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); - to_result(if insert { - doc.insert(obj_id, pos, value) - } else { - doc.put(obj_id, pos, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a signed integer value into an item within a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item if -/// \p insert `== false` or one past its last item if -/// \p insert `== true`. -/// \param[in] insert A flag for inserting a new item for \p value before -/// \p pos instead of putting \p value into the item at -/// \p pos. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutInt( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - insert: bool, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); - to_result(if insert { - doc.insert(obj_id, pos, value) - } else { - doc.put(obj_id, pos, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a null value into an item within a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item if -/// \p insert `== false` or one past its last item if -/// \p insert `== true`. -/// \param[in] insert A flag for inserting a new item for \p value before -/// \p pos instead of putting \p value into the item at -/// \p pos. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutNull( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - insert: bool, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); - to_result(if insert { - doc.insert(obj_id, pos, ()) - } else { - doc.put(obj_id, pos, ()) - }) -} - -/// \memberof AMdoc -/// \brief Puts an empty object value into an item within a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item if -/// \p insert `== false` or one past its last item if -/// \p insert `== true`. -/// \param[in] insert A flag for inserting a new item for \p value before -/// \p pos instead of putting \p value into the item at -/// \p pos. -/// \param[in] obj_type An `AMobjIdType` enum tag. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_OBJ_TYPE` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutObject( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - insert: bool, - obj_type: AMobjType, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); - let obj_type = to_obj_type!(obj_type); - to_result(if insert { - (doc.insert_object(obj_id, pos, obj_type), obj_type) - } else { - (doc.put_object(obj_id, pos, obj_type), obj_type) - }) -} - -/// \memberof AMdoc -/// \brief Puts a UTF-8 string value into an item within a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item if -/// \p insert `== false` or one past its last item if -/// \p insert `== true`. -/// \param[in] insert A flag for inserting a new item for \p value before -/// \p pos instead of putting \p value into the item at -/// \p pos. -/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \pre \p value.src `!= NULL` -/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// value.src must be a byte array of length >= value.count -#[no_mangle] -pub unsafe extern "C" fn AMlistPutStr( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - insert: bool, - value: AMbyteSpan, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); - let value = to_str!(value); - to_result(if insert { - doc.insert(obj_id, pos, value) - } else { - doc.put(obj_id, pos, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a *nix timestamp (milliseconds) value into an item within a -/// list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item if -/// \p insert `== false` or one past its last item if -/// \p insert `== true`. -/// \param[in] insert A flag for inserting a new item for \p value before -/// \p pos instead of putting \p value into the item at -/// \p pos. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutTimestamp( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - insert: bool, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); - let value = am::ScalarValue::Timestamp(value); - to_result(if insert { - doc.insert(obj_id, pos, value) - } else { - doc.put(obj_id, pos, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts an unsigned integer value into an item within a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos The position of an item within the list object identified by -/// \p obj_id or `SIZE_MAX` to indicate its last item if -/// \p insert `== false` or one past its last item if -/// \p insert `== true`. -/// \param[in] insert A flag for inserting a new item for \p value before -/// \p pos instead of putting \p value into the item at -/// \p pos. -/// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutUint( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - insert: bool, - value: u64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); - to_result(if insert { - doc.insert(obj_id, pos, value) - } else { - doc.put(obj_id, pos, value) - }) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical items in the list object within the -/// given range. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] begin The first pos in a range of indices. -/// \param[in] end At least one past the last pos in a range of indices. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items to select historical items or `NULL` to select -/// current items. -/// \return A pointer to an `AMresult` struct with an `AMitems` struct. -/// \pre \p doc `!= NULL` -/// \pre \p begin `<=` \p end `<= SIZE_MAX` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistRange( - doc: *const AMdoc, - obj_id: *const AMobjId, - begin: usize, - end: usize, - heads: *const AMitems, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let range = to_range!(begin, end); - match heads.as_ref() { - None => to_result(doc.list_range(obj_id, range)), - Some(heads) => match >::try_from(heads) { - Ok(heads) => to_result(doc.list_range_at(obj_id, range, &heads)), - Err(e) => AMresult::error(&e.to_string()).into(), - }, - } -} diff --git a/rust/automerge-c/src/doc/map.rs b/rust/automerge-c/src/doc/map.rs deleted file mode 100644 index b2f7db02..00000000 --- a/rust/automerge-c/src/doc/map.rs +++ /dev/null @@ -1,552 +0,0 @@ -use automerge as am; -use automerge::transaction::Transactable; -use automerge::ReadDoc; - -use crate::byte_span::{to_str, AMbyteSpan}; -use crate::doc::{to_doc, to_doc_mut, AMdoc}; -use crate::items::AMitems; -use crate::obj::{to_obj_id, to_obj_type, AMobjId, AMobjType}; -use crate::result::{to_result, AMresult}; - -/// \memberof AMdoc -/// \brief Deletes an item from a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key The UTF-8 string view key of an item within the map object -/// identified by \p obj_id as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapDelete( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - to_result(doc.delete(to_obj_id!(obj_id), key)) -} - -/// \memberof AMdoc -/// \brief Gets a current or historical item within a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key The UTF-8 string view key of an item within the map object -/// identified by \p obj_id as an `AMbyteSpan` struct. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items to select a historical item at \p key or `NULL` -/// to select the current item at \p key. -/// \return A pointer to an `AMresult` struct with an `AMitem` struct. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMmapGet( - doc: *const AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - heads: *const AMitems, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let key = to_str!(key); - match heads.as_ref() { - None => to_result(doc.get(obj_id, key)), - Some(heads) => match >::try_from(heads) { - Ok(heads) => to_result(doc.get_at(obj_id, key, &heads)), - Err(e) => AMresult::error(&e.to_string()).into(), - }, - } -} - -/// \memberof AMdoc -/// \brief Gets all of the historical items at a key within a map object until -/// its current one or a specific one. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key The UTF-8 string view key of an item within the map object -/// identified by \p obj_id as an `AMbyteSpan` struct. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items to select a historical last item or `NULL` to -/// select the current last item. -/// \return A pointer to an `AMresult` struct with an `AMItems` struct. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMmapGetAll( - doc: *const AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - heads: *const AMitems, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let key = to_str!(key); - match heads.as_ref() { - None => to_result(doc.get_all(obj_id, key)), - Some(heads) => match >::try_from(heads) { - Ok(heads) => to_result(doc.get_all_at(obj_id, key, &heads)), - Err(e) => AMresult::error(&e.to_string()).into(), - }, - } -} - -/// \memberof AMdoc -/// \brief Increments a counter at a key in a map object by the given value. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key The UTF-8 string view key of an item within the map object -/// identified by \p obj_id as an `AMbyteSpan` struct. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapIncrement( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - to_result(doc.increment(to_obj_id!(obj_id), key, value)) -} - -/// \memberof AMdoc -/// \brief Puts a boolean as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key The UTF-8 string view key of an item within the map object -/// identified by \p obj_id as an `AMbyteSpan` struct. -/// \param[in] value A boolean. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapPutBool( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - value: bool, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - to_result(doc.put(to_obj_id!(obj_id), key, value)) -} - -/// \memberof AMdoc -/// \brief Puts an array of bytes value at a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key The UTF-8 string view key of an item within the map object -/// identified by \p obj_id as an `AMbyteSpan` struct. -/// \param[in] value A view onto an array of bytes as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \pre \p value.src `!= NULL` -/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -/// value.src must be a byte array of length >= value.count -#[no_mangle] -pub unsafe extern "C" fn AMmapPutBytes( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - value: AMbyteSpan, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - to_result(doc.put(to_obj_id!(obj_id), key, Vec::::from(&value))) -} - -/// \memberof AMdoc -/// \brief Puts a CRDT counter as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapPutCounter( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - to_result(doc.put( - to_obj_id!(obj_id), - key, - am::ScalarValue::Counter(value.into()), - )) -} - -/// \memberof AMdoc -/// \brief Puts null as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapPutNull( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - to_result(doc.put(to_obj_id!(obj_id), key, ())) -} - -/// \memberof AMdoc -/// \brief Puts an empty object as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \param[in] obj_type An `AMobjIdType` enum tag. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_OBJ_TYPE` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapPutObject( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - obj_type: AMobjType, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - let obj_type = to_obj_type!(obj_type); - to_result((doc.put_object(to_obj_id!(obj_id), key, obj_type), obj_type)) -} - -/// \memberof AMdoc -/// \brief Puts a float as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapPutF64( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - value: f64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - to_result(doc.put(to_obj_id!(obj_id), key, value)) -} - -/// \memberof AMdoc -/// \brief Puts a signed integer as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapPutInt( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - to_result(doc.put(to_obj_id!(obj_id), key, value)) -} - -/// \memberof AMdoc -/// \brief Puts a UTF-8 string as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapPutStr( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - value: AMbyteSpan, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str!(key), to_str!(value))) -} - -/// \memberof AMdoc -/// \brief Puts a *nix timestamp (milliseconds) as the value of a key in a map -/// object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapPutTimestamp( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - to_result(doc.put(to_obj_id!(obj_id), key, am::ScalarValue::Timestamp(value))) -} - -/// \memberof AMdoc -/// \brief Puts an unsigned integer as the value of a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string view key for the map object identified by -/// \p obj_id as an `AMbyteSpan` struct. -/// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. -/// \pre \p doc `!= NULL` -/// \pre \p key.src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key.src must be a byte array of length >= key.count -#[no_mangle] -pub unsafe extern "C" fn AMmapPutUint( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: AMbyteSpan, - value: u64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let key = to_str!(key); - to_result(doc.put(to_obj_id!(obj_id), key, value)) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical items of the map object within the -/// given range. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] begin The first key in a subrange or `AMstr(NULL)` to indicate the -/// absolute first key. -/// \param[in] end The key one past the last key in a subrange or `AMstr(NULL)` -/// to indicate one past the absolute last key. -/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` -/// items to select historical items or `NULL` to select -/// current items. -/// \return A pointer to an `AMresult` struct with an `AMitems` struct. -/// \pre \p doc `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// begin.src must be a byte array of length >= begin.count or std::ptr::null() -/// end.src must be a byte array of length >= end.count or std::ptr::null() -/// heads must be a valid pointer to an AMitems or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMmapRange( - doc: *const AMdoc, - obj_id: *const AMobjId, - begin: AMbyteSpan, - end: AMbyteSpan, - heads: *const AMitems, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let heads = match heads.as_ref() { - None => None, - Some(heads) => match >::try_from(heads) { - Ok(heads) => Some(heads), - Err(e) => { - return AMresult::error(&e.to_string()).into(); - } - }, - }; - match (begin.is_null(), end.is_null()) { - (false, false) => { - let (begin, end) = (to_str!(begin).to_string(), to_str!(end).to_string()); - if begin > end { - return AMresult::error(&format!("Invalid range [{}-{})", begin, end)).into(); - }; - let bounds = begin..end; - if let Some(heads) = heads { - to_result(doc.map_range_at(obj_id, bounds, &heads)) - } else { - to_result(doc.map_range(obj_id, bounds)) - } - } - (false, true) => { - let bounds = to_str!(begin).to_string()..; - if let Some(heads) = heads { - to_result(doc.map_range_at(obj_id, bounds, &heads)) - } else { - to_result(doc.map_range(obj_id, bounds)) - } - } - (true, false) => { - let bounds = ..to_str!(end).to_string(); - if let Some(heads) = heads { - to_result(doc.map_range_at(obj_id, bounds, &heads)) - } else { - to_result(doc.map_range(obj_id, bounds)) - } - } - (true, true) => { - let bounds = ..; - if let Some(heads) = heads { - to_result(doc.map_range_at(obj_id, bounds, &heads)) - } else { - to_result(doc.map_range(obj_id, bounds)) - } - } - } -} diff --git a/rust/automerge-c/src/doc/utils.rs b/rust/automerge-c/src/doc/utils.rs deleted file mode 100644 index ce465b84..00000000 --- a/rust/automerge-c/src/doc/utils.rs +++ /dev/null @@ -1,46 +0,0 @@ -macro_rules! clamp { - ($index:expr, $len:expr, $param_name:expr) => {{ - if $index > $len && $index != usize::MAX { - return AMresult::error(&format!("Invalid {} {}", $param_name, $index)).into(); - } - std::cmp::min($index, $len) - }}; -} - -pub(crate) use clamp; - -macro_rules! to_doc { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::error("Invalid `AMdoc*`").into(), - } - }}; -} - -pub(crate) use to_doc; - -macro_rules! to_doc_mut { - ($handle:expr) => {{ - let handle = $handle.as_mut(); - match handle { - Some(b) => b, - None => return AMresult::error("Invalid `AMdoc*`").into(), - } - }}; -} - -pub(crate) use to_doc_mut; - -macro_rules! to_items { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::error("Invalid `AMitems*`").into(), - } - }}; -} - -pub(crate) use to_items; diff --git a/rust/automerge-c/src/index.rs b/rust/automerge-c/src/index.rs deleted file mode 100644 index f1ea153b..00000000 --- a/rust/automerge-c/src/index.rs +++ /dev/null @@ -1,84 +0,0 @@ -use automerge as am; - -use std::any::type_name; - -use smol_str::SmolStr; - -use crate::byte_span::AMbyteSpan; - -/// \struct AMindex -/// \installed_headerfile -/// \brief An item index. -#[derive(PartialEq)] -pub enum AMindex { - /// A UTF-8 string key variant. - Key(SmolStr), - /// A 64-bit unsigned integer position variant. - Pos(usize), -} - -impl TryFrom<&AMindex> for AMbyteSpan { - type Error = am::AutomergeError; - - fn try_from(item: &AMindex) -> Result { - use am::AutomergeError::InvalidValueType; - use AMindex::*; - - if let Key(key) = item { - return Ok(key.into()); - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } -} - -impl TryFrom<&AMindex> for usize { - type Error = am::AutomergeError; - - fn try_from(item: &AMindex) -> Result { - use am::AutomergeError::InvalidValueType; - use AMindex::*; - - if let Pos(pos) = item { - return Ok(*pos); - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } -} - -/// \ingroup enumerations -/// \enum AMidxType -/// \installed_headerfile -/// \brief The type of an item's index. -#[derive(PartialEq, Eq)] -#[repr(u8)] -pub enum AMidxType { - /// The default tag, not a type signifier. - Default = 0, - /// A UTF-8 string view key. - Key, - /// A 64-bit unsigned integer position. - Pos, -} - -impl Default for AMidxType { - fn default() -> Self { - Self::Default - } -} - -impl From<&AMindex> for AMidxType { - fn from(index: &AMindex) -> Self { - use AMindex::*; - - match index { - Key(_) => Self::Key, - Pos(_) => Self::Pos, - } - } -} diff --git a/rust/automerge-c/src/item.rs b/rust/automerge-c/src/item.rs deleted file mode 100644 index 94735464..00000000 --- a/rust/automerge-c/src/item.rs +++ /dev/null @@ -1,1963 +0,0 @@ -use automerge as am; - -use std::any::type_name; -use std::borrow::Cow; -use std::cell::{RefCell, UnsafeCell}; -use std::rc::Rc; - -use crate::actor_id::AMactorId; -use crate::byte_span::{to_str, AMbyteSpan}; -use crate::change::AMchange; -use crate::doc::AMdoc; -use crate::index::{AMidxType, AMindex}; -use crate::obj::AMobjId; -use crate::result::{to_result, AMresult}; -use crate::sync::{AMsyncHave, AMsyncMessage, AMsyncState}; - -/// \struct AMunknownValue -/// \installed_headerfile -/// \brief A value (typically for a `set` operation) whose type is unknown. -#[derive(Default, Eq, PartialEq)] -#[repr(C)] -pub struct AMunknownValue { - /// The value's raw bytes. - bytes: AMbyteSpan, - /// The value's encoded type identifier. - type_code: u8, -} - -pub enum Value { - ActorId(am::ActorId, UnsafeCell>), - Change(Box, UnsafeCell>), - ChangeHash(am::ChangeHash), - Doc(RefCell), - SyncHave(AMsyncHave), - SyncMessage(AMsyncMessage), - SyncState(RefCell), - Value(am::Value<'static>), -} - -impl Value { - pub fn try_into_bytes(&self) -> Result { - use am::AutomergeError::InvalidValueType; - use am::ScalarValue::*; - use am::Value::*; - - if let Self::Value(Scalar(scalar)) = &self { - if let Bytes(vector) = scalar.as_ref() { - return Ok(vector.as_slice().into()); - } - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } - - pub fn try_into_change_hash(&self) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Self::ChangeHash(change_hash) = &self { - return Ok(change_hash.into()); - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } - - pub fn try_into_counter(&self) -> Result { - use am::AutomergeError::InvalidValueType; - use am::ScalarValue::*; - use am::Value::*; - - if let Self::Value(Scalar(scalar)) = &self { - if let Counter(counter) = scalar.as_ref() { - return Ok(counter.into()); - } - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } - - pub fn try_into_int(&self) -> Result { - use am::AutomergeError::InvalidValueType; - use am::ScalarValue::*; - use am::Value::*; - - if let Self::Value(Scalar(scalar)) = &self { - if let Int(int) = scalar.as_ref() { - return Ok(*int); - } - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } - - pub fn try_into_str(&self) -> Result { - use am::AutomergeError::InvalidValueType; - use am::ScalarValue::*; - use am::Value::*; - - if let Self::Value(Scalar(scalar)) = &self { - if let Str(smol_str) = scalar.as_ref() { - return Ok(smol_str.into()); - } - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } - - pub fn try_into_timestamp(&self) -> Result { - use am::AutomergeError::InvalidValueType; - use am::ScalarValue::*; - use am::Value::*; - - if let Self::Value(Scalar(scalar)) = &self { - if let Timestamp(timestamp) = scalar.as_ref() { - return Ok(*timestamp); - } - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } -} - -impl From for Value { - fn from(actor_id: am::ActorId) -> Self { - Self::ActorId(actor_id, Default::default()) - } -} - -impl From for Value { - fn from(auto_commit: am::AutoCommit) -> Self { - Self::Doc(RefCell::new(AMdoc::new(auto_commit))) - } -} - -impl From for Value { - fn from(change: am::Change) -> Self { - Self::Change(Box::new(change), Default::default()) - } -} - -impl From for Value { - fn from(change_hash: am::ChangeHash) -> Self { - Self::ChangeHash(change_hash) - } -} - -impl From for Value { - fn from(have: am::sync::Have) -> Self { - Self::SyncHave(AMsyncHave::new(have)) - } -} - -impl From for Value { - fn from(message: am::sync::Message) -> Self { - Self::SyncMessage(AMsyncMessage::new(message)) - } -} - -impl From for Value { - fn from(state: am::sync::State) -> Self { - Self::SyncState(RefCell::new(AMsyncState::new(state))) - } -} - -impl From> for Value { - fn from(value: am::Value<'static>) -> Self { - Self::Value(value) - } -} - -impl From for Value { - fn from(string: String) -> Self { - Self::Value(am::Value::Scalar(Cow::Owned(am::ScalarValue::Str( - string.into(), - )))) - } -} - -impl<'a> TryFrom<&'a Value> for &'a am::Change { - type Error = am::AutomergeError; - - fn try_from(value: &'a Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - - match value { - Change(change, _) => Ok(change), - _ => Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }), - } - } -} - -impl<'a> TryFrom<&'a Value> for &'a am::ChangeHash { - type Error = am::AutomergeError; - - fn try_from(value: &'a Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - - match value { - ChangeHash(change_hash) => Ok(change_hash), - _ => Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }), - } - } -} - -impl<'a> TryFrom<&'a Value> for &'a am::ScalarValue { - type Error = am::AutomergeError; - - fn try_from(value: &'a Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - use am::Value::*; - - if let Value(Scalar(scalar)) = value { - return Ok(scalar.as_ref()); - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } -} - -impl<'a> TryFrom<&'a Value> for &'a AMactorId { - type Error = am::AutomergeError; - - fn try_from(value: &'a Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - - match value { - ActorId(actor_id, c_actor_id) => unsafe { - Ok((*c_actor_id.get()).get_or_insert(AMactorId::new(actor_id))) - }, - _ => Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }), - } - } -} - -impl<'a> TryFrom<&'a mut Value> for &'a mut AMchange { - type Error = am::AutomergeError; - - fn try_from(value: &'a mut Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - - match value { - Change(change, c_change) => unsafe { - Ok((*c_change.get()).get_or_insert(AMchange::new(change))) - }, - _ => Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }), - } - } -} - -impl<'a> TryFrom<&'a mut Value> for &'a mut AMdoc { - type Error = am::AutomergeError; - - fn try_from(value: &'a mut Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - - match value { - Doc(doc) => Ok(doc.get_mut()), - _ => Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }), - } - } -} - -impl<'a> TryFrom<&'a Value> for &'a AMsyncHave { - type Error = am::AutomergeError; - - fn try_from(value: &'a Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - - match value { - SyncHave(sync_have) => Ok(sync_have), - _ => Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }), - } - } -} - -impl<'a> TryFrom<&'a Value> for &'a AMsyncMessage { - type Error = am::AutomergeError; - - fn try_from(value: &'a Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - - match value { - SyncMessage(sync_message) => Ok(sync_message), - _ => Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }), - } - } -} - -impl<'a> TryFrom<&'a mut Value> for &'a mut AMsyncState { - type Error = am::AutomergeError; - - fn try_from(value: &'a mut Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - - match value { - SyncState(sync_state) => Ok(sync_state.get_mut()), - _ => Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }), - } - } -} - -impl TryFrom<&Value> for bool { - type Error = am::AutomergeError; - - fn try_from(value: &Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - use am::ScalarValue::*; - use am::Value::*; - - if let Value(Scalar(scalar)) = value { - if let Boolean(boolean) = scalar.as_ref() { - return Ok(*boolean); - } - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } -} - -impl TryFrom<&Value> for f64 { - type Error = am::AutomergeError; - - fn try_from(value: &Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - use am::ScalarValue::*; - use am::Value::*; - - if let Value(Scalar(scalar)) = value { - if let F64(float) = scalar.as_ref() { - return Ok(*float); - } - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } -} - -impl TryFrom<&Value> for u64 { - type Error = am::AutomergeError; - - fn try_from(value: &Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - use am::ScalarValue::*; - use am::Value::*; - - if let Value(Scalar(scalar)) = value { - if let Uint(uint) = scalar.as_ref() { - return Ok(*uint); - } - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } -} - -impl TryFrom<&Value> for AMunknownValue { - type Error = am::AutomergeError; - - fn try_from(value: &Value) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidValueType; - use am::ScalarValue::*; - use am::Value::*; - - if let Value(Scalar(scalar)) = value { - if let Unknown { bytes, type_code } = scalar.as_ref() { - return Ok(Self { - bytes: bytes.as_slice().into(), - type_code: *type_code, - }); - } - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }) - } -} - -impl PartialEq for Value { - fn eq(&self, other: &Self) -> bool { - use self::Value::*; - - match (self, other) { - (ActorId(lhs, _), ActorId(rhs, _)) => *lhs == *rhs, - (Change(lhs, _), Change(rhs, _)) => lhs == rhs, - (ChangeHash(lhs), ChangeHash(rhs)) => lhs == rhs, - (Doc(lhs), Doc(rhs)) => lhs.as_ptr() == rhs.as_ptr(), - (SyncMessage(lhs), SyncMessage(rhs)) => *lhs == *rhs, - (SyncState(lhs), SyncState(rhs)) => *lhs == *rhs, - (Value(lhs), Value(rhs)) => lhs == rhs, - _ => false, - } - } -} - -#[derive(Default)] -pub struct Item { - /// The item's index. - index: Option, - /// The item's identifier. - obj_id: Option, - /// The item's value. - value: Option, -} - -impl Item { - pub fn try_into_bytes(&self) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &self.value { - return value.try_into_bytes(); - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - - pub fn try_into_change_hash(&self) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &self.value { - return value.try_into_change_hash(); - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - - pub fn try_into_counter(&self) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &self.value { - return value.try_into_counter(); - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - - pub fn try_into_int(&self) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &self.value { - return value.try_into_int(); - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - - pub fn try_into_str(&self) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &self.value { - return value.try_into_str(); - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - - pub fn try_into_timestamp(&self) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &self.value { - return value.try_into_timestamp(); - } - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } -} - -impl From for Item { - fn from(actor_id: am::ActorId) -> Self { - Value::from(actor_id).into() - } -} - -impl From for Item { - fn from(auto_commit: am::AutoCommit) -> Self { - Value::from(auto_commit).into() - } -} - -impl From for Item { - fn from(change: am::Change) -> Self { - Value::from(change).into() - } -} - -impl From for Item { - fn from(change_hash: am::ChangeHash) -> Self { - Value::from(change_hash).into() - } -} - -impl From<(am::ObjId, am::ObjType)> for Item { - fn from((obj_id, obj_type): (am::ObjId, am::ObjType)) -> Self { - Self { - index: None, - obj_id: Some(AMobjId::new(obj_id)), - value: Some(am::Value::Object(obj_type).into()), - } - } -} - -impl From for Item { - fn from(have: am::sync::Have) -> Self { - Value::from(have).into() - } -} - -impl From for Item { - fn from(message: am::sync::Message) -> Self { - Value::from(message).into() - } -} - -impl From for Item { - fn from(state: am::sync::State) -> Self { - Value::from(state).into() - } -} - -impl From> for Item { - fn from(value: am::Value<'static>) -> Self { - Value::from(value).into() - } -} - -impl From for Item { - fn from(string: String) -> Self { - Value::from(string).into() - } -} - -impl From for Item { - fn from(value: Value) -> Self { - Self { - index: None, - obj_id: None, - value: Some(value), - } - } -} - -impl PartialEq for Item { - fn eq(&self, other: &Self) -> bool { - self.index == other.index && self.obj_id == other.obj_id && self.value == other.value - } -} - -impl<'a> TryFrom<&'a Item> for &'a am::Change { - type Error = am::AutomergeError; - - fn try_from(item: &'a Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl<'a> TryFrom<&'a Item> for &'a am::ChangeHash { - type Error = am::AutomergeError; - - fn try_from(item: &'a Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl<'a> TryFrom<&'a Item> for &'a am::ScalarValue { - type Error = am::AutomergeError; - - fn try_from(item: &'a Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl<'a> TryFrom<&'a Item> for &'a AMactorId { - type Error = am::AutomergeError; - - fn try_from(item: &'a Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl<'a> TryFrom<&'a mut Item> for &'a mut AMchange { - type Error = am::AutomergeError; - - fn try_from(item: &'a mut Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &mut item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl<'a> TryFrom<&'a mut Item> for &'a mut AMdoc { - type Error = am::AutomergeError; - - fn try_from(item: &'a mut Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &mut item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl From<&Item> for AMidxType { - fn from(item: &Item) -> Self { - if let Some(index) = &item.index { - return index.into(); - } - Default::default() - } -} - -impl<'a> TryFrom<&'a Item> for &'a AMsyncHave { - type Error = am::AutomergeError; - - fn try_from(item: &'a Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl<'a> TryFrom<&'a Item> for &'a AMsyncMessage { - type Error = am::AutomergeError; - - fn try_from(item: &'a Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl<'a> TryFrom<&'a mut Item> for &'a mut AMsyncState { - type Error = am::AutomergeError; - - fn try_from(item: &'a mut Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &mut item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl TryFrom<&Item> for bool { - type Error = am::AutomergeError; - - fn try_from(item: &Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl TryFrom<&Item> for f64 { - type Error = am::AutomergeError; - - fn try_from(item: &Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl TryFrom<&Item> for u64 { - type Error = am::AutomergeError; - - fn try_from(item: &Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl TryFrom<&Item> for AMunknownValue { - type Error = am::AutomergeError; - - fn try_from(item: &Item) -> Result { - use am::AutomergeError::InvalidValueType; - - if let Some(value) = &item.value { - value.try_into() - } else { - Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::>().to_string(), - }) - } - } -} - -impl TryFrom<&Item> for (am::Value<'static>, am::ObjId) { - type Error = am::AutomergeError; - - fn try_from(item: &Item) -> Result { - use self::Value::*; - use am::AutomergeError::InvalidObjId; - use am::AutomergeError::InvalidValueType; - - let expected = type_name::().to_string(); - match (&item.obj_id, &item.value) { - (None, None) | (None, Some(_)) => Err(InvalidObjId("".to_string())), - (Some(_), None) => Err(InvalidValueType { - expected, - unexpected: type_name::>().to_string(), - }), - (Some(obj_id), Some(value)) => match value { - ActorId(_, _) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - ChangeHash(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Change(_, _) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Doc(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - SyncHave(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - SyncMessage(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - SyncState(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Value(v) => Ok((v.clone(), obj_id.as_ref().clone())), - }, - } - } -} - -/// \struct AMitem -/// \installed_headerfile -/// \brief An item within a result. -#[derive(Clone)] -pub struct AMitem(Rc); - -impl AMitem { - pub fn exact(obj_id: am::ObjId, value: Value) -> Self { - Self(Rc::new(Item { - index: None, - obj_id: Some(AMobjId::new(obj_id)), - value: Some(value), - })) - } - - pub fn indexed(index: AMindex, obj_id: am::ObjId, value: Value) -> Self { - Self(Rc::new(Item { - index: Some(index), - obj_id: Some(AMobjId::new(obj_id)), - value: Some(value), - })) - } -} - -impl AsRef for AMitem { - fn as_ref(&self) -> &Item { - self.0.as_ref() - } -} - -impl Default for AMitem { - fn default() -> Self { - Self(Rc::new(Item { - index: None, - obj_id: None, - value: None, - })) - } -} - -impl From for AMitem { - fn from(actor_id: am::ActorId) -> Self { - Value::from(actor_id).into() - } -} - -impl From for AMitem { - fn from(auto_commit: am::AutoCommit) -> Self { - Value::from(auto_commit).into() - } -} - -impl From for AMitem { - fn from(change: am::Change) -> Self { - Value::from(change).into() - } -} - -impl From for AMitem { - fn from(change_hash: am::ChangeHash) -> Self { - Value::from(change_hash).into() - } -} - -impl From<(am::ObjId, am::ObjType)> for AMitem { - fn from((obj_id, obj_type): (am::ObjId, am::ObjType)) -> Self { - Self(Rc::new(Item::from((obj_id, obj_type)))) - } -} - -impl From for AMitem { - fn from(have: am::sync::Have) -> Self { - Value::from(have).into() - } -} - -impl From for AMitem { - fn from(message: am::sync::Message) -> Self { - Value::from(message).into() - } -} - -impl From for AMitem { - fn from(state: am::sync::State) -> Self { - Value::from(state).into() - } -} - -impl From> for AMitem { - fn from(value: am::Value<'static>) -> Self { - Value::from(value).into() - } -} - -impl From for AMitem { - fn from(string: String) -> Self { - Value::from(string).into() - } -} - -impl From for AMitem { - fn from(value: Value) -> Self { - Self(Rc::new(Item::from(value))) - } -} - -impl PartialEq for AMitem { - fn eq(&self, other: &Self) -> bool { - self.as_ref() == other.as_ref() - } -} - -impl<'a> TryFrom<&'a AMitem> for &'a am::Change { - type Error = am::AutomergeError; - - fn try_from(item: &'a AMitem) -> Result { - item.as_ref().try_into() - } -} - -impl<'a> TryFrom<&'a AMitem> for &'a am::ChangeHash { - type Error = am::AutomergeError; - - fn try_from(item: &'a AMitem) -> Result { - item.as_ref().try_into() - } -} - -impl<'a> TryFrom<&'a AMitem> for &'a am::ScalarValue { - type Error = am::AutomergeError; - - fn try_from(item: &'a AMitem) -> Result { - item.as_ref().try_into() - } -} - -impl<'a> TryFrom<&'a AMitem> for &'a AMactorId { - type Error = am::AutomergeError; - - fn try_from(item: &'a AMitem) -> Result { - item.as_ref().try_into() - } -} - -impl<'a> TryFrom<&'a mut AMitem> for &'a mut AMchange { - type Error = am::AutomergeError; - - fn try_from(item: &'a mut AMitem) -> Result { - if let Some(item) = Rc::get_mut(&mut item.0) { - item.try_into() - } else { - Err(Self::Error::Fail) - } - } -} - -impl<'a> TryFrom<&'a mut AMitem> for &'a mut AMdoc { - type Error = am::AutomergeError; - - fn try_from(item: &'a mut AMitem) -> Result { - if let Some(item) = Rc::get_mut(&mut item.0) { - item.try_into() - } else { - Err(Self::Error::Fail) - } - } -} - -impl<'a> TryFrom<&'a AMitem> for &'a AMsyncHave { - type Error = am::AutomergeError; - - fn try_from(item: &'a AMitem) -> Result { - item.as_ref().try_into() - } -} - -impl<'a> TryFrom<&'a AMitem> for &'a AMsyncMessage { - type Error = am::AutomergeError; - - fn try_from(item: &'a AMitem) -> Result { - item.as_ref().try_into() - } -} - -impl<'a> TryFrom<&'a mut AMitem> for &'a mut AMsyncState { - type Error = am::AutomergeError; - - fn try_from(item: &'a mut AMitem) -> Result { - if let Some(item) = Rc::get_mut(&mut item.0) { - item.try_into() - } else { - Err(Self::Error::Fail) - } - } -} - -impl TryFrom<&AMitem> for bool { - type Error = am::AutomergeError; - - fn try_from(item: &AMitem) -> Result { - item.as_ref().try_into() - } -} - -impl TryFrom<&AMitem> for f64 { - type Error = am::AutomergeError; - - fn try_from(item: &AMitem) -> Result { - item.as_ref().try_into() - } -} - -impl TryFrom<&AMitem> for u64 { - type Error = am::AutomergeError; - - fn try_from(item: &AMitem) -> Result { - item.as_ref().try_into() - } -} - -impl TryFrom<&AMitem> for AMunknownValue { - type Error = am::AutomergeError; - - fn try_from(item: &AMitem) -> Result { - item.as_ref().try_into() - } -} - -impl TryFrom<&AMitem> for (am::Value<'static>, am::ObjId) { - type Error = am::AutomergeError; - - fn try_from(item: &AMitem) -> Result { - item.as_ref().try_into() - } -} - -/// \ingroup enumerations -/// \enum AMvalType -/// \installed_headerfile -/// \brief The type of an item's value. -#[derive(PartialEq, Eq)] -#[repr(u32)] -pub enum AMvalType { - /// An actor identifier value. - ActorId = 1 << 1, - /// A boolean value. - Bool = 1 << 2, - /// A view onto an array of bytes value. - Bytes = 1 << 3, - /// A change value. - Change = 1 << 4, - /// A change hash value. - ChangeHash = 1 << 5, - /// A CRDT counter value. - Counter = 1 << 6, - /// The default tag, not a type signifier. - Default = 0, - /// A document value. - Doc = 1 << 7, - /// A 64-bit float value. - F64 = 1 << 8, - /// A 64-bit signed integer value. - Int = 1 << 9, - /// A null value. - Null = 1 << 10, - /// An object type value. - ObjType = 1 << 11, - /// A UTF-8 string view value. - Str = 1 << 12, - /// A synchronization have value. - SyncHave = 1 << 13, - /// A synchronization message value. - SyncMessage = 1 << 14, - /// A synchronization state value. - SyncState = 1 << 15, - /// A *nix timestamp (milliseconds) value. - Timestamp = 1 << 16, - /// A 64-bit unsigned integer value. - Uint = 1 << 17, - /// An unknown type of value. - Unknown = 1 << 18, - /// A void. - Void = 1 << 0, -} - -impl Default for AMvalType { - fn default() -> Self { - Self::Default - } -} - -impl From<&am::Value<'static>> for AMvalType { - fn from(value: &am::Value<'static>) -> Self { - use am::ScalarValue::*; - use am::Value::*; - - match value { - Object(_) => Self::ObjType, - Scalar(scalar) => match scalar.as_ref() { - Boolean(_) => Self::Bool, - Bytes(_) => Self::Bytes, - Counter(_) => Self::Counter, - F64(_) => Self::F64, - Int(_) => Self::Int, - Null => Self::Null, - Str(_) => Self::Str, - Timestamp(_) => Self::Timestamp, - Uint(_) => Self::Uint, - Unknown { .. } => Self::Unknown, - }, - } - } -} - -impl From<&Value> for AMvalType { - fn from(value: &Value) -> Self { - use self::Value::*; - - match value { - ActorId(_, _) => Self::ActorId, - Change(_, _) => Self::Change, - ChangeHash(_) => Self::ChangeHash, - Doc(_) => Self::Doc, - SyncHave(_) => Self::SyncHave, - SyncMessage(_) => Self::SyncMessage, - SyncState(_) => Self::SyncState, - Value(v) => v.into(), - } - } -} - -impl From<&Item> for AMvalType { - fn from(item: &Item) -> Self { - if let Some(value) = &item.value { - return value.into(); - } - Self::Void - } -} - -/// \memberof AMitem -/// \brief Tests the equality of two items. -/// -/// \param[in] item1 A pointer to an `AMitem` struct. -/// \param[in] item2 A pointer to an `AMitem` struct. -/// \return `true` if \p item1 `==` \p item2 and `false` otherwise. -/// \pre \p item1 `!= NULL` -/// \pre \p item2 `!= NULL` -/// \post `!(`\p item1 `&&` \p item2 `) -> false` -/// \internal -/// -/// #Safety -/// item1 must be a valid AMitem pointer -/// item2 must be a valid AMitem pointer -#[no_mangle] -pub unsafe extern "C" fn AMitemEqual(item1: *const AMitem, item2: *const AMitem) -> bool { - match (item1.as_ref(), item2.as_ref()) { - (Some(item1), Some(item2)) => *item1 == *item2, - (None, None) | (None, Some(_)) | (Some(_), None) => false, - } -} - -/// \memberof AMitem -/// \brief Allocates a new item and initializes it from a boolean value. -/// -/// \param[in] value A boolean. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BOOL` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -#[no_mangle] -pub unsafe extern "C" fn AMitemFromBool(value: bool) -> *mut AMresult { - AMresult::item(am::Value::from(value).into()).into() -} - -/// \memberof AMitem -/// \brief Allocates a new item and initializes it from an array of bytes value. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The count of bytes to copy from the array pointed to by -/// \p src. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. -/// \pre \p src `!= NULL` -/// \pre `sizeof(`\p src `) > 0` -/// \pre \p count `<= sizeof(`\p src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// value.src must be a byte array of length >= value.count -#[no_mangle] -pub unsafe extern "C" fn AMitemFromBytes(src: *const u8, count: usize) -> *mut AMresult { - let value = std::slice::from_raw_parts(src, count); - AMresult::item(am::Value::bytes(value.to_vec()).into()).into() -} - -/// \memberof AMitem -/// \brief Allocates a new item and initializes it from a change hash value. -/// -/// \param[in] value A change hash as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_CHANGE_HASH` item. -/// \pre \p value.src `!= NULL` -/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// value.src must be a byte array of length >= value.count -#[no_mangle] -pub unsafe extern "C" fn AMitemFromChangeHash(value: AMbyteSpan) -> *mut AMresult { - to_result(am::ChangeHash::try_from(&value)) -} - -/// \memberof AMitem -/// \brief Allocates a new item and initializes it from a CRDT counter value. -/// -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_COUNTER` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -#[no_mangle] -pub unsafe extern "C" fn AMitemFromCounter(value: i64) -> *mut AMresult { - AMresult::item(am::Value::counter(value).into()).into() -} - -/// \memberof AMitem -/// \brief Allocates a new item and initializes it from a float value. -/// -/// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_F64` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -#[no_mangle] -pub unsafe extern "C" fn AMitemFromF64(value: f64) -> *mut AMresult { - AMresult::item(am::Value::f64(value).into()).into() -} - -/// \memberof AMitem -/// \brief Allocates a new item and initializes it from a signed integer value. -/// -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_INT` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -#[no_mangle] -pub unsafe extern "C" fn AMitemFromInt(value: i64) -> *mut AMresult { - AMresult::item(am::Value::int(value).into()).into() -} - -/// \memberof AMitem -/// \brief Allocates a new item and initializes it from a null value. -/// -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_NULL` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -#[no_mangle] -pub unsafe extern "C" fn AMitemFromNull() -> *mut AMresult { - AMresult::item(am::Value::from(()).into()).into() -} - -/// \memberof AMitem -/// \brief Allocates a new item and initializes it from a UTF-8 string value. -/// -/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_STR` item. -/// \pre \p value.src `!= NULL` -/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// value.src must be a byte array of length >= value.count -#[no_mangle] -pub unsafe extern "C" fn AMitemFromStr(value: AMbyteSpan) -> *mut AMresult { - AMresult::item(am::Value::str(to_str!(value)).into()).into() -} - -/// \memberof AMitem -/// \brief Allocates a new item and initializes it from a *nix timestamp -/// (milliseconds) value. -/// -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_TIMESTAMP` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -#[no_mangle] -pub unsafe extern "C" fn AMitemFromTimestamp(value: i64) -> *mut AMresult { - AMresult::item(am::Value::timestamp(value).into()).into() -} - -/// \memberof AMitem -/// \brief Allocates a new item and initializes it from an unsigned integer value. -/// -/// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_UINT` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -#[no_mangle] -pub unsafe extern "C" fn AMitemFromUint(value: u64) -> *mut AMresult { - AMresult::item(am::Value::uint(value).into()).into() -} - -/// \memberof AMitem -/// \brief Gets the type of an item's index. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \return An `AMidxType` enum tag. -/// \pre \p item `!= NULL` -/// \post `(`\p item `== NULL) -> 0` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemIdxType(item: *const AMitem) -> AMidxType { - if let Some(item) = item.as_ref() { - return item.0.as_ref().into(); - } - Default::default() -} - -/// \memberof AMitem -/// \brief Gets the object identifier of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \return A pointer to an `AMobjId` struct. -/// \pre \p item `!= NULL` -/// \post `(`\p item `== NULL) -> NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemObjId(item: *const AMitem) -> *const AMobjId { - if let Some(item) = item.as_ref() { - if let Some(obj_id) = &item.as_ref().obj_id { - return obj_id; - } - } - std::ptr::null() -} - -/// \memberof AMitem -/// \brief Gets the UTF-8 string view key index of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to a UTF-8 string view as an `AMbyteSpan` struct. -/// \return `true` if `AMitemIdxType(`\p item `) == AM_IDX_TYPE_KEY` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemKey(item: *const AMitem, value: *mut AMbyteSpan) -> bool { - if let Some(item) = item.as_ref() { - if let Some(index) = &item.as_ref().index { - if let Ok(key) = index.try_into() { - if !value.is_null() { - *value = key; - return true; - } - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the unsigned integer position index of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to a `size_t`. -/// \return `true` if `AMitemIdxType(`\p item `) == AM_IDX_TYPE_POS` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemPos(item: *const AMitem, value: *mut usize) -> bool { - if let Some(item) = item.as_ref() { - if let Some(index) = &item.as_ref().index { - if let Ok(pos) = index.try_into() { - if !value.is_null() { - *value = pos; - return true; - } - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the reference count of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \return A 64-bit unsigned integer. -/// \pre \p item `!= NULL` -/// \post `(`\p item `== NULL) -> 0` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemRefCount(item: *const AMitem) -> usize { - if let Some(item) = item.as_ref() { - return Rc::strong_count(&item.0); - } - 0 -} - -/// \memberof AMitem -/// \brief Gets a new result for an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \return A pointer to an `AMresult` struct. -/// \pre \p item `!= NULL` -/// \post `(`\p item `== NULL) -> NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemResult(item: *const AMitem) -> *mut AMresult { - if let Some(item) = item.as_ref() { - return AMresult::item(item.clone()).into(); - } - std::ptr::null_mut() -} - -/// \memberof AMitem -/// \brief Gets the actor identifier value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to an `AMactorId` struct pointer. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_ACTOR_ID` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToActorId( - item: *const AMitem, - value: *mut *const AMactorId, -) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(actor_id) = <&AMactorId>::try_from(item) { - if !value.is_null() { - *value = actor_id; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the boolean value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to a boolean. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_BOOL` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToBool(item: *const AMitem, value: *mut bool) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(boolean) = item.try_into() { - if !value.is_null() { - *value = boolean; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the array of bytes value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to an `AMbyteSpan` struct. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_BYTES` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToBytes(item: *const AMitem, value: *mut AMbyteSpan) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(bytes) = item.as_ref().try_into_bytes() { - if !value.is_null() { - *value = bytes; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the change value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to an `AMchange` struct pointer. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_CHANGE` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToChange(item: *mut AMitem, value: *mut *mut AMchange) -> bool { - if let Some(item) = item.as_mut() { - if let Ok(change) = <&mut AMchange>::try_from(item) { - if !value.is_null() { - *value = change; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the change hash value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to an `AMbyteSpan` struct. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_CHANGE_HASH` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToChangeHash(item: *const AMitem, value: *mut AMbyteSpan) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(change_hash) = item.as_ref().try_into_change_hash() { - if !value.is_null() { - *value = change_hash; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the CRDT counter value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to a signed 64-bit integer. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_COUNTER` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToCounter(item: *const AMitem, value: *mut i64) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(counter) = item.as_ref().try_into_counter() { - if !value.is_null() { - *value = counter; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the document value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to an `AMdoc` struct pointer. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_DOC` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToDoc(item: *mut AMitem, value: *mut *const AMdoc) -> bool { - if let Some(item) = item.as_mut() { - if let Ok(doc) = <&mut AMdoc>::try_from(item) { - if !value.is_null() { - *value = doc; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the float value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to a 64-bit float. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_F64` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToF64(item: *const AMitem, value: *mut f64) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(float) = item.try_into() { - if !value.is_null() { - *value = float; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the integer value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to a signed 64-bit integer. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_INT` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToInt(item: *const AMitem, value: *mut i64) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(int) = item.as_ref().try_into_int() { - if !value.is_null() { - *value = int; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the UTF-8 string view value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to a UTF-8 string view as an `AMbyteSpan` struct. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_STR` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToStr(item: *const AMitem, value: *mut AMbyteSpan) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(str) = item.as_ref().try_into_str() { - if !value.is_null() { - *value = str; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the synchronization have value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to an `AMsyncHave` struct pointer. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_SYNC_HAVE` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToSyncHave( - item: *const AMitem, - value: *mut *const AMsyncHave, -) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(sync_have) = <&AMsyncHave>::try_from(item) { - if !value.is_null() { - *value = sync_have; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the synchronization message value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to an `AMsyncMessage` struct pointer. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_SYNC_MESSAGE` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToSyncMessage( - item: *const AMitem, - value: *mut *const AMsyncMessage, -) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(sync_message) = <&AMsyncMessage>::try_from(item) { - if !value.is_null() { - *value = sync_message; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the synchronization state value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to an `AMsyncState` struct pointer. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_SYNC_STATE` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToSyncState( - item: *mut AMitem, - value: *mut *mut AMsyncState, -) -> bool { - if let Some(item) = item.as_mut() { - if let Ok(sync_state) = <&mut AMsyncState>::try_from(item) { - if !value.is_null() { - *value = sync_state; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the *nix timestamp (milliseconds) value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to a signed 64-bit integer. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_TIMESTAMP` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToTimestamp(item: *const AMitem, value: *mut i64) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(timestamp) = item.as_ref().try_into_timestamp() { - if !value.is_null() { - *value = timestamp; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the unsigned integer value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to a unsigned 64-bit integer. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_UINT` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToUint(item: *const AMitem, value: *mut u64) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(uint) = item.try_into() { - if !value.is_null() { - *value = uint; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the unknown type of value of an item. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \param[out] value A pointer to an `AMunknownValue` struct. -/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_UNKNOWN` and -/// \p *value has been reassigned, `false` otherwise. -/// \pre \p item `!= NULL` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemToUnknown(item: *const AMitem, value: *mut AMunknownValue) -> bool { - if let Some(item) = item.as_ref() { - if let Ok(unknown) = item.try_into() { - if !value.is_null() { - *value = unknown; - return true; - } - } - } - false -} - -/// \memberof AMitem -/// \brief Gets the type of an item's value. -/// -/// \param[in] item A pointer to an `AMitem` struct. -/// \return An `AMvalType` enum tag. -/// \pre \p item `!= NULL` -/// \post `(`\p item `== NULL) -> 0` -/// \internal -/// -/// # Safety -/// item must be a valid pointer to an AMitem -#[no_mangle] -pub unsafe extern "C" fn AMitemValType(item: *const AMitem) -> AMvalType { - if let Some(item) = item.as_ref() { - return item.0.as_ref().into(); - } - Default::default() -} diff --git a/rust/automerge-c/src/items.rs b/rust/automerge-c/src/items.rs deleted file mode 100644 index 361078b3..00000000 --- a/rust/automerge-c/src/items.rs +++ /dev/null @@ -1,401 +0,0 @@ -use automerge as am; - -use std::ffi::c_void; -use std::marker::PhantomData; -use std::mem::size_of; - -use crate::item::AMitem; -use crate::result::AMresult; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(items: &[AMitem], offset: isize) -> Self { - Self { - len: items.len(), - offset, - ptr: items.as_ptr() as *mut c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<&mut AMitem> { - if self.is_stopped() { - return None; - } - let slice: &mut [AMitem] = - unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut AMitem, self.len) }; - let value = &mut slice[self.get_index()]; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<&mut AMitem> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &mut [AMitem] = - unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut AMitem, self.len) }; - Some(&mut slice[self.get_index()]) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMitems -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of `AMitem` structs. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMitems<'a> { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], - phantom: PhantomData<&'a mut AMresult>, -} - -impl<'a> AMitems<'a> { - pub fn new(items: &[AMitem]) -> Self { - Self { - detail: Detail::new(items, 0).into(), - phantom: PhantomData, - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<&mut AMitem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<&mut AMitem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - phantom: PhantomData, - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - phantom: PhantomData, - } - } -} - -impl<'a> AsRef<[AMitem]> for AMitems<'a> { - fn as_ref(&self) -> &[AMitem] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const AMitem, detail.len) } - } -} - -impl<'a> Default for AMitems<'a> { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - phantom: PhantomData, - } - } -} - -impl TryFrom<&AMitems<'_>> for Vec { - type Error = am::AutomergeError; - - fn try_from(items: &AMitems<'_>) -> Result { - let mut changes = Vec::::with_capacity(items.len()); - for item in items.as_ref().iter() { - match <&am::Change>::try_from(item.as_ref()) { - Ok(change) => { - changes.push(change.clone()); - } - Err(e) => { - return Err(e); - } - } - } - Ok(changes) - } -} - -impl TryFrom<&AMitems<'_>> for Vec { - type Error = am::AutomergeError; - - fn try_from(items: &AMitems<'_>) -> Result { - let mut change_hashes = Vec::::with_capacity(items.len()); - for item in items.as_ref().iter() { - match <&am::ChangeHash>::try_from(item.as_ref()) { - Ok(change_hash) => { - change_hashes.push(*change_hash); - } - Err(e) => { - return Err(e); - } - } - } - Ok(change_hashes) - } -} - -impl TryFrom<&AMitems<'_>> for Vec { - type Error = am::AutomergeError; - - fn try_from(items: &AMitems<'_>) -> Result { - let mut scalars = Vec::::with_capacity(items.len()); - for item in items.as_ref().iter() { - match <&am::ScalarValue>::try_from(item.as_ref()) { - Ok(scalar) => { - scalars.push(scalar.clone()); - } - Err(e) => { - return Err(e); - } - } - } - Ok(scalars) - } -} - -/// \memberof AMitems -/// \brief Advances an iterator over a sequence of object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in] items A pointer to an `AMitems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p items `!= NULL` -/// \internal -/// -/// #Safety -/// items must be a valid pointer to an AMitems -#[no_mangle] -pub unsafe extern "C" fn AMitemsAdvance(items: *mut AMitems, n: isize) { - if let Some(items) = items.as_mut() { - items.advance(n); - }; -} - -/// \memberof AMitems -/// \brief Tests the equality of two sequences of object items underlying a -/// pair of iterators. -/// -/// \param[in] items1 A pointer to an `AMitems` struct. -/// \param[in] items2 A pointer to an `AMitems` struct. -/// \return `true` if \p items1 `==` \p items2 and `false` otherwise. -/// \pre \p items1 `!= NULL` -/// \pre \p items1 `!= NULL` -/// \post `!(`\p items1 `&&` \p items2 `) -> false` -/// \internal -/// -/// #Safety -/// items1 must be a valid pointer to an AMitems -/// items2 must be a valid pointer to an AMitems -#[no_mangle] -pub unsafe extern "C" fn AMitemsEqual(items1: *const AMitems, items2: *const AMitems) -> bool { - match (items1.as_ref(), items2.as_ref()) { - (Some(items1), Some(items2)) => items1.as_ref() == items2.as_ref(), - (None, None) | (None, Some(_)) | (Some(_), None) => false, - } -} - -/// \memberof AMitems -/// \brief Gets the object item at the current position of an iterator over a -/// sequence of object items and then advances it by at most \p |n| -/// positions where the sign of \p n is relative to the iterator's -/// direction. -/// -/// \param[in] items A pointer to an `AMitems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMitem` struct that's `NULL` when \p items -/// was previously advanced past its forward/reverse limit. -/// \pre \p items `!= NULL` -/// \internal -/// -/// #Safety -/// items must be a valid pointer to an AMitems -#[no_mangle] -pub unsafe extern "C" fn AMitemsNext(items: *mut AMitems, n: isize) -> *mut AMitem { - if let Some(items) = items.as_mut() { - if let Some(item) = items.next(n) { - return item; - } - } - std::ptr::null_mut() -} - -/// \memberof AMitems -/// \brief Advances an iterator over a sequence of object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the object item at its new -/// position. -/// -/// \param[in] items A pointer to an `AMitems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMitem` struct that's `NULL` when \p items -/// is presently advanced past its forward/reverse limit. -/// \pre \p items `!= NULL` -/// \internal -/// -/// #Safety -/// items must be a valid pointer to an AMitems -#[no_mangle] -pub unsafe extern "C" fn AMitemsPrev(items: *mut AMitems, n: isize) -> *mut AMitem { - if let Some(items) = items.as_mut() { - if let Some(obj_item) = items.prev(n) { - return obj_item; - } - } - std::ptr::null_mut() -} - -/// \memberof AMitems -/// \brief Gets the size of the sequence underlying an iterator. -/// -/// \param[in] items A pointer to an `AMitems` struct. -/// \return The count of items in \p items. -/// \pre \p items `!= NULL` -/// \internal -/// -/// #Safety -/// items must be a valid pointer to an AMitems -#[no_mangle] -pub unsafe extern "C" fn AMitemsSize(items: *const AMitems) -> usize { - if let Some(items) = items.as_ref() { - return items.len(); - } - 0 -} - -/// \memberof AMitems -/// \brief Creates an iterator over the same sequence of items as the -/// given one but with the opposite position and direction. -/// -/// \param[in] items A pointer to an `AMitems` struct. -/// \return An `AMitems` struct -/// \pre \p items `!= NULL` -/// \internal -/// -/// #Safety -/// items must be a valid pointer to an AMitems -#[no_mangle] -pub unsafe extern "C" fn AMitemsReversed(items: *const AMitems) -> AMitems { - if let Some(items) = items.as_ref() { - return items.reversed(); - } - Default::default() -} - -/// \memberof AMitems -/// \brief Creates an iterator at the starting position over the same sequence -/// of items as the given one. -/// -/// \param[in] items A pointer to an `AMitems` struct. -/// \return An `AMitems` struct -/// \pre \p items `!= NULL` -/// \internal -/// -/// #Safety -/// items must be a valid pointer to an AMitems -#[no_mangle] -pub unsafe extern "C" fn AMitemsRewound(items: *const AMitems) -> AMitems { - if let Some(items) = items.as_ref() { - return items.rewound(); - } - Default::default() -} diff --git a/rust/automerge-c/src/lib.rs b/rust/automerge-c/src/lib.rs deleted file mode 100644 index 1ee1a85d..00000000 --- a/rust/automerge-c/src/lib.rs +++ /dev/null @@ -1,12 +0,0 @@ -mod actor_id; -mod byte_span; -mod change; -mod doc; -mod index; -mod item; -mod items; -mod obj; -mod result; -mod sync; - -// include!(concat!(env!("OUT_DIR"), "/enum_string_functions.rs")); diff --git a/rust/automerge-c/src/obj.rs b/rust/automerge-c/src/obj.rs deleted file mode 100644 index 3d52286c..00000000 --- a/rust/automerge-c/src/obj.rs +++ /dev/null @@ -1,216 +0,0 @@ -use automerge as am; -use std::any::type_name; -use std::cell::RefCell; -use std::ops::Deref; - -use crate::actor_id::AMactorId; - -macro_rules! to_obj_id { - ($handle:expr) => {{ - match $handle.as_ref() { - Some(obj_id) => obj_id, - None => &automerge::ROOT, - } - }}; -} - -pub(crate) use to_obj_id; - -macro_rules! to_obj_type { - ($c_obj_type:expr) => {{ - let result: Result = (&$c_obj_type).try_into(); - match result { - Ok(obj_type) => obj_type, - Err(e) => return AMresult::error(&e.to_string()).into(), - } - }}; -} - -pub(crate) use to_obj_type; - -/// \struct AMobjId -/// \installed_headerfile -/// \brief An object's unique identifier. -#[derive(Eq, PartialEq)] -pub struct AMobjId { - body: am::ObjId, - c_actor_id: RefCell>, -} - -impl AMobjId { - pub fn new(obj_id: am::ObjId) -> Self { - Self { - body: obj_id, - c_actor_id: Default::default(), - } - } - - pub fn actor_id(&self) -> *const AMactorId { - let mut c_actor_id = self.c_actor_id.borrow_mut(); - match c_actor_id.as_mut() { - None => { - if let am::ObjId::Id(_, actor_id, _) = &self.body { - return c_actor_id.insert(AMactorId::new(actor_id)); - } - } - Some(value) => { - return value; - } - } - std::ptr::null() - } -} - -impl AsRef for AMobjId { - fn as_ref(&self) -> &am::ObjId { - &self.body - } -} - -impl Deref for AMobjId { - type Target = am::ObjId; - - fn deref(&self) -> &Self::Target { - &self.body - } -} - -/// \memberof AMobjId -/// \brief Gets the actor identifier component of an object identifier. -/// -/// \param[in] obj_id A pointer to an `AMobjId` struct. -/// \return A pointer to an `AMactorId` struct or `NULL`. -/// \pre \p obj_id `!= NULL` -/// \internal -/// -/// # Safety -/// obj_id must be a valid pointer to an AMobjId -#[no_mangle] -pub unsafe extern "C" fn AMobjIdActorId(obj_id: *const AMobjId) -> *const AMactorId { - if let Some(obj_id) = obj_id.as_ref() { - return obj_id.actor_id(); - }; - std::ptr::null() -} - -/// \memberof AMobjId -/// \brief Gets the counter component of an object identifier. -/// -/// \param[in] obj_id A pointer to an `AMobjId` struct. -/// \return A 64-bit unsigned integer. -/// \pre \p obj_id `!= NULL` -/// \internal -/// -/// # Safety -/// obj_id must be a valid pointer to an AMobjId -#[no_mangle] -pub unsafe extern "C" fn AMobjIdCounter(obj_id: *const AMobjId) -> u64 { - if let Some(obj_id) = obj_id.as_ref() { - match obj_id.as_ref() { - am::ObjId::Id(counter, _, _) => *counter, - am::ObjId::Root => 0, - } - } else { - u64::MAX - } -} - -/// \memberof AMobjId -/// \brief Tests the equality of two object identifiers. -/// -/// \param[in] obj_id1 A pointer to an `AMobjId` struct. -/// \param[in] obj_id2 A pointer to an `AMobjId` struct. -/// \return `true` if \p obj_id1 `==` \p obj_id2 and `false` otherwise. -/// \pre \p obj_id1 `!= NULL` -/// \pre \p obj_id1 `!= NULL` -/// \post `!(`\p obj_id1 `&&` \p obj_id2 `) -> false` -/// \internal -/// -/// #Safety -/// obj_id1 must be a valid AMobjId pointer -/// obj_id2 must be a valid AMobjId pointer -#[no_mangle] -pub unsafe extern "C" fn AMobjIdEqual(obj_id1: *const AMobjId, obj_id2: *const AMobjId) -> bool { - match (obj_id1.as_ref(), obj_id2.as_ref()) { - (Some(obj_id1), Some(obj_id2)) => obj_id1 == obj_id2, - (None, None) | (None, Some(_)) | (Some(_), None) => false, - } -} - -/// \memberof AMobjId -/// \brief Gets the index component of an object identifier. -/// -/// \param[in] obj_id A pointer to an `AMobjId` struct. -/// \return A 64-bit unsigned integer. -/// \pre \p obj_id `!= NULL` -/// \internal -/// -/// # Safety -/// obj_id must be a valid pointer to an AMobjId -#[no_mangle] -pub unsafe extern "C" fn AMobjIdIndex(obj_id: *const AMobjId) -> usize { - use am::ObjId::*; - - if let Some(obj_id) = obj_id.as_ref() { - match obj_id.as_ref() { - Id(_, _, index) => *index, - Root => 0, - } - } else { - usize::MAX - } -} - -/// \ingroup enumerations -/// \enum AMobjType -/// \installed_headerfile -/// \brief The type of an object value. -#[derive(PartialEq, Eq)] -#[repr(u8)] -pub enum AMobjType { - /// The default tag, not a type signifier. - Default = 0, - /// A list. - List = 1, - /// A key-value map. - Map, - /// A list of Unicode graphemes. - Text, -} - -impl Default for AMobjType { - fn default() -> Self { - Self::Default - } -} - -impl From<&am::ObjType> for AMobjType { - fn from(o: &am::ObjType) -> Self { - use am::ObjType::*; - - match o { - List => Self::List, - Map | Table => Self::Map, - Text => Self::Text, - } - } -} - -impl TryFrom<&AMobjType> for am::ObjType { - type Error = am::AutomergeError; - - fn try_from(c_obj_type: &AMobjType) -> Result { - use am::AutomergeError::InvalidValueType; - use AMobjType::*; - - match c_obj_type { - List => Ok(Self::List), - Map => Ok(Self::Map), - Text => Ok(Self::Text), - _ => Err(InvalidValueType { - expected: type_name::().to_string(), - unexpected: type_name::().to_string(), - }), - } - } -} diff --git a/rust/automerge-c/src/result.rs b/rust/automerge-c/src/result.rs deleted file mode 100644 index 2975f38b..00000000 --- a/rust/automerge-c/src/result.rs +++ /dev/null @@ -1,660 +0,0 @@ -use automerge as am; - -use std::ops::{Range, RangeFrom, RangeFull, RangeTo}; - -use crate::byte_span::AMbyteSpan; -use crate::index::AMindex; -use crate::item::AMitem; -use crate::items::AMitems; - -/// \struct AMresult -/// \installed_headerfile -/// \brief A discriminated union of result variants. -pub enum AMresult { - Items(Vec), - Error(String), -} - -impl AMresult { - pub(crate) fn error(s: &str) -> Self { - Self::Error(s.to_string()) - } - - pub(crate) fn item(item: AMitem) -> Self { - Self::Items(vec![item]) - } - - pub(crate) fn items(items: Vec) -> Self { - Self::Items(items) - } -} - -impl Default for AMresult { - fn default() -> Self { - Self::Items(vec![]) - } -} - -impl From for AMresult { - fn from(auto_commit: am::AutoCommit) -> Self { - Self::item(AMitem::exact(am::ROOT, auto_commit.into())) - } -} - -impl From for AMresult { - fn from(change: am::Change) -> Self { - Self::item(change.into()) - } -} - -impl From for AMresult { - fn from(change_hash: am::ChangeHash) -> Self { - Self::item(change_hash.into()) - } -} - -impl From> for AMresult { - fn from(maybe: Option) -> Self { - match maybe { - Some(change_hash) => change_hash.into(), - None => Self::item(Default::default()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(change_hash) => change_hash.into(), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From for AMresult { - fn from(state: am::sync::State) -> Self { - Self::item(state.into()) - } -} - -impl From> for AMresult { - fn from(pairs: am::Values<'static>) -> Self { - Self::items(pairs.map(|(v, o)| AMitem::exact(o, v.into())).collect()) - } -} - -impl From for *mut AMresult { - fn from(b: AMresult) -> Self { - Box::into_raw(Box::new(b)) - } -} - -impl From> for AMresult { - fn from(keys: am::Keys<'_, '_>) -> Self { - Self::items(keys.map(|s| s.into()).collect()) - } -} - -impl From> for AMresult { - fn from(keys: am::KeysAt<'_, '_>) -> Self { - Self::items(keys.map(|s| s.into()).collect()) - } -} - -impl From>> for AMresult { - fn from(list_range: am::ListRange<'static, Range>) -> Self { - Self::items( - list_range - .map(|(i, v, o)| AMitem::indexed(AMindex::Pos(i), o, v.into())) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(list_range: am::ListRangeAt<'static, Range>) -> Self { - Self::items( - list_range - .map(|(i, v, o)| AMitem::indexed(AMindex::Pos(i), o, v.into())) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRange<'static, Range>) -> Self { - Self::items( - map_range - .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRangeAt<'static, Range>) -> Self { - Self::items( - map_range - .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRange<'static, RangeFrom>) -> Self { - Self::items( - map_range - .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRangeAt<'static, RangeFrom>) -> Self { - Self::items( - map_range - .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) - .collect(), - ) - } -} - -impl From> for AMresult { - fn from(map_range: am::MapRange<'static, RangeFull>) -> Self { - Self::items( - map_range - .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) - .collect(), - ) - } -} - -impl From> for AMresult { - fn from(map_range: am::MapRangeAt<'static, RangeFull>) -> Self { - Self::items( - map_range - .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRange<'static, RangeTo>) -> Self { - Self::items( - map_range - .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRangeAt<'static, RangeTo>) -> Self { - Self::items( - map_range - .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) - .collect(), - ) - } -} - -impl From> for AMresult { - fn from(maybe: Option<&am::Change>) -> Self { - Self::item(match maybe { - Some(change) => change.clone().into(), - None => Default::default(), - }) - } -} - -impl From> for AMresult { - fn from(maybe: Option) -> Self { - Self::item(match maybe { - Some(message) => message.into(), - None => Default::default(), - }) - } -} - -impl From> for AMresult { - fn from(maybe: Result<(), am::AutomergeError>) -> Self { - match maybe { - Ok(()) => Self::item(Default::default()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(actor_id) => Self::item(actor_id.into()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(actor_id) => Self::item(actor_id.into()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(auto_commit) => Self::item(auto_commit.into()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(change) => Self::item(change.into()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From<(Result, am::ObjType)> for AMresult { - fn from(tuple: (Result, am::ObjType)) -> Self { - match tuple { - (Ok(obj_id), obj_type) => Self::item((obj_id, obj_type).into()), - (Err(e), _) => Self::error(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(message) => Self::item(message.into()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(state) => Self::item(state.into()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(value) => Self::item(value.into()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From, am::ObjId)>, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { - match maybe { - Ok(Some((value, obj_id))) => Self::item(AMitem::exact(obj_id, value.into())), - Ok(None) => Self::item(Default::default()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(string) => Self::item(string.into()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(size) => Self::item(am::Value::uint(size as u64).into()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(changes) => Self::items(changes.into_iter().map(|change| change.into()).collect()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(changes) => Self::items( - changes - .into_iter() - .map(|change| change.clone().into()) - .collect(), - ), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From, am::LoadChangeError>> for AMresult { - fn from(maybe: Result, am::LoadChangeError>) -> Self { - match maybe { - Ok(changes) => Self::items(changes.into_iter().map(|change| change.into()).collect()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(change_hashes) => Self::items( - change_hashes - .into_iter() - .map(|change_hash| change_hash.into()) - .collect(), - ), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From, am::InvalidChangeHashSlice>> for AMresult { - fn from(maybe: Result, am::InvalidChangeHashSlice>) -> Self { - match maybe { - Ok(change_hashes) => Self::items( - change_hashes - .into_iter() - .map(|change_hash| change_hash.into()) - .collect(), - ), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From, am::ObjId)>, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { - match maybe { - Ok(pairs) => Self::items( - pairs - .into_iter() - .map(|(v, o)| AMitem::exact(o, v.into())) - .collect(), - ), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(bytes) => Self::item(am::Value::bytes(bytes).into()), - Err(e) => Self::error(&e.to_string()), - } - } -} - -impl From<&[am::Change]> for AMresult { - fn from(changes: &[am::Change]) -> Self { - Self::items(changes.iter().map(|change| change.clone().into()).collect()) - } -} - -impl From> for AMresult { - fn from(changes: Vec<&am::Change>) -> Self { - Self::items( - changes - .into_iter() - .map(|change| change.clone().into()) - .collect(), - ) - } -} - -impl From<&[am::ChangeHash]> for AMresult { - fn from(change_hashes: &[am::ChangeHash]) -> Self { - Self::items( - change_hashes - .iter() - .map(|change_hash| (*change_hash).into()) - .collect(), - ) - } -} - -impl From<&[am::sync::Have]> for AMresult { - fn from(haves: &[am::sync::Have]) -> Self { - Self::items(haves.iter().map(|have| have.clone().into()).collect()) - } -} - -impl From> for AMresult { - fn from(change_hashes: Vec) -> Self { - Self::items( - change_hashes - .into_iter() - .map(|change_hash| change_hash.into()) - .collect(), - ) - } -} - -impl From> for AMresult { - fn from(haves: Vec) -> Self { - Self::items(haves.into_iter().map(|have| have.into()).collect()) - } -} - -impl From> for AMresult { - fn from(bytes: Vec) -> Self { - Self::item(am::Value::bytes(bytes).into()) - } -} - -pub fn to_result>(r: R) -> *mut AMresult { - (r.into()).into() -} - -/// \ingroup enumerations -/// \enum AMstatus -/// \installed_headerfile -/// \brief The status of an API call. -#[derive(PartialEq, Eq)] -#[repr(u8)] -pub enum AMstatus { - /// Success. - /// \note This tag is unalphabetized so that `0` indicates success. - Ok, - /// Failure due to an error. - Error, - /// Failure due to an invalid result. - InvalidResult, -} - -/// \memberof AMresult -/// \brief Concatenates the items from two results. -/// -/// \param[in] dest A pointer to an `AMresult` struct. -/// \param[in] src A pointer to an `AMresult` struct. -/// \return A pointer to an `AMresult` struct with the items from \p dest in -/// their original order followed by the items from \p src in their -/// original order. -/// \pre \p dest `!= NULL` -/// \pre \p src `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// dest must be a valid pointer to an AMresult -/// src must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultCat(dest: *const AMresult, src: *const AMresult) -> *mut AMresult { - use AMresult::*; - - match (dest.as_ref(), src.as_ref()) { - (Some(dest), Some(src)) => match (dest, src) { - (Items(dest_items), Items(src_items)) => { - return AMresult::items( - dest_items - .iter() - .cloned() - .chain(src_items.iter().cloned()) - .collect(), - ) - .into(); - } - (Error(_), Error(_)) | (Error(_), Items(_)) | (Items(_), Error(_)) => { - AMresult::error("Invalid `AMresult`").into() - } - }, - (None, None) | (None, Some(_)) | (Some(_), None) => { - AMresult::error("Invalid `AMresult*`").into() - } - } -} - -/// \memberof AMresult -/// \brief Gets a result's error message string. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return A UTF-8 string view as an `AMbyteSpan` struct. -/// \pre \p result `!= NULL` -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultError(result: *const AMresult) -> AMbyteSpan { - use AMresult::*; - - if let Some(Error(message)) = result.as_ref() { - return message.as_bytes().into(); - } - Default::default() -} - -/// \memberof AMresult -/// \brief Deallocates the storage for a result. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \pre \p result `!= NULL` -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultFree(result: *mut AMresult) { - if !result.is_null() { - let result: AMresult = *Box::from_raw(result); - drop(result) - } -} - -/// \memberof AMresult -/// \brief Gets a result's first item. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return A pointer to an `AMitem` struct. -/// \pre \p result `!= NULL` -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultItem(result: *mut AMresult) -> *mut AMitem { - use AMresult::*; - - if let Some(Items(items)) = result.as_mut() { - if !items.is_empty() { - return &mut items[0]; - } - } - std::ptr::null_mut() -} - -/// \memberof AMresult -/// \brief Gets a result's items. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return An `AMitems` struct. -/// \pre \p result `!= NULL` -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultItems<'a>(result: *mut AMresult) -> AMitems<'a> { - use AMresult::*; - - if let Some(Items(items)) = result.as_mut() { - if !items.is_empty() { - return AMitems::new(items); - } - } - Default::default() -} - -/// \memberof AMresult -/// \brief Gets the size of a result. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return The count of items within \p result. -/// \pre \p result `!= NULL` -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultSize(result: *const AMresult) -> usize { - use self::AMresult::*; - - if let Some(Items(items)) = result.as_ref() { - return items.len(); - } - 0 -} - -/// \memberof AMresult -/// \brief Gets the status code of a result. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return An `AMstatus` enum tag. -/// \pre \p result `!= NULL` -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultStatus(result: *const AMresult) -> AMstatus { - use AMresult::*; - - if let Some(result) = result.as_ref() { - match result { - Error(_) => { - return AMstatus::Error; - } - _ => { - return AMstatus::Ok; - } - } - } - AMstatus::InvalidResult -} diff --git a/rust/automerge-c/src/sync.rs b/rust/automerge-c/src/sync.rs deleted file mode 100644 index fe0332a1..00000000 --- a/rust/automerge-c/src/sync.rs +++ /dev/null @@ -1,7 +0,0 @@ -mod have; -mod message; -mod state; - -pub(crate) use have::AMsyncHave; -pub(crate) use message::{to_sync_message, AMsyncMessage}; -pub(crate) use state::AMsyncState; diff --git a/rust/automerge-c/src/sync/have.rs b/rust/automerge-c/src/sync/have.rs deleted file mode 100644 index 37d2031f..00000000 --- a/rust/automerge-c/src/sync/have.rs +++ /dev/null @@ -1,42 +0,0 @@ -use automerge as am; - -use crate::result::{to_result, AMresult}; - -/// \struct AMsyncHave -/// \installed_headerfile -/// \brief A summary of the changes that the sender of a synchronization -/// message already has. -#[derive(Clone, Eq, PartialEq)] -pub struct AMsyncHave(am::sync::Have); - -impl AMsyncHave { - pub fn new(have: am::sync::Have) -> Self { - Self(have) - } -} - -impl AsRef for AMsyncHave { - fn as_ref(&self) -> &am::sync::Have { - &self.0 - } -} - -/// \memberof AMsyncHave -/// \brief Gets the heads of the sender. -/// -/// \param[in] sync_have A pointer to an `AMsyncHave` struct. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p sync_have `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_have must be a valid pointer to an AMsyncHave -#[no_mangle] -pub unsafe extern "C" fn AMsyncHaveLastSync(sync_have: *const AMsyncHave) -> *mut AMresult { - to_result(match sync_have.as_ref() { - Some(sync_have) => sync_have.as_ref().last_sync.as_slice(), - None => Default::default(), - }) -} diff --git a/rust/automerge-c/src/sync/message.rs b/rust/automerge-c/src/sync/message.rs deleted file mode 100644 index bdb1db34..00000000 --- a/rust/automerge-c/src/sync/message.rs +++ /dev/null @@ -1,166 +0,0 @@ -use automerge as am; -use std::cell::RefCell; -use std::collections::BTreeMap; - -use crate::change::AMchange; -use crate::result::{to_result, AMresult}; -use crate::sync::have::AMsyncHave; - -macro_rules! to_sync_message { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::error("Invalid `AMsyncMessage*`").into(), - } - }}; -} - -pub(crate) use to_sync_message; - -/// \struct AMsyncMessage -/// \installed_headerfile -/// \brief A synchronization message for a peer. -#[derive(PartialEq)] -pub struct AMsyncMessage { - body: am::sync::Message, - changes_storage: RefCell>, - haves_storage: RefCell>, -} - -impl AMsyncMessage { - pub fn new(message: am::sync::Message) -> Self { - Self { - body: message, - changes_storage: RefCell::new(BTreeMap::new()), - haves_storage: RefCell::new(BTreeMap::new()), - } - } -} - -impl AsRef for AMsyncMessage { - fn as_ref(&self) -> &am::sync::Message { - &self.body - } -} - -/// \memberof AMsyncMessage -/// \brief Gets the changes for the recipient to apply. -/// -/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. -/// \pre \p sync_message `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_message must be a valid pointer to an AMsyncMessage -#[no_mangle] -pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage) -> *mut AMresult { - to_result(match sync_message.as_ref() { - Some(sync_message) => sync_message.body.changes.as_slice(), - None => Default::default(), - }) -} - -/// \memberof AMsyncMessage -/// \brief Decodes an array of bytes into a synchronization message. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The count of bytes to decode from the array pointed to by -/// \p src. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_SYNC_MESSAGE` item. -/// \pre \p src `!= NULL` -/// \pre `sizeof(`\p src `) > 0` -/// \pre \p count `<= sizeof(`\p src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// src must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMsyncMessageDecode(src: *const u8, count: usize) -> *mut AMresult { - let data = std::slice::from_raw_parts(src, count); - to_result(am::sync::Message::decode(data)) -} - -/// \memberof AMsyncMessage -/// \brief Encodes a synchronization message as an array of bytes. -/// -/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. -/// \pre \p sync_message `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_message must be a valid pointer to an AMsyncMessage -#[no_mangle] -pub unsafe extern "C" fn AMsyncMessageEncode(sync_message: *const AMsyncMessage) -> *mut AMresult { - let sync_message = to_sync_message!(sync_message); - to_result(sync_message.as_ref().clone().encode()) -} - -/// \memberof AMsyncMessage -/// \brief Gets a summary of the changes that the sender already has. -/// -/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct with `AM_SYNC_HAVE` items. -/// \pre \p sync_message `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_message must be a valid pointer to an AMsyncMessage -#[no_mangle] -pub unsafe extern "C" fn AMsyncMessageHaves(sync_message: *const AMsyncMessage) -> *mut AMresult { - to_result(match sync_message.as_ref() { - Some(sync_message) => sync_message.as_ref().have.as_slice(), - None => Default::default(), - }) -} - -/// \memberof AMsyncMessage -/// \brief Gets the heads of the sender. -/// -/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p sync_message `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_message must be a valid pointer to an AMsyncMessage -#[no_mangle] -pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) -> *mut AMresult { - to_result(match sync_message.as_ref() { - Some(sync_message) => sync_message.as_ref().heads.as_slice(), - None => Default::default(), - }) -} - -/// \memberof AMsyncMessage -/// \brief Gets the hashes of any changes that are being explicitly requested -/// by the recipient. -/// -/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p sync_message `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_message must be a valid pointer to an AMsyncMessage -#[no_mangle] -pub unsafe extern "C" fn AMsyncMessageNeeds(sync_message: *const AMsyncMessage) -> *mut AMresult { - to_result(match sync_message.as_ref() { - Some(sync_message) => sync_message.as_ref().need.as_slice(), - None => Default::default(), - }) -} diff --git a/rust/automerge-c/src/sync/state.rs b/rust/automerge-c/src/sync/state.rs deleted file mode 100644 index 1d85ed98..00000000 --- a/rust/automerge-c/src/sync/state.rs +++ /dev/null @@ -1,262 +0,0 @@ -use automerge as am; -use std::cell::RefCell; -use std::collections::BTreeMap; - -use crate::result::{to_result, AMresult}; -use crate::sync::have::AMsyncHave; - -macro_rules! to_sync_state { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::error("Invalid `AMsyncState*`").into(), - } - }}; -} - -pub(crate) use to_sync_state; - -/// \struct AMsyncState -/// \installed_headerfile -/// \brief The state of synchronization with a peer. -#[derive(Eq, PartialEq)] -pub struct AMsyncState { - body: am::sync::State, - their_haves_storage: RefCell>, -} - -impl AMsyncState { - pub fn new(state: am::sync::State) -> Self { - Self { - body: state, - their_haves_storage: RefCell::new(BTreeMap::new()), - } - } -} - -impl AsMut for AMsyncState { - fn as_mut(&mut self) -> &mut am::sync::State { - &mut self.body - } -} - -impl AsRef for AMsyncState { - fn as_ref(&self) -> &am::sync::State { - &self.body - } -} - -impl From for *mut AMsyncState { - fn from(b: AMsyncState) -> Self { - Box::into_raw(Box::new(b)) - } -} - -/// \memberof AMsyncState -/// \brief Decodes an array of bytes into a synchronization state. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The count of bytes to decode from the array pointed to by -/// \p src. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_SYNC_STATE` item. -/// \pre \p src `!= NULL` -/// \pre `sizeof(`\p src `) > 0` -/// \pre \p count `<= sizeof(`\p src `)` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// src must be a byte array of length `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMsyncStateDecode(src: *const u8, count: usize) -> *mut AMresult { - let data = std::slice::from_raw_parts(src, count); - to_result(am::sync::State::decode(data)) -} - -/// \memberof AMsyncState -/// \brief Encodes a synchronization state as an array of bytes. -/// -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTE_SPAN` item. -/// \pre \p sync_state `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_state must be a valid pointer to an AMsyncState -#[no_mangle] -pub unsafe extern "C" fn AMsyncStateEncode(sync_state: *const AMsyncState) -> *mut AMresult { - let sync_state = to_sync_state!(sync_state); - to_result(sync_state.as_ref().encode()) -} - -/// \memberof AMsyncState -/// \brief Tests the equality of two synchronization states. -/// -/// \param[in] sync_state1 A pointer to an `AMsyncState` struct. -/// \param[in] sync_state2 A pointer to an `AMsyncState` struct. -/// \return `true` if \p sync_state1 `==` \p sync_state2 and `false` otherwise. -/// \pre \p sync_state1 `!= NULL` -/// \pre \p sync_state2 `!= NULL` -/// \post `!(`\p sync_state1 `&&` \p sync_state2 `) -> false` -/// \internal -/// -/// #Safety -/// sync_state1 must be a valid pointer to an AMsyncState -/// sync_state2 must be a valid pointer to an AMsyncState -#[no_mangle] -pub unsafe extern "C" fn AMsyncStateEqual( - sync_state1: *const AMsyncState, - sync_state2: *const AMsyncState, -) -> bool { - match (sync_state1.as_ref(), sync_state2.as_ref()) { - (Some(sync_state1), Some(sync_state2)) => sync_state1.as_ref() == sync_state2.as_ref(), - (None, None) | (None, Some(_)) | (Some(_), None) => false, - } -} - -/// \memberof AMsyncState -/// \brief Allocates a new synchronization state and initializes it from -/// default values. -/// -/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_SYNC_STATE` item. -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -#[no_mangle] -pub extern "C" fn AMsyncStateInit() -> *mut AMresult { - to_result(am::sync::State::new()) -} - -/// \memberof AMsyncState -/// \brief Gets the heads that are shared by both peers. -/// -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p sync_state `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_state must be a valid pointer to an AMsyncState -#[no_mangle] -pub unsafe extern "C" fn AMsyncStateSharedHeads(sync_state: *const AMsyncState) -> *mut AMresult { - let sync_state = to_sync_state!(sync_state); - to_result(sync_state.as_ref().shared_heads.as_slice()) -} - -/// \memberof AMsyncState -/// \brief Gets the heads that were last sent by this peer. -/// -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p sync_state `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_state must be a valid pointer to an AMsyncState -#[no_mangle] -pub unsafe extern "C" fn AMsyncStateLastSentHeads(sync_state: *const AMsyncState) -> *mut AMresult { - let sync_state = to_sync_state!(sync_state); - to_result(sync_state.as_ref().last_sent_heads.as_slice()) -} - -/// \memberof AMsyncState -/// \brief Gets a summary of the changes that the other peer already has. -/// -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \param[out] has_value A pointer to a boolean flag that is set to `true` if -/// the returned `AMitems` struct is relevant, `false` otherwise. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_SYNC_HAVE` items. -/// \pre \p sync_state `!= NULL` -/// \pre \p has_value `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -//// \internal -/// -/// # Safety -/// sync_state must be a valid pointer to an AMsyncState -/// has_value must be a valid pointer to a bool. -#[no_mangle] -pub unsafe extern "C" fn AMsyncStateTheirHaves( - sync_state: *const AMsyncState, - has_value: *mut bool, -) -> *mut AMresult { - if let Some(sync_state) = sync_state.as_ref() { - if let Some(haves) = &sync_state.as_ref().their_have { - *has_value = true; - return to_result(haves.as_slice()); - } - }; - *has_value = false; - to_result(Vec::::new()) -} - -/// \memberof AMsyncState -/// \brief Gets the heads that were sent by the other peer. -/// -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \param[out] has_value A pointer to a boolean flag that is set to `true` if -/// the returned `AMitems` struct is relevant, `false` -/// otherwise. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p sync_state `!= NULL` -/// \pre \p has_value `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_state must be a valid pointer to an AMsyncState -/// has_value must be a valid pointer to a bool -#[no_mangle] -pub unsafe extern "C" fn AMsyncStateTheirHeads( - sync_state: *const AMsyncState, - has_value: *mut bool, -) -> *mut AMresult { - if let Some(sync_state) = sync_state.as_ref() { - if let Some(change_hashes) = &sync_state.as_ref().their_heads { - *has_value = true; - return to_result(change_hashes.as_slice()); - } - }; - *has_value = false; - to_result(Vec::::new()) -} - -/// \memberof AMsyncState -/// \brief Gets the needs that were sent by the other peer. -/// -/// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \param[out] has_value A pointer to a boolean flag that is set to `true` if -/// the returned `AMitems` struct is relevant, `false` -/// otherwise. -/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. -/// \pre \p sync_state `!= NULL` -/// \pre \p has_value `!= NULL` -/// \warning The returned `AMresult` struct pointer must be passed to -/// `AMresultFree()` in order to avoid a memory leak. -/// \internal -/// -/// # Safety -/// sync_state must be a valid pointer to an AMsyncState -/// has_value must be a valid pointer to a bool -#[no_mangle] -pub unsafe extern "C" fn AMsyncStateTheirNeeds( - sync_state: *const AMsyncState, - has_value: *mut bool, -) -> *mut AMresult { - if let Some(sync_state) = sync_state.as_ref() { - if let Some(change_hashes) = &sync_state.as_ref().their_need { - *has_value = true; - return to_result(change_hashes.as_slice()); - } - }; - *has_value = false; - to_result(Vec::::new()) -} diff --git a/rust/automerge-c/src/utils/result.c b/rust/automerge-c/src/utils/result.c deleted file mode 100644 index f922ca31..00000000 --- a/rust/automerge-c/src/utils/result.c +++ /dev/null @@ -1,33 +0,0 @@ -#include - -#include - -AMresult* AMresultFrom(int count, ...) { - AMresult* result = NULL; - bool is_ok = true; - va_list args; - va_start(args, count); - for (int i = 0; i != count; ++i) { - AMresult* src = va_arg(args, AMresult*); - AMresult* dest = result; - is_ok = (AMresultStatus(src) == AM_STATUS_OK); - if (is_ok) { - if (dest) { - result = AMresultCat(dest, src); - is_ok = (AMresultStatus(result) == AM_STATUS_OK); - AMresultFree(dest); - AMresultFree(src); - } else { - result = src; - } - } else { - AMresultFree(src); - } - } - va_end(args); - if (!is_ok) { - AMresultFree(result); - result = NULL; - } - return result; -} diff --git a/rust/automerge-c/src/utils/stack.c b/rust/automerge-c/src/utils/stack.c deleted file mode 100644 index 2cad7c5c..00000000 --- a/rust/automerge-c/src/utils/stack.c +++ /dev/null @@ -1,106 +0,0 @@ -#include -#include - -#include -#include - -void AMstackFree(AMstack** stack) { - if (stack) { - while (*stack) { - AMresultFree(AMstackPop(stack, NULL)); - } - } -} - -AMresult* AMstackPop(AMstack** stack, const AMresult* result) { - if (!stack) { - return NULL; - } - AMstack** prev = stack; - if (result) { - while (*prev && ((*prev)->result != result)) { - *prev = (*prev)->prev; - } - } - if (!*prev) { - return NULL; - } - AMstack* target = *prev; - *prev = target->prev; - AMresult* popped = target->result; - free(target); - return popped; -} - -AMresult* AMstackResult(AMstack** stack, AMresult* result, AMstackCallback callback, void* data) { - if (!stack) { - if (callback) { - /* Create a local stack so that the callback can still examine the - * result. */ - AMstack node = {.result = result, .prev = NULL}; - AMstack* stack = &node; - callback(&stack, data); - } else { - /* \note There is no reason to call this function when both the - * stack and the callback are null. */ - fprintf(stderr, "ERROR: NULL AMstackCallback!\n"); - } - /* \note Nothing can be returned without a stack regardless of - * whether or not the callback validated the result. */ - AMresultFree(result); - return NULL; - } - /* Always push the result onto the stack, even if it's null, so that the - * callback can examine it. */ - AMstack* next = calloc(1, sizeof(AMstack)); - *next = (AMstack){.result = result, .prev = *stack}; - AMstack* top = next; - *stack = top; - if (callback) { - if (!callback(stack, data)) { - /* The result didn't pass the callback's examination. */ - return NULL; - } - } else { - /* Report an obvious error. */ - if (result) { - AMbyteSpan const err_msg = AMresultError(result); - if (err_msg.src && err_msg.count) { - /* \note The callback may be null because the result is supposed - * to be examined externally so return it despite an - * error. */ - char* const cstr = AMstrdup(err_msg, NULL); - fprintf(stderr, "WARNING: %s.\n", cstr); - free(cstr); - } - } else { - /* \note There's no reason to call this function when both the - * result and the callback are null. */ - fprintf(stderr, "ERROR: NULL AMresult*!\n"); - return NULL; - } - } - return result; -} - -AMitem* AMstackItem(AMstack** stack, AMresult* result, AMstackCallback callback, void* data) { - AMitems items = AMstackItems(stack, result, callback, data); - return AMitemsNext(&items, 1); -} - -AMitems AMstackItems(AMstack** stack, AMresult* result, AMstackCallback callback, void* data) { - return (AMstackResult(stack, result, callback, data)) ? AMresultItems(result) : (AMitems){0}; -} - -size_t AMstackSize(AMstack const* const stack) { - if (!stack) { - return 0; - } - size_t count = 0; - AMstack const* prev = stack; - while (prev) { - ++count; - prev = prev->prev; - } - return count; -} \ No newline at end of file diff --git a/rust/automerge-c/src/utils/stack_callback_data.c b/rust/automerge-c/src/utils/stack_callback_data.c deleted file mode 100644 index f1e988d8..00000000 --- a/rust/automerge-c/src/utils/stack_callback_data.c +++ /dev/null @@ -1,9 +0,0 @@ -#include - -#include - -AMstackCallbackData* AMstackCallbackDataInit(AMvalType const bitmask, char const* const file, int const line) { - AMstackCallbackData* data = malloc(sizeof(AMstackCallbackData)); - *data = (AMstackCallbackData){.bitmask = bitmask, .file = file, .line = line}; - return data; -} diff --git a/rust/automerge-c/src/utils/string.c b/rust/automerge-c/src/utils/string.c deleted file mode 100644 index a0d1ebe3..00000000 --- a/rust/automerge-c/src/utils/string.c +++ /dev/null @@ -1,46 +0,0 @@ -#include -#include - -#include - -char* AMstrdup(AMbyteSpan const str, char const* nul) { - if (!str.src) { - return NULL; - } else if (!str.count) { - return strdup(""); - } - nul = (nul) ? nul : "\\0"; - size_t const nul_len = strlen(nul); - char* dup = NULL; - size_t dup_len = 0; - char const* begin = str.src; - char const* end = begin; - for (size_t i = 0; i != str.count; ++i, ++end) { - if (!*end) { - size_t const len = end - begin; - size_t const alloc_len = dup_len + len + nul_len; - if (dup) { - dup = realloc(dup, alloc_len + 1); - } else { - dup = malloc(alloc_len + 1); - } - memcpy(dup + dup_len, begin, len); - memcpy(dup + dup_len + len, nul, nul_len); - dup[alloc_len] = '\0'; - begin = end + 1; - dup_len = alloc_len; - } - } - if (begin != end) { - size_t const len = end - begin; - size_t const alloc_len = dup_len + len; - if (dup) { - dup = realloc(dup, alloc_len + 1); - } else { - dup = malloc(alloc_len + 1); - } - memcpy(dup + dup_len, begin, len); - dup[alloc_len] = '\0'; - } - return dup; -} diff --git a/rust/automerge-c/test/CMakeLists.txt b/rust/automerge-c/test/CMakeLists.txt deleted file mode 100644 index 1759f140..00000000 --- a/rust/automerge-c/test/CMakeLists.txt +++ /dev/null @@ -1,55 +0,0 @@ -find_package(cmocka CONFIG REQUIRED) - -add_executable( - ${LIBRARY_NAME}_test - actor_id_tests.c - base_state.c - byte_span_tests.c - cmocka_utils.c - enum_string_tests.c - doc_state.c - doc_tests.c - item_tests.c - list_tests.c - macro_utils.c - main.c - map_tests.c - str_utils.c - ported_wasm/basic_tests.c - ported_wasm/suite.c - ported_wasm/sync_tests.c -) - -set_target_properties(${LIBRARY_NAME}_test PROPERTIES LINKER_LANGUAGE C) - -if(WIN32) - set(CMOCKA "cmocka::cmocka") -else() - set(CMOCKA "cmocka") -endif() - -target_link_libraries(${LIBRARY_NAME}_test PRIVATE ${CMOCKA} ${LIBRARY_NAME}) - -add_dependencies(${LIBRARY_NAME}_test ${BINDINGS_NAME}_artifacts) - -if(BUILD_SHARED_LIBS AND WIN32) - add_custom_command( - TARGET ${LIBRARY_NAME}_test - POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ - COMMENT "Copying the DLL into the tests directory..." - VERBATIM - ) -endif() - -add_test(NAME ${LIBRARY_NAME}_test COMMAND ${LIBRARY_NAME}_test) - -add_custom_command( - TARGET ${LIBRARY_NAME}_test - POST_BUILD - COMMAND - ${CMAKE_CTEST_COMMAND} --config $ --output-on-failure - COMMENT - "Running the test(s)..." - VERBATIM -) diff --git a/rust/automerge-c/test/actor_id_tests.c b/rust/automerge-c/test/actor_id_tests.c deleted file mode 100644 index 918d6213..00000000 --- a/rust/automerge-c/test/actor_id_tests.c +++ /dev/null @@ -1,140 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include "cmocka_utils.h" -#include "str_utils.h" - -/** - * \brief State for a group of cmocka test cases. - */ -typedef struct { - /** An actor ID as an array of bytes. */ - uint8_t* src; - /** The count of bytes in \p src. */ - size_t count; - /** A stack of results. */ - AMstack* stack; - /** An actor ID as a hexadecimal string. */ - AMbyteSpan str; -} DocState; - -static int group_setup(void** state) { - DocState* doc_state = test_calloc(1, sizeof(DocState)); - doc_state->str = AMstr("000102030405060708090a0b0c0d0e0f"); - doc_state->count = doc_state->str.count / 2; - doc_state->src = test_calloc(doc_state->count, sizeof(uint8_t)); - hex_to_bytes(doc_state->str.src, doc_state->src, doc_state->count); - *state = doc_state; - return 0; -} - -static int group_teardown(void** state) { - DocState* doc_state = *state; - test_free(doc_state->src); - AMstackFree(&doc_state->stack); - test_free(doc_state); - return 0; -} - -static void test_AMactorIdFromBytes(void** state) { - DocState* doc_state = *state; - AMstack** stack_ptr = &doc_state->stack; - /* Non-empty string. */ - AMresult* result = AMstackResult(stack_ptr, AMactorIdFromBytes(doc_state->src, doc_state->count), NULL, NULL); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg_view("%s", AMresultError(result)); - } - assert_int_equal(AMresultSize(result), 1); - AMitem* const item = AMresultItem(result); - assert_int_equal(AMitemValType(item), AM_VAL_TYPE_ACTOR_ID); - AMactorId const* actor_id; - assert_true(AMitemToActorId(item, &actor_id)); - AMbyteSpan const bytes = AMactorIdBytes(actor_id); - assert_int_equal(bytes.count, doc_state->count); - assert_memory_equal(bytes.src, doc_state->src, bytes.count); - /* Empty array. */ - /** \todo Find out if this is intentionally allowed. */ - result = AMstackResult(stack_ptr, AMactorIdFromBytes(doc_state->src, 0), NULL, NULL); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg_view("%s", AMresultError(result)); - } - /* NULL array. */ - result = AMstackResult(stack_ptr, AMactorIdFromBytes(NULL, doc_state->count), NULL, NULL); - if (AMresultStatus(result) == AM_STATUS_OK) { - fail_msg("AMactorId from NULL."); - } -} - -static void test_AMactorIdFromStr(void** state) { - DocState* doc_state = *state; - AMstack** stack_ptr = &doc_state->stack; - AMresult* result = AMstackResult(stack_ptr, AMactorIdFromStr(doc_state->str), NULL, NULL); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg_view("%s", AMresultError(result)); - } - assert_int_equal(AMresultSize(result), 1); - AMitem* const item = AMresultItem(result); - assert_int_equal(AMitemValType(item), AM_VAL_TYPE_ACTOR_ID); - /* The hexadecimal string should've been decoded as identical bytes. */ - AMactorId const* actor_id; - assert_true(AMitemToActorId(item, &actor_id)); - AMbyteSpan const bytes = AMactorIdBytes(actor_id); - assert_int_equal(bytes.count, doc_state->count); - assert_memory_equal(bytes.src, doc_state->src, bytes.count); - /* The bytes should've been encoded as an identical hexadecimal string. */ - assert_true(AMitemToActorId(item, &actor_id)); - AMbyteSpan const str = AMactorIdStr(actor_id); - assert_int_equal(str.count, doc_state->str.count); - assert_memory_equal(str.src, doc_state->str.src, str.count); -} - -static void test_AMactorIdInit(void** state) { - DocState* doc_state = *state; - AMstack** stack_ptr = &doc_state->stack; - AMresult* prior_result = NULL; - AMbyteSpan prior_bytes = {NULL, 0}; - AMbyteSpan prior_str = {NULL, 0}; - for (size_t i = 0; i != 11; ++i) { - AMresult* result = AMstackResult(stack_ptr, AMactorIdInit(), NULL, NULL); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg_view("%s", AMresultError(result)); - } - assert_int_equal(AMresultSize(result), 1); - AMitem* const item = AMresultItem(result); - assert_int_equal(AMitemValType(item), AM_VAL_TYPE_ACTOR_ID); - AMactorId const* actor_id; - assert_true(AMitemToActorId(item, &actor_id)); - AMbyteSpan const bytes = AMactorIdBytes(actor_id); - assert_true(AMitemToActorId(item, &actor_id)); - AMbyteSpan const str = AMactorIdStr(actor_id); - if (prior_result) { - size_t const max_byte_count = fmax(bytes.count, prior_bytes.count); - assert_memory_not_equal(bytes.src, prior_bytes.src, max_byte_count); - size_t const max_char_count = fmax(str.count, prior_str.count); - assert_memory_not_equal(str.src, prior_str.src, max_char_count); - } - prior_result = result; - prior_bytes = bytes; - prior_str = str; - } -} - -int run_actor_id_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMactorIdFromBytes), - cmocka_unit_test(test_AMactorIdFromStr), - cmocka_unit_test(test_AMactorIdInit), - }; - - return cmocka_run_group_tests(tests, group_setup, group_teardown); -} diff --git a/rust/automerge-c/test/base_state.c b/rust/automerge-c/test/base_state.c deleted file mode 100644 index 53325a99..00000000 --- a/rust/automerge-c/test/base_state.c +++ /dev/null @@ -1,17 +0,0 @@ -#include - -/* local */ -#include "base_state.h" - -int setup_base(void** state) { - BaseState* base_state = calloc(1, sizeof(BaseState)); - *state = base_state; - return 0; -} - -int teardown_base(void** state) { - BaseState* base_state = *state; - AMstackFree(&base_state->stack); - free(base_state); - return 0; -} diff --git a/rust/automerge-c/test/base_state.h b/rust/automerge-c/test/base_state.h deleted file mode 100644 index 3c4ff01b..00000000 --- a/rust/automerge-c/test/base_state.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef TESTS_BASE_STATE_H -#define TESTS_BASE_STATE_H - -#include - -/* local */ -#include -#include - -/** - * \struct BaseState - * \brief The shared state for one or more cmocka test cases. - */ -typedef struct { - /** A stack of results. */ - AMstack* stack; -} BaseState; - -/** - * \memberof BaseState - * \brief Sets up the shared state for one or more cmocka test cases. - * - * \param[in,out] state A pointer to a pointer to a `BaseState` struct. - * \pre \p state `!= NULL`. - * \warning The `BaseState` struct returned through \p state must be - * passed to `teardown_base()` in order to avoid a memory leak. - */ -int setup_base(void** state); - -/** - * \memberof BaseState - * \brief Tears down the shared state for one or more cmocka test cases. - * - * \param[in] state A pointer to a pointer to a `BaseState` struct. - * \pre \p state `!= NULL`. - */ -int teardown_base(void** state); - -#endif /* TESTS_BASE_STATE_H */ diff --git a/rust/automerge-c/test/byte_span_tests.c b/rust/automerge-c/test/byte_span_tests.c deleted file mode 100644 index 0b1c86a1..00000000 --- a/rust/automerge-c/test/byte_span_tests.c +++ /dev/null @@ -1,119 +0,0 @@ -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include - -static void test_AMbytes(void** state) { - static char const DATA[] = {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf}; - - AMbyteSpan bytes = AMbytes(DATA, sizeof(DATA)); - assert_int_equal(bytes.count, sizeof(DATA)); - assert_memory_equal(bytes.src, DATA, bytes.count); - assert_ptr_equal(bytes.src, DATA); - /* Empty view */ - bytes = AMbytes(DATA, 0); - assert_int_equal(bytes.count, 0); - assert_ptr_equal(bytes.src, DATA); - /* Invalid array */ - bytes = AMbytes(NULL, SIZE_MAX); - assert_int_not_equal(bytes.count, SIZE_MAX); - assert_int_equal(bytes.count, 0); - assert_ptr_equal(bytes.src, NULL); -} - -static void test_AMstr(void** state) { - AMbyteSpan str = AMstr("abcdefghijkl"); - assert_int_equal(str.count, strlen("abcdefghijkl")); - assert_memory_equal(str.src, "abcdefghijkl", str.count); - /* Empty string */ - static char const* const EMPTY = ""; - - str = AMstr(EMPTY); - assert_int_equal(str.count, 0); - assert_ptr_equal(str.src, EMPTY); - /* Invalid string */ - str = AMstr(NULL); - assert_int_equal(str.count, 0); - assert_ptr_equal(str.src, NULL); -} - -static void test_AMstrCmp(void** state) { - /* Length ordering */ - assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("abcdefghijkl")), -1); - assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("abcdefghijkl")), 0); - assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("abcdef")), 1); - /* Lexicographical ordering */ - assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("ghijkl")), -1); - assert_int_equal(AMstrCmp(AMstr("ghijkl"), AMstr("abcdef")), 1); - /* Case ordering */ - assert_int_equal(AMstrCmp(AMstr("ABCDEFGHIJKL"), AMstr("abcdefghijkl")), -1); - assert_int_equal(AMstrCmp(AMstr("ABCDEFGHIJKL"), AMstr("ABCDEFGHIJKL")), 0); - assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("ABCDEFGHIJKL")), 1); - assert_int_equal(AMstrCmp(AMstr("ABCDEFGHIJKL"), AMstr("abcdef")), -1); - assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("ABCDEFGHIJKL")), 1); - assert_int_equal(AMstrCmp(AMstr("GHIJKL"), AMstr("abcdef")), -1); - assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("GHIJKL")), 1); - /* NUL character inclusion */ - static char const SRC[] = {'a', 'b', 'c', 'd', 'e', 'f', '\0', 'g', 'h', 'i', 'j', 'k', 'l'}; - static AMbyteSpan const NUL_STR = {.src = SRC, .count = 13}; - - assert_int_equal(AMstrCmp(AMstr("abcdef"), NUL_STR), -1); - assert_int_equal(AMstrCmp(NUL_STR, NUL_STR), 0); - assert_int_equal(AMstrCmp(NUL_STR, AMstr("abcdef")), 1); - /* Empty string */ - assert_int_equal(AMstrCmp(AMstr(""), AMstr("abcdefghijkl")), -1); - assert_int_equal(AMstrCmp(AMstr(""), AMstr("")), 0); - assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("")), 1); - /* Invalid string */ - assert_int_equal(AMstrCmp(AMstr(NULL), AMstr("abcdefghijkl")), -1); - assert_int_equal(AMstrCmp(AMstr(NULL), AMstr(NULL)), 0); - assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr(NULL)), 1); -} - -static void test_AMstrdup(void** state) { - static char const SRC[] = {'a', 'b', 'c', '\0', 'd', 'e', 'f', '\0', 'g', 'h', 'i', '\0', 'j', 'k', 'l'}; - static AMbyteSpan const NUL_STR = {.src = SRC, .count = 15}; - - /* Default substitution ("\\0") for NUL */ - char* dup = AMstrdup(NUL_STR, NULL); - assert_int_equal(strlen(dup), 18); - assert_string_equal(dup, "abc\\0def\\0ghi\\0jkl"); - free(dup); - /* Arbitrary substitution for NUL */ - dup = AMstrdup(NUL_STR, ":-O"); - assert_int_equal(strlen(dup), 21); - assert_string_equal(dup, "abc:-Odef:-Oghi:-Ojkl"); - free(dup); - /* Empty substitution for NUL */ - dup = AMstrdup(NUL_STR, ""); - assert_int_equal(strlen(dup), 12); - assert_string_equal(dup, "abcdefghijkl"); - free(dup); - /* Empty string */ - dup = AMstrdup(AMstr(""), NULL); - assert_int_equal(strlen(dup), 0); - assert_string_equal(dup, ""); - free(dup); - /* Invalid string */ - assert_null(AMstrdup(AMstr(NULL), NULL)); -} - -int run_byte_span_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMbytes), - cmocka_unit_test(test_AMstr), - cmocka_unit_test(test_AMstrCmp), - cmocka_unit_test(test_AMstrdup), - }; - - return cmocka_run_group_tests(tests, NULL, NULL); -} diff --git a/rust/automerge-c/test/cmocka_utils.c b/rust/automerge-c/test/cmocka_utils.c deleted file mode 100644 index 37c57fb1..00000000 --- a/rust/automerge-c/test/cmocka_utils.c +++ /dev/null @@ -1,88 +0,0 @@ -#include -#include -#include -#include - -/* third-party */ -#include -#include -#include -#include - -/* local */ -#include "cmocka_utils.h" - -/** - * \brief Assert that the given expression is true and report failure in terms - * of a line number within a file. - * - * \param[in] c An expression. - * \param[in] file A file's full path string. - * \param[in] line A line number. - */ -#define assert_true_where(c, file, line) _assert_true(cast_ptr_to_largest_integral_type(c), #c, file, line) - -/** - * \brief Assert that the given pointer is non-NULL and report failure in terms - * of a line number within a file. - * - * \param[in] c An expression. - * \param[in] file A file's full path string. - * \param[in] line A line number. - */ -#define assert_non_null_where(c, file, line) assert_true_where(c, file, line) - -/** - * \brief Forces the test to fail immediately and quit, printing the reason in - * terms of a line number within a file. - * - * \param[in] msg A message string into which \p str is interpolated. - * \param[in] str An owned string. - * \param[in] file A file's full path string. - * \param[in] line A line number. - */ -#define fail_msg_where(msg, str, file, line) \ - do { \ - print_error("ERROR: " msg "\n", str); \ - _fail(file, line); \ - } while (0) - -/** - * \brief Forces the test to fail immediately and quit, printing the reason in - * terms of a line number within a file. - * - * \param[in] msg A message string into which \p view.src is interpolated. - * \param[in] view A UTF-8 string view as an `AMbyteSpan` struct. - * \param[in] file A file's full path string. - * \param[in] line A line number. - */ -#define fail_msg_view_where(msg, view, file, line) \ - do { \ - char* const str = AMstrdup(view, NULL); \ - print_error("ERROR: " msg "\n", str); \ - free(str); \ - _fail(file, line); \ - } while (0) - -bool cmocka_cb(AMstack** stack, void* data) { - assert_non_null(data); - AMstackCallbackData* const sc_data = (AMstackCallbackData*)data; - assert_non_null_where(stack, sc_data->file, sc_data->line); - assert_non_null_where(*stack, sc_data->file, sc_data->line); - assert_non_null_where((*stack)->result, sc_data->file, sc_data->line); - if (AMresultStatus((*stack)->result) != AM_STATUS_OK) { - fail_msg_view_where("%s", AMresultError((*stack)->result), sc_data->file, sc_data->line); - return false; - } - /* Test that the types of all item values are members of the mask. */ - AMitems items = AMresultItems((*stack)->result); - AMitem* item = NULL; - while ((item = AMitemsNext(&items, 1)) != NULL) { - AMvalType const tag = AMitemValType(item); - if (!(tag & sc_data->bitmask)) { - fail_msg_where("Unexpected value type `%s`.", AMvalTypeToString(tag), sc_data->file, sc_data->line); - return false; - } - } - return true; -} diff --git a/rust/automerge-c/test/cmocka_utils.h b/rust/automerge-c/test/cmocka_utils.h deleted file mode 100644 index b6611bcc..00000000 --- a/rust/automerge-c/test/cmocka_utils.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef TESTS_CMOCKA_UTILS_H -#define TESTS_CMOCKA_UTILS_H - -#include -#include - -/* third-party */ -#include -#include - -/* local */ -#include "base_state.h" - -/** - * \brief Forces the test to fail immediately and quit, printing the reason. - * - * \param[in] msg A message string into which \p view.src is interpolated. - * \param[in] view A UTF-8 string view as an `AMbyteSpan` struct. - */ -#define fail_msg_view(msg, view) \ - do { \ - char* const c_str = AMstrdup(view, NULL); \ - print_error("ERROR: " msg "\n", c_str); \ - free(c_str); \ - fail(); \ - } while (0) - -/** - * \brief Validates the top result in a stack based upon the parameters - * specified within the given data structure and reports violations - * using cmocka assertions. - * - * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. - * \param[in] data A pointer to an owned `AMpushData` struct. - * \return `true` if the top `AMresult` struct in \p stack is valid, `false` - * otherwise. - * \pre \p stack `!= NULL`. - * \pre \p data `!= NULL`. - */ -bool cmocka_cb(AMstack** stack, void* data); - -#endif /* TESTS_CMOCKA_UTILS_H */ diff --git a/rust/automerge-c/test/doc_state.c b/rust/automerge-c/test/doc_state.c deleted file mode 100644 index 3cbece50..00000000 --- a/rust/automerge-c/test/doc_state.c +++ /dev/null @@ -1,27 +0,0 @@ -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include "cmocka_utils.h" -#include "doc_state.h" - -int setup_doc(void** state) { - DocState* doc_state = test_calloc(1, sizeof(DocState)); - setup_base((void**)&doc_state->base_state); - AMitemToDoc(AMstackItem(&doc_state->base_state->stack, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), - &doc_state->doc); - *state = doc_state; - return 0; -} - -int teardown_doc(void** state) { - DocState* doc_state = *state; - teardown_base((void**)&doc_state->base_state); - test_free(doc_state); - return 0; -} diff --git a/rust/automerge-c/test/doc_state.h b/rust/automerge-c/test/doc_state.h deleted file mode 100644 index 525a49fa..00000000 --- a/rust/automerge-c/test/doc_state.h +++ /dev/null @@ -1,17 +0,0 @@ -#ifndef TESTS_DOC_STATE_H -#define TESTS_DOC_STATE_H - -/* local */ -#include -#include "base_state.h" - -typedef struct { - BaseState* base_state; - AMdoc* doc; -} DocState; - -int setup_doc(void** state); - -int teardown_doc(void** state); - -#endif /* TESTS_DOC_STATE_H */ diff --git a/rust/automerge-c/test/doc_tests.c b/rust/automerge-c/test/doc_tests.c deleted file mode 100644 index c1d21928..00000000 --- a/rust/automerge-c/test/doc_tests.c +++ /dev/null @@ -1,231 +0,0 @@ -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include -#include "base_state.h" -#include "cmocka_utils.h" -#include "doc_state.h" -#include "str_utils.h" - -typedef struct { - DocState* doc_state; - AMbyteSpan actor_id_str; - uint8_t* actor_id_bytes; - size_t actor_id_size; -} TestState; - -static int setup(void** state) { - TestState* test_state = test_calloc(1, sizeof(TestState)); - setup_doc((void**)&test_state->doc_state); - test_state->actor_id_str.src = "000102030405060708090a0b0c0d0e0f"; - test_state->actor_id_str.count = strlen(test_state->actor_id_str.src); - test_state->actor_id_size = test_state->actor_id_str.count / 2; - test_state->actor_id_bytes = test_malloc(test_state->actor_id_size); - hex_to_bytes(test_state->actor_id_str.src, test_state->actor_id_bytes, test_state->actor_id_size); - *state = test_state; - return 0; -} - -static int teardown(void** state) { - TestState* test_state = *state; - teardown_doc((void**)&test_state->doc_state); - test_free(test_state->actor_id_bytes); - test_free(test_state); - return 0; -} - -static void test_AMkeys_empty(void** state) { - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->doc_state->base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMitems forward = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - assert_int_equal(AMitemsSize(&forward), 0); - AMitems reverse = AMitemsReversed(&forward); - assert_int_equal(AMitemsSize(&reverse), 0); - assert_null(AMitemsNext(&forward, 1)); - assert_null(AMitemsPrev(&forward, 1)); - assert_null(AMitemsNext(&reverse, 1)); - assert_null(AMitemsPrev(&reverse, 1)); -} - -static void test_AMkeys_list(void** state) { - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->doc_state->base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMobjId const* const list = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - AMstackItem(NULL, AMlistPutInt(doc, list, 0, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutInt(doc, list, 1, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutInt(doc, list, 2, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMitems forward = AMstackItems(stack_ptr, AMkeys(doc, list, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&forward), 3); - AMitems reverse = AMitemsReversed(&forward); - assert_int_equal(AMitemsSize(&reverse), 3); - /* Forward iterator forward. */ - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); - assert_ptr_equal(strstr(str.src, "2@"), str.src); - assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); - assert_ptr_equal(strstr(str.src, "3@"), str.src); - assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); - assert_ptr_equal(strstr(str.src, "4@"), str.src); - assert_null(AMitemsNext(&forward, 1)); - // /* Forward iterator reverse. */ - assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); - assert_ptr_equal(strstr(str.src, "4@"), str.src); - assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); - assert_ptr_equal(strstr(str.src, "3@"), str.src); - assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); - assert_ptr_equal(strstr(str.src, "2@"), str.src); - assert_null(AMitemsPrev(&forward, 1)); - /* Reverse iterator forward. */ - assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); - assert_ptr_equal(strstr(str.src, "4@"), str.src); - assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); - assert_ptr_equal(strstr(str.src, "3@"), str.src); - assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); - assert_ptr_equal(strstr(str.src, "2@"), str.src); - assert_null(AMitemsNext(&reverse, 1)); - /* Reverse iterator reverse. */ - assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); - assert_ptr_equal(strstr(str.src, "2@"), str.src); - assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); - assert_ptr_equal(strstr(str.src, "3@"), str.src); - assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); - assert_ptr_equal(strstr(str.src, "4@"), str.src); - assert_null(AMitemsPrev(&reverse, 1)); -} - -static void test_AMkeys_map(void** state) { - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->doc_state->base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("one"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("two"), 2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("three"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMitems forward = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&forward), 3); - AMitems reverse = AMitemsReversed(&forward); - assert_int_equal(AMitemsSize(&reverse), 3); - /* Forward iterator forward. */ - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "one", str.count); - assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); - assert_int_equal(str.count, 5); - assert_memory_equal(str.src, "three", str.count); - assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "two", str.count); - assert_null(AMitemsNext(&forward, 1)); - /* Forward iterator reverse. */ - assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "two", str.count); - assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); - assert_int_equal(str.count, 5); - assert_memory_equal(str.src, "three", str.count); - assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "one", str.count); - assert_null(AMitemsPrev(&forward, 1)); - /* Reverse iterator forward. */ - assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "two", str.count); - assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); - assert_int_equal(str.count, 5); - assert_memory_equal(str.src, "three", str.count); - assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "one", str.count); - assert_null(AMitemsNext(&reverse, 1)); - /* Reverse iterator reverse. */ - assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "one", str.count); - assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); - assert_int_equal(str.count, 5); - assert_memory_equal(str.src, "three", str.count); - assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "two", str.count); - assert_null(AMitemsPrev(&reverse, 1)); -} - -static void test_AMputActor_bytes(void** state) { - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->doc_state->base_state->stack; - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromBytes(test_state->actor_id_bytes, test_state->actor_id_size), cmocka_cb, - AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMstackItem(NULL, AMsetActorId(test_state->doc_state->doc, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMgetActorId(test_state->doc_state->doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMbyteSpan const bytes = AMactorIdBytes(actor_id); - assert_int_equal(bytes.count, test_state->actor_id_size); - assert_memory_equal(bytes.src, test_state->actor_id_bytes, bytes.count); -} - -static void test_AMputActor_str(void** state) { - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->doc_state->base_state->stack; - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(test_state->actor_id_str), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMstackItem(NULL, AMsetActorId(test_state->doc_state->doc, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMgetActorId(test_state->doc_state->doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMbyteSpan const str = AMactorIdStr(actor_id); - assert_int_equal(str.count, test_state->actor_id_str.count); - assert_memory_equal(str.src, test_state->actor_id_str.src, str.count); -} - -static void test_AMspliceText(void** state) { - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->doc_state->base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMobjId const* const text = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - AMstackItem(NULL, AMspliceText(doc, text, 0, 0, AMstr("one + ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMspliceText(doc, text, 4, 2, AMstr("two = ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMspliceText(doc, text, 8, 2, AMstr("three")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMbyteSpan str; - assert_true( - AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, strlen("one two three")); - assert_memory_equal(str.src, "one two three", str.count); -} - -int run_doc_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test_setup_teardown(test_AMkeys_empty, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMkeys_list, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMkeys_map, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMputActor_bytes, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMputActor_str, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMspliceText, setup, teardown), - }; - - return cmocka_run_group_tests(tests, NULL, NULL); -} diff --git a/rust/automerge-c/test/enum_string_tests.c b/rust/automerge-c/test/enum_string_tests.c deleted file mode 100644 index 11131e43..00000000 --- a/rust/automerge-c/test/enum_string_tests.c +++ /dev/null @@ -1,148 +0,0 @@ -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include - -#define assert_to_string(function, tag) assert_string_equal(function(tag), #tag) - -#define assert_from_string(function, type, tag) \ - do { \ - type out; \ - assert_true(function(&out, #tag)); \ - assert_int_equal(out, tag); \ - } while (0) - -static void test_AMidxTypeToString(void** state) { - assert_to_string(AMidxTypeToString, AM_IDX_TYPE_DEFAULT); - assert_to_string(AMidxTypeToString, AM_IDX_TYPE_KEY); - assert_to_string(AMidxTypeToString, AM_IDX_TYPE_POS); - /* Zero tag */ - assert_string_equal(AMidxTypeToString(0), "AM_IDX_TYPE_DEFAULT"); - /* Invalid tag */ - assert_string_equal(AMidxTypeToString(-1), "???"); -} - -static void test_AMidxTypeFromString(void** state) { - assert_from_string(AMidxTypeFromString, AMidxType, AM_IDX_TYPE_DEFAULT); - assert_from_string(AMidxTypeFromString, AMidxType, AM_IDX_TYPE_KEY); - assert_from_string(AMidxTypeFromString, AMidxType, AM_IDX_TYPE_POS); - /* Invalid tag */ - AMidxType out = -1; - assert_false(AMidxTypeFromString(&out, "???")); - assert_int_equal(out, (AMidxType)-1); -} - -static void test_AMobjTypeToString(void** state) { - assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_DEFAULT); - assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_LIST); - assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_MAP); - assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_TEXT); - /* Zero tag */ - assert_string_equal(AMobjTypeToString(0), "AM_OBJ_TYPE_DEFAULT"); - /* Invalid tag */ - assert_string_equal(AMobjTypeToString(-1), "???"); -} - -static void test_AMobjTypeFromString(void** state) { - assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_DEFAULT); - assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_LIST); - assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_MAP); - assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_TEXT); - /* Invalid tag */ - AMobjType out = -1; - assert_false(AMobjTypeFromString(&out, "???")); - assert_int_equal(out, (AMobjType)-1); -} - -static void test_AMstatusToString(void** state) { - assert_to_string(AMstatusToString, AM_STATUS_ERROR); - assert_to_string(AMstatusToString, AM_STATUS_INVALID_RESULT); - assert_to_string(AMstatusToString, AM_STATUS_OK); - /* Zero tag */ - assert_string_equal(AMstatusToString(0), "AM_STATUS_OK"); - /* Invalid tag */ - assert_string_equal(AMstatusToString(-1), "???"); -} - -static void test_AMstatusFromString(void** state) { - assert_from_string(AMstatusFromString, AMstatus, AM_STATUS_ERROR); - assert_from_string(AMstatusFromString, AMstatus, AM_STATUS_INVALID_RESULT); - assert_from_string(AMstatusFromString, AMstatus, AM_STATUS_OK); - /* Invalid tag */ - AMstatus out = -1; - assert_false(AMstatusFromString(&out, "???")); - assert_int_equal(out, (AMstatus)-1); -} - -static void test_AMvalTypeToString(void** state) { - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_ACTOR_ID); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_BOOL); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_BYTES); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_CHANGE); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_CHANGE_HASH); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_COUNTER); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_DEFAULT); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_DOC); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_F64); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_INT); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_NULL); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_OBJ_TYPE); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_STR); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_SYNC_HAVE); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_SYNC_MESSAGE); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_SYNC_STATE); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_TIMESTAMP); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_UINT); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_UNKNOWN); - assert_to_string(AMvalTypeToString, AM_VAL_TYPE_VOID); - /* Zero tag */ - assert_string_equal(AMvalTypeToString(0), "AM_VAL_TYPE_DEFAULT"); - /* Invalid tag */ - assert_string_equal(AMvalTypeToString(-1), "???"); -} - -static void test_AMvalTypeFromString(void** state) { - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_ACTOR_ID); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_BOOL); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_BYTES); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_CHANGE); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_CHANGE_HASH); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_COUNTER); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_DEFAULT); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_DOC); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_F64); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_INT); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_NULL); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_OBJ_TYPE); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_STR); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_SYNC_HAVE); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_SYNC_MESSAGE); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_SYNC_STATE); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_TIMESTAMP); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_UINT); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_UNKNOWN); - assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_VOID); - /* Invalid tag */ - AMvalType out = -1; - assert_false(AMvalTypeFromString(&out, "???")); - assert_int_equal(out, (AMvalType)-1); -} - -int run_enum_string_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMidxTypeToString), cmocka_unit_test(test_AMidxTypeFromString), - cmocka_unit_test(test_AMobjTypeToString), cmocka_unit_test(test_AMobjTypeFromString), - cmocka_unit_test(test_AMstatusToString), cmocka_unit_test(test_AMstatusFromString), - cmocka_unit_test(test_AMvalTypeToString), cmocka_unit_test(test_AMvalTypeFromString), - }; - - return cmocka_run_group_tests(tests, NULL, NULL); -} diff --git a/rust/automerge-c/test/item_tests.c b/rust/automerge-c/test/item_tests.c deleted file mode 100644 index a30b0556..00000000 --- a/rust/automerge-c/test/item_tests.c +++ /dev/null @@ -1,94 +0,0 @@ -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include -#include "cmocka_utils.h" -#include "doc_state.h" - -static void test_AMitemResult(void** state) { - enum { ITEM_COUNT = 1000 }; - - DocState* doc_state = *state; - AMstack** stack_ptr = &doc_state->base_state->stack; - /* Append the strings to a list so that they'll be in numerical order. */ - AMobjId const* const list = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - for (size_t pos = 0; pos != ITEM_COUNT; ++pos) { - size_t const count = snprintf(NULL, 0, "%zu", pos); - char* const src = test_calloc(count + 1, sizeof(char)); - assert_int_equal(sprintf(src, "%zu", pos), count); - AMstackItem(NULL, AMlistPutStr(doc_state->doc, list, pos, true, AMbytes(src, count)), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - test_free(src); - } - /* Get an item iterator. */ - AMitems items = AMstackItems(stack_ptr, AMlistRange(doc_state->doc, list, 0, SIZE_MAX, NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - /* Get the item iterator's result so that it can be freed later. */ - AMresult const* const items_result = (*stack_ptr)->result; - /* Iterate over all of the items and copy their pointers into an array. */ - AMitem* item_ptrs[ITEM_COUNT] = {NULL}; - AMitem* item = NULL; - for (size_t pos = 0; (item = AMitemsNext(&items, 1)) != NULL; ++pos) { - /* The item's reference count should be 1. */ - assert_int_equal(AMitemRefCount(item), 1); - if (pos & 1) { - /* Create a redundant result for an odd item. */ - AMitem* const new_item = AMstackItem(stack_ptr, AMitemResult(item), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - /* The item's old and new pointers will never match. */ - assert_ptr_not_equal(new_item, item); - /* The item's reference count will have been incremented. */ - assert_int_equal(AMitemRefCount(item), 2); - assert_int_equal(AMitemRefCount(new_item), 2); - /* The item's old and new indices should match. */ - assert_int_equal(AMitemIdxType(item), AMitemIdxType(new_item)); - assert_int_equal(AMitemIdxType(item), AM_IDX_TYPE_POS); - size_t pos, new_pos; - assert_true(AMitemPos(item, &pos)); - assert_true(AMitemPos(new_item, &new_pos)); - assert_int_equal(pos, new_pos); - /* The item's old and new object IDs should match. */ - AMobjId const* const obj_id = AMitemObjId(item); - AMobjId const* const new_obj_id = AMitemObjId(new_item); - assert_true(AMobjIdEqual(obj_id, new_obj_id)); - /* The item's old and new value types should match. */ - assert_int_equal(AMitemValType(item), AMitemValType(new_item)); - /* The item's old and new string values should match. */ - AMbyteSpan str; - assert_true(AMitemToStr(item, &str)); - AMbyteSpan new_str; - assert_true(AMitemToStr(new_item, &new_str)); - assert_int_equal(str.count, new_str.count); - assert_memory_equal(str.src, new_str.src, new_str.count); - /* The item's old and new object IDs are one and the same. */ - assert_ptr_equal(obj_id, new_obj_id); - /* The item's old and new string values are one and the same. */ - assert_ptr_equal(str.src, new_str.src); - /* Save the item's new pointer. */ - item_ptrs[pos] = new_item; - } - } - /* Free the item iterator's result. */ - AMresultFree(AMstackPop(stack_ptr, items_result)); - /* An odd item's reference count should be 1 again. */ - for (size_t pos = 1; pos < ITEM_COUNT; pos += 2) { - assert_int_equal(AMitemRefCount(item_ptrs[pos]), 1); - } -} - -int run_item_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMitemResult), - }; - - return cmocka_run_group_tests(tests, setup_doc, teardown_doc); -} diff --git a/rust/automerge-c/test/list_tests.c b/rust/automerge-c/test/list_tests.c deleted file mode 100644 index 723dd038..00000000 --- a/rust/automerge-c/test/list_tests.c +++ /dev/null @@ -1,515 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include -#include "base_state.h" -#include "cmocka_utils.h" -#include "doc_state.h" -#include "macro_utils.h" - -static void test_AMlistIncrement(void** state) { - DocState* doc_state = *state; - AMstack** stack_ptr = &doc_state->base_state->stack; - AMobjId const* const list = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - AMstackItem(NULL, AMlistPutCounter(doc_state->doc, list, 0, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - int64_t counter; - assert_true(AMitemToCounter( - AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), - &counter)); - assert_int_equal(counter, 0); - AMresultFree(AMstackPop(stack_ptr, NULL)); - AMstackItem(NULL, AMlistIncrement(doc_state->doc, list, 0, 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - assert_true(AMitemToCounter( - AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), - &counter)); - assert_int_equal(counter, 3); - AMresultFree(AMstackPop(stack_ptr, NULL)); -} - -#define test_AMlistPut(suffix, mode) test_AMlistPut##suffix##_##mode - -#define static_void_test_AMlistPut(suffix, mode, type, scalar_value) \ - static void test_AMlistPut##suffix##_##mode(void** state) { \ - DocState* doc_state = *state; \ - AMstack** stack_ptr = &doc_state->base_state->stack; \ - AMobjId const* const list = AMitemObjId( \ - AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ - AMstackItem(NULL, AMlistPut##suffix(doc_state->doc, list, 0, !strcmp(#mode, "insert"), scalar_value), \ - cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); \ - type value; \ - assert_true(AMitemTo##suffix(AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, \ - AMexpect(suffix_to_val_type(#suffix))), \ - &value)); \ - assert_true(value == scalar_value); \ - AMresultFree(AMstackPop(stack_ptr, NULL)); \ - } - -#define test_AMlistPutBytes(mode) test_AMlistPutBytes##_##mode - -#define static_void_test_AMlistPutBytes(mode, bytes_value) \ - static void test_AMlistPutBytes_##mode(void** state) { \ - static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ - \ - DocState* doc_state = *state; \ - AMstack** stack_ptr = &doc_state->base_state->stack; \ - AMobjId const* const list = AMitemObjId( \ - AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ - AMstackItem( \ - NULL, AMlistPutBytes(doc_state->doc, list, 0, !strcmp(#mode, "insert"), AMbytes(bytes_value, BYTES_SIZE)), \ - cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); \ - AMbyteSpan bytes; \ - assert_true(AMitemToBytes( \ - AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), \ - &bytes)); \ - assert_int_equal(bytes.count, BYTES_SIZE); \ - assert_memory_equal(bytes.src, bytes_value, BYTES_SIZE); \ - AMresultFree(AMstackPop(stack_ptr, NULL)); \ - } - -#define test_AMlistPutNull(mode) test_AMlistPutNull_##mode - -#define static_void_test_AMlistPutNull(mode) \ - static void test_AMlistPutNull_##mode(void** state) { \ - DocState* doc_state = *state; \ - AMstack** stack_ptr = &doc_state->base_state->stack; \ - AMobjId const* const list = AMitemObjId( \ - AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ - AMstackItem(NULL, AMlistPutNull(doc_state->doc, list, 0, !strcmp(#mode, "insert")), cmocka_cb, \ - AMexpect(AM_VAL_TYPE_VOID)); \ - AMresult* result = AMstackResult(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), NULL, NULL); \ - if (AMresultStatus(result) != AM_STATUS_OK) { \ - fail_msg_view("%s", AMresultError(result)); \ - } \ - assert_int_equal(AMresultSize(result), 1); \ - assert_int_equal(AMitemValType(AMresultItem(result)), AM_VAL_TYPE_NULL); \ - AMresultFree(AMstackPop(stack_ptr, NULL)); \ - } - -#define test_AMlistPutObject(label, mode) test_AMlistPutObject_##label##_##mode - -#define static_void_test_AMlistPutObject(label, mode) \ - static void test_AMlistPutObject_##label##_##mode(void** state) { \ - DocState* doc_state = *state; \ - AMstack** stack_ptr = &doc_state->base_state->stack; \ - AMobjId const* const list = AMitemObjId( \ - AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ - AMobjType const obj_type = suffix_to_obj_type(#label); \ - AMobjId const* const obj_id = AMitemObjId( \ - AMstackItem(stack_ptr, AMlistPutObject(doc_state->doc, list, 0, !strcmp(#mode, "insert"), obj_type), \ - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ - assert_non_null(obj_id); \ - assert_int_equal(AMobjObjType(doc_state->doc, obj_id), obj_type); \ - assert_int_equal(AMobjSize(doc_state->doc, obj_id, NULL), 0); \ - AMresultFree(AMstackPop(stack_ptr, NULL)); \ - } - -#define test_AMlistPutStr(mode) test_AMlistPutStr##_##mode - -#define static_void_test_AMlistPutStr(mode, str_value) \ - static void test_AMlistPutStr_##mode(void** state) { \ - DocState* doc_state = *state; \ - AMstack** stack_ptr = &doc_state->base_state->stack; \ - AMobjId const* const list = AMitemObjId( \ - AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ - AMstackItem(NULL, AMlistPutStr(doc_state->doc, list, 0, !strcmp(#mode, "insert"), AMstr(str_value)), \ - cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); \ - AMbyteSpan str; \ - assert_true(AMitemToStr( \ - AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), \ - &str)); \ - assert_int_equal(str.count, strlen(str_value)); \ - assert_memory_equal(str.src, str_value, str.count); \ - AMresultFree(AMstackPop(stack_ptr, NULL)); \ - } - -static_void_test_AMlistPut(Bool, insert, bool, true); - -static_void_test_AMlistPut(Bool, update, bool, true); - -static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; - -static_void_test_AMlistPutBytes(insert, BYTES_VALUE); - -static_void_test_AMlistPutBytes(update, BYTES_VALUE); - -static_void_test_AMlistPut(Counter, insert, int64_t, INT64_MAX); - -static_void_test_AMlistPut(Counter, update, int64_t, INT64_MAX); - -static_void_test_AMlistPut(F64, insert, double, DBL_MAX); - -static_void_test_AMlistPut(F64, update, double, DBL_MAX); - -static_void_test_AMlistPut(Int, insert, int64_t, INT64_MAX); - -static_void_test_AMlistPut(Int, update, int64_t, INT64_MAX); - -static_void_test_AMlistPutNull(insert); - -static_void_test_AMlistPutNull(update); - -static_void_test_AMlistPutObject(List, insert); - -static_void_test_AMlistPutObject(List, update); - -static_void_test_AMlistPutObject(Map, insert); - -static_void_test_AMlistPutObject(Map, update); - -static_void_test_AMlistPutObject(Text, insert); - -static_void_test_AMlistPutObject(Text, update); - -static_void_test_AMlistPutStr(insert, - "Hello, " - "world!"); - -static_void_test_AMlistPutStr(update, - "Hello," - " world" - "!"); - -static_void_test_AMlistPut(Timestamp, insert, int64_t, INT64_MAX); - -static_void_test_AMlistPut(Timestamp, update, int64_t, INT64_MAX); - -static_void_test_AMlistPut(Uint, insert, uint64_t, UINT64_MAX); - -static_void_test_AMlistPut(Uint, update, uint64_t, UINT64_MAX); - -static void test_get_range_values(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc1; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); - AMobjId const* const list = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - - /* Insert elements. */ - AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("First")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Second")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Third")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Fourth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Fifth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Sixth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Seventh")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Eighth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMitems const v1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMdoc* doc2; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(doc1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); - - AMstackItem(NULL, AMlistPutStr(doc1, list, 2, false, AMstr("Third V2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMstackItem(NULL, AMlistPutStr(doc2, list, 2, false, AMstr("Third V3")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc2, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - /* Forward vs. reverse: complete current list range. */ - AMitems range = - AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - size_t size = AMitemsSize(&range); - assert_int_equal(size, 8); - AMitems range_back = AMitemsReversed(&range); - assert_int_equal(AMitemsSize(&range_back), size); - size_t pos; - assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); - assert_int_equal(pos, 0); - assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); - assert_int_equal(pos, 7); - - AMitem *item1, *item_back1; - size_t count, middle = size / 2; - range = AMitemsRewound(&range); - range_back = AMitemsRewound(&range_back); - for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; - item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { - size_t pos1, pos_back1; - assert_true(AMitemPos(item1, &pos1)); - assert_true(AMitemPos(item_back1, &pos_back1)); - if ((count == middle) && (middle & 1)) { - /* The iterators are crossing in the middle. */ - assert_int_equal(pos1, pos_back1); - assert_true(AMitemEqual(item1, item_back1)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); - } else { - assert_int_not_equal(pos1, pos_back1); - } - AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, NULL), NULL, NULL); - AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, NULL), NULL, NULL); - /** \note An item returned from an `AM...Get()` call doesn't include the - index used to retrieve it. */ - assert_false(AMitemIdxType(item2)); - assert_false(AMitemIdxType(item_back2)); - assert_true(AMitemEqual(item1, item2)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); - assert_true(AMitemEqual(item_back1, item_back2)); - assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); - AMresultFree(AMstackPop(stack_ptr, NULL)); - } - - /* Forward vs. reverse: partial current list range. */ - range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 1, 6, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - size = AMitemsSize(&range); - assert_int_equal(size, 5); - range_back = AMitemsReversed(&range); - assert_int_equal(AMitemsSize(&range_back), size); - assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); - assert_int_equal(pos, 1); - assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); - assert_int_equal(pos, 5); - - middle = size / 2; - range = AMitemsRewound(&range); - range_back = AMitemsRewound(&range_back); - for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; - item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { - size_t pos1, pos_back1; - assert_true(AMitemPos(item1, &pos1)); - assert_true(AMitemPos(item_back1, &pos_back1)); - if ((count == middle) && (middle & 1)) { - /* The iterators are crossing in the middle. */ - assert_int_equal(pos1, pos_back1); - assert_true(AMitemEqual(item1, item_back1)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); - } else { - assert_int_not_equal(pos1, pos_back1); - } - AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, NULL), NULL, NULL); - AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, NULL), NULL, NULL); - /** \note An item returned from an `AMlistGet()` call doesn't include - the index used to retrieve it. */ - assert_int_equal(AMitemIdxType(item2), 0); - assert_int_equal(AMitemIdxType(item_back2), 0); - assert_true(AMitemEqual(item1, item2)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); - assert_true(AMitemEqual(item_back1, item_back2)); - assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); - AMresultFree(AMstackPop(stack_ptr, NULL)); - } - - /* Forward vs. reverse: complete historical map range. */ - range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - size = AMitemsSize(&range); - assert_int_equal(size, 8); - range_back = AMitemsReversed(&range); - assert_int_equal(AMitemsSize(&range_back), size); - assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); - assert_int_equal(pos, 0); - assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); - assert_int_equal(pos, 7); - - middle = size / 2; - range = AMitemsRewound(&range); - range_back = AMitemsRewound(&range_back); - for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; - item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { - size_t pos1, pos_back1; - assert_true(AMitemPos(item1, &pos1)); - assert_true(AMitemPos(item_back1, &pos_back1)); - if ((count == middle) && (middle & 1)) { - /* The iterators are crossing in the middle. */ - assert_int_equal(pos1, pos_back1); - assert_true(AMitemEqual(item1, item_back1)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); - } else { - assert_int_not_equal(pos1, pos_back1); - } - AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, &v1), NULL, NULL); - AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, &v1), NULL, NULL); - /** \note An item returned from an `AM...Get()` call doesn't include the - index used to retrieve it. */ - assert_false(AMitemIdxType(item2)); - assert_false(AMitemIdxType(item_back2)); - assert_true(AMitemEqual(item1, item2)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); - assert_true(AMitemEqual(item_back1, item_back2)); - assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); - AMresultFree(AMstackPop(stack_ptr, NULL)); - } - - /* Forward vs. reverse: partial historical map range. */ - range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 2, 7, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - size = AMitemsSize(&range); - assert_int_equal(size, 5); - range_back = AMitemsReversed(&range); - assert_int_equal(AMitemsSize(&range_back), size); - assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); - assert_int_equal(pos, 2); - assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); - assert_int_equal(pos, 6); - - middle = size / 2; - range = AMitemsRewound(&range); - range_back = AMitemsRewound(&range_back); - for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; - item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { - size_t pos1, pos_back1; - assert_true(AMitemPos(item1, &pos1)); - assert_true(AMitemPos(item_back1, &pos_back1)); - if ((count == middle) && (middle & 1)) { - /* The iterators are crossing in the middle. */ - assert_int_equal(pos1, pos_back1); - assert_true(AMitemEqual(item1, item_back1)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); - } else { - assert_int_not_equal(pos1, pos_back1); - } - AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, &v1), NULL, NULL); - AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, &v1), NULL, NULL); - /** \note An item returned from an `AM...Get()` call doesn't include the - index used to retrieve it. */ - assert_false(AMitemIdxType(item2)); - assert_false(AMitemIdxType(item_back2)); - assert_true(AMitemEqual(item1, item2)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); - assert_true(AMitemEqual(item_back1, item_back2)); - assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); - AMresultFree(AMstackPop(stack_ptr, NULL)); - } - - /* List range vs. object range: complete current. */ - range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - AMitems obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, list, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); - - AMitem *item, *obj_item; - for (item = NULL, obj_item = NULL; item && obj_item; - item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { - /** \note Object iteration doesn't yield any item indices. */ - assert_true(AMitemIdxType(item)); - assert_false(AMitemIdxType(obj_item)); - assert_true(AMitemEqual(item, obj_item)); - assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); - } - - /* List range vs. object range: complete historical. */ - range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, list, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); - - for (item = NULL, obj_item = NULL; item && obj_item; - item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { - /** \note Object iteration doesn't yield any item indices. */ - assert_true(AMitemIdxType(item)); - assert_false(AMitemIdxType(obj_item)); - assert_true(AMitemEqual(item, obj_item)); - assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); - } -} - -/** - * \brief A JavaScript application can introduce NUL (`\0`) characters into a - * list object's string value which will truncate it in a C application. - */ -static void test_get_NUL_string_value(void** state) { - /* - import * as Automerge from "@automerge/automerge"; - let doc = Automerge.init(); - doc = Automerge.change(doc, doc => { - doc[0] = 'o\0ps'; - }); - const bytes = Automerge.save(doc); - console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], - bytes).join(", ") + "};"); - */ - static uint8_t const OOPS_VALUE[] = {'o', '\0', 'p', 's'}; - static size_t const OOPS_SIZE = sizeof(OOPS_VALUE) / sizeof(uint8_t); - - static uint8_t const SAVED_DOC[] = { - 133, 111, 74, 131, 224, 28, 197, 17, 0, 113, 1, 16, 246, 137, 63, 193, 255, 181, 76, 79, 129, - 213, 133, 29, 214, 158, 164, 15, 1, 207, 184, 14, 57, 1, 194, 79, 247, 82, 160, 134, 227, 144, - 5, 241, 136, 205, 238, 250, 251, 54, 34, 250, 210, 96, 204, 132, 153, 203, 110, 109, 6, 6, 1, - 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 3, 33, 2, 35, 2, 52, 1, 66, - 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, - 1, 48, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, 0, 112, 115, 127, 0, 0}; - static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); - - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(SAVED_DOC, SAVED_DOC_SIZE), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMbyteSpan str; - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMlistGet(doc, AM_ROOT, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_not_equal(str.count, strlen(OOPS_VALUE)); - assert_int_equal(str.count, OOPS_SIZE); - assert_memory_equal(str.src, OOPS_VALUE, str.count); -} - -static void test_insert_at_index(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMobjId const* const list = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* Insert both at the same index. */ - AMstackItem(NULL, AMlistPutUint(doc, list, 0, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutUint(doc, list, 0, true, 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - - assert_int_equal(AMobjSize(doc, list, NULL), 2); - AMitems const keys = AMstackItems(stack_ptr, AMkeys(doc, list, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&keys), 2); - AMitems const range = - AMstackItems(stack_ptr, AMlistRange(doc, list, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)); - assert_int_equal(AMitemsSize(&range), 2); -} - -int run_list_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMlistIncrement), - cmocka_unit_test(test_AMlistPut(Bool, insert)), - cmocka_unit_test(test_AMlistPut(Bool, update)), - cmocka_unit_test(test_AMlistPutBytes(insert)), - cmocka_unit_test(test_AMlistPutBytes(update)), - cmocka_unit_test(test_AMlistPut(Counter, insert)), - cmocka_unit_test(test_AMlistPut(Counter, update)), - cmocka_unit_test(test_AMlistPut(F64, insert)), - cmocka_unit_test(test_AMlistPut(F64, update)), - cmocka_unit_test(test_AMlistPut(Int, insert)), - cmocka_unit_test(test_AMlistPut(Int, update)), - cmocka_unit_test(test_AMlistPutNull(insert)), - cmocka_unit_test(test_AMlistPutNull(update)), - cmocka_unit_test(test_AMlistPutObject(List, insert)), - cmocka_unit_test(test_AMlistPutObject(List, update)), - cmocka_unit_test(test_AMlistPutObject(Map, insert)), - cmocka_unit_test(test_AMlistPutObject(Map, update)), - cmocka_unit_test(test_AMlistPutObject(Text, insert)), - cmocka_unit_test(test_AMlistPutObject(Text, update)), - cmocka_unit_test(test_AMlistPutStr(insert)), - cmocka_unit_test(test_AMlistPutStr(update)), - cmocka_unit_test(test_AMlistPut(Timestamp, insert)), - cmocka_unit_test(test_AMlistPut(Timestamp, update)), - cmocka_unit_test(test_AMlistPut(Uint, insert)), - cmocka_unit_test(test_AMlistPut(Uint, update)), - cmocka_unit_test_setup_teardown(test_get_range_values, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_get_NUL_string_value, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_insert_at_index, setup_base, teardown_base), - }; - - return cmocka_run_group_tests(tests, setup_doc, teardown_doc); -} diff --git a/rust/automerge-c/test/macro_utils.c b/rust/automerge-c/test/macro_utils.c deleted file mode 100644 index 3a546eb5..00000000 --- a/rust/automerge-c/test/macro_utils.c +++ /dev/null @@ -1,38 +0,0 @@ -#include - -/* local */ -#include "macro_utils.h" - -AMobjType suffix_to_obj_type(char const* obj_type_label) { - if (!strcmp(obj_type_label, "List")) - return AM_OBJ_TYPE_LIST; - else if (!strcmp(obj_type_label, "Map")) - return AM_OBJ_TYPE_MAP; - else if (!strcmp(obj_type_label, "Text")) - return AM_OBJ_TYPE_TEXT; - else - return AM_OBJ_TYPE_DEFAULT; -} - -AMvalType suffix_to_val_type(char const* suffix) { - if (!strcmp(suffix, "Bool")) - return AM_VAL_TYPE_BOOL; - else if (!strcmp(suffix, "Bytes")) - return AM_VAL_TYPE_BYTES; - else if (!strcmp(suffix, "Counter")) - return AM_VAL_TYPE_COUNTER; - else if (!strcmp(suffix, "F64")) - return AM_VAL_TYPE_F64; - else if (!strcmp(suffix, "Int")) - return AM_VAL_TYPE_INT; - else if (!strcmp(suffix, "Null")) - return AM_VAL_TYPE_NULL; - else if (!strcmp(suffix, "Str")) - return AM_VAL_TYPE_STR; - else if (!strcmp(suffix, "Timestamp")) - return AM_VAL_TYPE_TIMESTAMP; - else if (!strcmp(suffix, "Uint")) - return AM_VAL_TYPE_UINT; - else - return AM_VAL_TYPE_DEFAULT; -} diff --git a/rust/automerge-c/test/macro_utils.h b/rust/automerge-c/test/macro_utils.h deleted file mode 100644 index e4c2c5b9..00000000 --- a/rust/automerge-c/test/macro_utils.h +++ /dev/null @@ -1,23 +0,0 @@ -#ifndef TESTS_MACRO_UTILS_H -#define TESTS_MACRO_UTILS_H - -/* local */ -#include - -/** - * \brief Gets the object type tag corresponding to an object type suffix. - * - * \param[in] suffix An object type suffix string. - * \return An `AMobjType` enum tag. - */ -AMobjType suffix_to_obj_type(char const* suffix); - -/** - * \brief Gets the value type tag corresponding to a value type suffix. - * - * \param[in] suffix A value type suffix string. - * \return An `AMvalType` enum tag. - */ -AMvalType suffix_to_val_type(char const* suffix); - -#endif /* TESTS_MACRO_UTILS_H */ diff --git a/rust/automerge-c/test/main.c b/rust/automerge-c/test/main.c deleted file mode 100644 index 2996c9b3..00000000 --- a/rust/automerge-c/test/main.c +++ /dev/null @@ -1,28 +0,0 @@ -#include -#include -#include -#include - -/* third-party */ -#include - -extern int run_actor_id_tests(void); - -extern int run_byte_span_tests(void); - -extern int run_doc_tests(void); - -extern int run_enum_string_tests(void); - -extern int run_item_tests(void); - -extern int run_list_tests(void); - -extern int run_map_tests(void); - -extern int run_ported_wasm_suite(void); - -int main(void) { - return (run_actor_id_tests() + run_byte_span_tests() + run_doc_tests() + run_enum_string_tests() + - run_item_tests() + run_list_tests() + run_map_tests() + run_ported_wasm_suite()); -} diff --git a/rust/automerge-c/test/map_tests.c b/rust/automerge-c/test/map_tests.c deleted file mode 100644 index 2ee2e69a..00000000 --- a/rust/automerge-c/test/map_tests.c +++ /dev/null @@ -1,1582 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include -#include -#include "base_state.h" -#include "cmocka_utils.h" -#include "doc_state.h" -#include "macro_utils.h" - -static void test_AMmapIncrement(void** state) { - DocState* doc_state = *state; - AMstack** stack_ptr = &doc_state->base_state->stack; - AMstackItem(NULL, AMmapPutCounter(doc_state->doc, AM_ROOT, AMstr("Counter"), 0), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - int64_t counter; - assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr("Counter"), NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), - &counter)); - assert_int_equal(counter, 0); - AMresultFree(AMstackPop(stack_ptr, NULL)); - AMstackItem(NULL, AMmapIncrement(doc_state->doc, AM_ROOT, AMstr("Counter"), 3), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr("Counter"), NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), - &counter)); - assert_int_equal(counter, 3); - AMresultFree(AMstackPop(stack_ptr, NULL)); -} - -#define test_AMmapPut(suffix) test_AMmapPut##suffix - -#define static_void_test_AMmapPut(suffix, type, scalar_value) \ - static void test_AMmapPut##suffix(void** state) { \ - DocState* doc_state = *state; \ - AMstack** stack_ptr = &doc_state->base_state->stack; \ - AMstackItem(NULL, AMmapPut##suffix(doc_state->doc, AM_ROOT, AMstr(#suffix), scalar_value), cmocka_cb, \ - AMexpect(AM_VAL_TYPE_VOID)); \ - type value; \ - assert_true(AMitemTo##suffix(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr(#suffix), NULL), \ - cmocka_cb, AMexpect(suffix_to_val_type(#suffix))), \ - &value)); \ - assert_true(value == scalar_value); \ - AMresultFree(AMstackPop(stack_ptr, NULL)); \ - } - -static void test_AMmapPutBytes(void** state) { - static AMbyteSpan const KEY = {"Bytes", 5}; - static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; - static size_t const BYTES_SIZE = sizeof(BYTES_VALUE) / sizeof(uint8_t); - - DocState* doc_state = *state; - AMstack** stack_ptr = &doc_state->base_state->stack; - AMstackItem(NULL, AMmapPutBytes(doc_state->doc, AM_ROOT, KEY, AMbytes(BYTES_VALUE, BYTES_SIZE)), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - AMbyteSpan bytes; - assert_true(AMitemToBytes( - AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, KEY, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), - &bytes)); - assert_int_equal(bytes.count, BYTES_SIZE); - assert_memory_equal(bytes.src, BYTES_VALUE, BYTES_SIZE); - AMresultFree(AMstackPop(stack_ptr, NULL)); -} - -static void test_AMmapPutNull(void** state) { - static AMbyteSpan const KEY = {"Null", 4}; - - DocState* doc_state = *state; - AMstack** stack_ptr = &doc_state->base_state->stack; - AMstackItem(NULL, AMmapPutNull(doc_state->doc, AM_ROOT, KEY), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMresult* result = AMstackResult(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, KEY, NULL), NULL, NULL); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg_view("%s", AMresultError(result)); - } - assert_int_equal(AMresultSize(result), 1); - AMitem* item = AMresultItem(result); - assert_int_equal(AMitemValType(item), AM_VAL_TYPE_NULL); -} - -#define test_AMmapPutObject(label) test_AMmapPutObject_##label - -#define static_void_test_AMmapPutObject(label) \ - static void test_AMmapPutObject_##label(void** state) { \ - DocState* doc_state = *state; \ - AMstack** stack_ptr = &doc_state->base_state->stack; \ - AMobjType const obj_type = suffix_to_obj_type(#label); \ - AMobjId const* const obj_id = \ - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr(#label), obj_type), \ - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ - assert_non_null(obj_id); \ - assert_int_equal(AMobjObjType(doc_state->doc, obj_id), obj_type); \ - assert_int_equal(AMobjSize(doc_state->doc, obj_id, NULL), 0); \ - AMresultFree(AMstackPop(stack_ptr, NULL)); \ - } - -static void test_AMmapPutStr(void** state) { - DocState* doc_state = *state; - AMstack** stack_ptr = &doc_state->base_state->stack; - AMstackItem(NULL, AMmapPutStr(doc_state->doc, AM_ROOT, AMstr("Str"), AMstr("Hello, world!")), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - AMbyteSpan str; - assert_true(AMitemToStr(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr("Str"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)), - &str)); - assert_int_equal(str.count, strlen("Hello, world!")); - assert_memory_equal(str.src, "Hello, world!", str.count); - AMresultFree(AMstackPop(stack_ptr, NULL)); -} - -static_void_test_AMmapPut(Bool, bool, true); - -static_void_test_AMmapPut(Counter, int64_t, INT64_MAX); - -static_void_test_AMmapPut(F64, double, DBL_MAX); - -static_void_test_AMmapPut(Int, int64_t, INT64_MAX); - -static_void_test_AMmapPutObject(List); - -static_void_test_AMmapPutObject(Map); - -static_void_test_AMmapPutObject(Text); - -static_void_test_AMmapPut(Timestamp, int64_t, INT64_MAX); - -static_void_test_AMmapPut(Uint, int64_t, UINT64_MAX); - -/** - * \brief A JavaScript application can introduce NUL (`\0`) characters into - * a map object's key which will truncate it in a C application. - */ -static void test_get_NUL_key(void** state) { - /* - import * as Automerge from "@automerge/automerge"; - let doc = Automerge.init(); - doc = Automerge.change(doc, doc => { - doc['o\0ps'] = 'oops'; - }); - const bytes = Automerge.save(doc); - console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], - bytes).join(", ") + "};"); - */ - static uint8_t const OOPS_SRC[] = {'o', '\0', 'p', 's'}; - static AMbyteSpan const OOPS_KEY = {.src = OOPS_SRC, .count = sizeof(OOPS_SRC) / sizeof(uint8_t)}; - - static uint8_t const SAVED_DOC[] = { - 133, 111, 74, 131, 233, 150, 60, 244, 0, 116, 1, 16, 223, 253, 146, 193, 58, 122, 66, 134, 151, - 225, 210, 51, 58, 86, 247, 8, 1, 49, 118, 234, 228, 42, 116, 171, 13, 164, 99, 244, 27, 19, - 150, 44, 201, 136, 222, 219, 90, 246, 226, 123, 77, 120, 157, 155, 55, 182, 2, 178, 64, 6, 1, - 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 6, 33, 2, 35, 2, 52, 1, 66, - 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, - 4, 111, 0, 112, 115, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, 111, 112, 115, 127, 0, 0}; - static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); - - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(SAVED_DOC, SAVED_DOC_SIZE), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMbyteSpan str; - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, OOPS_KEY, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_not_equal(OOPS_KEY.count, strlen(OOPS_KEY.src)); - assert_int_equal(str.count, strlen("oops")); - assert_memory_equal(str.src, "oops", str.count); -} - -/** - * \brief A JavaScript application can introduce NUL (`\0`) characters into a - * map object's string value which will truncate it in a C application. - */ -static void test_get_NUL_string_value(void** state) { - /* - import * as Automerge from "@automerge/automerge"; - let doc = Automerge.init(); - doc = Automerge.change(doc, doc => { - doc.oops = 'o\0ps'; - }); - const bytes = Automerge.save(doc); - console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], - bytes).join(", ") + "};"); - */ - static uint8_t const OOPS_VALUE[] = {'o', '\0', 'p', 's'}; - static size_t const OOPS_SIZE = sizeof(OOPS_VALUE) / sizeof(uint8_t); - - static uint8_t const SAVED_DOC[] = { - 133, 111, 74, 131, 63, 94, 151, 29, 0, 116, 1, 16, 156, 159, 189, 12, 125, 55, 71, 154, 136, - 104, 237, 186, 45, 224, 32, 22, 1, 36, 163, 164, 222, 81, 42, 1, 247, 231, 156, 54, 222, 76, - 6, 109, 18, 172, 75, 36, 118, 120, 68, 73, 87, 186, 230, 127, 68, 19, 81, 149, 185, 6, 1, - 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 6, 33, 2, 35, 2, 52, 1, 66, - 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, - 4, 111, 111, 112, 115, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, 0, 112, 115, 127, 0, 0}; - static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); - - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(SAVED_DOC, SAVED_DOC_SIZE), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMbyteSpan str; - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("oops"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), - &str)); - assert_int_not_equal(str.count, strlen(OOPS_VALUE)); - assert_int_equal(str.count, OOPS_SIZE); - assert_memory_equal(str.src, OOPS_VALUE, str.count); -} - -static void test_range_iter_map(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("a"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("b"), 4), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("c"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("d"), 6), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("a"), 7), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("a"), 8), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("d"), 9), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMactorId const* actor_id; - assert_true(AMitemToActorId(AMstackItem(stack_ptr, AMgetActorId(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMitems map_items = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_UINT)); - assert_int_equal(AMitemsSize(&map_items), 4); - - /* ["b"-"d") */ - AMitems range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr("b"), AMstr("d"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_UINT)); - /* First */ - AMitem* next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "b", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - uint64_t uint; - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 4); - AMobjId const* next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "c", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 5); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - assert_null(AMitemsNext(&range, 1)); - - /* ["b"-) */ - range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr("b"), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_UINT)); - /* First */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "b", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 4); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "c", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 5); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "d", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 9); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 7); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Fourth */ - assert_null(AMitemsNext(&range, 1)); - - /* [-"d") */ - range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr("d"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_UINT)); - /* First */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "a", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 8); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 6); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "b", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 4); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "c", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 5); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Fourth */ - assert_null(AMitemsNext(&range, 1)); - - /* ["a"-) */ - range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr("a"), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_UINT)); - /* First */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "a", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 8); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 6); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "b", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 4); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "c", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 5); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Fourth */ - next = AMitemsNext(&range, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "d", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); - assert_true(AMitemToUint(next, &uint)); - assert_int_equal(uint, 9); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 7); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Fifth */ - assert_null(AMitemsNext(&range, 1)); -} - -static void test_map_range_back_and_forth_single(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMactorId const* actor_id; - assert_true(AMitemToActorId(AMstackItem(stack_ptr, AMgetActorId(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - - /* Forward, back, back. */ - AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - /* First */ - AMitem* next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - AMbyteSpan str; - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - AMobjId const* next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - AMitems range_back_all = AMitemsReversed(&range_all); - range_back_all = AMitemsRewound(&range_back_all); - AMitem* next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - AMbyteSpan str_back; - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "c", str_back.count); - AMobjId const* next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "b", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - - /* Forward, back, forward. */ - range_all = AMitemsRewound(&range_all); - range_back_all = AMitemsRewound(&range_back_all); - /* First */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "c", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - - /* Forward, forward, forward. */ - range_all = AMitemsRewound(&range_all); - /* First */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "c", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Forward stop */ - assert_null(AMitemsNext(&range_all, 1)); - - /* Back, back, back. */ - range_back_all = AMitemsRewound(&range_back_all); - /* Third */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "c", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "b", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* First */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "a", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Back stop */ - assert_null(AMitemsNext(&range_back_all, 1)); -} - -static void test_map_range_back_and_forth_double(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc1; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); - AMactorId const* actor_id1; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromBytes("\0", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id1)); - AMstackItem(NULL, AMsetActorId(doc1, actor_id1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - - /* The second actor should win all conflicts here. */ - AMdoc* doc2; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); - AMactorId const* actor_id2; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromBytes("\1", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id2)); - AMstackItem(NULL, AMsetActorId(doc2, actor_id2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("1"), AMstr("aa")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("2"), AMstr("bb")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("3"), AMstr("cc")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - - AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - /* Forward, back, back. */ - AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - /* First */ - AMitem* next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - AMbyteSpan str; - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "aa", str.count); - AMobjId const* next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - AMitems range_back_all = AMitemsReversed(&range_all); - range_back_all = AMitemsRewound(&range_back_all); - AMitem* next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - AMbyteSpan str_back; - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "cc", str_back.count); - AMobjId const* next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "bb", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - - /* Forward, back, forward. */ - range_all = AMitemsRewound(&range_all); - range_back_all = AMitemsRewound(&range_back_all); - /* First */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "aa", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "cc", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "bb", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - - /* Forward, forward, forward. */ - range_all = AMitemsRewound(&range_all); - /* First */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "aa", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Second */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "bb", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "cc", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Forward stop */ - assert_null(AMitemsNext(&range_all, 1)); - - /* Back, back, back. */ - range_back_all = AMitemsRewound(&range_back_all); - /* Third */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "cc", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "bb", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* First */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "aa", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Back stop */ - assert_null(AMitemsNext(&range_back_all, 1)); -} - -static void test_map_range_at_back_and_forth_single(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - AMactorId const* actor_id; - assert_true(AMitemToActorId(AMstackItem(stack_ptr, AMgetActorId(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - - AMitems const heads = AMstackItems(stack_ptr, AMgetHeads(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - /* Forward, back, back. */ - AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - /* First */ - AMitem* next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - AMbyteSpan str; - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - AMobjId const* next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - AMitems range_back_all = AMitemsReversed(&range_all); - range_back_all = AMitemsRewound(&range_back_all); - AMitem* next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - AMbyteSpan str_back; - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "c", str_back.count); - AMobjId const* next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "b", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - - /* Forward, back, forward. */ - range_all = AMitemsRewound(&range_all); - range_back_all = AMitemsRewound(&range_back_all); - /* First */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "c", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - - /* Forward, forward, forward. */ - range_all = AMitemsRewound(&range_all); - /* First */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "c", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Forward stop */ - assert_null(AMitemsNext(&range_all, 1)); - - /* Back, back, back. */ - range_back_all = AMitemsRewound(&range_back_all); - /* Third */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "c", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "b", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* First */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 1); - assert_memory_equal(str_back.src, "a", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Back stop */ - assert_null(AMitemsNext(&range_back_all, 1)); -} - -static void test_map_range_at_back_and_forth_double(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc1; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); - AMactorId const* actor_id1; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromBytes("\0", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id1)); - AMstackItem(NULL, AMsetActorId(doc1, actor_id1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - - /* The second actor should win all conflicts here. */ - AMdoc* doc2; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); - AMactorId const* actor_id2; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromBytes("\1", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id2)); - AMstackItem(NULL, AMsetActorId(doc2, actor_id2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("1"), AMstr("aa")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("2"), AMstr("bb")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("3"), AMstr("cc")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - - AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMitems const heads = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - /* Forward, back, back. */ - AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - /* First */ - AMitem* next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - AMbyteSpan str; - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "aa", str.count); - AMobjId const* next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - AMitems range_back_all = AMitemsReversed(&range_all); - range_back_all = AMitemsRewound(&range_back_all); - AMitem* next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - AMbyteSpan str_back; - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "cc", str_back.count); - AMobjId const* next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "bb", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - - /* Forward, back, forward. */ - range_all = AMitemsRewound(&range_all); - range_back_all = AMitemsRewound(&range_back_all); - /* First */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "aa", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "cc", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "bb", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - - /* Forward, forward, forward. */ - range_all = AMitemsRewound(&range_all); - /* First */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "aa", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Second */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "bb", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - next = AMitemsNext(&range_all, 1); - assert_non_null(next); - assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next, &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "cc", str.count); - next_obj_id = AMitemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Forward stop */ - assert_null(AMitemsNext(&range_all, 1)); - - /* Back, back, back. */ - range_back_all = AMitemsRewound(&range_back_all); - /* Third */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "3", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "cc", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "2", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "bb", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* First */ - next_back = AMitemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(next_back, &key)); - assert_int_equal(key.count, 1); - assert_memory_equal(key.src, "1", key.count); - assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); - assert_true(AMitemToStr(next_back, &str_back)); - assert_int_equal(str_back.count, 2); - assert_memory_equal(str_back.src, "aa", str_back.count); - next_back_obj_id = AMitemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Back stop */ - assert_null(AMitemsNext(&range_back_all, 1)); -} - -static void test_get_range_values(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - AMdoc* doc1; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("aa"), AMstr("aaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("bb"), AMstr("bbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("cc"), AMstr("ccc")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("dd"), AMstr("ddd")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMitems const v1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMdoc* doc2; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(doc1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); - - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("cc"), AMstr("ccc V2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("cc"), AMstr("ccc V3")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(doc2, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - - /* Forward vs. reverse: complete current map range. */ - AMitems range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - size_t size = AMitemsSize(&range); - assert_int_equal(size, 4); - AMitems range_back = AMitemsReversed(&range); - assert_int_equal(AMitemsSize(&range_back), size); - AMbyteSpan key; - assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); - assert_memory_equal(key.src, "aa", key.count); - assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); - assert_memory_equal(key.src, "dd", key.count); - - AMitem *item1, *item_back1; - size_t count, middle = size / 2; - range = AMitemsRewound(&range); - range_back = AMitemsRewound(&range_back); - for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; - item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { - AMbyteSpan key1, key_back1; - assert_true(AMitemKey(item1, &key1)); - assert_true(AMitemKey(item_back1, &key_back1)); - if ((count == middle) && (middle & 1)) { - /* The iterators are crossing in the middle. */ - assert_int_equal(AMstrCmp(key1, key_back1), 0); - assert_true(AMitemEqual(item1, item_back1)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); - } else { - assert_int_not_equal(AMstrCmp(key1, key_back1), 0); - } - AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, NULL), NULL, NULL); - AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, NULL), NULL, NULL); - /** \note An item returned from an `AM...Get()` call doesn't include the - index used to retrieve it. */ - assert_false(AMitemIdxType(item2)); - assert_false(AMitemIdxType(item_back2)); - assert_true(AMitemEqual(item1, item2)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); - assert_true(AMitemEqual(item1, item2)); - assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); - AMresultFree(AMstackPop(stack_ptr, NULL)); - } - - /* Forward vs. reverse: partial current map range. */ - range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr("aa"), AMstr("dd"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - size = AMitemsSize(&range); - assert_int_equal(size, 3); - range_back = AMitemsReversed(&range); - assert_int_equal(AMitemsSize(&range_back), size); - assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); - assert_memory_equal(key.src, "aa", key.count); - assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); - assert_memory_equal(key.src, "cc", key.count); - - middle = size / 2; - range = AMitemsRewound(&range); - range_back = AMitemsRewound(&range_back); - for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; - item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { - AMbyteSpan key1, key_back1; - assert_true(AMitemKey(item1, &key1)); - assert_true(AMitemKey(item_back1, &key_back1)); - if ((count == middle) && (middle & 1)) { - /* The iterators are crossing in the middle. */ - assert_int_equal(AMstrCmp(key1, key_back1), 0); - assert_true(AMitemEqual(item1, item_back1)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); - } else { - assert_int_not_equal(AMstrCmp(key1, key_back1), 0); - } - AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, NULL), NULL, NULL); - AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, NULL), NULL, NULL); - /** \note An item returned from an `AM...Get()` call doesn't include the - index used to retrieve it. */ - assert_false(AMitemIdxType(item2)); - assert_false(AMitemIdxType(item_back2)); - assert_true(AMitemEqual(item1, item2)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); - assert_true(AMitemEqual(item_back1, item_back2)); - assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); - AMresultFree(AMstackPop(stack_ptr, NULL)); - } - - /* Forward vs. reverse: complete historical map range. */ - range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &v1), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - size = AMitemsSize(&range); - assert_int_equal(size, 4); - range_back = AMitemsReversed(&range); - assert_int_equal(AMitemsSize(&range_back), size); - assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); - assert_memory_equal(key.src, "aa", key.count); - assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); - assert_memory_equal(key.src, "dd", key.count); - - middle = size / 2; - range = AMitemsRewound(&range); - range_back = AMitemsRewound(&range_back); - for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; - item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { - AMbyteSpan key1, key_back1; - assert_true(AMitemKey(item1, &key1)); - assert_true(AMitemKey(item_back1, &key_back1)); - if ((count == middle) && (middle & 1)) { - /* The iterators are crossing in the middle. */ - assert_int_equal(AMstrCmp(key1, key_back1), 0); - assert_true(AMitemEqual(item1, item_back1)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); - } else { - assert_int_not_equal(AMstrCmp(key1, key_back1), 0); - } - AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, &v1), NULL, NULL); - AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, &v1), NULL, NULL); - /** \note An item returned from an `AM...Get()` call doesn't include the - index used to retrieve it. */ - assert_false(AMitemIdxType(item2)); - assert_false(AMitemIdxType(item_back2)); - assert_true(AMitemEqual(item1, item2)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); - assert_true(AMitemEqual(item_back1, item_back2)); - assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); - AMresultFree(AMstackPop(stack_ptr, NULL)); - } - - /* Forward vs. reverse: partial historical map range. */ - range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr("bb"), AMstr(NULL), &v1), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - size = AMitemsSize(&range); - assert_int_equal(size, 3); - range_back = AMitemsReversed(&range); - assert_int_equal(AMitemsSize(&range_back), size); - assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); - assert_memory_equal(key.src, "bb", key.count); - assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); - assert_memory_equal(key.src, "dd", key.count); - - middle = size / 2; - range = AMitemsRewound(&range); - range_back = AMitemsRewound(&range_back); - for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; - item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { - AMbyteSpan key1, key_back1; - assert_true(AMitemKey(item1, &key1)); - assert_true(AMitemKey(item_back1, &key_back1)); - if ((count == middle) && (middle & 1)) { - /* The iterators are crossing in the middle. */ - assert_int_equal(AMstrCmp(key1, key_back1), 0); - assert_true(AMitemEqual(item1, item_back1)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); - } else { - assert_int_not_equal(AMstrCmp(key1, key_back1), 0); - } - AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, &v1), NULL, NULL); - AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, &v1), NULL, NULL); - /** \note An item returned from an `AM...Get()` call doesn't include the - index used to retrieve it. */ - assert_false(AMitemIdxType(item2)); - assert_false(AMitemIdxType(item_back2)); - assert_true(AMitemEqual(item1, item2)); - assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); - assert_true(AMitemEqual(item_back1, item_back2)); - assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); - AMresultFree(AMstackPop(stack_ptr, NULL)); - } - - /* Map range vs. object range: complete current. */ - range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - AMitems obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); - - AMitem *item, *obj_item; - for (item = NULL, obj_item = NULL; item && obj_item; - item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { - /** \note Object iteration doesn't yield any item indices. */ - assert_true(AMitemIdxType(item)); - assert_false(AMitemIdxType(obj_item)); - assert_true(AMitemEqual(item, obj_item)); - assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); - } - - /* Map range vs. object range: complete historical. */ - range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &v1), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, AM_ROOT, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); - - for (item = NULL, obj_item = NULL; item && obj_item; - item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { - /** \note Object iteration doesn't yield any item indices. */ - assert_true(AMitemIdxType(item)); - assert_false(AMitemIdxType(obj_item)); - assert_true(AMitemEqual(item, obj_item)); - assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); - } -} - -int run_map_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMmapIncrement), - cmocka_unit_test(test_AMmapPut(Bool)), - cmocka_unit_test(test_AMmapPutBytes), - cmocka_unit_test(test_AMmapPut(Counter)), - cmocka_unit_test(test_AMmapPut(F64)), - cmocka_unit_test(test_AMmapPut(Int)), - cmocka_unit_test(test_AMmapPutNull), - cmocka_unit_test(test_AMmapPutObject(List)), - cmocka_unit_test(test_AMmapPutObject(Map)), - cmocka_unit_test(test_AMmapPutObject(Text)), - cmocka_unit_test(test_AMmapPutStr), - cmocka_unit_test(test_AMmapPut(Timestamp)), - cmocka_unit_test(test_AMmapPut(Uint)), - cmocka_unit_test_setup_teardown(test_get_NUL_key, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_get_NUL_string_value, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_range_iter_map, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_single, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_double, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_single, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_double, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_get_range_values, setup_base, teardown_base), - }; - - return cmocka_run_group_tests(tests, setup_doc, teardown_doc); -} diff --git a/rust/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c deleted file mode 100644 index b83ff132..00000000 --- a/rust/automerge-c/test/ported_wasm/basic_tests.c +++ /dev/null @@ -1,1642 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include -#include -#include "../base_state.h" -#include "../cmocka_utils.h" - -/** - * \brief default import init() should return a promise - */ -static void test_default_import_init_should_return_a_promise(void** state); - -/** - * \brief should create, clone and free - */ -static void test_create_clone_and_free(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc1 = create() */ - AMdoc* doc1; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); - /* const doc2 = doc1.clone() */ - AMdoc* doc2; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMclone(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); -} - -/** - * \brief should be able to start and commit - */ -static void test_start_and_commit(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* doc.commit() */ - AMstackItems(stack_ptr, AMemptyChange(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); -} - -/** - * \brief getting a nonexistent prop does not throw an error - */ -static void test_getting_a_nonexistent_prop_does_not_throw_an_error(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const root = "_root" */ - /* const result = doc.getWithType(root, "hello") */ - /* assert.deepEqual(result, undefined) */ - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("hello"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); -} - -/** - * \brief should be able to set and get a simple value - */ -static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc: Automerge = create("aabbcc") */ - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aabbcc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const root = "_root" */ - /* let result */ - /* */ - /* doc.put(root, "hello", "world") */ - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("hello"), AMstr("world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put(root, "number1", 5, "uint") */ - AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("number1"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put(root, "number2", 5) */ - AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("number2"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put(root, "number3", 5.5) */ - AMstackItem(NULL, AMmapPutF64(doc, AM_ROOT, AMstr("number3"), 5.5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put(root, "number4", 5.5, "f64") */ - AMstackItem(NULL, AMmapPutF64(doc, AM_ROOT, AMstr("number4"), 5.5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put(root, "number5", 5.5, "int") */ - AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("number5"), 5.5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put(root, "bool", true) */ - AMstackItem(NULL, AMmapPutBool(doc, AM_ROOT, AMstr("bool"), true), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put(root, "time1", 1000, "timestamp") */ - AMstackItem(NULL, AMmapPutTimestamp(doc, AM_ROOT, AMstr("time1"), 1000), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put(root, "time2", new Date(1001)) */ - AMstackItem(NULL, AMmapPutTimestamp(doc, AM_ROOT, AMstr("time2"), 1001), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.putObject(root, "list", []); */ - AMstackItem(NULL, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - /* doc.put(root, "null", null) */ - AMstackItem(NULL, AMmapPutNull(doc, AM_ROOT, AMstr("null")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* result = doc.getWithType(root, "hello") */ - /* assert.deepEqual(result, ["str", "world"]) */ - /* assert.deepEqual(doc.get("/", "hello"), "world") */ - AMbyteSpan str; - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("hello"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), - &str)); - assert_int_equal(str.count, strlen("world")); - assert_memory_equal(str.src, "world", str.count); - /* assert.deepEqual(doc.get("/", "hello"), "world") */ - /* */ - /* result = doc.getWithType(root, "number1") */ - /* assert.deepEqual(result, ["uint", 5]) */ - uint64_t uint; - assert_true(AMitemToUint( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number1"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)), - &uint)); - assert_int_equal(uint, 5); - /* assert.deepEqual(doc.get("/", "number1"), 5) */ - /* */ - /* result = doc.getWithType(root, "number2") */ - /* assert.deepEqual(result, ["int", 5]) */ - int64_t int_; - assert_true(AMitemToInt( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number2"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_INT)), - &int_)); - assert_int_equal(int_, 5); - /* */ - /* result = doc.getWithType(root, "number3") */ - /* assert.deepEqual(result, ["f64", 5.5]) */ - double f64; - assert_true(AMitemToF64( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number3"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_F64)), - &f64)); - assert_float_equal(f64, 5.5, DBL_EPSILON); - /* */ - /* result = doc.getWithType(root, "number4") */ - /* assert.deepEqual(result, ["f64", 5.5]) */ - assert_true(AMitemToF64( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number4"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_F64)), - &f64)); - assert_float_equal(f64, 5.5, DBL_EPSILON); - /* */ - /* result = doc.getWithType(root, "number5") */ - /* assert.deepEqual(result, ["int", 5]) */ - assert_true(AMitemToInt( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number5"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_INT)), - &int_)); - assert_int_equal(int_, 5); - /* */ - /* result = doc.getWithType(root, "bool") */ - /* assert.deepEqual(result, ["boolean", true]) */ - bool boolean; - assert_true(AMitemToBool( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("bool"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BOOL)), - &boolean)); - assert_true(boolean); - /* */ - /* doc.put(root, "bool", false, "boolean") */ - AMstackItem(NULL, AMmapPutBool(doc, AM_ROOT, AMstr("bool"), false), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* result = doc.getWithType(root, "bool") */ - /* assert.deepEqual(result, ["boolean", false]) */ - assert_true(AMitemToBool( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("bool"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BOOL)), - &boolean)); - assert_false(boolean); - /* */ - /* result = doc.getWithType(root, "time1") */ - /* assert.deepEqual(result, ["timestamp", new Date(1000)]) */ - int64_t timestamp; - assert_true(AMitemToTimestamp(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("time1"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_TIMESTAMP)), - ×tamp)); - assert_int_equal(timestamp, 1000); - /* */ - /* result = doc.getWithType(root, "time2") */ - /* assert.deepEqual(result, ["timestamp", new Date(1001)]) */ - assert_true(AMitemToTimestamp(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("time2"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_TIMESTAMP)), - ×tamp)); - assert_int_equal(timestamp, 1001); - /* */ - /* result = doc.getWithType(root, "list") */ - /* assert.deepEqual(result, ["list", "10@aabbcc"]); */ - AMobjId const* const list = AMitemObjId( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("list"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - assert_int_equal(AMobjIdCounter(list), 10); - str = AMactorIdStr(AMobjIdActorId(list)); - assert_int_equal(str.count, strlen("aabbcc")); - assert_memory_equal(str.src, "aabbcc", str.count); - /* */ - /* result = doc.getWithType(root, "null") */ - /* assert.deepEqual(result, ["null", null]); */ - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("null"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_NULL)); -} - -/** - * \brief should be able to use bytes - */ -static void test_should_be_able_to_use_bytes(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* doc.put("_root", "data1", new Uint8Array([10, 11, 12])); */ - static uint8_t const DATA1[] = {10, 11, 12}; - AMstackItem(NULL, AMmapPutBytes(doc, AM_ROOT, AMstr("data1"), AMbytes(DATA1, sizeof(DATA1))), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); */ - static uint8_t const DATA2[] = {13, 14, 15}; - AMstackItem(NULL, AMmapPutBytes(doc, AM_ROOT, AMstr("data2"), AMbytes(DATA2, sizeof(DATA2))), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* const value1 = doc.getWithType("_root", "data1") */ - AMbyteSpan value1; - assert_true(AMitemToBytes( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("data1"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), - &value1)); - /* assert.deepEqual(value1, ["bytes", new Uint8Array([10, 11, 12])]); */ - assert_int_equal(value1.count, sizeof(DATA1)); - assert_memory_equal(value1.src, DATA1, sizeof(DATA1)); - /* const value2 = doc.getWithType("_root", "data2") */ - AMbyteSpan value2; - assert_true(AMitemToBytes( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("data2"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), - &value2)); - /* assert.deepEqual(value2, ["bytes", new Uint8Array([13, 14, 15])]); */ - assert_int_equal(value2.count, sizeof(DATA2)); - assert_memory_equal(value2.src, DATA2, sizeof(DATA2)); -} - -/** - * \brief should be able to make subobjects - */ -static void test_should_be_able_to_make_subobjects(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const root = "_root" */ - /* let result */ - /* */ - /* const submap = doc.putObject(root, "submap", {}) */ - AMobjId const* const submap = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("submap"), AM_OBJ_TYPE_MAP), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* doc.put(submap, "number", 6, "uint") */ - AMstackItem(NULL, AMmapPutUint(doc, submap, AMstr("number"), 6), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.strictEqual(doc.pendingOps(), 2) */ - assert_int_equal(AMpendingOps(doc), 2); - /* */ - /* result = doc.getWithType(root, "submap") */ - /* assert.deepEqual(result, ["map", submap]) */ - assert_true(AMobjIdEqual(AMitemObjId(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("submap"), NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))), - submap)); - /* */ - /* result = doc.getWithType(submap, "number") */ - /* assert.deepEqual(result, ["uint", 6]) */ - uint64_t uint; - assert_true(AMitemToUint( - AMstackItem(stack_ptr, AMmapGet(doc, submap, AMstr("number"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)), - &uint)); - assert_int_equal(uint, 6); -} - -/** - * \brief should be able to make lists - */ -static void test_should_be_able_to_make_lists(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const root = "_root" */ - /* */ - /* const sublist = doc.putObject(root, "numbers", []) */ - AMobjId const* const sublist = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("numbers"), AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* doc.insert(sublist, 0, "a"); */ - AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.insert(sublist, 1, "b"); */ - AMstackItem(NULL, AMlistPutStr(doc, sublist, 1, true, AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.insert(sublist, 2, "c"); */ - AMstackItem(NULL, AMlistPutStr(doc, sublist, 2, true, AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.insert(sublist, 0, "z"); */ - AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("z")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* assert.deepEqual(doc.getWithType(sublist, 0), ["str", "z"]) */ - AMbyteSpan str; - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMlistGet(doc, sublist, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "z", str.count); - /* assert.deepEqual(doc.getWithType(sublist, 1), ["str", "a"]) */ - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMlistGet(doc, sublist, 1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b"]) */ - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMlistGet(doc, sublist, 2, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - /* assert.deepEqual(doc.getWithType(sublist, 3), ["str", "c"]) */ - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMlistGet(doc, sublist, 3, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "c", str.count); - /* assert.deepEqual(doc.length(sublist), 4) */ - assert_int_equal(AMobjSize(doc, sublist, NULL), 4); - /* */ - /* doc.put(sublist, 2, "b v2"); */ - AMstackItem(NULL, AMlistPutStr(doc, sublist, 2, false, AMstr("b v2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b v2"]) */ - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMlistGet(doc, sublist, 2, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 4); - assert_memory_equal(str.src, "b v2", str.count); - /* assert.deepEqual(doc.length(sublist), 4) */ - assert_int_equal(AMobjSize(doc, sublist, NULL), 4); -} - -/** - * \brief lists have insert, set, splice, and push ops - */ -static void test_lists_have_insert_set_splice_and_push_ops(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const root = "_root" */ - /* */ - /* const sublist = doc.putObject(root, "letters", []) */ - AMobjId const* const sublist = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("letters"), AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* doc.insert(sublist, 0, "a"); */ - AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.insert(sublist, 0, "b"); */ - AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "a"] }) */ - AMitem* doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(doc_item, &key)); - assert_int_equal(key.count, strlen("letters")); - assert_memory_equal(key.src, "letters", key.count); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&list_items), 2); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - assert_null(AMitemsNext(&list_items, 1)); - } - /* doc.push(sublist, "c"); */ - AMstackItem(NULL, AMlistPutStr(doc, sublist, SIZE_MAX, true, AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const heads = doc.getHeads() */ - AMitems const heads = AMstackItems(stack_ptr, AMgetHeads(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c"] }) */ - doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(doc_item, &key)); - assert_int_equal(key.count, strlen("letters")); - assert_memory_equal(key.src, "letters", key.count); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&list_items), 3); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "c", str.count); - assert_null(AMitemsNext(&list_items, 1)); - } - /* doc.push(sublist, 3, "timestamp"); */ - AMstackItem(NULL, AMlistPutTimestamp(doc, sublist, SIZE_MAX, true, 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new - * Date(3)] } */ - doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(doc_item, &key)); - assert_int_equal(key.count, strlen("letters")); - assert_memory_equal(key.src, "letters", key.count); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); - assert_int_equal(AMitemsSize(&list_items), 4); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "c", str.count); - int64_t timestamp; - assert_true(AMitemToTimestamp(AMitemsNext(&list_items, 1), ×tamp)); - assert_int_equal(timestamp, 3); - assert_null(AMitemsNext(&list_items, 1)); - } - /* doc.splice(sublist, 1, 1, ["d", "e", "f"]); */ - AMresult* data = AMstackResult( - stack_ptr, AMresultFrom(3, AMitemFromStr(AMstr("d")), AMitemFromStr(AMstr("e")), AMitemFromStr(AMstr("f"))), - NULL, NULL); - AMstackItem(NULL, AMsplice(doc, sublist, 1, 1, AMresultItems(data)), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", - * new Date(3)] } */ - doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(doc_item, &key)); - assert_int_equal(key.count, strlen("letters")); - assert_memory_equal(key.src, "letters", key.count); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "d", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "e", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "f", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "c", str.count); - int64_t timestamp; - assert_true(AMitemToTimestamp(AMitemsNext(&list_items, 1), ×tamp)); - assert_int_equal(timestamp, 3); - assert_null(AMitemsNext(&list_items, 1)); - } - /* doc.put(sublist, 0, "z"); */ - AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, false, AMstr("z")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", - * new Date(3)] } */ - doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(doc_item, &key)); - assert_int_equal(key.count, strlen("letters")); - assert_memory_equal(key.src, "letters", key.count); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "z", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "d", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "e", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "f", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "c", str.count); - int64_t timestamp; - assert_true(AMitemToTimestamp(AMitemsNext(&list_items, 1), ×tamp)); - assert_int_equal(timestamp, 3); - assert_null(AMitemsNext(&list_items, 1)); - } - /* assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new - * Date(3)] */ - AMitems sublist_items = AMstackItems(stack_ptr, AMlistRange(doc, sublist, 0, SIZE_MAX, NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "z", str.count); - assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "d", str.count); - assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "e", str.count); - assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "f", str.count); - assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "c", str.count); - int64_t timestamp; - assert_true(AMitemToTimestamp(AMitemsNext(&sublist_items, 1), ×tamp)); - assert_int_equal(timestamp, 3); - assert_null(AMitemsNext(&sublist_items, 1)); - /* assert.deepEqual(doc.length(sublist), 6) */ - assert_int_equal(AMobjSize(doc, sublist, NULL), 6); - /* assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] - * } */ - doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(doc_item, &key)); - assert_int_equal(key.count, strlen("letters")); - assert_memory_equal(key.src, "letters", key.count); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, &heads), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "c", str.count); - assert_null(AMitemsNext(&list_items, 1)); - } -} - -/** - * \brief should be able to delete non-existent props - */ -static void test_should_be_able_to_delete_non_existent_props(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* */ - /* doc.put("_root", "foo", "bar") */ - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("foo"), AMstr("bar")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put("_root", "bip", "bap") */ - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("bip"), AMstr("bap")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const hash1 = doc.commit() */ - AMitems const hash1 = - AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* */ - /* assert.deepEqual(doc.keys("_root"), ["bip", "foo"]) */ - AMitems keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "bip", str.count); - assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "foo", str.count); - /* */ - /* doc.delete("_root", "foo") */ - AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("foo")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.delete("_root", "baz") */ - AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("baz")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const hash2 = doc.commit() */ - AMitems const hash2 = - AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* */ - /* assert.deepEqual(doc.keys("_root"), ["bip"]) */ - keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "bip", str.count); - /* assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) */ - keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, &hash1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "bip", str.count); - assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "foo", str.count); - /* assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) */ - keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, &hash2), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "bip", str.count); -} - -/** - * \brief should be able to del - */ -static void test_should_be_able_to_del(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const root = "_root" */ - /* */ - /* doc.put(root, "xxx", "xxx"); */ - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("xxx"), AMstr("xxx")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.getWithType(root, "xxx"), ["str", "xxx"]) */ - AMbyteSpan str; - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("xxx"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), - &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "xxx", str.count); - /* doc.delete(root, "xxx"); */ - AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("xxx")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.getWithType(root, "xxx"), undefined) */ - AMstackItem(NULL, AMmapGet(doc, AM_ROOT, AMstr("xxx"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); -} - -/** - * \brief should be able to use counters - */ -static void test_should_be_able_to_use_counters(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const root = "_root" */ - /* */ - /* doc.put(root, "counter", 10, "counter"); */ - AMstackItem(NULL, AMmapPutCounter(doc, AM_ROOT, AMstr("counter"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 10]) */ - int64_t counter; - assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_COUNTER)), - &counter)); - assert_int_equal(counter, 10); - /* doc.increment(root, "counter", 10); */ - AMstackItem(NULL, AMmapIncrement(doc, AM_ROOT, AMstr("counter"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 20]) */ - assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_COUNTER)), - &counter)); - assert_int_equal(counter, 20); - /* doc.increment(root, "counter", -5); */ - AMstackItem(NULL, AMmapIncrement(doc, AM_ROOT, AMstr("counter"), -5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 15]) */ - assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_COUNTER)), - &counter)); - assert_int_equal(counter, 15); -} - -/** - * \brief should be able to splice text - */ -static void test_should_be_able_to_splice_text(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const root = "_root"; */ - /* */ - /* const text = doc.putObject(root, "text", ""); */ - AMobjId const* const text = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* doc.splice(text, 0, 0, "hello ") */ - AMstackItem(NULL, AMspliceText(doc, text, 0, 0, AMstr("hello ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.splice(text, 6, 0, "world") */ - AMstackItem(NULL, AMspliceText(doc, text, 6, 0, AMstr("world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.splice(text, 11, 0, "!?") */ - AMstackItem(NULL, AMspliceText(doc, text, 11, 0, AMstr("!?")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) */ - AMbyteSpan str; - assert_true( - AMitemToStr(AMstackItem(stack_ptr, AMlistGet(doc, text, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "h", str.count); - /* assert.deepEqual(doc.getWithType(text, 1), ["str", "e"]) */ - assert_true( - AMitemToStr(AMstackItem(stack_ptr, AMlistGet(doc, text, 1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "e", str.count); - /* assert.deepEqual(doc.getWithType(text, 9), ["str", "l"]) */ - assert_true( - AMitemToStr(AMstackItem(stack_ptr, AMlistGet(doc, text, 9, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "l", str.count); - /* assert.deepEqual(doc.getWithType(text, 10), ["str", "d"]) */ - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMlistGet(doc, text, 10, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "d", str.count); - /* assert.deepEqual(doc.getWithType(text, 11), ["str", "!"]) */ - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMlistGet(doc, text, 11, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "!", str.count); - /* assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) */ - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMlistGet(doc, text, 12, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "?", str.count); -} - -/** - * \brief should be able to save all or incrementally - */ -static void test_should_be_able_to_save_all_or_incrementally(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* */ - /* doc.put("_root", "foo", 1) */ - AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("foo"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* const save1 = doc.save() */ - AMbyteSpan save1; - assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save1)); - /* */ - /* doc.put("_root", "bar", 2) */ - AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("bar"), 2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* const saveMidway = doc.clone().save(); */ - AMdoc* doc_clone; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMclone(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc_clone)); - AMbyteSpan saveMidway; - assert_true( - AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc_clone), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &saveMidway)); - /* */ - /* const save2 = doc.saveIncremental(); */ - AMbyteSpan save2; - assert_true( - AMitemToBytes(AMstackItem(stack_ptr, AMsaveIncremental(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save2)); - /* */ - /* doc.put("_root", "baz", 3); */ - AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("baz"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* const save3 = doc.saveIncremental(); */ - AMbyteSpan save3; - assert_true( - AMitemToBytes(AMstackItem(stack_ptr, AMsaveIncremental(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save3)); - /* */ - /* const saveA = doc.save(); */ - AMbyteSpan saveA; - assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &saveA)); - /* const saveB = new Uint8Array([...save1, ...save2, ...save3]); */ - size_t const saveB_count = save1.count + save2.count + save3.count; - uint8_t* const saveB_src = test_malloc(saveB_count); - memcpy(saveB_src, save1.src, save1.count); - memcpy(saveB_src + save1.count, save2.src, save2.count); - memcpy(saveB_src + save1.count + save2.count, save3.src, save3.count); - /* */ - /* assert.notDeepEqual(saveA, saveB); */ - assert_memory_not_equal(saveA.src, saveB_src, saveA.count); - /* */ - /* const docA = load(saveA); */ - AMdoc* docA; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(saveA.src, saveA.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &docA)); - /* const docB = load(saveB); */ - AMdoc* docB; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(saveB_src, saveB_count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &docB)); - test_free(saveB_src); - /* const docC = load(saveMidway) */ - AMdoc* docC; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(saveMidway.src, saveMidway.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &docC)); - /* docC.loadIncremental(save3) */ - AMstackItem(NULL, AMloadIncremental(docC, save3.src, save3.count), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)); - /* */ - /* assert.deepEqual(docA.keys("_root"), docB.keys("_root")); */ - AMitems const keysA = AMstackItems(stack_ptr, AMkeys(docA, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - AMitems const keysB = AMstackItems(stack_ptr, AMkeys(docB, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_true(AMitemsEqual(&keysA, &keysB)); - /* assert.deepEqual(docA.save(), docB.save()); */ - AMbyteSpan docA_save; - assert_true( - AMitemToBytes(AMstackItem(stack_ptr, AMsave(docA), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &docA_save)); - AMbyteSpan docB_save; - assert_true( - AMitemToBytes(AMstackItem(stack_ptr, AMsave(docB), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &docB_save)); - assert_int_equal(docA_save.count, docB_save.count); - assert_memory_equal(docA_save.src, docB_save.src, docA_save.count); - /* assert.deepEqual(docA.save(), docC.save()); */ - AMbyteSpan docC_save; - assert_true( - AMitemToBytes(AMstackItem(stack_ptr, AMsave(docC), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &docC_save)); - assert_int_equal(docA_save.count, docC_save.count); - assert_memory_equal(docA_save.src, docC_save.src, docA_save.count); -} - -/** - * \brief should be able to splice text #2 - */ -static void test_should_be_able_to_splice_text_2(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create() */ - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const text = doc.putObject("_root", "text", ""); */ - AMobjId const* const text = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* doc.splice(text, 0, 0, "hello world"); */ - AMstackItem(NULL, AMspliceText(doc, text, 0, 0, AMstr("hello world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const hash1 = doc.commit(); */ - AMitems const hash1 = - AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* doc.splice(text, 6, 0, "big bad "); */ - AMstackItem(NULL, AMspliceText(doc, text, 6, 0, AMstr("big bad ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const hash2 = doc.commit(); */ - AMitems const hash2 = - AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* assert.strictEqual(doc.text(text), "hello big bad world") */ - AMbyteSpan str; - assert_true( - AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, strlen("hello big bad world")); - assert_memory_equal(str.src, "hello big bad world", str.count); - /* assert.strictEqual(doc.length(text), 19) */ - assert_int_equal(AMobjSize(doc, text, NULL), 19); - /* assert.strictEqual(doc.text(text, [hash1]), "hello world") */ - assert_true( - AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, &hash1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, strlen("hello world")); - assert_memory_equal(str.src, "hello world", str.count); - /* assert.strictEqual(doc.length(text, [hash1]), 11) */ - assert_int_equal(AMobjSize(doc, text, &hash1), 11); - /* assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") */ - assert_true( - AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, &hash2), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, strlen("hello big bad world")); - assert_memory_equal(str.src, "hello big bad world", str.count); - /* assert.strictEqual(doc.length(text, [hash2]), 19) */ - assert_int_equal(AMobjSize(doc, text, &hash2), 19); -} - -/** - * \brief local inc increments all visible counters in a map - */ -static void test_local_inc_increments_all_visible_counters_in_a_map(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc1 = create("aaaa") */ - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMdoc* doc1; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); - /* doc1.put("_root", "hello", "world") */ - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("hello"), AMstr("world")), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* const doc2 = load(doc1.save(), "bbbb"); */ - AMbyteSpan save; - assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save)); - AMdoc* doc2; - assert_true( - AMitemToDoc(AMstackItem(stack_ptr, AMload(save.src, save.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMstackItem(NULL, AMsetActorId(doc2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const doc3 = load(doc1.save(), "cccc"); */ - AMdoc* doc3; - assert_true( - AMitemToDoc(AMstackItem(stack_ptr, AMload(save.src, save.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc3)); - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("cccc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMstackItem(NULL, AMsetActorId(doc3, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* let heads = doc1.getHeads() */ - AMitems const heads1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* doc1.put("_root", "cnt", 20) */ - AMstackItem(NULL, AMmapPutInt(doc1, AM_ROOT, AMstr("cnt"), 20), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc2.put("_root", "cnt", 0, "counter") */ - AMstackItem(NULL, AMmapPutCounter(doc2, AM_ROOT, AMstr("cnt"), 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc3.put("_root", "cnt", 10, "counter") */ - AMstackItem(NULL, AMmapPutCounter(doc3, AM_ROOT, AMstr("cnt"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc1.applyChanges(doc2.getChanges(heads)) */ - AMitems const changes2 = - AMstackItems(stack_ptr, AMgetChanges(doc2, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - AMstackItem(NULL, AMapplyChanges(doc1, &changes2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc1.applyChanges(doc3.getChanges(heads)) */ - AMitems const changes3 = - AMstackItems(stack_ptr, AMgetChanges(doc3, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - AMstackItem(NULL, AMapplyChanges(doc1, &changes3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* let result = doc1.getAll("_root", "cnt") */ - AMitems result = AMstackItems(stack_ptr, AMmapGetAll(doc1, AM_ROOT, AMstr("cnt"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_COUNTER | AM_VAL_TYPE_INT | AM_VAL_TYPE_STR)); - /* assert.deepEqual(result, [ - ['int', 20, '2@aaaa'], - ['counter', 0, '2@bbbb'], - ['counter', 10, '2@cccc'], - ]) */ - AMitem* result_item = AMitemsNext(&result, 1); - int64_t int_; - assert_true(AMitemToInt(result_item, &int_)); - assert_int_equal(int_, 20); - assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); - AMbyteSpan str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); - assert_int_equal(str.count, 4); - assert_memory_equal(str.src, "aaaa", str.count); - result_item = AMitemsNext(&result, 1); - int64_t counter; - assert_true(AMitemToCounter(result_item, &counter)); - assert_int_equal(counter, 0); - assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); - str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); - assert_int_equal(str.count, 4); - assert_memory_equal(str.src, "bbbb", str.count); - result_item = AMitemsNext(&result, 1); - assert_true(AMitemToCounter(result_item, &counter)); - assert_int_equal(counter, 10); - assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); - str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); - assert_int_equal(str.count, 4); - assert_memory_equal(str.src, "cccc", str.count); - /* doc1.increment("_root", "cnt", 5) */ - AMstackItem(NULL, AMmapIncrement(doc1, AM_ROOT, AMstr("cnt"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* result = doc1.getAll("_root", "cnt") */ - result = AMstackItems(stack_ptr, AMmapGetAll(doc1, AM_ROOT, AMstr("cnt"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_COUNTER)); - /* assert.deepEqual(result, [ - ['counter', 5, '2@bbbb'], - ['counter', 15, '2@cccc'], - ]) */ - result_item = AMitemsNext(&result, 1); - assert_true(AMitemToCounter(result_item, &counter)); - assert_int_equal(counter, 5); - assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); - str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); - assert_int_equal(str.count, 4); - assert_memory_equal(str.src, "bbbb", str.count); - result_item = AMitemsNext(&result, 1); - assert_true(AMitemToCounter(result_item, &counter)); - assert_int_equal(counter, 15); - assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); - str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); - assert_int_equal(str.count, 4); - assert_memory_equal(str.src, "cccc", str.count); - /* */ - /* const save1 = doc1.save() */ - AMbyteSpan save1; - assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save1)); - /* const doc4 = load(save1) */ - AMdoc* doc4; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(save1.src, save1.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc4)); - /* assert.deepEqual(doc4.save(), save1); */ - AMbyteSpan doc4_save; - assert_true( - AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc4), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &doc4_save)); - assert_int_equal(doc4_save.count, save1.count); - assert_memory_equal(doc4_save.src, save1.src, doc4_save.count); -} - -/** - * \brief local inc increments all visible counters in a sequence - */ -static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc1 = create("aaaa") */ - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMdoc* doc1; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); - /* const seq = doc1.putObject("_root", "seq", []) */ - AMobjId const* const seq = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("seq"), AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* doc1.insert(seq, 0, "hello") */ - AMstackItem(NULL, AMlistPutStr(doc1, seq, 0, true, AMstr("hello")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const doc2 = load(doc1.save(), "bbbb"); */ - AMbyteSpan save1; - assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save1)); - AMdoc* doc2; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(save1.src, save1.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMstackItem(NULL, AMsetActorId(doc2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const doc3 = load(doc1.save(), "cccc"); */ - AMdoc* doc3; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(save1.src, save1.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc3)); - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("cccc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMstackItem(NULL, AMsetActorId(doc3, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* let heads = doc1.getHeads() */ - AMitems const heads1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* doc1.put(seq, 0, 20) */ - AMstackItem(NULL, AMlistPutInt(doc1, seq, 0, false, 20), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc2.put(seq, 0, 0, "counter") */ - AMstackItem(NULL, AMlistPutCounter(doc2, seq, 0, false, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc3.put(seq, 0, 10, "counter") */ - AMstackItem(NULL, AMlistPutCounter(doc3, seq, 0, false, 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc1.applyChanges(doc2.getChanges(heads)) */ - AMitems const changes2 = - AMstackItems(stack_ptr, AMgetChanges(doc2, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - AMstackItem(NULL, AMapplyChanges(doc1, &changes2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc1.applyChanges(doc3.getChanges(heads)) */ - AMitems const changes3 = - AMstackItems(stack_ptr, AMgetChanges(doc3, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - AMstackItem(NULL, AMapplyChanges(doc1, &changes3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* let result = doc1.getAll(seq, 0) */ - AMitems result = AMstackItems(stack_ptr, AMlistGetAll(doc1, seq, 0, NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_COUNTER | AM_VAL_TYPE_INT)); - /* assert.deepEqual(result, [ - ['int', 20, '3@aaaa'], - ['counter', 0, '3@bbbb'], - ['counter', 10, '3@cccc'], - ]) */ - AMitem* result_item = AMitemsNext(&result, 1); - int64_t int_; - assert_true(AMitemToInt(result_item, &int_)); - assert_int_equal(int_, 20); - assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); - AMbyteSpan str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); - assert_int_equal(str.count, 4); - assert_memory_equal(str.src, "aaaa", str.count); - result_item = AMitemsNext(&result, 1); - int64_t counter; - assert_true(AMitemToCounter(result_item, &counter)); - assert_int_equal(counter, 0); - assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); - str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); - assert_memory_equal(str.src, "bbbb", str.count); - result_item = AMitemsNext(&result, 1); - assert_true(AMitemToCounter(result_item, &counter)); - assert_int_equal(counter, 10); - assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); - str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); - assert_int_equal(str.count, 4); - assert_memory_equal(str.src, "cccc", str.count); - /* doc1.increment(seq, 0, 5) */ - AMstackItem(NULL, AMlistIncrement(doc1, seq, 0, 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* result = doc1.getAll(seq, 0) */ - result = AMstackItems(stack_ptr, AMlistGetAll(doc1, seq, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)); - /* assert.deepEqual(result, [ - ['counter', 5, '3@bbbb'], - ['counter', 15, '3@cccc'], - ]) */ - result_item = AMitemsNext(&result, 1); - assert_true(AMitemToCounter(result_item, &counter)); - assert_int_equal(counter, 5); - assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); - str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); - assert_int_equal(str.count, 4); - assert_memory_equal(str.src, "bbbb", str.count); - result_item = AMitemsNext(&result, 1); - assert_true(AMitemToCounter(result_item, &counter)); - assert_int_equal(counter, 15); - assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); - str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); - assert_memory_equal(str.src, "cccc", str.count); - /* */ - /* const save = doc1.save() */ - AMbyteSpan save; - assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save)); - /* const doc4 = load(save) */ - AMdoc* doc4; - assert_true( - AMitemToDoc(AMstackItem(stack_ptr, AMload(save.src, save.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc4)); - /* assert.deepEqual(doc4.save(), save); */ - AMbyteSpan doc4_save; - assert_true( - AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc4), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &doc4_save)); - assert_int_equal(doc4_save.count, save.count); - assert_memory_equal(doc4_save.src, save.src, doc4_save.count); -} - -/** - * \brief paths can be used instead of objids - */ -static void test_paths_can_be_used_instead_of_objids(void** state); - -/** - * \brief should be able to fetch changes by hash - */ -static void test_should_be_able_to_fetch_changes_by_hash(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc1 = create("aaaa") */ - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMdoc* doc1; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); - /* const doc2 = create("bbbb") */ - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMdoc* doc2; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); - /* doc1.put("/", "a", "b") */ - AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("a"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc2.put("/", "b", "c") */ - AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("b"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const head1 = doc1.getHeads() */ - AMitems head1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* const head2 = doc2.getHeads() */ - AMitems head2 = AMstackItems(stack_ptr, AMgetHeads(doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* const change1 = doc1.getChangeByHash(head1[0]) - if (change1 === null) { throw new RangeError("change1 should not be - null") */ - AMbyteSpan change_hash1; - assert_true(AMitemToChangeHash(AMitemsNext(&head1, 1), &change_hash1)); - AMchange const* change1; - assert_true(AMitemToChange(AMstackItem(stack_ptr, AMgetChangeByHash(doc1, change_hash1.src, change_hash1.count), - cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)), - &change1)); - /* const change2 = doc1.getChangeByHash(head2[0]) - assert.deepEqual(change2, null) */ - AMbyteSpan change_hash2; - assert_true(AMitemToChangeHash(AMitemsNext(&head2, 1), &change_hash2)); - AMstackItem(NULL, AMgetChangeByHash(doc1, change_hash2.src, change_hash2.count), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(decodeChange(change1).hash, head1[0]) */ - assert_memory_equal(AMchangeHash(change1).src, change_hash1.src, change_hash1.count); -} - -/** - * \brief recursive sets are possible - */ -static void test_recursive_sets_are_possible(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create("aaaa") */ - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]] */ - AMobjId const* const l1 = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - { - AMobjId const* const map = AMitemObjId(AMstackItem( - stack_ptr, AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - AMstackItem(NULL, AMmapPutStr(doc, map, AMstr("foo"), AMstr("bar")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMobjId const* const list = - AMitemObjId(AMstackItem(stack_ptr, AMlistPutObject(doc, l1, SIZE_MAX, true, AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - for (int value = 1; value != 4; ++value) { - AMstackItem(NULL, AMlistPutInt(doc, list, SIZE_MAX, true, value), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - } - } - /* const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) */ - AMobjId const* const l2 = AMitemObjId(AMstackItem(stack_ptr, AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - { - AMobjId const* const list = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, l2, AMstr("zip"), AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - AMstackItem(NULL, AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - } - /* const l3 = doc.putObject("_root", "info1", "hello world") // 'text' - * object */ - AMobjId const* const l3 = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("info1"), AM_OBJ_TYPE_TEXT), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - AMstackItem(NULL, AMspliceText(doc, l3, 0, 0, AMstr("hello world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* doc.put("_root", "info2", "hello world") // 'str' */ - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("info2"), AMstr("hello world")), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* const l4 = doc.putObject("_root", "info3", "hello world") */ - AMobjId const* const l4 = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("info3"), AM_OBJ_TYPE_TEXT), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - AMstackItem(NULL, AMspliceText(doc, l4, 0, 0, AMstr("hello world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(doc.materialize(), { - "list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]], - "info1": "hello world", - "info2": "hello world", - "info3": "hello world", - }) */ - AMitems doc_items = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE | AM_VAL_TYPE_STR)); - AMitem* doc_item = AMitemsNext(&doc_items, 1); - assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(doc_item, &key)); - assert_int_equal(key.count, strlen("info1")); - assert_memory_equal(key.src, "info1", key.count); - AMbyteSpan str; - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMtext(doc, AMitemObjId(doc_item), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, strlen("hello world")); - assert_memory_equal(str.src, "hello world", str.count); - doc_item = AMitemsNext(&doc_items, 1); - assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(doc_item, &key)); - assert_int_equal(key.count, strlen("info2")); - assert_memory_equal(key.src, "info2", key.count); - assert_true(AMitemToStr(doc_item, &str)); - assert_int_equal(str.count, strlen("hello world")); - assert_memory_equal(str.src, "hello world", str.count); - doc_item = AMitemsNext(&doc_items, 1); - assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(doc_item, &key)); - assert_int_equal(key.count, strlen("info3")); - assert_memory_equal(key.src, "info3", key.count); - assert_true(AMitemToStr( - AMstackItem(stack_ptr, AMtext(doc, AMitemObjId(doc_item), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, strlen("hello world")); - assert_memory_equal(str.src, "hello world", str.count); - doc_item = AMitemsNext(&doc_items, 1); - assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(doc_item, &key)); - assert_int_equal(key.count, strlen("list")); - assert_memory_equal(key.src, "list", key.count); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - AMitem const* list_item = AMitemsNext(&list_items, 1); - { - AMitems map_items = - AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - AMitem const* map_item = AMitemsNext(&map_items, 1); - assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(map_item, &key)); - assert_int_equal(key.count, strlen("zip")); - assert_memory_equal(key.src, "zip", key.count); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(map_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE | AM_VAL_TYPE_STR)); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - } - } - list_item = AMitemsNext(&list_items, 1); - { - AMitems map_items = - AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE | AM_VAL_TYPE_STR)); - AMitem* map_item = AMitemsNext(&map_items, 1); - assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(map_item, &key)); - assert_int_equal(key.count, strlen("foo")); - assert_memory_equal(key.src, "foo", key.count); - AMbyteSpan str; - assert_true(AMitemToStr(map_item, &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "bar", str.count); - } - list_item = AMitemsNext(&list_items, 1); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(list_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_INT)); - int64_t int_; - assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); - assert_int_equal(int_, 1); - assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); - assert_int_equal(int_, 2); - assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); - assert_int_equal(int_, 3); - } - } - /* assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) */ - AMitems map_items = AMstackItems(stack_ptr, AMmapRange(doc, l2, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - AMitem const* map_item = AMitemsNext(&map_items, 1); - assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); - assert_true(AMitemKey(map_item, &key)); - assert_int_equal(key.count, strlen("zip")); - assert_memory_equal(key.src, "zip", key.count); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(map_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - } - /* assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" - * }, [1, 2, 3]] */ - AMitems list_items = - AMstackItems(stack_ptr, AMlistRange(doc, l1, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - AMitem const* list_item = AMitemsNext(&list_items, 1); - { - AMitems map_items = - AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - AMitem const* map_item = AMitemsNext(&map_items, 1); - assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(map_item, &key)); - assert_int_equal(key.count, strlen("zip")); - assert_memory_equal(key.src, "zip", key.count); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(map_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "a", str.count); - assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "b", str.count); - } - } - list_item = AMitemsNext(&list_items, 1); - { - AMitems map_items = - AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - AMitem* map_item = AMitemsNext(&map_items, 1); - assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); - AMbyteSpan key; - assert_true(AMitemKey(map_item, &key)); - assert_int_equal(key.count, strlen("foo")); - assert_memory_equal(key.src, "foo", key.count); - AMbyteSpan str; - assert_true(AMitemToStr(map_item, &str)); - assert_int_equal(str.count, 3); - assert_memory_equal(str.src, "bar", str.count); - } - list_item = AMitemsNext(&list_items, 1); - { - AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(list_item), 0, SIZE_MAX, NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_INT)); - int64_t int_; - assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); - assert_int_equal(int_, 1); - assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); - assert_int_equal(int_, 2); - assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); - assert_int_equal(int_, 3); - } - /* assert.deepEqual(doc.materialize(l4), "hello world") */ - assert_true(AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, l4, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, strlen("hello world")); - assert_memory_equal(str.src, "hello world", str.count); -} - -/** - * \brief only returns an object id when objects are created - */ -static void test_only_returns_an_object_id_when_objects_are_created(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc = create("aaaa") */ - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMdoc* doc; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); - /* const r1 = doc.put("_root", "foo", "bar") - assert.deepEqual(r1, null); */ - AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("foo"), AMstr("bar")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const r2 = doc.putObject("_root", "list", []) */ - AMobjId const* const r2 = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* const r3 = doc.put("_root", "counter", 10, "counter") - assert.deepEqual(r3, null); */ - AMstackItem(NULL, AMmapPutCounter(doc, AM_ROOT, AMstr("counter"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const r4 = doc.increment("_root", "counter", 1) - assert.deepEqual(r4, null); */ - AMstackItem(NULL, AMmapIncrement(doc, AM_ROOT, AMstr("counter"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const r5 = doc.delete("_root", "counter") - assert.deepEqual(r5, null); */ - AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("counter")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const r6 = doc.insert(r2, 0, 10); - assert.deepEqual(r6, null); */ - AMstackItem(NULL, AMlistPutInt(doc, r2, 0, true, 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const r7 = doc.insertObject(r2, 0, {}); */ - AMobjId const* const r7 = AMitemObjId(AMstackItem(stack_ptr, AMlistPutObject(doc, r2, 0, true, AM_OBJ_TYPE_LIST), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* const r8 = doc.splice(r2, 1, 0, ["a", "b", "c"]); */ - AMresult* data = AMstackResult( - stack_ptr, AMresultFrom(3, AMitemFromStr(AMstr("a")), AMitemFromStr(AMstr("b")), AMitemFromStr(AMstr("c"))), - NULL, NULL); - AMstackItem(NULL, AMsplice(doc, r2, 1, 0, AMresultItems(data)), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(r2, "2@aaaa"); */ - assert_int_equal(AMobjIdCounter(r2), 2); - AMbyteSpan str = AMactorIdStr(AMobjIdActorId(r2)); - assert_int_equal(str.count, 4); - assert_memory_equal(str.src, "aaaa", str.count); - /* assert.deepEqual(r7, "7@aaaa"); */ - assert_int_equal(AMobjIdCounter(r7), 7); - str = AMactorIdStr(AMobjIdActorId(r7)); - assert_memory_equal(str.src, "aaaa", str.count); -} - -/** - * \brief objects without properties are preserved - */ -static void test_objects_without_properties_are_preserved(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const doc1 = create("aaaa") */ - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); - AMdoc* doc1; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); - /* const a = doc1.putObject("_root", "a", {}); */ - AMobjId const* const a = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("a"), AM_OBJ_TYPE_MAP), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* const b = doc1.putObject("_root", "b", {}); */ - AMobjId const* const b = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("b"), AM_OBJ_TYPE_MAP), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* const c = doc1.putObject("_root", "c", {}); */ - AMobjId const* const c = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("c"), AM_OBJ_TYPE_MAP), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* const d = doc1.put(c, "d", "dd"); */ - AMstackItem(NULL, AMmapPutStr(doc1, c, AMstr("d"), AMstr("dd")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const saved = doc1.save(); */ - AMbyteSpan saved; - assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &saved)); - /* const doc2 = load(saved); */ - AMdoc* doc2; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(saved.src, saved.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); - /* assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) */ - AMitems doc_items = AMstackItems(stack_ptr, AMmapRange(doc2, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE)); - assert_true(AMobjIdEqual(AMitemObjId(AMitemsNext(&doc_items, 1)), a)); - /* assert.deepEqual(doc2.keys(a), []) */ - AMitems keys = AMstackItems(stack_ptr, AMkeys(doc1, a, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&keys), 0); - /* assert.deepEqual(doc2.getWithType("_root", "b"), ["map", b]) */ - assert_true(AMobjIdEqual(AMitemObjId(AMitemsNext(&doc_items, 1)), b)); - /* assert.deepEqual(doc2.keys(b), []) */ - keys = AMstackItems(stack_ptr, AMkeys(doc1, b, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_int_equal(AMitemsSize(&keys), 0); - /* assert.deepEqual(doc2.getWithType("_root", "c"), ["map", c]) */ - assert_true(AMobjIdEqual(AMitemObjId(AMitemsNext(&doc_items, 1)), c)); - /* assert.deepEqual(doc2.keys(c), ["d"]) */ - keys = AMstackItems(stack_ptr, AMkeys(doc1, c, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - AMbyteSpan str; - assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); - assert_int_equal(str.count, 1); - assert_memory_equal(str.src, "d", str.count); - /* assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) */ - AMitems obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, c, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - assert_true(AMitemToStr(AMitemsNext(&obj_items, 1), &str)); - assert_int_equal(str.count, 2); - assert_memory_equal(str.src, "dd", str.count); -} - -/** - * \brief should allow you to forkAt a heads - */ -static void test_should_allow_you_to_forkAt_a_heads(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const A = create("aaaaaa") */ - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMdoc* A; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A)); - /* A.put("/", "key1", "val1"); */ - AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key1"), AMstr("val1")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* A.put("/", "key2", "val2"); */ - AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key2"), AMstr("val2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const heads1 = A.getHeads(); */ - AMitems const heads1 = AMstackItems(stack_ptr, AMgetHeads(A), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* const B = A.fork("bbbbbb") */ - AMdoc* B; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &B)); - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMstackItem(NULL, AMsetActorId(B, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* A.put("/", "key3", "val3"); */ - AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key3"), AMstr("val3")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* B.put("/", "key4", "val4"); */ - AMstackItem(NULL, AMmapPutStr(B, AM_ROOT, AMstr("key4"), AMstr("val4")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* A.merge(B) */ - AMstackItem(NULL, AMmerge(A, B), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* const heads2 = A.getHeads(); */ - AMitems const heads2 = AMstackItems(stack_ptr, AMgetHeads(A), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* A.put("/", "key5", "val5"); */ - AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key5"), AMstr("val5")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", - * heads1) */ - AMdoc* A_forkAt1; - assert_true( - AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A_forkAt1)); - AMitems AforkAt1_items = AMstackItems(stack_ptr, AMmapRange(A_forkAt1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - AMitems A1_items = AMstackItems(stack_ptr, AMmapRange(A, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads1), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - assert_true(AMitemsEqual(&AforkAt1_items, &A1_items)); - /* assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", - * heads2) */ - AMdoc* A_forkAt2; - assert_true( - AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, &heads2), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A_forkAt2)); - AMitems AforkAt2_items = AMstackItems(stack_ptr, AMmapRange(A_forkAt2, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); - AMitems A2_items = AMstackItems(stack_ptr, AMmapRange(A, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads2), cmocka_cb, - AMexpect(AM_VAL_TYPE_STR)); - assert_true(AMitemsEqual(&AforkAt2_items, &A2_items)); -} - -/** - * \brief should handle merging text conflicts then saving & loading - */ -static void test_should_handle_merging_text_conflicts_then_saving_and_loading(void** state) { - BaseState* base_state = *state; - AMstack** stack_ptr = &base_state->stack; - /* const A = create("aabbcc") */ - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aabbcc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMdoc* A; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A)); - /* const At = A.putObject('_root', 'text', "") */ - AMobjId const* const At = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(A, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, - AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* A.splice(At, 0, 0, 'hello') */ - AMstackItem(NULL, AMspliceText(A, At, 0, 0, AMstr("hello")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* const B = A.fork() */ - AMdoc* B; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &B)); - /* */ - /* assert.deepEqual(B.getWithType("_root", "text"), ["text", At]) */ - AMbyteSpan str; - assert_true( - AMitemToStr(AMstackItem(stack_ptr, - AMtext(B, - AMitemObjId(AMstackItem(stack_ptr, AMmapGet(B, AM_ROOT, AMstr("text"), NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))), - NULL), - cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), - &str)); - AMbyteSpan str2; - assert_true(AMitemToStr(AMstackItem(stack_ptr, AMtext(A, At, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str2)); - assert_int_equal(str.count, str2.count); - assert_memory_equal(str.src, str2.src, str.count); - /* */ - /* B.splice(At, 4, 1) */ - AMstackItem(NULL, AMspliceText(B, At, 4, 1, AMstr(NULL)), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* B.splice(At, 4, 0, '!') */ - AMstackItem(NULL, AMspliceText(B, At, 4, 0, AMstr("!")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* B.splice(At, 5, 0, ' ') */ - AMstackItem(NULL, AMspliceText(B, At, 5, 0, AMstr(" ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* B.splice(At, 6, 0, 'world') */ - AMstackItem(NULL, AMspliceText(B, At, 6, 0, AMstr("world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* A.merge(B) */ - AMstackItem(NULL, AMmerge(A, B), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* */ - /* const binary = A.save() */ - AMbyteSpan binary; - assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(A), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &binary)); - /* */ - /* const C = load(binary) */ - AMdoc* C; - assert_true(AMitemToDoc( - AMstackItem(stack_ptr, AMload(binary.src, binary.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &C)); - /* */ - /* assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc'] */ - AMobjId const* const C_text = AMitemObjId( - AMstackItem(stack_ptr, AMmapGet(C, AM_ROOT, AMstr("text"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - assert_int_equal(AMobjIdCounter(C_text), 1); - str = AMactorIdStr(AMobjIdActorId(C_text)); - assert_int_equal(str.count, strlen("aabbcc")); - assert_memory_equal(str.src, "aabbcc", str.count); - /* assert.deepEqual(C.text(At), 'hell! world') */ - assert_true(AMitemToStr(AMstackItem(stack_ptr, AMtext(C, At, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); - assert_int_equal(str.count, strlen("hell! world")); - assert_memory_equal(str.src, "hell! world", str.count); -} - -int run_ported_wasm_basic_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test_setup_teardown(test_create_clone_and_free, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_start_and_commit, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_getting_a_nonexistent_prop_does_not_throw_an_error, setup_base, - teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_set_and_get_a_simple_value, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_use_bytes, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_make_subobjects, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_make_lists, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_lists_have_insert_set_splice_and_push_ops, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_delete_non_existent_props, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_del, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_use_counters, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_save_all_or_incrementally, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text_2, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_map, setup_base, - teardown_base), - cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_sequence, setup_base, - teardown_base), - cmocka_unit_test_setup_teardown(test_should_be_able_to_fetch_changes_by_hash, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_recursive_sets_are_possible, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_only_returns_an_object_id_when_objects_are_created, setup_base, - teardown_base), - cmocka_unit_test_setup_teardown(test_objects_without_properties_are_preserved, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_allow_you_to_forkAt_a_heads, setup_base, teardown_base), - cmocka_unit_test_setup_teardown(test_should_handle_merging_text_conflicts_then_saving_and_loading, setup_base, - teardown_base)}; - - return cmocka_run_group_tests(tests, NULL, NULL); -} diff --git a/rust/automerge-c/test/ported_wasm/suite.c b/rust/automerge-c/test/ported_wasm/suite.c deleted file mode 100644 index 440ed899..00000000 --- a/rust/automerge-c/test/ported_wasm/suite.c +++ /dev/null @@ -1,15 +0,0 @@ -#include -#include -#include -#include - -/* third-party */ -#include - -extern int run_ported_wasm_basic_tests(void); - -extern int run_ported_wasm_sync_tests(void); - -int run_ported_wasm_suite(void) { - return (run_ported_wasm_basic_tests() + run_ported_wasm_sync_tests()); -} diff --git a/rust/automerge-c/test/ported_wasm/sync_tests.c b/rust/automerge-c/test/ported_wasm/sync_tests.c deleted file mode 100644 index 099f8dbf..00000000 --- a/rust/automerge-c/test/ported_wasm/sync_tests.c +++ /dev/null @@ -1,1282 +0,0 @@ -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include -#include "../base_state.h" -#include "../cmocka_utils.h" - -typedef struct { - BaseState* base_state; - AMdoc* n1; - AMdoc* n2; - AMsyncState* s1; - AMsyncState* s2; -} TestState; - -static int setup(void** state) { - TestState* test_state = test_calloc(1, sizeof(TestState)); - setup_base((void**)&test_state->base_state); - AMstack** stack_ptr = &test_state->base_state->stack; - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("01234567")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - assert_true( - AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &test_state->n1)); - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("89abcdef")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - assert_true( - AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &test_state->n2)); - assert_true(AMitemToSyncState( - AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &test_state->s1)); - assert_true(AMitemToSyncState( - AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &test_state->s2)); - *state = test_state; - return 0; -} - -static int teardown(void** state) { - TestState* test_state = *state; - teardown_base((void**)&test_state->base_state); - test_free(test_state); - return 0; -} - -static void sync(AMdoc* a, AMdoc* b, AMsyncState* a_sync_state, AMsyncState* b_sync_state) { - static size_t const MAX_ITER = 10; - - AMsyncMessage const* a2b_msg = NULL; - AMsyncMessage const* b2a_msg = NULL; - size_t iter = 0; - do { - AMresult* a2b_msg_result = AMgenerateSyncMessage(a, a_sync_state); - AMresult* b2a_msg_result = AMgenerateSyncMessage(b, b_sync_state); - AMitem* item = AMresultItem(a2b_msg_result); - switch (AMitemValType(item)) { - case AM_VAL_TYPE_SYNC_MESSAGE: { - AMitemToSyncMessage(item, &a2b_msg); - AMstackResult(NULL, AMreceiveSyncMessage(b, b_sync_state, a2b_msg), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - } break; - case AM_VAL_TYPE_VOID: - a2b_msg = NULL; - break; - } - item = AMresultItem(b2a_msg_result); - switch (AMitemValType(item)) { - case AM_VAL_TYPE_SYNC_MESSAGE: { - AMitemToSyncMessage(item, &b2a_msg); - AMstackResult(NULL, AMreceiveSyncMessage(a, a_sync_state, b2a_msg), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - } break; - case AM_VAL_TYPE_VOID: - b2a_msg = NULL; - break; - } - if (++iter > MAX_ITER) { - fail_msg( - "Did not synchronize within %d iterations. " - "Do you have a bug causing an infinite loop?", - MAX_ITER); - } - } while (a2b_msg || b2a_msg); -} - -static time_t const TIME_0 = 0; - -/** - * \brief should send a sync message implying no local data - */ -static void test_should_send_a_sync_message_implying_no_local_data(void** state) { - /* const doc = create() - const s1 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* const m1 = doc.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") } - const message: DecodedSyncMessage = decodeSyncMessage(m1) */ - AMsyncMessage const* m1; - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &m1)); - /* assert.deepStrictEqual(message.heads, []) */ - AMitems heads = AMstackItems(stack_ptr, AMsyncMessageHeads(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_int_equal(AMitemsSize(&heads), 0); - /* assert.deepStrictEqual(message.need, []) */ - AMitems needs = AMstackItems(stack_ptr, AMsyncMessageNeeds(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_int_equal(AMitemsSize(&needs), 0); - /* assert.deepStrictEqual(message.have.length, 1) */ - AMitems haves = AMstackItems(stack_ptr, AMsyncMessageHaves(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); - assert_int_equal(AMitemsSize(&haves), 1); - /* assert.deepStrictEqual(message.have[0].lastSync, []) */ - AMsyncHave const* have0; - assert_true(AMitemToSyncHave(AMitemsNext(&haves, 1), &have0)); - AMitems last_sync = - AMstackItems(stack_ptr, AMsyncHaveLastSync(have0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_int_equal(AMitemsSize(&last_sync), 0); - /* assert.deepStrictEqual(message.have[0].bloom.byteLength, 0) - assert.deepStrictEqual(message.changes, []) */ - AMitems changes = AMstackItems(stack_ptr, AMsyncMessageChanges(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&changes), 0); -} - -/** - * \brief should not reply if we have no data as well - */ -static void test_should_not_reply_if_we_have_no_data_as_well(void** state) { - /* const n1 = create(), n2 = create() - const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* const m1 = n1.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") */ - AMsyncMessage const* m1; - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &m1)); - /* n2.receiveSyncMessage(s2, m1) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, m1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const m2 = n2.generateSyncMessage(s2) - assert.deepStrictEqual(m2, null) */ - AMstackItem(NULL, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); -} - -/** - * \brief repos with equal heads do not need a reply message - */ -static void test_repos_with_equal_heads_do_not_need_a_reply_message(void** state) { - /* const n1 = create(), n2 = create() - const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* */ - /* make two nodes with the same changes */ - /* const list = n1.putObject("_root", "n", []) */ - AMobjId const* const list = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* for (let i = 0; i < 10; i++) { */ - for (size_t i = 0; i != 10; ++i) { - /* n1.insert(list, i, i) */ - AMstackItem(NULL, AMlistPutUint(test_state->n1, list, i, true, i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* n2.applyChanges(n1.getChanges([])) */ - AMitems const items = - AMstackItems(stack_ptr, AMgetChanges(test_state->n1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - AMstackItem(NULL, AMapplyChanges(test_state->n2, &items), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); - /* */ - /* generate a naive sync message */ - /* const m1 = n1.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") */ - AMsyncMessage const* m1; - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &m1)); - /* assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) */ - AMitems const last_sent_heads = - AMstackItems(stack_ptr, AMsyncStateLastSentHeads(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMitems const heads = - AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&last_sent_heads, &heads)); - /* */ - /* heads are equal so this message should be null */ - /* n2.receiveSyncMessage(s2, m1) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, m1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* const m2 = n2.generateSyncMessage(s2) - assert.strictEqual(m2, null) */ - AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); -} - -/** - * \brief n1 should offer all changes to n2 when starting from nothing - */ -static void test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing(void** state) { - /* const n1 = create(), n2 = create() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* make changes for n1 that n2 should request */ - /* const list = n1.putObject("_root", "n", []) */ - AMobjId const* const list = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* for (let i = 0; i < 10; i++) { */ - for (size_t i = 0; i != 10; ++i) { - /* n1.insert(list, i, i) */ - AMstackItem(NULL, AMlistPutUint(test_state->n1, list, i, true, i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_false(AMequal(test_state->n1, test_state->n2)); - /* sync(n1, n2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); -} - -/** - * \brief should sync peers where one has commits the other does not - */ -static void test_should_sync_peers_where_one_has_commits_the_other_does_not(void** state) { - /* const n1 = create(), n2 = create() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* make changes for n1 that n2 should request */ - /* const list = n1.putObject("_root", "n", []) */ - AMobjId const* const list = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* for (let i = 0; i < 10; i++) { */ - for (size_t i = 0; i != 10; ++i) { - /* n1.insert(list, i, i) */ - AMstackItem(NULL, AMlistPutUint(test_state->n1, list, i, true, i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_false(AMequal(test_state->n1, test_state->n2)); - /* sync(n1, n2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); -} - -/** - * \brief should work with prior sync state - */ -static void test_should_work_with_prior_sync_state(void** state) { - /* create & synchronize two nodes */ - /* const n1 = create(), n2 = create() - const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* */ - /* for (let i = 0; i < 5; i++) { */ - for (size_t i = 0; i != 5; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* */ - /* modify the first node further */ - /* for (let i = 5; i < 10; i++) { */ - for (size_t i = 5; i != 10; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_false(AMequal(test_state->n1, test_state->n2)); - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); -} - -/** - * \brief should not generate messages once synced - */ -static void test_should_not_generate_messages_once_synced(void** state) { - /* create & synchronize two nodes */ - /* const n1 = create('abc123'), n2 = create('def456') - const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("abc123")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMstackItem(NULL, AMsetActorId(test_state->n1, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("def456")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMstackItem(NULL, AMsetActorId(test_state->n2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* let message, patch - for (let i = 0; i < 5; i++) { */ - for (size_t i = 0; i != 5; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* for (let i = 0; i < 5; i++) { */ - for (size_t i = 0; i != 5; ++i) { - /* n2.put("_root", "y", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("y"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n2.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* n1 reports what it has */ - /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be - null") */ - AMsyncMessage const* message; - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &message)); - /* */ - /* n2 receives that message and sends changes along with what it has */ - /* n2.receiveSyncMessage(s2, message) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, message), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be - null") */ - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &message)); - AMitems message_changes = - AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&message_changes), 5); - /* */ - /* n1 receives the changes and replies with the changes it now knows that - * n2 needs */ - /* n1.receiveSyncMessage(s1, message) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, message), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be - null") */ - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &message)); - message_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&message_changes), 5); - /* */ - /* n2 applies the changes and sends confirmation ending the exchange */ - /* n2.receiveSyncMessage(s2, message) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, message), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be - null") */ - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &message)); - /* */ - /* n1 receives the message and has nothing more to say */ - /* n1.receiveSyncMessage(s1, message) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, message), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* message = n1.generateSyncMessage(s1) - assert.deepStrictEqual(message, null) */ - AMstackItem(NULL, AMgenerateSyncMessage(test_state->n1, test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* //assert.deepStrictEqual(patch, null) // no changes arrived */ - /* */ - /* n2 also has nothing left to say */ - /* message = n2.generateSyncMessage(s2) - assert.deepStrictEqual(message, null) */ - AMstackItem(NULL, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); -} - -/** - * \brief should allow simultaneous messages during synchronization - */ -static void test_should_allow_simultaneous_messages_during_synchronization(void** state) { - /* create & synchronize two nodes */ - /* const n1 = create('abc123'), n2 = create('def456') - const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("abc123")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMstackItem(NULL, AMsetActorId(test_state->n1, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("def456")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMstackItem(NULL, AMsetActorId(test_state->n2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* for (let i = 0; i < 5; i++) { */ - for (size_t i = 0; i != 5; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* for (let i = 0; i < 5; i++) { */ - for (size_t i = 0; i != 5; ++i) { - /* n2.put("_root", "y", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("y"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n2.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] */ - AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMbyteSpan head1; - assert_true(AMitemToChangeHash(AMitemsNext(&heads1, 1), &head1)); - AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMbyteSpan head2; - assert_true(AMitemToChangeHash(AMitemsNext(&heads2, 1), &head2)); - /* */ - /* both sides report what they have but have no shared peer state */ - /* let msg1to2, msg2to1 - msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be - null") */ - AMsyncMessage const* msg1to2; - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &msg1to2)); - /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be - null") */ - AMsyncMessage const* msg2to1; - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &msg2to1)); - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) */ - AMitems msg1to2_changes = - AMstackItems(stack_ptr, AMsyncMessageChanges(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&msg1to2_changes), 0); - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, - * 0 */ - AMitems msg1to2_haves = - AMstackItems(stack_ptr, AMsyncMessageHaves(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); - AMsyncHave const* msg1to2_have; - assert_true(AMitemToSyncHave(AMitemsNext(&msg1to2_haves, 1), &msg1to2_have)); - AMitems msg1to2_last_sync = - AMstackItems(stack_ptr, AMsyncHaveLastSync(msg1to2_have), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_int_equal(AMitemsSize(&msg1to2_last_sync), 0); - /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) */ - AMitems msg2to1_changes = - AMstackItems(stack_ptr, AMsyncMessageChanges(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&msg2to1_changes), 0); - /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, - * 0 */ - AMitems msg2to1_haves = - AMstackItems(stack_ptr, AMsyncMessageHaves(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); - AMsyncHave const* msg2to1_have; - assert_true(AMitemToSyncHave(AMitemsNext(&msg2to1_haves, 1), &msg2to1_have)); - AMitems msg2to1_last_sync = - AMstackItems(stack_ptr, AMsyncHaveLastSync(msg2to1_have), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_int_equal(AMitemsSize(&msg2to1_last_sync), 0); - /* */ - /* n1 and n2 receive that message and update sync state but make no patc */ - /* n1.receiveSyncMessage(s1, msg2to1) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* n2.receiveSyncMessage(s2, msg1to2) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* now both reply with their local changes that the other lacks - * (standard warning that 1% of the time this will result in a "needs" - * message) */ - /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be - null") */ - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &msg1to2)); - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 5) */ - msg1to2_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&msg1to2_changes), 5); - /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be - null") */ - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &msg2to1)); - /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) */ - msg2to1_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&msg2to1_changes), 5); - /* */ - /* both should now apply the changes and update the frontend */ - /* n1.receiveSyncMessage(s1, msg2to1) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepStrictEqual(n1.getMissingDeps(), []) */ - AMitems missing_deps = - AMstackItems(stack_ptr, AMgetMissingDeps(test_state->n1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_int_equal(AMitemsSize(&missing_deps), 0); - /* //assert.notDeepStrictEqual(patch1, null) - assert.deepStrictEqual(n1.materialize(), { x: 4, y: 4 }) */ - uint64_t uint; - assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n1, AM_ROOT, AMstr("x"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_UINT)), - &uint)); - assert_int_equal(uint, 4); - assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n1, AM_ROOT, AMstr("y"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_UINT)), - &uint)); - assert_int_equal(uint, 4); - /* */ - /* n2.receiveSyncMessage(s2, msg1to2) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepStrictEqual(n2.getMissingDeps(), []) */ - missing_deps = - AMstackItems(stack_ptr, AMgetMissingDeps(test_state->n2, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_int_equal(AMitemsSize(&missing_deps), 0); - /* //assert.notDeepStrictEqual(patch2, null) - assert.deepStrictEqual(n2.materialize(), { x: 4, y: 4 }) */ - assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n2, AM_ROOT, AMstr("x"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_UINT)), - &uint)); - assert_int_equal(uint, 4); - assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n2, AM_ROOT, AMstr("y"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_UINT)), - &uint)); - assert_int_equal(uint, 4); - /* */ - /* The response acknowledges the changes received and sends no further - * changes */ - /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be - null") */ - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &msg1to2)); - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) */ - msg1to2_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&msg1to2_changes), 0); - /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be - null") */ - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &msg2to1)); - /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) */ - msg2to1_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&msg2to1_changes), 0); - /* */ - /* After receiving acknowledgements, their shared heads should be equal */ - /* n1.receiveSyncMessage(s1, msg2to1) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* n2.receiveSyncMessage(s2, msg1to2) */ - AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) */ - AMitems s1_shared_heads = - AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMbyteSpan s1_shared_change_hash; - assert_true(AMitemToChangeHash(AMitemsNext(&s1_shared_heads, 1), &s1_shared_change_hash)); - assert_memory_equal(s1_shared_change_hash.src, head1.src, head1.count); - assert_true(AMitemToChangeHash(AMitemsNext(&s1_shared_heads, 1), &s1_shared_change_hash)); - assert_memory_equal(s1_shared_change_hash.src, head2.src, head2.count); - /* assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) */ - AMitems s2_shared_heads = - AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMbyteSpan s2_shared_change_hash; - assert_true(AMitemToChangeHash(AMitemsNext(&s2_shared_heads, 1), &s2_shared_change_hash)); - assert_memory_equal(s2_shared_change_hash.src, head1.src, head1.count); - assert_true(AMitemToChangeHash(AMitemsNext(&s2_shared_heads, 1), &s2_shared_change_hash)); - assert_memory_equal(s2_shared_change_hash.src, head2.src, head2.count); - /* //assert.deepStrictEqual(patch1, null) - //assert.deepStrictEqual(patch2, null) */ - /* */ - /* We're in sync, no more messages required */ - /* msg1to2 = n1.generateSyncMessage(s1) - assert.deepStrictEqual(msg1to2, null) */ - AMstackItem(NULL, AMgenerateSyncMessage(test_state->n1, test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* msg2to1 = n2.generateSyncMessage(s2) - assert.deepStrictEqual(msg2to1, null) */ - AMstackItem(NULL, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* If we make one more change and start another sync then its lastSync - * should be updated */ - /* n1.put("_root", "x", 5) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be - null") */ - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &msg1to2)); - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, - * [head1, head2].sort( */ - msg1to2_haves = AMstackItems(stack_ptr, AMsyncMessageHaves(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); - assert_true(AMitemToSyncHave(AMitemsNext(&msg1to2_haves, 1), &msg1to2_have)); - msg1to2_last_sync = - AMstackItems(stack_ptr, AMsyncHaveLastSync(msg1to2_have), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMbyteSpan msg1to2_last_sync_next; - assert_true(AMitemToChangeHash(AMitemsNext(&msg1to2_last_sync, 1), &msg1to2_last_sync_next)); - assert_int_equal(msg1to2_last_sync_next.count, head1.count); - assert_memory_equal(msg1to2_last_sync_next.src, head1.src, head1.count); - assert_true(AMitemToChangeHash(AMitemsNext(&msg1to2_last_sync, 1), &msg1to2_last_sync_next)); - assert_int_equal(msg1to2_last_sync_next.count, head2.count); - assert_memory_equal(msg1to2_last_sync_next.src, head2.src, head2.count); -} - -/** - * \brief should assume sent changes were received until we hear otherwise - */ -static void test_should_assume_sent_changes_were_received_until_we_hear_otherwise(void** state) { - /* const n1 = create('01234567'), n2 = create('89abcdef') - const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* let message = null */ - /* */ - /* const items = n1.putObject("_root", "items", []) */ - AMobjId const* const items = - AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("items"), AM_OBJ_TYPE_LIST), - cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* */ - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* */ - /* n1.push(items, "x") */ - AMstackItem(NULL, AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("x")), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") - */ - AMsyncMessage const* message; - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &message)); - /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ - AMitems message_changes = - AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&message_changes), 1); - /* */ - /* n1.push(items, "y") */ - AMstackItem(NULL, AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("y")), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be - null") */ - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &message)); - /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ - message_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&message_changes), 1); - /* */ - /* n1.push(items, "z") */ - AMstackItem(NULL, AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("z")), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* */ - /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be - null") */ - assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), - cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), - &message)); - /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ - message_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - assert_int_equal(AMitemsSize(&message_changes), 1); -} - -/** - * \brief should work regardless of who initiates the exchange - */ -static void test_should_work_regardless_of_who_initiates_the_exchange(void** state) { - /* create & synchronize two nodes */ - /* const n1 = create(), n2 = create() - const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* */ - /* for (let i = 0; i < 5; i++) { */ - for (size_t i = 0; i != 5; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* */ - /* modify the first node further */ - /* for (let i = 5; i < 10; i++) { */ - for (size_t i = 5; i != 10; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_false(AMequal(test_state->n1, test_state->n2)); - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); -} - -/** - * \brief should work without prior sync state - */ -static void test_should_work_without_prior_sync_state(void** state) { - /* Scenario: ,-- - * c10 <-- c11 <-- c12 <-- c13 <-- c14 c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 - * <-- c6 <-- c7 <-- c8 <-- c9 <-+ - * `-- - * c15 <-- c16 <-- c17 lastSync is undefined. */ - /* */ - /* create two peers both with divergent commits */ - /* const n1 = create('01234567'), n2 = create('89abcdef') - const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* */ - /* for (let i = 0; i < 10; i++) { */ - for (size_t i = 0; i != 10; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* sync(n1, n2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* */ - /* for (let i = 10; i < 15; i++) { */ - for (size_t i = 10; i != 15; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* for (let i = 15; i < 18; i++) { */ - for (size_t i = 15; i != 18; ++i) { - /* n2.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n2.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_false(AMequal(test_state->n1, test_state->n2)); - /* sync(n1, n2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&heads1, &heads2)); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); -} - -/** - * \brief should work with prior sync state - */ -static void test_should_work_with_prior_sync_state_2(void** state) { - /* Scenario: - * ,-- - * c10 <-- c11 <-- c12 <-- c13 <-- c14 c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 - * <-- c6 <-- c7 <-- c8 <-- c9 <-+ - * `-- - * c15 <-- c16 <-- c17 lastSync is c9. */ - /* */ - /* create two peers both with divergent commits */ - /* const n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* */ - /* for (let i = 0; i < 10; i++) { */ - for (size_t i = 0; i != 10; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* */ - /* for (let i = 10; i < 15; i++) { */ - for (size_t i = 10; i != 15; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* for (let i = 15; i < 18; i++) { */ - for (size_t i = 15; i != 18; ++i) { - /* n2.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n2.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* s1 = decodeSyncState(encodeSyncState(s1)) */ - AMbyteSpan encoded; - assert_true(AMitemToBytes( - AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &encoded)); - AMsyncState* s1; - assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded.src, encoded.count), cmocka_cb, - AMexpect(AM_VAL_TYPE_SYNC_STATE)), - &s1)); - /* s2 = decodeSyncState(encodeSyncState(s2)) */ - assert_true(AMitemToBytes( - AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &encoded)); - AMsyncState* s2; - assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded.src, encoded.count), cmocka_cb, - AMexpect(AM_VAL_TYPE_SYNC_STATE)), - &s2)); - /* */ - /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_false(AMequal(test_state->n1, test_state->n2)); - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, s1, s2); - /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&heads1, &heads2)); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); -} - -/** - * \brief should ensure non-empty state after sync - */ -static void test_should_ensure_non_empty_state_after_sync(void** state) { - /* const n1 = create('01234567'), n2 = create('89abcdef') - const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* */ - /* for (let i = 0; i < 3; i++) { */ - for (size_t i = 0; i != 3; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* */ - /* assert.deepStrictEqual(s1.sharedHeads, n1.getHeads()) */ - AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMitems shared_heads1 = - AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&shared_heads1, &heads1)); - /* assert.deepStrictEqual(s2.sharedHeads, n1.getHeads()) */ - AMitems shared_heads2 = - AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&shared_heads2, &heads1)); -} - -/** - * \brief should re-sync after one node crashed with data loss - */ -static void test_should_resync_after_one_node_crashed_with_data_loss(void** state) { - /* Scenario: (r) (n2) (n1) - * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - * n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync - * is c2 - * we want to successfully sync (n1) with (r), even though (n1) believes - * it's talking to (n2) */ - /* const n1 = create('01234567'), n2 = create('89abcdef') - let s1 = initSyncState() - const s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* */ - /* n1 makes three changes, which we sync to n2 */ - /* for (let i = 0; i < 3; i++) { */ - for (size_t i = 0; i != 3; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* */ - /* save a copy of n2 as "r" to simulate recovering from a crash */ - /* let r - let rSyncState - ;[r, rSyncState] = [n2.clone(), s2.clone()] */ - AMdoc* r; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMclone(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &r)); - AMbyteSpan encoded_s2; - assert_true( - AMitemToBytes(AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), - &encoded_s2)); - AMsyncState* sync_state_r; - assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded_s2.src, encoded_s2.count), cmocka_cb, - AMexpect(AM_VAL_TYPE_SYNC_STATE)), - &sync_state_r)); - /* */ - /* sync another few commits */ - /* for (let i = 3; i < 6; i++) { */ - for (size_t i = 3; i != 6; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* */ - /* everyone should be on the same page here */ - /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&heads1, &heads2)); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); - /* */ - /* now make a few more changes and then attempt to sync the fully - * up-to-date n1 with with the confused r */ - /* for (let i = 6; i < 9; i++) { */ - for (size_t i = 6; i != 9; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* s1 = decodeSyncState(encodeSyncState(s1)) */ - AMbyteSpan encoded_s1; - assert_true( - AMitemToBytes(AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), - &encoded_s1)); - AMsyncState* s1; - assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded_s1.src, encoded_s1.count), cmocka_cb, - AMexpect(AM_VAL_TYPE_SYNC_STATE)), - &s1)); - /* rSyncState = decodeSyncState(encodeSyncState(rSyncState)) */ - AMbyteSpan encoded_r; - assert_true(AMitemToBytes( - AMstackItem(stack_ptr, AMsyncStateEncode(sync_state_r), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &encoded_r)); - assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded_r.src, encoded_r.count), cmocka_cb, - AMexpect(AM_VAL_TYPE_SYNC_STATE)), - &sync_state_r)); - /* */ - /* assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) */ - heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMitems heads_r = AMstackItems(stack_ptr, AMgetHeads(r), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_false(AMitemsEqual(&heads1, &heads_r)); - /* assert.notDeepStrictEqual(n1.materialize(), r.materialize()) */ - assert_false(AMequal(test_state->n1, r)); - /* assert.deepStrictEqual(n1.materialize(), { x: 8 }) */ - uint64_t uint; - assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n1, AM_ROOT, AMstr("x"), NULL), cmocka_cb, - AMexpect(AM_VAL_TYPE_UINT)), - &uint)); - assert_int_equal(uint, 8); - /* assert.deepStrictEqual(r.materialize(), { x: 2 }) */ - assert_true(AMitemToUint( - AMstackItem(stack_ptr, AMmapGet(r, AM_ROOT, AMstr("x"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)), &uint)); - assert_int_equal(uint, 2); - /* sync(n1, r, s1, rSyncState) */ - sync(test_state->n1, r, test_state->s1, sync_state_r); - /* assert.deepStrictEqual(n1.getHeads(), r.getHeads()) */ - heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - heads_r = AMstackItems(stack_ptr, AMgetHeads(r), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&heads1, &heads_r)); - /* assert.deepStrictEqual(n1.materialize(), r.materialize()) */ - assert_true(AMequal(test_state->n1, r)); -} - -/** - * \brief should re-sync after one node experiences data loss without - * disconnecting - */ -static void test_should_resync_after_one_node_experiences_data_loss_without_disconnecting(void** state) { - /* const n1 = create('01234567'), n2 = create('89abcdef') - const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - /* */ - /* n1 makes three changes which we sync to n2 */ - /* for (let i = 0; i < 3; i++) { */ - for (size_t i = 0; i != 3; ++i) { - /* n1.put("_root", "x", i) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.commit("", 0) */ - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* { */ - } - /* */ - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* */ - /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&heads1, &heads2)); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); - /* */ - /* const n2AfterDataLoss = create('89abcdef') */ - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("89abcdef")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMdoc* n2_after_data_loss; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), - &n2_after_data_loss)); - /* */ - /* "n2" now has no data, but n1 still thinks it does. Note we don't do - * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss - * without disconnecting */ - /* sync(n1, n2AfterDataLoss, s1, initSyncState()) */ - AMsyncState* s2_after_data_loss; - assert_true(AMitemToSyncState( - AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &s2_after_data_loss)); - sync(test_state->n1, n2_after_data_loss, test_state->s1, s2_after_data_loss); - /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&heads1, &heads2)); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); -} - -/** - * \brief should handle changes concurrent to the last sync heads - */ -static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void** state) { - /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = - * create('fedcba98' */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("fedcba98")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMdoc* n3; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &n3)); - /* const s12 = initSyncState(), s21 = initSyncState(), s23 = - * initSyncState(), s32 = initSyncState( */ - AMsyncState* s12 = test_state->s1; - AMsyncState* s21 = test_state->s2; - AMsyncState* s23; - assert_true(AMitemToSyncState( - AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &s23)); - AMsyncState* s32; - assert_true(AMitemToSyncState( - AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &s32)); - /* */ - /* Change 1 is known to all three nodes */ - /* //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) */ - /* n1.put("_root", "x", 1); n1.commit("", 0) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* */ - /* sync(n1, n2, s12, s21) */ - sync(test_state->n1, test_state->n2, s12, s21); - /* sync(n2, n3, s23, s32) */ - sync(test_state->n2, n3, s23, s32); - /* */ - /* Change 2 is known to n1 and n2 */ - /* n1.put("_root", "x", 2); n1.commit("", 0) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* */ - /* sync(n1, n2, s12, s21) */ - sync(test_state->n1, test_state->n2, s12, s21); - /* */ - /* Each of the three nodes makes one change (changes 3, 4, 5) */ - /* n1.put("_root", "x", 3); n1.commit("", 0) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* n2.put("_root", "x", 4); n2.commit("", 0) */ - AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), 4), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* n3.put("_root", "x", 5); n3.commit("", 0) */ - AMstackItem(NULL, AMmapPutUint(n3, AM_ROOT, AMstr("x"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(n3, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* */ - /* Apply n3's latest change to n2. */ - /* let change = n3.getLastLocalChange() - if (change === null) throw new RangeError("no local change") */ - AMitems changes = AMstackItems(stack_ptr, AMgetLastLocalChange(n3), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - /* n2.applyChanges([change]) */ - AMstackItem(NULL, AMapplyChanges(test_state->n2, &changes), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* */ - /* Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync - * heads */ - /* sync(n1, n2, s12, s21) */ - sync(test_state->n1, test_state->n2, s12, s21); - /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&heads1, &heads2)); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); -} - -/** - * \brief should handle histories with lots of branching and merging - */ -static void test_should_handle_histories_with_lots_of_branching_and_merging(void** state) { - /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = - create('fedcba98') const s1 = initSyncState(), s2 = initSyncState() */ - TestState* test_state = *state; - AMstack** stack_ptr = &test_state->base_state->stack; - AMactorId const* actor_id; - assert_true(AMitemToActorId( - AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("fedcba98")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), - &actor_id)); - AMdoc* n3; - assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &n3)); - /* n1.put("_root", "x", 0); n1.commit("", 0) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* let change1 = n1.getLastLocalChange() - if (change1 === null) throw new RangeError("no local change") */ - AMitems change1 = - AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - /* n2.applyChanges([change1]) */ - AMstackItem(NULL, AMapplyChanges(test_state->n2, &change1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* let change2 = n1.getLastLocalChange() - if (change2 === null) throw new RangeError("no local change") */ - AMitems change2 = - AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - /* n3.applyChanges([change2]) */ - AMstackItem(NULL, AMapplyChanges(n3, &change2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n3.put("_root", "x", 1); n3.commit("", 0) */ - AMstackItem(NULL, AMmapPutUint(n3, AM_ROOT, AMstr("x"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(n3, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* */ - /* - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 - * / \/ \/ \/ - * / /\ /\ /\ - * c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 - * \ / - * ---------------------------------------------- n3c1 <----- - */ - /* for (let i = 1; i < 20; i++) { */ - for (size_t i = 1; i != 20; ++i) { - /* n1.put("_root", "n1", i); n1.commit("", 0) */ - AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("n1"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* n2.put("_root", "n2", i); n2.commit("", 0) */ - AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("n2"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* const change1 = n1.getLastLocalChange() - if (change1 === null) throw new RangeError("no local change") */ - AMitems change1 = - AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - /* const change2 = n2.getLastLocalChange() - if (change2 === null) throw new RangeError("no local change") */ - AMitems change2 = - AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - /* n1.applyChanges([change2]) */ - AMstackItem(NULL, AMapplyChanges(test_state->n1, &change2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n2.applyChanges([change1]) */ - AMstackItem(NULL, AMapplyChanges(test_state->n2, &change1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* { */ - } - /* */ - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* */ - /* Having n3's last change concurrent to the last sync heads forces us into - * the slower code path */ - /* const change3 = n2.getLastLocalChange() - if (change3 === null) throw new RangeError("no local change") */ - AMitems change3 = AMstackItems(stack_ptr, AMgetLastLocalChange(n3), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); - /* n2.applyChanges([change3]) */ - AMstackItem(NULL, AMapplyChanges(test_state->n2, &change3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); - /* n1.put("_root", "n1", "final"); n1.commit("", 0) */ - AMstackItem(NULL, AMmapPutStr(test_state->n1, AM_ROOT, AMstr("n1"), AMstr("final")), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* n2.put("_root", "n2", "final"); n2.commit("", 0) */ - AMstackItem(NULL, AMmapPutStr(test_state->n2, AM_ROOT, AMstr("n2"), AMstr("final")), cmocka_cb, - AMexpect(AM_VAL_TYPE_VOID)); - AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - /* */ - /* sync(n1, n2, s1, s2) */ - sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); - /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); - assert_true(AMitemsEqual(&heads1, &heads2)); - /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ - assert_true(AMequal(test_state->n1, test_state->n2)); -} - -int run_ported_wasm_sync_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test_setup_teardown(test_should_send_a_sync_message_implying_no_local_data, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_not_reply_if_we_have_no_data_as_well, setup, teardown), - cmocka_unit_test_setup_teardown(test_repos_with_equal_heads_do_not_need_a_reply_message, setup, teardown), - cmocka_unit_test_setup_teardown(test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing, setup, - teardown), - cmocka_unit_test_setup_teardown(test_should_sync_peers_where_one_has_commits_the_other_does_not, setup, - teardown), - cmocka_unit_test_setup_teardown(test_should_work_with_prior_sync_state, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_not_generate_messages_once_synced, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_allow_simultaneous_messages_during_synchronization, setup, - teardown), - cmocka_unit_test_setup_teardown(test_should_assume_sent_changes_were_received_until_we_hear_otherwise, setup, - teardown), - cmocka_unit_test_setup_teardown(test_should_work_regardless_of_who_initiates_the_exchange, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_work_without_prior_sync_state, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_work_with_prior_sync_state_2, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_ensure_non_empty_state_after_sync, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_resync_after_one_node_crashed_with_data_loss, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_resync_after_one_node_experiences_data_loss_without_disconnecting, - setup, teardown), - cmocka_unit_test_setup_teardown(test_should_handle_changes_concurrrent_to_the_last_sync_heads, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_handle_histories_with_lots_of_branching_and_merging, setup, - teardown), - }; - - return cmocka_run_group_tests(tests, NULL, NULL); -} diff --git a/rust/automerge-c/test/str_utils.c b/rust/automerge-c/test/str_utils.c deleted file mode 100644 index 2937217a..00000000 --- a/rust/automerge-c/test/str_utils.c +++ /dev/null @@ -1,15 +0,0 @@ -#include -#include - -/* local */ -#include "str_utils.h" - -void hex_to_bytes(char const* hex_str, uint8_t* src, size_t const count) { - unsigned int byte; - char const* next = hex_str; - for (size_t index = 0; *next && index != count; next += 2, ++index) { - if (sscanf(next, "%02x", &byte) == 1) { - src[index] = (uint8_t)byte; - } - } -} diff --git a/rust/automerge-c/test/str_utils.h b/rust/automerge-c/test/str_utils.h deleted file mode 100644 index 14a4af73..00000000 --- a/rust/automerge-c/test/str_utils.h +++ /dev/null @@ -1,17 +0,0 @@ -#ifndef TESTS_STR_UTILS_H -#define TESTS_STR_UTILS_H - -/** - * \brief Converts a hexadecimal string into an array of bytes. - * - * \param[in] hex_str A hexadecimal string. - * \param[in] src A pointer to an array of bytes. - * \param[in] count The count of bytes to copy into the array pointed to by - * \p src. - * \pre \p src `!= NULL` - * \pre `sizeof(`\p src `) > 0` - * \pre \p count `<= sizeof(`\p src `)` - */ -void hex_to_bytes(char const* hex_str, uint8_t* src, size_t const count); - -#endif /* TESTS_STR_UTILS_H */ diff --git a/rust/automerge-cli/src/color_json.rs b/rust/automerge-cli/src/color_json.rs deleted file mode 100644 index 9514da22..00000000 --- a/rust/automerge-cli/src/color_json.rs +++ /dev/null @@ -1,370 +0,0 @@ -use std::io::Write; - -use serde::Serialize; -use serde_json::ser::Formatter; -use termcolor::{Buffer, BufferWriter, Color, ColorSpec, WriteColor}; - -struct Style { - /// style of object brackets - object_brackets: ColorSpec, - /// style of array brackets - array_brackets: ColorSpec, - /// style of object - key: ColorSpec, - /// style of string values - string_value: ColorSpec, - /// style of integer values - integer_value: ColorSpec, - /// style of float values - float_value: ColorSpec, - /// style of bool values - bool_value: ColorSpec, - /// style of the `nil` value - nil_value: ColorSpec, - /// should the quotation get the style of the inner string/key? - string_include_quotation: bool, -} - -impl Default for Style { - fn default() -> Self { - Self { - object_brackets: ColorSpec::new().set_bold(true).clone(), - array_brackets: ColorSpec::new().set_bold(true).clone(), - key: ColorSpec::new() - .set_fg(Some(Color::Blue)) - .set_bold(true) - .clone(), - string_value: ColorSpec::new().set_fg(Some(Color::Green)).clone(), - integer_value: ColorSpec::new(), - float_value: ColorSpec::new(), - bool_value: ColorSpec::new(), - nil_value: ColorSpec::new(), - string_include_quotation: true, - } - } -} - -/// Write pretty printed, colored json to stdout -pub(crate) fn print_colored_json(value: &serde_json::Value) -> std::io::Result<()> { - let formatter = ColoredFormatter { - formatter: serde_json::ser::PrettyFormatter::new(), - style: Style::default(), - in_object_key: false, - }; - let mut ignored_writer = Vec::new(); - let mut ser = serde_json::Serializer::with_formatter(&mut ignored_writer, formatter); - value - .serialize(&mut ser) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string())) -} - -struct ColoredFormatter { - formatter: F, - style: Style, - in_object_key: bool, -} - -fn write_colored(color: ColorSpec, handler: H) -> std::io::Result<()> -where - H: FnOnce(&mut Buffer) -> std::io::Result<()>, -{ - let buf = BufferWriter::stdout(termcolor::ColorChoice::Auto); - let mut buffer = buf.buffer(); - buffer.set_color(&color)?; - handler(&mut buffer)?; - buffer.reset()?; - buf.print(&buffer)?; - Ok(()) -} - -impl Formatter for ColoredFormatter { - fn write_null(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.nil_value.clone(), |w| { - self.formatter.write_null(w) - }) - } - - fn write_bool(&mut self, _writer: &mut W, value: bool) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.bool_value.clone(), |w| { - self.formatter.write_bool(w, value) - }) - } - - fn write_i8(&mut self, _writer: &mut W, value: i8) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_i8(w, value) - }) - } - - fn write_i16(&mut self, _writer: &mut W, value: i16) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_i16(w, value) - }) - } - - fn write_i32(&mut self, _writer: &mut W, value: i32) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_i32(w, value) - }) - } - - fn write_i64(&mut self, _writer: &mut W, value: i64) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_i64(w, value) - }) - } - - fn write_i128(&mut self, _writer: &mut W, value: i128) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_i128(w, value) - }) - } - - fn write_u8(&mut self, _writer: &mut W, value: u8) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_u8(w, value) - }) - } - - fn write_u16(&mut self, _writer: &mut W, value: u16) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_u16(w, value) - }) - } - - fn write_u32(&mut self, _writer: &mut W, value: u32) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_u32(w, value) - }) - } - - fn write_u64(&mut self, _writer: &mut W, value: u64) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_u64(w, value) - }) - } - - fn write_u128(&mut self, _writer: &mut W, value: u128) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_u128(w, value) - }) - } - - fn write_f32(&mut self, _writer: &mut W, value: f32) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.float_value.clone(), |w| { - self.formatter.write_f32(w, value) - }) - } - - fn write_f64(&mut self, _writer: &mut W, value: f64) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.float_value.clone(), |w| { - self.formatter.write_f64(w, value) - }) - } - - fn write_number_str(&mut self, _writer: &mut W, value: &str) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.integer_value.clone(), |w| { - self.formatter.write_number_str(w, value) - }) - } - - fn begin_string(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - let style = if self.style.string_include_quotation { - if self.in_object_key { - self.style.key.clone() - } else { - self.style.string_value.clone() - } - } else { - ColorSpec::new() - }; - write_colored(style, |w| self.formatter.begin_string(w)) - } - - fn end_string(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - let style = if self.style.string_include_quotation { - if self.in_object_key { - self.style.key.clone() - } else { - self.style.string_value.clone() - } - } else { - ColorSpec::new() - }; - write_colored(style, |w| self.formatter.end_string(w)) - } - - fn write_string_fragment(&mut self, _writer: &mut W, fragment: &str) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - let style = if self.in_object_key { - self.style.key.clone() - } else { - self.style.string_value.clone() - }; - write_colored(style, |w| w.write_all(fragment.as_bytes())) - } - - fn write_char_escape( - &mut self, - _writer: &mut W, - char_escape: serde_json::ser::CharEscape, - ) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - let style = if self.in_object_key { - self.style.key.clone() - } else { - self.style.string_value.clone() - }; - write_colored(style, |w| self.formatter.write_char_escape(w, char_escape)) - } - - fn begin_array(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.array_brackets.clone(), |w| { - self.formatter.begin_array(w) - }) - } - - fn end_array(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.array_brackets.clone(), |w| { - self.formatter.end_array(w) - }) - } - - fn begin_array_value(&mut self, _writer: &mut W, first: bool) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(ColorSpec::new(), |w| { - self.formatter.begin_array_value(w, first) - }) - } - - fn end_array_value(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(ColorSpec::new(), |w| self.formatter.end_array_value(w)) - } - - fn begin_object(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.object_brackets.clone(), |w| { - self.formatter.begin_object(w) - }) - } - - fn end_object(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(self.style.object_brackets.clone(), |w| { - self.formatter.end_object(w) - }) - } - - fn begin_object_key(&mut self, _writer: &mut W, first: bool) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - self.in_object_key = true; - write_colored(ColorSpec::new(), |w| { - self.formatter.begin_object_key(w, first) - }) - } - - fn end_object_key(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - self.in_object_key = false; - write_colored(ColorSpec::new(), |w| self.formatter.end_object_key(w)) - } - - fn begin_object_value(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - self.in_object_key = false; - write_colored(ColorSpec::new(), |w| self.formatter.begin_object_value(w)) - } - - fn end_object_value(&mut self, _writer: &mut W) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - self.in_object_key = false; - write_colored(ColorSpec::new(), |w| self.formatter.end_object_value(w)) - } - - fn write_raw_fragment(&mut self, _writer: &mut W, fragment: &str) -> std::io::Result<()> - where - W: ?Sized + std::io::Write, - { - write_colored(ColorSpec::new(), |w| { - self.formatter.write_raw_fragment(w, fragment) - }) - } -} diff --git a/rust/automerge-cli/src/examine_sync.rs b/rust/automerge-cli/src/examine_sync.rs deleted file mode 100644 index c0d5df97..00000000 --- a/rust/automerge-cli/src/examine_sync.rs +++ /dev/null @@ -1,38 +0,0 @@ -use automerge::sync::ReadMessageError; - -use crate::color_json::print_colored_json; - -#[derive(Debug, thiserror::Error)] -pub enum ExamineSyncError { - #[error("Error reading message: {0}")] - ReadMessage(#[source] std::io::Error), - - #[error("error writing message: {0}")] - WriteMessage(#[source] std::io::Error), - - #[error("error writing json to output: {0}")] - WriteJson(#[source] serde_json::Error), - - #[error("Error parsing message: {0}")] - ParseMessage(#[from] ReadMessageError), -} - -pub(crate) fn examine_sync( - mut input: Box, - output: W, - is_tty: bool, -) -> Result<(), ExamineSyncError> { - let mut buf: Vec = Vec::new(); - input - .read_to_end(&mut buf) - .map_err(ExamineSyncError::ReadMessage)?; - - let message = automerge::sync::Message::decode(&buf)?; - let json = serde_json::to_value(message).unwrap(); - if is_tty { - print_colored_json(&json).map_err(ExamineSyncError::WriteMessage)?; - } else { - serde_json::to_writer(output, &json).map_err(ExamineSyncError::WriteJson)?; - } - Ok(()) -} diff --git a/rust/automerge-test/Cargo.toml b/rust/automerge-test/Cargo.toml deleted file mode 100644 index 9290d7ac..00000000 --- a/rust/automerge-test/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "automerge-test" -version = "0.2.0" -edition = "2021" -license = "MIT" -repository = "https://github.com/automerge/automerge-rs" -rust-version = "1.57.0" -description = "Utilities for testing automerge libraries" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -automerge = { version = "^0.3", path = "../automerge" } -smol_str = { version = "^0.1.21", features=["serde"] } -serde = { version = "^1.0", features=["derive"] } -decorum = "0.3.1" -serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } diff --git a/rust/automerge-test/README.md b/rust/automerge-test/README.md deleted file mode 100644 index 2cadabbb..00000000 --- a/rust/automerge-test/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# `automerge-test` - -Utilities for making assertions about automerge documents diff --git a/rust/automerge-wasm/.eslintignore b/rust/automerge-wasm/.eslintignore deleted file mode 100644 index 7cd573e3..00000000 --- a/rust/automerge-wasm/.eslintignore +++ /dev/null @@ -1,3 +0,0 @@ -web -nodejs -examples diff --git a/rust/automerge-wasm/.eslintrc.cjs b/rust/automerge-wasm/.eslintrc.cjs deleted file mode 100644 index 80e08d55..00000000 --- a/rust/automerge-wasm/.eslintrc.cjs +++ /dev/null @@ -1,11 +0,0 @@ -module.exports = { - root: true, - parser: '@typescript-eslint/parser', - plugins: [ - '@typescript-eslint', - ], - extends: [ - 'eslint:recommended', - 'plugin:@typescript-eslint/recommended', - ], -}; diff --git a/rust/automerge-wasm/LICENSE b/rust/automerge-wasm/LICENSE deleted file mode 100644 index 63b21502..00000000 --- a/rust/automerge-wasm/LICENSE +++ /dev/null @@ -1,10 +0,0 @@ -MIT License - -Copyright 2022, Ink & Switch LLC - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/rust/automerge-wasm/README.md b/rust/automerge-wasm/README.md deleted file mode 100644 index 20256313..00000000 --- a/rust/automerge-wasm/README.md +++ /dev/null @@ -1,469 +0,0 @@ -## Automerge WASM Low Level Interface - -This package is a low level interface to the [automerge rust](https://github.com/automerge/automerge-rs/tree/experiment) CRDT. The api is intended to be as "close to the metal" as possible with only a few ease of use accommodations. This library is used as the underpinnings for the [Automerge JS wrapper](https://github.com/automerge/automerge-rs/tree/experiment/automerge-js) and can be used as is or as a basis for another higher level expression of a CRDT. - -All example code can be found in `test/readme.ts` - -### Why CRDT? - -CRDT stands for Conflict Free Replicated Data Type. It is a data structure that offers eventual consistency where multiple actors can write to the document independently and then these edits can be automatically merged together into a coherent document that, as much as possible, preserves the intent of the different writers. This allows for novel masterless application design where different components need not have a central coordinating server when altering application state. - -### Terminology - -The term Actor, Object Id and Heads are used through this documentation. Detailed explanations are in the glossary at the end of this readme. But the most basic definition would be... - -An Actor is a unique id that distinguishes a single writer to a document. It can be any hex string. - -An Object id uniquely identifies a Map, List or Text object within a document. It can be treated as an opaque string and can be used across documents. This id comes as a string in the form of `{number}@{actor}` - so `"10@aabbcc"` for example. The string `"_root"` or `"/"` can also be used to refer to the document root. These strings are durable and can be used on any descendant or copy of the document that generated them. - -Heads refers to a set of hashes that uniquely identifies a point in time in a document's history. Heads are useful for comparing documents state or retrieving past states from the document. - -### Automerge Scalar Types - -Automerge has many scalar types. Methods like `put()` and `insert()` take an optional data type parameter. Normally the type can be inferred but in some cases, such as telling the difference between int, uint and a counter, it cannot. - -These are puts without a data type - -```javascript - import { create } from "@automerge/automerge-wasm" - - let doc = create() - doc.put("/", "prop1", 100) // int - doc.put("/", "prop2", 3.14) // f64 - doc.put("/", "prop3", "hello world") - doc.put("/", "prop4", new Date()) - doc.put("/", "prop5", new Uint8Array([1,2,3])) - doc.put("/", "prop6", true) - doc.put("/", "prop7", null) -``` - -Put's with a data type and examples of all the supported data types. - -While int vs uint vs f64 matters little in javascript, Automerge is a cross platform library where these distinctions matter. - -```javascript - import { create } from "@automerge/automerge-wasm" - - let doc = create() - doc.put("/", "prop1", 100, "int") - doc.put("/", "prop2", 100, "uint") - doc.put("/", "prop3", 100.5, "f64") - doc.put("/", "prop4", 100, "counter") - doc.put("/", "prop5", 1647531707301, "timestamp") - doc.put("/", "prop6", new Date(), "timestamp") - doc.put("/", "prop7", "hello world", "str") - doc.put("/", "prop8", new Uint8Array([1,2,3]), "bytes") - doc.put("/", "prop9", true, "boolean") - doc.put("/", "prop10", null, "null") -``` - -### Automerge Object Types - -Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key value stores where the values can be any scalar type or any object type. Lists are numerically indexed sets of data that can hold any scalar or any object type. - -```javascript - import { create } from "@automerge/automerge-wasm" - - let doc = create() - - // you can create an object by passing in the inital state - if blank pass in `{}` - // the return value is the Object Id - // these functions all return an object id - - let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) - let token = doc.putObject("/", "tokens", {}) - - // lists can be made with javascript arrays - - let birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"]) - let bots = doc.putObject("/", "bots", []) - - // text is initialized with a string - - let notes = doc.putObject("/", "notes", "Hello world!") -``` - -You can access objects by passing the object id as the first parameter for a call. - -```javascript - import { create } from "@automerge/automerge-wasm" - - let doc = create() - - let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) - - doc.put(config, "align", "right") - - // Anywhere Object Ids are being used a path can also be used. - // The following two statements are equivalent: - - // get the id then use it - - // get returns a single simple javascript value or undefined - // getWithType returns an Array of the datatype plus basic type or null - - let id = doc.getWithType("/", "config") - if (id && id[0] === 'map') { - doc.put(id[1], "align", "right") - } - - // use a path instead - - doc.put("/config", "align", "right") -``` - -Using the id directly is always faster (as it prevents the path to id conversion internally) so it is preferred for performance critical code. - -### Maps - -Maps are key/value stores. The root object is always a map. The keys are always strings. The values can be any scalar type or any object. - -```javascript - let doc = create() - let mymap = doc.putObject("_root", "mymap", { foo: "bar"}) - // make a new map with the foo key - - doc.put(mymap, "bytes", new Uint8Array([1,2,3])) - // assign a byte array to key `bytes` of the mymap object - - let submap = doc.putObject(mymap, "sub", {}) - // make a new empty object and assign it to the key `sub` of mymap - - doc.keys(mymap) // returns ["bytes","foo","sub"] - doc.materialize("_root") // returns { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {}}} -``` - -### Lists - -Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with `insert()`, `put()`, `insertObject()`, `putObject()`, `push()`, `pushObject()`, `splice()`, and `delete()`. - -```javascript - let doc = create() - let items = doc.putObject("_root", "items", [10,"box"]) - // init a new list with two elements - doc.push(items, true) // push `true` to the end of the list - doc.putObject(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value - doc.delete(items, 1) // delete "box" - doc.splice(items, 2, 0, ["bag", "brick"]) // splice in "bag" and "brick" at position 2 - doc.insert(items, 0, "bat") // insert "bat" to the beginning of the list - doc.insertObject(items, 1, [1,2]) // insert a list with 2 values at pos 1 - - doc.materialize(items) // returns [ "bat", [1,2], { hello : "world" }, true, "bag", "brick"] - doc.length(items) // returns 6 -``` - -### Text - -Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the `splice()` method. Spliced strings will be indexable by character (important to note for platforms that index by graphmeme cluster). - -```javascript - let doc = create("aaaaaa") - let notes = doc.putObject("_root", "notes", "Hello world") - doc.splice(notes, 6, 5, "everyone") - - doc.text(notes) // returns "Hello everyone" -``` - -### Tables - -Automerge's Table type is currently not implemented. - -### Querying Data - -When querying maps use the `get()` method with the object in question and the property to query. This method returns a tuple with the data type and the data. The `keys()` method will return all the keys on the object. If you are interested in conflicted values from a merge use `getAll()` instead which returns an array of values instead of just the winner. - -```javascript - let doc1 = create("aabbcc") - doc1.put("_root", "key1", "val1") - let key2 = doc1.putObject("_root", "key2", []) - - doc1.get("_root", "key1") // returns "val1" - doc1.getWithType("_root", "key2") // returns ["list", "2@aabbcc"] - doc1.keys("_root") // returns ["key1", "key2"] - - let doc2 = doc1.fork("ffaaff") - - // put a value concurrently - doc1.put("_root","key3","doc1val") - doc2.put("_root","key3","doc2val") - - doc1.merge(doc2) - - doc1.get("_root","key3") // returns "doc2val" - doc1.getAll("_root","key3") // returns [[ "str", "doc1val"], ["str", "doc2val"]] -``` - -### Counters - -Counters are 64 bit ints that support the increment operation. Frequently different actors will want to increment or decrement a number and have all these coalesse into a merged value. - -```javascript - let doc1 = create("aaaaaa") - doc1.put("_root", "number", 0) - doc1.put("_root", "total", 0, "counter") - - let doc2 = doc1.fork("bbbbbb") - doc2.put("_root", "number", 10) - doc2.increment("_root", "total", 11) - - doc1.put("_root", "number", 20) - doc1.increment("_root", "total", 22) - - doc1.merge(doc2) - - doc1.materialize("_root") // returns { number: 10, total: 33 } -``` - -### Transactions - -Generally speaking you don't need to think about transactions when using Automerge. Normal edits queue up into an in-progress transaction. You can query the number of ops in the current transaction with `pendingOps()`. The transaction will commit automatically on certains calls such as `save()`, `saveIncremental()`, `fork()`, `merge()`, `getHeads()`, `applyChanges()`, `generateSyncMessage()`, and `receiveSyncMessage()`. When the transaction commits the heads of the document change. If you want to roll back all the in progress ops you can call `doc.rollback()`. If you want to manually commit a transaction in progress you can call `doc.commit()` with an optional commit message and timestamp. - -```javascript - let doc = create() - - doc.put("_root", "key", "val1") - - doc.get("_root", "key") // returns "val1" - doc.pendingOps() // returns 1 - - doc.rollback() - - doc.get("_root", "key") // returns null - doc.pendingOps() // returns 0 - - doc.put("_root", "key", "val2") - - doc.pendingOps() // returns 1 - - doc.commit("test commit 1") - - doc.get("_root", "key") // returns "val2" - doc.pendingOps() // returns 0 -``` - -### Viewing Old Versions of the Document - -All query functions can take an optional argument of `heads` which allow you to query a prior document state. Heads are a set of change hashes that uniquely identify a point in the document history. The `getHeads()` method can retrieve these at any point. - -```javascript - let doc = create() - - doc.put("_root", "key", "val1") - let heads1 = doc.getHeads() - - doc.put("_root", "key", "val2") - let heads2 = doc.getHeads() - - doc.put("_root", "key", "val3") - - doc.get("_root","key") // returns "val3" - doc.get("_root","key",heads2) // returns "val2" - doc.get("_root","key",heads1) // returns "val1" - doc.get("_root","key",[]) // returns undefined -``` - -This works for `get()`, `getAll()`, `keys()`, `length()`, `text()`, and `materialize()` - -Queries of old document states are not indexed internally and will be slower than normal access. If you need a fast indexed version of a document at a previous point in time you can create one with `doc.forkAt(heads, actor?)` - -### Forking and Merging - -You can `fork()` a document which makes an exact copy of it. This assigns a new actor so changes made to the fork can be merged back in with the original. The `forkAt()` takes a Heads, allowing you to fork off a document from a previous point in its history. These documents allocate new memory in WASM and need to be freed. - -The `merge()` command applies all changes in the argument doc into the calling doc. Therefore if doc a has 1000 changes that doc b lacks and doc b has only 10 changes that doc a lacks, `a.merge(b)` will be much faster than `b.merge(a)`. - -```javascript - let doc1 = create() - doc1.put("_root", "key1", "val1") - - let doc2 = doc1.fork() - - doc1.put("_root", "key2", "val2") - doc2.put("_root", "key3", "val3") - - doc1.merge(doc2) - - doc1.materialize("_root") // returns { key1: "val1", key2: "val2", key3: "val3" } - doc2.materialize("_root") // returns { key1: "val1", key3: "val3" } -``` - -Note that calling `a.merge(a)` will produce an unrecoverable error from the wasm-bindgen layer which (as of this writing) there is no workaround for. - -### Saving and Loading - -Calling `save()` converts the document to a compressed `Uint8Array()` that can be saved to durable storage. This format uses a columnar storage format that compresses away most of the Automerge metadata needed to manage the CRDT state, but does include all of the change history. - -If you wish to incrementally update a saved Automerge doc you can call `saveIncremental()` to get a `Uint8Array()` of bytes that can be appended to the file with all the new changes(). Note that the `saveIncremental()` bytes are not as compressed as the whole document save as each chunk has metadata information needed to parse it. It may make sense to periodically perform a new `save()` to get the smallest possible file footprint. - -The `load()` function takes a `Uint8Array()` of bytes produced in this way and constitutes a new document. The `loadIncremental()` method is available if you wish to consume the result of a `saveIncremental()` with an already instanciated document. - -```javascript - import { create, load } from "@automerge/automerge-wasm" - - let doc1 = create() - - doc1.put("_root", "key1", "value1") - - let save1 = doc1.save() - - let doc2 = load(save1) - - doc2.materialize("_root") // returns { key1: "value1" } - - doc1.put("_root", "key2", "value2") - - let saveIncremental = doc1.saveIncremental() - - let save2 = doc1.save() - - let save3 = new Uint8Array([... save1, ... saveIncremental]) - - // save2 has fewer bytes than save3 but contains the same ops - - doc2.loadIncremental(saveIncremental) - - let doc3 = load(save2) - - let doc4 = load(save3) - - doc1.materialize("_root") // returns { key1: "value1", key2: "value2" } - doc2.materialize("_root") // returns { key1: "value1", key2: "value2" } - doc3.materialize("_root") // returns { key1: "value1", key2: "value2" } - doc4.materialize("_root") // returns { key1: "value1", key2: "value2" } -``` - -One interesting feature of automerge binary saves is that they can be concatenated together in any order and can still be loaded into a coherent merged document. - -```javascript -import { load } from "@automerge/automerge-wasm" -import * as fs from "fs" - -let file1 = fs.readFileSync("automerge_save_1"); -let file2 = fs.readFileSync("automerge_save_2"); - -let docA = load(file1).merge(load(file2)) -let docB = load(Buffer.concat([ file1, file2 ])) - -assert.deepEqual(docA.materialize("/"), docB.materialize("/")) -assert.equal(docA.save(), docB.save()) -``` - -### Syncing - -When syncing a document the `generateSyncMessage()` and `receiveSyncMessage()` methods will produce and consume sync messages. A sync state object will need to be managed for the duration of the connection (created by the function `initSyncState()` and can be serialized to a Uint8Array() to preserve sync state with the `encodeSyncState()` and `decodeSyncState()` functions. - -A very simple sync implementation might look like this. - -```javascript - import { encodeSyncState, decodeSyncState, initSyncState } from "@automerge/automerge-wasm" - - let states = {} - - function receiveMessageFromPeer(doc, peer_id, message) { - let syncState = states[peer_id] - doc.receiveMessage(syncState, message) - let reply = doc.generateSyncMessage(syncState) - if (reply) { - sendMessage(peer_id, reply) - } - } - - function notifyPeerAboutUpdates(doc, peer_id) { - let syncState = states[peer_id] - let message = doc.generateSyncMessage(syncState) - if (message) { - sendMessage(peer_id, message) - } - } - - function onDisconnect(peer_id) { - let state = states[peer_id] - if (state) { - saveSyncToStorage(peer_id, encodeSyncState(state)) - } - delete states[peer_id] - } - - function onConnect(peer_id) { - let state = loadSyncFromStorage(peer_id) - if (state) { - states[peer_id] = decodeSyncState(state) - } else { - states[peer_id] = initSyncState() - } - } -``` - -### Glossary: Actors - -Some basic concepts you will need to know to better understand the api are Actors and Object Ids. - -Actors are ids that need to be unique to each process writing to a document. This is normally one actor per device. Or for a web app one actor per tab per browser would be needed. It can be a uuid, or a public key, or a certificate, as your application demands. All that matters is that its bytes are unique. Actors are always expressed in this api as a hex string. - -Methods that create new documents will generate random actors automatically - if you wish to supply your own it is always taken as an optional argument. This is true for the following functions. - -```javascript - import { create, load } from "@automerge/automerge-wasm" - - let doc1 = create() // random actorid - let doc2 = create("aabbccdd") - let doc3 = doc1.fork() // random actorid - let doc4 = doc2.fork("ccdd0011") - let doc5 = load(doc3.save()) // random actorid - let doc6 = load(doc4.save(), "00aabb11") - - let actor = doc1.getActor() -``` - -### Glossary: Object Id's - -Object Ids uniquely identify an object within a document. They are represented as strings in the format of `{counter}@{actor}`. The root object is a special case and can be referred to as `_root`. The counter is an ever increasing integer, starting at 1, that is always one higher than the highest counter seen in the document thus far. Object Id's do not change when the object is modified but they do if it is overwritten with a new object. - -```javascript - let doc = create("aabbcc") - let o1 = doc.putObject("_root", "o1", {}) - let o2 = doc.putObject("_root", "o2", {}) - doc.put(o1, "hello", "world") - - assert.deepEqual(doc.materialize("_root"), { "o1": { hello: "world" }, "o2": {} }) - assert.equal(o1, "1@aabbcc") - assert.equal(o2, "2@aabbcc") - - let o1v2 = doc.putObject("_root", "o1", {}) - - doc.put(o1, "a", "b") // modifying an overwritten object - does nothing - doc.put(o1v2, "x", "y") // modifying the new "o1" object - - assert.deepEqual(doc.materialize("_root"), { "o1": { x: "y" }, "o2": {} }) -``` - -### Appendix: Building - - The following steps should allow you to build the package - - ``` - $ rustup target add wasm32-unknown-unknown - $ cargo install wasm-bindgen-cli - $ cargo install wasm-opt - $ yarn - $ yarn release - $ yarn pack - ``` - -### Appendix: WASM and Memory Allocation - -Allocated memory in rust will be freed automatically on platforms that support `FinalizationRegistry`. - -This is currently supported in [all major browsers and nodejs](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry). - -On unsupported platforms you can free memory explicitly. - -```javascript - import { create, initSyncState } from "@automerge/automerge-wasm" - - let doc = create() - let sync = initSyncState() - - doc.free() - sync.free() -``` diff --git a/rust/automerge-wasm/deno-tests/deno.ts b/rust/automerge-wasm/deno-tests/deno.ts deleted file mode 100644 index b346435a..00000000 --- a/rust/automerge-wasm/deno-tests/deno.ts +++ /dev/null @@ -1,8 +0,0 @@ -// @deno-types="../index.d.ts" -import { create } from '../deno/automerge_wasm.js' - -Deno.test("It should create, clone and free", () => { - const doc1 = create(false) - const doc2 = doc1.clone() - doc2.free() -}); diff --git a/rust/automerge-wasm/examples/cra/public/favicon.ico b/rust/automerge-wasm/examples/cra/public/favicon.ico deleted file mode 100644 index a11777cc..00000000 Binary files a/rust/automerge-wasm/examples/cra/public/favicon.ico and /dev/null differ diff --git a/rust/automerge-wasm/examples/cra/public/index.html b/rust/automerge-wasm/examples/cra/public/index.html deleted file mode 100644 index aa069f27..00000000 --- a/rust/automerge-wasm/examples/cra/public/index.html +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - - - - - - React App - - - -
- - - diff --git a/rust/automerge-wasm/examples/cra/public/logo192.png b/rust/automerge-wasm/examples/cra/public/logo192.png deleted file mode 100644 index fc44b0a3..00000000 Binary files a/rust/automerge-wasm/examples/cra/public/logo192.png and /dev/null differ diff --git a/rust/automerge-wasm/examples/cra/public/logo512.png b/rust/automerge-wasm/examples/cra/public/logo512.png deleted file mode 100644 index a4e47a65..00000000 Binary files a/rust/automerge-wasm/examples/cra/public/logo512.png and /dev/null differ diff --git a/rust/automerge-wasm/examples/cra/public/manifest.json b/rust/automerge-wasm/examples/cra/public/manifest.json deleted file mode 100644 index 080d6c77..00000000 --- a/rust/automerge-wasm/examples/cra/public/manifest.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "short_name": "React App", - "name": "Create React App Sample", - "icons": [ - { - "src": "favicon.ico", - "sizes": "64x64 32x32 24x24 16x16", - "type": "image/x-icon" - }, - { - "src": "logo192.png", - "type": "image/png", - "sizes": "192x192" - }, - { - "src": "logo512.png", - "type": "image/png", - "sizes": "512x512" - } - ], - "start_url": ".", - "display": "standalone", - "theme_color": "#000000", - "background_color": "#ffffff" -} diff --git a/rust/automerge-wasm/examples/cra/public/robots.txt b/rust/automerge-wasm/examples/cra/public/robots.txt deleted file mode 100644 index e9e57dc4..00000000 --- a/rust/automerge-wasm/examples/cra/public/robots.txt +++ /dev/null @@ -1,3 +0,0 @@ -# https://www.robotstxt.org/robotstxt.html -User-agent: * -Disallow: diff --git a/rust/automerge-wasm/examples/cra/src/App.css b/rust/automerge-wasm/examples/cra/src/App.css deleted file mode 100644 index 74b5e053..00000000 --- a/rust/automerge-wasm/examples/cra/src/App.css +++ /dev/null @@ -1,38 +0,0 @@ -.App { - text-align: center; -} - -.App-logo { - height: 40vmin; - pointer-events: none; -} - -@media (prefers-reduced-motion: no-preference) { - .App-logo { - animation: App-logo-spin infinite 20s linear; - } -} - -.App-header { - background-color: #282c34; - min-height: 100vh; - display: flex; - flex-direction: column; - align-items: center; - justify-content: center; - font-size: calc(10px + 2vmin); - color: white; -} - -.App-link { - color: #61dafb; -} - -@keyframes App-logo-spin { - from { - transform: rotate(0deg); - } - to { - transform: rotate(360deg); - } -} diff --git a/rust/automerge-wasm/examples/cra/src/logo.svg b/rust/automerge-wasm/examples/cra/src/logo.svg deleted file mode 100644 index 9dfc1c05..00000000 --- a/rust/automerge-wasm/examples/cra/src/logo.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/rust/automerge-wasm/examples/webpack/.gitignore b/rust/automerge-wasm/examples/webpack/.gitignore deleted file mode 100644 index da9d3ff5..00000000 --- a/rust/automerge-wasm/examples/webpack/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -yarn.lock -node_modules -public/*.wasm -public/main.js -dist diff --git a/rust/automerge-wasm/examples/webpack/package.json b/rust/automerge-wasm/examples/webpack/package.json deleted file mode 100644 index 4abcd1c6..00000000 --- a/rust/automerge-wasm/examples/webpack/package.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "name": "webpack-automerge-example", - "version": "0.1.0", - "description": "", - "private": true, - "scripts": { - "build": "webpack", - "start": "serve public", - "test": "node dist/node.js" - }, - "author": "", - "dependencies": { - "automerge-wasm": "file:automerge-wasm-0.1.4.tgz" - }, - "devDependencies": { - "serve": "^13.0.2", - "webpack": "^5.72.1", - "webpack-cli": "^4.9.2", - "webpack-node-externals": "^3.0.0" - } -} diff --git a/rust/automerge-wasm/examples/webpack/public/index.html b/rust/automerge-wasm/examples/webpack/public/index.html deleted file mode 100644 index 5003393a..00000000 --- a/rust/automerge-wasm/examples/webpack/public/index.html +++ /dev/null @@ -1,10 +0,0 @@ - - - - - Simple Webpack for automerge-wasm - - - - - diff --git a/rust/automerge-wasm/examples/webpack/src/index.js b/rust/automerge-wasm/examples/webpack/src/index.js deleted file mode 100644 index bab417f5..00000000 --- a/rust/automerge-wasm/examples/webpack/src/index.js +++ /dev/null @@ -1,23 +0,0 @@ -import init, { create } from "automerge-wasm" - -// hello world code that will run correctly on web or node - -init().then((Automerge) => { - console.log("Automerge=", Automerge) - console.log("create=", create) - const doc = Automerge.create() - doc.put("/", "hello", "world") - const result = doc.materialize("/") - //const result = xxx - - if (typeof document !== 'undefined') { - // browser - const element = document.createElement('div'); - element.innerHTML = JSON.stringify(result) - document.body.appendChild(element); - } else { - // server - console.log("node:", result) - } -}) - diff --git a/rust/automerge-wasm/examples/webpack/webpack.config.js b/rust/automerge-wasm/examples/webpack/webpack.config.js deleted file mode 100644 index 3ab0e798..00000000 --- a/rust/automerge-wasm/examples/webpack/webpack.config.js +++ /dev/null @@ -1,35 +0,0 @@ -const path = require('path'); -const nodeExternals = require('webpack-node-externals'); - -// the most basic webpack config for node or web targets for automerge-wasm - -const serverConfig = { - // basic setup for bundling a node package - target: 'node', - externals: [nodeExternals()], - externalsPresets: { node: true }, - - entry: './src/index.js', - output: { - filename: 'node.js', - path: path.resolve(__dirname, 'dist'), - }, - mode: "development", // or production -}; - -const clientConfig = { - target: 'web', - entry: './src/index.js', - output: { - filename: 'main.js', - path: path.resolve(__dirname, 'public'), - }, - mode: "development", // or production - performance: { // we dont want the wasm blob to generate warnings - hints: false, - maxEntrypointSize: 512000, - maxAssetSize: 512000 - } -}; - -module.exports = [serverConfig, clientConfig]; diff --git a/rust/automerge-wasm/index.d.ts b/rust/automerge-wasm/index.d.ts deleted file mode 100644 index be12e4c1..00000000 --- a/rust/automerge-wasm/index.d.ts +++ /dev/null @@ -1,238 +0,0 @@ -export type Actor = string; -export type ObjID = string; -export type Change = Uint8Array; -export type SyncMessage = Uint8Array; -export type Prop = string | number; -export type Hash = string; -export type Heads = Hash[]; -export type Value = string | number | boolean | null | Date | Uint8Array -export type MaterializeValue = { [key:string]: MaterializeValue } | Array | Value -export type ObjType = string | Array | { [key: string]: ObjType | Value } -export type FullValue = - ["str", string] | - ["int", number] | - ["uint", number] | - ["f64", number] | - ["boolean", boolean] | - ["timestamp", Date] | - ["counter", number] | - ["bytes", Uint8Array] | - ["null", null] | - ["map", ObjID] | - ["list", ObjID] | - ["text", ObjID] | - ["table", ObjID] - -export type FullValueWithId = - ["str", string, ObjID ] | - ["int", number, ObjID ] | - ["uint", number, ObjID ] | - ["f64", number, ObjID ] | - ["boolean", boolean, ObjID ] | - ["timestamp", Date, ObjID ] | - ["counter", number, ObjID ] | - ["bytes", Uint8Array, ObjID ] | - ["null", null, ObjID ] | - ["map", ObjID ] | - ["list", ObjID] | - ["text", ObjID] | - ["table", ObjID] - -export enum ObjTypeName { - list = "list", - map = "map", - table = "table", - text = "text", -} - -export type Datatype = - "boolean" | - "str" | - "int" | - "uint" | - "f64" | - "null" | - "timestamp" | - "counter" | - "bytes" | - "map" | - "text" | - "list"; - -export type SyncHave = { - lastSync: Heads, - bloom: Uint8Array, -} - -export type DecodedSyncMessage = { - heads: Heads, - need: Heads, - have: SyncHave[] - changes: Change[] -} - -export type DecodedChange = { - actor: Actor, - seq: number - startOp: number, - time: number, - message: string | null, - deps: Heads, - hash: Hash, - ops: Op[] -} - -type PartialBy = Omit & Partial> -export type ChangeToEncode = PartialBy - -export type Op = { - action: string, - obj: ObjID, - key: string, - value?: string | number | boolean, - datatype?: string, - pred: string[], -} - -export type Patch = PutPatch | DelPatch | SpliceTextPatch | IncPatch | InsertPatch; - -export type PutPatch = { - action: 'put' - path: Prop[], - value: Value - conflict: boolean -} - -export type IncPatch = { - action: 'inc' - path: Prop[], - value: number -} - -export type DelPatch = { - action: 'del' - path: Prop[], - length?: number, -} - -export type SpliceTextPatch = { - action: 'splice' - path: Prop[], - value: string, -} - -export type InsertPatch = { - action: 'insert' - path: Prop[], - values: Value[], -} - -export function encodeChange(change: ChangeToEncode): Change; -export function create(text_v2: boolean, actor?: Actor): Automerge; -export function load(data: Uint8Array, text_v2: boolean, actor?: Actor): Automerge; -export function decodeChange(change: Change): DecodedChange; -export function initSyncState(): SyncState; -export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; -export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; -export function encodeSyncState(state: SyncState): Uint8Array; -export function decodeSyncState(data: Uint8Array): SyncState; -export function exportSyncState(state: SyncState): JsSyncState; -export function importSyncState(state: JsSyncState): SyncState; - -export interface API { - create(text_v2: boolean, actor?: Actor): Automerge; - load(data: Uint8Array, text_v2: boolean, actor?: Actor): Automerge; - encodeChange(change: ChangeToEncode): Change; - decodeChange(change: Change): DecodedChange; - initSyncState(): SyncState; - encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; - decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage; - encodeSyncState(state: SyncState): Uint8Array; - decodeSyncState(data: Uint8Array): SyncState; - exportSyncState(state: SyncState): JsSyncState; - importSyncState(state: JsSyncState): SyncState; -} - -export class Automerge { - // change state - put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): void; - putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID; - insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): void; - insertObject(obj: ObjID, index: number, value: ObjType): ObjID; - push(obj: ObjID, value: Value, datatype?: Datatype): void; - pushObject(obj: ObjID, value: ObjType): ObjID; - splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined; - increment(obj: ObjID, prop: Prop, value: number): void; - delete(obj: ObjID, prop: Prop): void; - - // returns a single value - if there is a conflict return the winner - get(obj: ObjID, prop: Prop, heads?: Heads): Value | undefined; - getWithType(obj: ObjID, prop: Prop, heads?: Heads): FullValue | null; - // return all values in case of a conflict - getAll(obj: ObjID, arg: Prop, heads?: Heads): FullValueWithId[]; - keys(obj: ObjID, heads?: Heads): string[]; - text(obj: ObjID, heads?: Heads): string; - length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heads?: Heads, metadata?: unknown): MaterializeValue; - toJS(): MaterializeValue; - - // transactions - commit(message?: string, time?: number): Hash | null; - emptyChange(message?: string, time?: number): Hash; - merge(other: Automerge): Heads; - getActorId(): Actor; - pendingOps(): number; - rollback(): number; - - // patches - enablePatches(enable: boolean): boolean; - enableFreeze(enable: boolean): boolean; - registerDatatype(datatype: string, callback: Function): void; - popPatches(): Patch[]; - - // save and load to local store - save(): Uint8Array; - saveIncremental(): Uint8Array; - loadIncremental(data: Uint8Array): number; - - // sync over network - receiveSyncMessage(state: SyncState, message: SyncMessage): void; - generateSyncMessage(state: SyncState): SyncMessage | null; - - // low level change functions - applyChanges(changes: Change[]): void; - getChanges(have_deps: Heads): Change[]; - getChangeByHash(hash: Hash): Change | null; - getChangesAdded(other: Automerge): Change[]; - getHeads(): Heads; - getLastLocalChange(): Change | null; - getMissingDeps(heads?: Heads): Heads; - - // memory management - free(): void; // only needed if weak-refs are unsupported - clone(actor?: string): Automerge; // TODO - remove, this is dangerous - fork(actor?: string, heads?: Heads): Automerge; - - // dump internal state to console.log - for debugging - dump(): void; - - // experimental api can go here - applyPatches(obj: Doc, meta?: unknown, callback?: (patch: Array, before: Doc, after: Doc) => void): Doc; -} - -export interface JsSyncState { - sharedHeads: Heads; - lastSentHeads: Heads; - theirHeads: Heads | undefined; - theirHeed: Heads | undefined; - theirHave: SyncHave[] | undefined; - sentHashes: Heads; -} - -export class SyncState { - free(): void; - clone(): SyncState; - lastSentHeads: Heads; - sentHashes: Heads; - readonly sharedHeads: Heads; -} diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json deleted file mode 100644 index 80b39fd4..00000000 --- a/rust/automerge-wasm/package.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "collaborators": [ - "Orion Henry ", - "Alex Good ", - "Martin Kleppmann" - ], - "name": "@automerge/automerge-wasm", - "description": "wasm-bindgen bindings to the automerge rust implementation", - "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", - "repository": "github:automerge/automerge-rs", - "version": "0.1.25", - "license": "MIT", - "files": [ - "README.md", - "LICENSE", - "package.json", - "index.d.ts", - "nodejs/automerge_wasm.js", - "nodejs/automerge_wasm_bg.wasm", - "deno/automerge_wasm.js", - "deno/automerge_wasm_bg.wasm", - "bundler/automerge_wasm.js", - "bundler/automerge_wasm_bg.js", - "bundler/automerge_wasm_bg.wasm" - ], - "private": false, - "types": "index.d.ts", - "module": "./bundler/automerge_wasm.js", - "main": "./nodejs/automerge_wasm.js", - "scripts": { - "lint": "eslint test/*.ts index.d.ts", - "debug": "cross-env PROFILE=dev TARGET_DIR=debug yarn buildall", - "build": "cross-env PROFILE=dev TARGET_DIR=debug FEATURES='' yarn buildall", - "release": "cross-env PROFILE=release TARGET_DIR=release yarn buildall", - "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=bundler yarn target && cross-env TARGET=deno yarn target", - "target": "rimraf ./$TARGET && yarn compile && yarn bindgen && yarn opt", - "compile": "cargo build --target wasm32-unknown-unknown --profile $PROFILE", - "bindgen": "wasm-bindgen --no-typescript --weak-refs --target $TARGET --out-dir $TARGET ../target/wasm32-unknown-unknown/$TARGET_DIR/automerge_wasm.wasm", - "opt": "wasm-opt -O4 $TARGET/automerge_wasm_bg.wasm -o $TARGET/automerge_wasm_bg.wasm", - "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" - }, - "devDependencies": { - "@types/mocha": "^10.0.1", - "@types/node": "^18.11.13", - "@typescript-eslint/eslint-plugin": "^5.46.0", - "@typescript-eslint/parser": "^5.46.0", - "cross-env": "^7.0.3", - "eslint": "^8.29.0", - "fast-sha256": "^1.3.0", - "mocha": "^10.2.0", - "pako": "^2.1.0", - "rimraf": "^3.0.2", - "ts-mocha": "^10.0.0", - "typescript": "^4.9.4" - }, - "exports": { - "browser": "./bundler/automerge_wasm.js", - "require": "./nodejs/automerge_wasm.js" - } -} diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs deleted file mode 100644 index 1546ff10..00000000 --- a/rust/automerge-wasm/src/interop.rs +++ /dev/null @@ -1,1478 +0,0 @@ -use crate::error::InsertObject; -use crate::value::Datatype; -use crate::{Automerge, TextRepresentation}; -use automerge as am; -use automerge::ReadDoc; -use automerge::ROOT; -use automerge::{Change, ChangeHash, ObjType, Prop}; -use js_sys::{Array, Function, JsString, Object, Reflect, Symbol, Uint8Array}; -use std::borrow::Cow; -use std::collections::{BTreeSet, HashSet}; -use std::fmt::Display; -use wasm_bindgen::prelude::*; -use wasm_bindgen::JsCast; - -use crate::{observer::Patch, ObjId, Value}; - -const RAW_DATA_SYMBOL: &str = "_am_raw_value_"; -const DATATYPE_SYMBOL: &str = "_am_datatype_"; -const RAW_OBJECT_SYMBOL: &str = "_am_objectId"; -const META_SYMBOL: &str = "_am_meta"; - -pub(crate) struct JS(pub(crate) JsValue); -pub(crate) struct AR(pub(crate) Array); - -impl From for JsValue { - fn from(ar: AR) -> Self { - ar.0.into() - } -} - -impl From for JsValue { - fn from(js: JS) -> Self { - js.0 - } -} - -impl From for JS { - fn from(state: am::sync::State) -> Self { - let shared_heads: JS = state.shared_heads.into(); - let last_sent_heads: JS = state.last_sent_heads.into(); - let their_heads: JS = state.their_heads.into(); - let their_need: JS = state.their_need.into(); - let sent_hashes: JS = state.sent_hashes.into(); - let their_have = if let Some(have) = &state.their_have { - JsValue::from(AR::from(have.as_slice()).0) - } else { - JsValue::null() - }; - let result: JsValue = Object::new().into(); - // we can unwrap here b/c we made the object and know its not frozen - Reflect::set(&result, &"sharedHeads".into(), &shared_heads.0).unwrap(); - Reflect::set(&result, &"lastSentHeads".into(), &last_sent_heads.0).unwrap(); - Reflect::set(&result, &"theirHeads".into(), &their_heads.0).unwrap(); - Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap(); - Reflect::set(&result, &"theirHave".into(), &their_have).unwrap(); - Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap(); - Reflect::set(&result, &"inFlight".into(), &state.in_flight.into()).unwrap(); - JS(result) - } -} - -impl From> for JS { - fn from(heads: Vec) -> Self { - JS(heads - .iter() - .map(|h| JsValue::from_str(&h.to_string())) - .collect::() - .into()) - } -} - -impl From> for JS { - fn from(heads: HashSet) -> Self { - let result: JsValue = Object::new().into(); - for key in &heads { - Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); - } - JS(result) - } -} - -impl From> for JS { - fn from(heads: BTreeSet) -> Self { - let result: JsValue = Object::new().into(); - for key in &heads { - Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); - } - JS(result) - } -} - -impl From>> for JS { - fn from(heads: Option>) -> Self { - if let Some(v) = heads { - let v: Array = v - .iter() - .map(|h| JsValue::from_str(&h.to_string())) - .collect(); - JS(v.into()) - } else { - JS(JsValue::null()) - } - } -} - -impl TryFrom for HashSet { - type Error = error::BadChangeHashSet; - - fn try_from(value: JS) -> Result { - let result = HashSet::new(); - fold_hash_set(result, &value.0, |mut set, hash| { - set.insert(hash); - set - }) - } -} - -impl TryFrom for BTreeSet { - type Error = error::BadChangeHashSet; - - fn try_from(value: JS) -> Result { - let result = BTreeSet::new(); - fold_hash_set(result, &value.0, |mut set, hash| { - set.insert(hash); - set - }) - } -} - -fn fold_hash_set(init: O, val: &JsValue, f: F) -> Result -where - F: Fn(O, ChangeHash) -> O, -{ - let mut result = init; - for key in Reflect::own_keys(val) - .map_err(|_| error::BadChangeHashSet::ListProp)? - .iter() - { - if let Some(true) = js_get(val, &key)?.0.as_bool() { - let hash = ChangeHash::try_from(JS(key.clone())) - .map_err(|e| error::BadChangeHashSet::BadHash(key, e))?; - result = f(result, hash); - } - } - Ok(result) -} - -impl TryFrom for ChangeHash { - type Error = error::BadChangeHash; - - fn try_from(value: JS) -> Result { - if let Some(s) = value.0.as_string() { - Ok(s.parse()?) - } else { - Err(error::BadChangeHash::NotString) - } - } -} - -impl TryFrom for Option> { - type Error = error::BadChangeHashes; - - fn try_from(value: JS) -> Result { - if value.0.is_null() { - Ok(None) - } else { - Vec::::try_from(value).map(Some) - } - } -} - -impl TryFrom for Vec { - type Error = error::BadChangeHashes; - - fn try_from(value: JS) -> Result { - let value = value - .0 - .dyn_into::() - .map_err(|_| error::BadChangeHashes::NotArray)?; - let value = value - .iter() - .enumerate() - .map(|(i, v)| { - ChangeHash::try_from(JS(v)).map_err(|e| error::BadChangeHashes::BadElem(i, e)) - }) - .collect::, _>>()?; - Ok(value) - } -} - -impl TryFrom for Vec { - type Error = error::BadJSChanges; - - fn try_from(value: JS) -> Result { - let value = value - .0 - .dyn_into::() - .map_err(|_| error::BadJSChanges::ChangesNotArray)?; - let changes = value - .iter() - .enumerate() - .map(|(i, j)| { - j.dyn_into().map_err::(|_| { - error::BadJSChanges::ElemNotUint8Array(i) - }) - }) - .collect::, _>>()?; - let changes = changes - .iter() - .enumerate() - .map(|(i, arr)| { - automerge::Change::try_from(arr.to_vec().as_slice()) - .map_err(|e| error::BadJSChanges::BadChange(i, e)) - }) - .collect::, _>>()?; - Ok(changes) - } -} - -impl TryFrom for am::sync::State { - type Error = error::BadSyncState; - - fn try_from(value: JS) -> Result { - let value = value.0; - let shared_heads = js_get(&value, "sharedHeads")? - .try_into() - .map_err(error::BadSyncState::BadSharedHeads)?; - let last_sent_heads = js_get(&value, "lastSentHeads")? - .try_into() - .map_err(error::BadSyncState::BadLastSentHeads)?; - let their_heads = js_get(&value, "theirHeads")? - .try_into() - .map_err(error::BadSyncState::BadTheirHeads)?; - let their_need = js_get(&value, "theirNeed")? - .try_into() - .map_err(error::BadSyncState::BadTheirNeed)?; - let their_have = js_get(&value, "theirHave")? - .try_into() - .map_err(error::BadSyncState::BadTheirHave)?; - let sent_hashes = js_get(&value, "sentHashes")? - .try_into() - .map_err(error::BadSyncState::BadSentHashes)?; - let in_flight = js_get(&value, "inFlight")? - .0 - .as_bool() - .ok_or(error::BadSyncState::InFlightNotBoolean)?; - Ok(am::sync::State { - shared_heads, - last_sent_heads, - their_heads, - their_need, - their_have, - sent_hashes, - in_flight, - }) - } -} - -impl TryFrom for am::sync::Have { - type Error = error::BadHave; - - fn try_from(value: JS) -> Result { - let last_sync = js_get(&value.0, "lastSync")? - .try_into() - .map_err(error::BadHave::BadLastSync)?; - let bloom = js_get(&value.0, "bloom")? - .try_into() - .map_err(error::BadHave::BadBloom)?; - Ok(am::sync::Have { last_sync, bloom }) - } -} - -impl TryFrom for Option> { - type Error = error::BadHaves; - - fn try_from(value: JS) -> Result { - if value.0.is_null() { - Ok(None) - } else { - Ok(Some(value.try_into()?)) - } - } -} - -impl TryFrom for Vec { - type Error = error::BadHaves; - - fn try_from(value: JS) -> Result { - let value = value - .0 - .dyn_into::() - .map_err(|_| error::BadHaves::NotArray)?; - let have = value - .iter() - .enumerate() - .map(|(i, s)| JS(s).try_into().map_err(|e| error::BadHaves::BadElem(i, e))) - .collect::, _>>()?; - Ok(have) - } -} - -impl TryFrom for am::sync::BloomFilter { - type Error = error::BadBloom; - - fn try_from(value: JS) -> Result { - let value: Uint8Array = value - .0 - .dyn_into() - .map_err(|_| error::BadBloom::NotU8Array)?; - let value = value.to_vec(); - let value = value.as_slice().try_into()?; - Ok(value) - } -} - -impl TryFrom for am::sync::Message { - type Error = error::BadSyncMessage; - - fn try_from(value: JS) -> Result { - let heads = js_get(&value.0, "heads")? - .try_into() - .map_err(error::BadSyncMessage::BadHeads)?; - let need = js_get(&value.0, "need")? - .try_into() - .map_err(error::BadSyncMessage::BadNeed)?; - let changes = js_get(&value.0, "changes")?.try_into()?; - let have = js_get(&value.0, "have")?.try_into()?; - Ok(am::sync::Message { - heads, - need, - have, - changes, - }) - } -} - -impl From<&[ChangeHash]> for AR { - fn from(value: &[ChangeHash]) -> Self { - AR(value - .iter() - .map(|h| JsValue::from_str(&hex::encode(h.0))) - .collect()) - } -} - -impl From<&[Change]> for AR { - fn from(value: &[Change]) -> Self { - let changes: Array = value - .iter() - .map(|c| Uint8Array::from(c.raw_bytes())) - .collect(); - AR(changes) - } -} - -impl From<&[am::sync::Have]> for AR { - fn from(value: &[am::sync::Have]) -> Self { - AR(value - .iter() - .map(|have| { - let last_sync: Array = have - .last_sync - .iter() - .map(|h| JsValue::from_str(&hex::encode(h.0))) - .collect(); - // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes() - let bloom = Uint8Array::from(have.bloom.to_bytes().as_slice()); - let obj: JsValue = Object::new().into(); - // we can unwrap here b/c we created the object and know its not frozen - Reflect::set(&obj, &"lastSync".into(), &last_sync.into()).unwrap(); - Reflect::set(&obj, &"bloom".into(), &bloom.into()).unwrap(); - obj - }) - .collect()) - } -} - -pub(crate) fn to_js_err(err: T) -> JsValue { - js_sys::Error::new(&std::format!("{}", err)).into() -} - -pub(crate) fn js_get, S: std::fmt::Debug + Into>( - obj: J, - prop: S, -) -> Result { - let prop = prop.into(); - Ok(JS(Reflect::get(&obj.into(), &prop).map_err(|e| { - error::GetProp { - property: format!("{:?}", prop), - error: e, - } - })?)) -} - -pub(crate) fn js_set, S: std::fmt::Debug + Into>( - obj: &JsValue, - prop: S, - val: V, -) -> Result { - let prop = prop.into(); - Reflect::set(obj, &prop, &val.into()).map_err(|e| error::SetProp { - property: prop, - error: e, - }) -} - -pub(crate) fn js_get_symbol>(obj: J, prop: &Symbol) -> Result { - Ok(JS(Reflect::get(&obj.into(), &prop.into()).map_err( - |e| error::GetProp { - property: format!("{}", prop.to_string()), - error: e, - }, - )?)) -} - -pub(crate) fn to_prop(p: JsValue) -> Result { - if let Some(s) = p.as_string() { - Ok(Prop::Map(s)) - } else if let Some(n) = p.as_f64() { - Ok(Prop::Seq(n as usize)) - } else { - Err(error::InvalidProp) - } -} - -pub(crate) enum JsObjType { - Text(String), - Map(Vec<(Prop, JsValue)>), - List(Vec<(Prop, JsValue)>), -} - -impl JsObjType { - pub(crate) fn objtype(&self) -> ObjType { - match self { - Self::Text(_) => ObjType::Text, - Self::Map(_) => ObjType::Map, - Self::List(_) => ObjType::List, - } - } - - pub(crate) fn text(&self) -> Option<&str> { - match self { - Self::Text(s) => Some(s.as_ref()), - Self::Map(_) => None, - Self::List(_) => None, - } - } - - pub(crate) fn subvals(&self) -> impl Iterator, JsValue)> + '_ + Clone { - match self { - Self::Text(s) => SubValIter::Str(s.chars().enumerate()), - Self::Map(sub) => SubValIter::Slice(sub.as_slice().iter()), - Self::List(sub) => SubValIter::Slice(sub.as_slice().iter()), - } - } -} - -#[derive(Debug, Clone)] -pub(crate) enum SubValIter<'a> { - Slice(std::slice::Iter<'a, (Prop, JsValue)>), - Str(std::iter::Enumerate>), -} - -impl<'a> Iterator for SubValIter<'a> { - type Item = (std::borrow::Cow<'a, Prop>, JsValue); - - fn next(&mut self) -> Option { - match self { - Self::Slice(i) => i - .next() - .map(|(p, v)| (std::borrow::Cow::Borrowed(p), v.clone())), - Self::Str(i) => i - .next() - .map(|(n, c)| (std::borrow::Cow::Owned(Prop::Seq(n)), c.to_string().into())), - } - } -} - -pub(crate) fn import_obj( - value: &JsValue, - datatype: &Option, -) -> Result { - match datatype.as_deref() { - Some("map") => { - let map = value - .clone() - .dyn_into::() - .map_err(|_| InsertObject::ValueNotObject)?; - let map = js_sys::Object::keys(&map) - .iter() - .zip(js_sys::Object::values(&map).iter()) - .map(|(key, val)| (key.as_string().unwrap().into(), val)) - .collect(); - Ok(JsObjType::Map(map)) - } - Some("list") => { - let list = value - .clone() - .dyn_into::() - .map_err(|_| InsertObject::ValueNotObject)?; - let list = list - .iter() - .enumerate() - .map(|(i, e)| (i.into(), e)) - .collect(); - Ok(JsObjType::List(list)) - } - Some("text") => { - let text = value.as_string().ok_or(InsertObject::ValueNotObject)?; - Ok(JsObjType::Text(text)) - } - Some(_) => Err(InsertObject::ValueNotObject), - None => { - if let Ok(list) = value.clone().dyn_into::() { - let list = list - .iter() - .enumerate() - .map(|(i, e)| (i.into(), e)) - .collect(); - Ok(JsObjType::List(list)) - } else if let Ok(map) = value.clone().dyn_into::() { - let map = js_sys::Object::keys(&map) - .iter() - .zip(js_sys::Object::values(&map).iter()) - .map(|(key, val)| (key.as_string().unwrap().into(), val)) - .collect(); - Ok(JsObjType::Map(map)) - } else if let Some(s) = value.as_string() { - Ok(JsObjType::Text(s)) - } else { - Err(InsertObject::ValueNotObject) - } - } - } -} - -pub(crate) fn get_heads( - heads: Option, -) -> Result>, error::BadChangeHashes> { - heads - .map(|h| { - h.iter() - .enumerate() - .map(|(i, v)| { - ChangeHash::try_from(JS(v)).map_err(|e| error::BadChangeHashes::BadElem(i, e)) - }) - .collect() - }) - .transpose() -} - -impl Automerge { - pub(crate) fn export_object( - &self, - obj: &ObjId, - datatype: Datatype, - heads: Option<&Vec>, - meta: &JsValue, - ) -> Result { - let result = match datatype { - Datatype::Text => match self.text_rep { - TextRepresentation::String => { - if let Some(heads) = heads { - self.doc.text_at(obj, heads)?.into() - } else { - self.doc.text(obj)?.into() - } - } - TextRepresentation::Array => self - .wrap_object(self.export_list(obj, heads, meta)?, datatype, obj, meta)? - .into(), - }, - Datatype::List => self - .wrap_object(self.export_list(obj, heads, meta)?, datatype, obj, meta)? - .into(), - _ => self - .wrap_object(self.export_map(obj, heads, meta)?, datatype, obj, meta)? - .into(), - }; - Ok(result) - } - - pub(crate) fn export_map( - &self, - obj: &ObjId, - heads: Option<&Vec>, - meta: &JsValue, - ) -> Result { - let keys = self.doc.keys(obj); - let map = Object::new(); - for k in keys { - let val_and_id = if let Some(heads) = heads { - self.doc.get_at(obj, &k, heads) - } else { - self.doc.get(obj, &k) - }; - if let Ok(Some((val, id))) = val_and_id { - let subval = match val { - Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, - Value::Scalar(_) => self.export_value(alloc(&val, self.text_rep))?, - }; - js_set(&map, &k, &subval)?; - }; - } - - Ok(map) - } - - pub(crate) fn export_list( - &self, - obj: &ObjId, - heads: Option<&Vec>, - meta: &JsValue, - ) -> Result { - let len = self.doc.length(obj); - let array = Array::new(); - for i in 0..len { - let val_and_id = if let Some(heads) = heads { - self.doc.get_at(obj, i, heads) - } else { - self.doc.get(obj, i) - }; - if let Ok(Some((val, id))) = val_and_id { - let subval = match val { - Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, - Value::Scalar(_) => self.export_value(alloc(&val, self.text_rep))?, - }; - array.push(&subval); - }; - } - - Ok(array.into()) - } - - pub(crate) fn export_value( - &self, - (datatype, raw_value): (Datatype, JsValue), - ) -> Result { - if let Some(function) = self.external_types.get(&datatype) { - let wrapped_value = function - .call1(&JsValue::undefined(), &raw_value) - .map_err(|e| error::Export::CallDataHandler(datatype.to_string(), e))?; - if let Ok(o) = wrapped_value.dyn_into::() { - let key = Symbol::for_(RAW_DATA_SYMBOL); - set_hidden_value(&o, &key, &raw_value)?; - let key = Symbol::for_(DATATYPE_SYMBOL); - set_hidden_value(&o, &key, datatype)?; - Ok(o.into()) - } else { - Err(error::Export::InvalidDataHandler(datatype.to_string())) - } - } else { - Ok(raw_value) - } - } - - pub(crate) fn unwrap_object( - &self, - ext_val: &Object, - ) -> Result<(Object, Datatype, ObjId), error::Export> { - let inner = js_get_symbol(ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?.0; - - let datatype = js_get_symbol(ext_val, &Symbol::for_(DATATYPE_SYMBOL))? - .0 - .try_into(); - - let id_val = js_get_symbol(ext_val, &Symbol::for_(RAW_OBJECT_SYMBOL))?.0; - let id = if id_val.is_undefined() { - am::ROOT - } else { - self.doc.import(&id_val.as_string().unwrap_or_default())?.0 - }; - - let inner = inner - .dyn_into::() - .unwrap_or_else(|_| ext_val.clone()); - let datatype = datatype.unwrap_or_else(|_| { - if Array::is_array(&inner) { - Datatype::List - } else { - Datatype::Map - } - }); - Ok((inner, datatype, id)) - } - - pub(crate) fn unwrap_scalar(&self, ext_val: JsValue) -> Result { - let inner = js_get_symbol(&ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?.0; - if !inner.is_undefined() { - Ok(inner) - } else { - Ok(ext_val) - } - } - - fn maybe_wrap_object( - &self, - (datatype, raw_value): (Datatype, JsValue), - id: &ObjId, - meta: &JsValue, - ) -> Result { - if let Ok(obj) = raw_value.clone().dyn_into::() { - let result = self.wrap_object(obj, datatype, id, meta)?; - Ok(result.into()) - } else { - self.export_value((datatype, raw_value)) - } - } - - pub(crate) fn wrap_object( - &self, - value: Object, - datatype: Datatype, - id: &ObjId, - meta: &JsValue, - ) -> Result { - let value = if let Some(function) = self.external_types.get(&datatype) { - let wrapped_value = function - .call1(&JsValue::undefined(), &value) - .map_err(|e| error::Export::CallDataHandler(datatype.to_string(), e))?; - let wrapped_object = wrapped_value - .dyn_into::() - .map_err(|_| error::Export::InvalidDataHandler(datatype.to_string()))?; - set_hidden_value(&wrapped_object, &Symbol::for_(RAW_DATA_SYMBOL), value)?; - wrapped_object - } else { - value - }; - if matches!(datatype, Datatype::Map | Datatype::List) - || (datatype == Datatype::Text && self.text_rep == TextRepresentation::Array) - { - set_hidden_value( - &value, - &Symbol::for_(RAW_OBJECT_SYMBOL), - &JsValue::from(&id.to_string()), - )?; - } - set_hidden_value(&value, &Symbol::for_(DATATYPE_SYMBOL), datatype)?; - set_hidden_value(&value, &Symbol::for_(META_SYMBOL), meta)?; - if self.freeze { - Object::freeze(&value); - } - Ok(value) - } - - pub(crate) fn apply_patch_to_array( - &self, - array: &Object, - patch: &Patch, - meta: &JsValue, - exposed: &mut HashSet, - ) -> Result { - let result = Array::from(array); // shallow copy - match patch { - Patch::PutSeq { - index, - value, - expose, - .. - } => { - if *expose && value.0.is_object() { - exposed.insert(value.1.clone()); - js_set(&result, *index as f64, &JsValue::null())?; - } else { - let sub_val = - self.maybe_wrap_object(alloc(&value.0, self.text_rep), &value.1, meta)?; - js_set(&result, *index as f64, &sub_val)?; - } - Ok(result.into()) - } - Patch::DeleteSeq { index, length, .. } => { - Ok(self.sub_splice(result, *index, *length, vec![], meta)?) - } - Patch::Insert { index, values, .. } => { - Ok(self.sub_splice(result, *index, 0, values, meta)?) - } - Patch::Increment { prop, value, .. } => { - if let Prop::Seq(index) = prop { - let index = *index as f64; - let old_val = js_get(&result, index)?.0; - let old_val = self.unwrap_scalar(old_val)?; - if let Some(old) = old_val.as_f64() { - let new_value: Value<'_> = - am::ScalarValue::counter(old as i64 + *value).into(); - js_set( - &result, - index, - &self.export_value(alloc(&new_value, self.text_rep))?, - )?; - Ok(result.into()) - } else { - Err(error::ApplyPatch::IncrementNonNumeric) - } - } else { - Err(error::ApplyPatch::IncrementKeyInSeq) - } - } - Patch::DeleteMap { .. } => Err(error::ApplyPatch::DeleteKeyFromSeq), - Patch::PutMap { .. } => Err(error::ApplyPatch::PutKeyInSeq), - Patch::SpliceText { index, value, .. } => { - match self.text_rep { - TextRepresentation::String => Err(error::ApplyPatch::SpliceTextInSeq), - TextRepresentation::Array => { - let bytes: Vec = value.iter().cloned().collect(); - let val = String::from_utf16_lossy(bytes.as_slice()); - let elems = val - .chars() - .map(|c| { - ( - Value::Scalar(std::borrow::Cow::Owned(am::ScalarValue::Str( - c.to_string().into(), - ))), - ObjId::Root, // Using ROOT is okay because this ID is never used as - // we're producing ScalarValue::Str - ) - }) - .collect::>(); - Ok(self.sub_splice(result, *index, 0, &elems, meta)?) - } - } - } - } - } - - pub(crate) fn apply_patch_to_map( - &self, - map: &Object, - patch: &Patch, - meta: &JsValue, - exposed: &mut HashSet, - ) -> Result { - let result = Object::assign(&Object::new(), map); // shallow copy - match patch { - Patch::PutMap { - key, value, expose, .. - } => { - if *expose && value.0.is_object() { - exposed.insert(value.1.clone()); - js_set(&result, key, &JsValue::null())?; - } else { - let sub_val = - self.maybe_wrap_object(alloc(&value.0, self.text_rep), &value.1, meta)?; - js_set(&result, key, &sub_val)?; - } - Ok(result) - } - Patch::DeleteMap { key, .. } => { - Reflect::delete_property(&result, &key.into()).map_err(|e| { - error::Export::Delete { - prop: key.to_string(), - err: e, - } - })?; - Ok(result) - } - Patch::Increment { prop, value, .. } => { - if let Prop::Map(key) = prop { - let old_val = js_get(&result, key)?.0; - let old_val = self.unwrap_scalar(old_val)?; - if let Some(old) = old_val.as_f64() { - let new_value: Value<'_> = - am::ScalarValue::counter(old as i64 + *value).into(); - js_set( - &result, - key, - &self.export_value(alloc(&new_value, self.text_rep))?, - )?; - Ok(result) - } else { - Err(error::ApplyPatch::IncrementNonNumeric) - } - } else { - Err(error::ApplyPatch::IncrementIndexInMap) - } - } - Patch::Insert { .. } => Err(error::ApplyPatch::InsertInMap), - Patch::DeleteSeq { .. } => Err(error::ApplyPatch::SpliceInMap), - //Patch::SpliceText { .. } => Err(to_js_err("cannot Splice into map")), - Patch::SpliceText { .. } => Err(error::ApplyPatch::SpliceTextInMap), - Patch::PutSeq { .. } => Err(error::ApplyPatch::PutIdxInMap), - } - } - - pub(crate) fn apply_patch( - &self, - obj: Object, - patch: &Patch, - depth: usize, - meta: &JsValue, - exposed: &mut HashSet, - ) -> Result { - let (inner, datatype, id) = self.unwrap_object(&obj)?; - let prop = patch.path().get(depth).map(|p| prop_to_js(&p.1)); - let result = if let Some(prop) = prop { - let subval = js_get(&inner, &prop)?.0; - if subval.is_string() && patch.path().len() - 1 == depth { - if let Ok(s) = subval.dyn_into::() { - let new_value = self.apply_patch_to_text(&s, patch)?; - let result = shallow_copy(&inner); - js_set(&result, &prop, &new_value)?; - Ok(result) - } else { - // bad patch - short circuit - Ok(obj) - } - } else if let Ok(sub_obj) = js_get(&inner, &prop)?.0.dyn_into::() { - let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta, exposed)?; - let result = shallow_copy(&inner); - js_set(&result, &prop, &new_value)?; - Ok(result) - } else { - // if a patch is trying to access a deleted object make no change - // short circuit the wrap process - return Ok(obj); - } - } else if Array::is_array(&inner) { - if &id == patch.obj() { - self.apply_patch_to_array(&inner, patch, meta, exposed) - } else { - Ok(Array::from(&inner).into()) - } - } else if &id == patch.obj() { - self.apply_patch_to_map(&inner, patch, meta, exposed) - } else { - Ok(Object::assign(&Object::new(), &inner)) - }?; - - self.wrap_object(result, datatype, &id, meta) - .map_err(|e| e.into()) - } - - fn apply_patch_to_text( - &self, - string: &JsString, - patch: &Patch, - ) -> Result { - match patch { - Patch::DeleteSeq { index, length, .. } => { - let index = *index as u32; - let before = string.slice(0, index); - let after = string.slice(index + *length as u32, string.length()); - let result = before.concat(&after); - Ok(result.into()) - } - Patch::SpliceText { index, value, .. } => { - let index = *index as u32; - let length = string.length(); - let before = string.slice(0, index); - let after = string.slice(index, length); - let bytes: Vec = value.iter().cloned().collect(); - let result = before - .concat(&String::from_utf16_lossy(bytes.as_slice()).into()) - .concat(&after); - Ok(result.into()) - } - _ => Ok(string.into()), - } - } - - fn sub_splice<'a, I: IntoIterator, ObjId)>>( - &self, - o: Array, - index: usize, - num_del: usize, - values: I, - meta: &JsValue, - ) -> Result { - let args: Array = values - .into_iter() - .map(|v| self.maybe_wrap_object(alloc(&v.0, self.text_rep), &v.1, meta)) - .collect::>()?; - args.unshift(&(num_del as u32).into()); - args.unshift(&(index as u32).into()); - let method = js_get(&o, "splice")? - .0 - .dyn_into::() - .map_err(error::Export::GetSplice)?; - Reflect::apply(&method, &o, &args).map_err(error::Export::CallSplice)?; - Ok(o.into()) - } - - pub(crate) fn import(&self, id: JsValue) -> Result<(ObjId, am::ObjType), error::ImportObj> { - if let Some(s) = id.as_string() { - // valid formats are - // 123@aabbcc - // 123@aabccc/prop1/prop2/prop3 - // /prop1/prop2/prop3 - let mut components = s.split('/'); - let obj = components.next(); - let (id, obj_type) = if obj == Some("") { - (ROOT, am::ObjType::Map) - } else { - self.doc - .import(obj.unwrap_or_default()) - .map_err(error::ImportObj::BadImport)? - }; - self.import_path(id, obj_type, components) - .map_err(|e| error::ImportObj::InvalidPath(s.to_string(), e)) - } else { - Err(error::ImportObj::NotString) - } - } - - fn import_path<'a, I: Iterator>( - &self, - mut obj: ObjId, - mut obj_type: am::ObjType, - components: I, - ) -> Result<(ObjId, am::ObjType), error::ImportPath> { - for (i, prop) in components.enumerate() { - if prop.is_empty() { - break; - } - let is_map = matches!(obj_type, am::ObjType::Map | am::ObjType::Table); - let val = if is_map { - self.doc.get(obj, prop)? - } else { - let idx = prop - .parse() - .map_err(|_| error::ImportPath::IndexNotInteger(i, prop.to_string()))?; - self.doc.get(obj, am::Prop::Seq(idx))? - }; - match val { - Some((am::Value::Object(am::ObjType::Map), id)) => { - obj_type = am::ObjType::Map; - obj = id; - } - Some((am::Value::Object(am::ObjType::Table), id)) => { - obj_type = am::ObjType::Table; - obj = id; - } - Some((am::Value::Object(am::ObjType::List), id)) => { - obj_type = am::ObjType::List; - obj = id; - } - Some((am::Value::Object(am::ObjType::Text), id)) => { - obj_type = am::ObjType::Text; - obj = id; - } - None => return Err(error::ImportPath::NonExistentObject(i, prop.to_string())), - _ => return Err(error::ImportPath::NotAnObject), - }; - } - Ok((obj, obj_type)) - } - - pub(crate) fn import_prop(&self, prop: JsValue) -> Result { - if let Some(s) = prop.as_string() { - Ok(s.into()) - } else if let Some(n) = prop.as_f64() { - Ok((n as usize).into()) - } else { - Err(error::InvalidProp) - } - } - - pub(crate) fn import_scalar( - &self, - value: &JsValue, - datatype: &Option, - ) -> Option { - match datatype.as_deref() { - Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean), - Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)), - Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)), - Some("str") => value.as_string().map(|v| am::ScalarValue::Str(v.into())), - Some("f64") => value.as_f64().map(am::ScalarValue::F64), - Some("bytes") => Some(am::ScalarValue::Bytes( - value.clone().dyn_into::().unwrap().to_vec(), - )), - Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)), - Some("timestamp") => { - if let Some(v) = value.as_f64() { - Some(am::ScalarValue::Timestamp(v as i64)) - } else if let Ok(d) = value.clone().dyn_into::() { - Some(am::ScalarValue::Timestamp(d.get_time() as i64)) - } else { - None - } - } - Some("null") => Some(am::ScalarValue::Null), - Some(_) => None, - None => { - if value.is_null() { - Some(am::ScalarValue::Null) - } else if let Some(b) = value.as_bool() { - Some(am::ScalarValue::Boolean(b)) - } else if let Some(s) = value.as_string() { - Some(am::ScalarValue::Str(s.into())) - } else if let Some(n) = value.as_f64() { - if (n.round() - n).abs() < f64::EPSILON { - Some(am::ScalarValue::Int(n as i64)) - } else { - Some(am::ScalarValue::F64(n)) - } - } else if let Ok(d) = value.clone().dyn_into::() { - Some(am::ScalarValue::Timestamp(d.get_time() as i64)) - } else if let Ok(o) = &value.clone().dyn_into::() { - Some(am::ScalarValue::Bytes(o.to_vec())) - } else { - None - } - } - } - } - - pub(crate) fn import_value( - &self, - value: &JsValue, - datatype: Option, - ) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), error::InvalidValue> { - match self.import_scalar(value, &datatype) { - Some(val) => Ok((val.into(), vec![])), - None => { - if let Ok(js_obj) = import_obj(value, &datatype) { - Ok(( - js_obj.objtype().into(), - js_obj - .subvals() - .map(|(p, v)| (p.into_owned(), v)) - .collect::>(), - )) - } else { - web_sys::console::log_2(&"Invalid value".into(), value); - Err(error::InvalidValue) - } - } - } - } - - pub(crate) fn finalize_exposed( - &self, - object: &JsValue, - exposed: HashSet, - meta: &JsValue, - ) -> Result<(), error::ApplyPatch> { - for obj in exposed { - let mut pointer = object.clone(); - if let Ok(obj_type) = self.doc.object_type(&obj) { - // only valid obj's should make it to this point ... - let path: Vec<_> = self - .doc - .path_to_object(&obj)? - .iter() - .map(|p| prop_to_js(&p.1)) - .collect(); - let value = self.export_object(&obj, obj_type.into(), None, meta)?; - for (i, prop) in path.iter().enumerate() { - if i + 1 < path.len() { - pointer = js_get(&pointer, prop)?.0; - } else { - js_set(&pointer, prop, &value)?; - } - } - } - } - Ok(()) - } -} - -pub(crate) fn alloc(value: &Value<'_>, text_rep: TextRepresentation) -> (Datatype, JsValue) { - match value { - am::Value::Object(o) => match o { - ObjType::Map => (Datatype::Map, Object::new().into()), - ObjType::Table => (Datatype::Table, Object::new().into()), - ObjType::List => (Datatype::List, Array::new().into()), - ObjType::Text => match text_rep { - TextRepresentation::String => (Datatype::Text, "".into()), - TextRepresentation::Array => (Datatype::Text, Array::new().into()), - }, - }, - am::Value::Scalar(s) => match s.as_ref() { - am::ScalarValue::Bytes(v) => (Datatype::Bytes, Uint8Array::from(v.as_slice()).into()), - am::ScalarValue::Str(v) => (Datatype::Str, v.to_string().into()), - am::ScalarValue::Int(v) => (Datatype::Int, (*v as f64).into()), - am::ScalarValue::Uint(v) => (Datatype::Uint, (*v as f64).into()), - am::ScalarValue::F64(v) => (Datatype::F64, (*v).into()), - am::ScalarValue::Counter(v) => (Datatype::Counter, (f64::from(v)).into()), - am::ScalarValue::Timestamp(v) => ( - Datatype::Timestamp, - js_sys::Date::new(&(*v as f64).into()).into(), - ), - am::ScalarValue::Boolean(v) => (Datatype::Boolean, (*v).into()), - am::ScalarValue::Null => (Datatype::Null, JsValue::null()), - am::ScalarValue::Unknown { bytes, type_code } => ( - Datatype::Unknown(*type_code), - Uint8Array::from(bytes.as_slice()).into(), - ), - }, - } -} - -fn set_hidden_value>( - o: &Object, - key: &Symbol, - value: V, -) -> Result<(), error::Export> { - let definition = Object::new(); - js_set(&definition, "value", &value.into()).map_err(|_| error::Export::SetHidden("value"))?; - js_set(&definition, "writable", false).map_err(|_| error::Export::SetHidden("writable"))?; - js_set(&definition, "enumerable", false).map_err(|_| error::Export::SetHidden("enumerable"))?; - js_set(&definition, "configurable", false) - .map_err(|_| error::Export::SetHidden("configurable"))?; - Object::define_property(o, &key.into(), &definition); - Ok(()) -} - -fn shallow_copy(obj: &Object) -> Object { - if Array::is_array(obj) { - Array::from(obj).into() - } else { - Object::assign(&Object::new(), obj) - } -} - -fn prop_to_js(prop: &Prop) -> JsValue { - match prop { - Prop::Map(key) => key.into(), - Prop::Seq(index) => (*index as f64).into(), - } -} - -pub(crate) mod error { - use automerge::{AutomergeError, LoadChangeError}; - use wasm_bindgen::JsValue; - - #[derive(Debug, thiserror::Error)] - pub enum BadJSChanges { - #[error("the changes were not an array of Uint8Array")] - ChangesNotArray, - #[error("change {0} was not a Uint8Array")] - ElemNotUint8Array(usize), - #[error("error loading change {0}: {1}")] - BadChange(usize, LoadChangeError), - } - - #[derive(Debug, thiserror::Error)] - pub enum BadChangeHashes { - #[error("the change hashes were not an array of strings")] - NotArray, - #[error("could not decode hash {0}: {1}")] - BadElem(usize, BadChangeHash), - } - - impl From for JsValue { - fn from(e: BadChangeHashes) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum BadChangeHashSet { - #[error("not an object")] - NotObject, - #[error(transparent)] - GetProp(#[from] GetProp), - #[error("unable to getOwnProperties")] - ListProp, - #[error("unable to parse hash from {0:?}: {1}")] - BadHash(wasm_bindgen::JsValue, BadChangeHash), - } - - #[derive(Debug, thiserror::Error)] - pub enum BadChangeHash { - #[error("change hash was not a string")] - NotString, - #[error(transparent)] - Parse(#[from] automerge::ParseChangeHashError), - } - - impl From for JsValue { - fn from(e: BadChangeHash) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum BadSyncState { - #[error(transparent)] - GetProp(#[from] GetProp), - #[error("bad sharedHeads: {0}")] - BadSharedHeads(BadChangeHashes), - #[error("bad lastSentHeads: {0}")] - BadLastSentHeads(BadChangeHashes), - #[error("bad theirHeads: {0}")] - BadTheirHeads(BadChangeHashes), - #[error("bad theirNeed: {0}")] - BadTheirNeed(BadChangeHashes), - #[error("bad theirHave: {0}")] - BadTheirHave(BadHaves), - #[error("bad sentHashes: {0}")] - BadSentHashes(BadChangeHashSet), - #[error("inFlight not a boolean")] - InFlightNotBoolean, - } - - impl From for JsValue { - fn from(e: BadSyncState) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - #[error("unable to get property {property}: {error:?}")] - pub struct GetProp { - pub(super) property: String, - pub(super) error: wasm_bindgen::JsValue, - } - - impl From for JsValue { - fn from(e: GetProp) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - #[error("error setting property {property:?} on JS value: {error:?}")] - pub struct SetProp { - pub(super) property: JsValue, - pub(super) error: JsValue, - } - - impl From for JsValue { - fn from(e: SetProp) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum BadHave { - #[error("bad lastSync: {0}")] - BadLastSync(BadChangeHashes), - #[error("bad bloom: {0}")] - BadBloom(BadBloom), - #[error(transparent)] - GetHaveProp(#[from] GetProp), - } - - #[derive(Debug, thiserror::Error)] - pub enum BadHaves { - #[error("value was not an array")] - NotArray, - #[error("error loading have at index {0}: {1}")] - BadElem(usize, BadHave), - } - - #[derive(Debug, thiserror::Error)] - pub enum BadBloom { - #[error("the value was not a Uint8Array")] - NotU8Array, - #[error("unable to decode: {0}")] - Decode(#[from] automerge::sync::DecodeBloomError), - } - - #[derive(Debug, thiserror::Error)] - pub enum Export { - #[error(transparent)] - Set(#[from] SetProp), - #[error("unable to delete prop {prop}: {err:?}")] - Delete { prop: String, err: JsValue }, - #[error("unable to set hidden property {0}")] - SetHidden(&'static str), - #[error("data handler for type {0} did not return a valid object")] - InvalidDataHandler(String), - #[error("error calling data handler for type {0}: {1:?}")] - CallDataHandler(String, JsValue), - #[error(transparent)] - GetProp(#[from] GetProp), - #[error(transparent)] - InvalidDatatype(#[from] crate::value::InvalidDatatype), - #[error("unable to get the splice function: {0:?}")] - GetSplice(JsValue), - #[error("error calling splice: {0:?}")] - CallSplice(JsValue), - #[error(transparent)] - Automerge(#[from] AutomergeError), - } - - impl From for JsValue { - fn from(e: Export) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum ApplyPatch { - #[error(transparent)] - Export(#[from] Export), - #[error("cannot delete from a seq")] - DeleteKeyFromSeq, - #[error("cannot put key in seq")] - PutKeyInSeq, - #[error("cannot increment a non-numeric value")] - IncrementNonNumeric, - #[error("cannot increment a key in a seq")] - IncrementKeyInSeq, - #[error("cannot increment index in a map")] - IncrementIndexInMap, - #[error("cannot insert into a map")] - InsertInMap, - #[error("cannot splice into a map")] - SpliceInMap, - #[error("cannot splice text into a seq")] - SpliceTextInSeq, - #[error("cannot splice text into a map")] - SpliceTextInMap, - #[error("cannot put a seq index in a map")] - PutIdxInMap, - #[error(transparent)] - GetProp(#[from] GetProp), - #[error(transparent)] - SetProp(#[from] SetProp), - #[error(transparent)] - Automerge(#[from] AutomergeError), - } - - impl From for JsValue { - fn from(e: ApplyPatch) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum BadSyncMessage { - #[error(transparent)] - GetProp(#[from] GetProp), - #[error("unable to read haves: {0}")] - BadHaves(#[from] BadHaves), - #[error("could not read changes: {0}")] - BadJSChanges(#[from] BadJSChanges), - #[error("could not read heads: {0}")] - BadHeads(BadChangeHashes), - #[error("could not read need: {0}")] - BadNeed(BadChangeHashes), - } - - impl From for JsValue { - fn from(e: BadSyncMessage) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum ImportObj { - #[error("obj id was not a string")] - NotString, - #[error("invalid path {0}: {1}")] - InvalidPath(String, ImportPath), - #[error("unable to import object id: {0}")] - BadImport(AutomergeError), - } - - impl From for JsValue { - fn from(e: ImportObj) -> Self { - JsValue::from(format!("invalid object ID: {}", e)) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum ImportPath { - #[error(transparent)] - Automerge(#[from] AutomergeError), - #[error("path component {0} ({1}) should be an integer to index a sequence")] - IndexNotInteger(usize, String), - #[error("path component {0} ({1}) referenced a nonexistent object")] - NonExistentObject(usize, String), - #[error("path did not refer to an object")] - NotAnObject, - } - - #[derive(Debug, thiserror::Error)] - #[error("given property was not a string or integer")] - pub struct InvalidProp; - - #[derive(Debug, thiserror::Error)] - #[error("given property was not a string or integer")] - pub struct InvalidValue; -} diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs deleted file mode 100644 index 09072ca7..00000000 --- a/rust/automerge-wasm/src/lib.rs +++ /dev/null @@ -1,1155 +0,0 @@ -#![doc( - html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg", - html_favicon_url = "https:///raw.githubusercontent.com/automerge/automerge-rs/main/img/favicon.ico" -)] -#![warn( - missing_debug_implementations, - // missing_docs, // TODO: add documentation! - rust_2021_compatibility, - rust_2018_idioms, - unreachable_pub, - bad_style, - dead_code, - improper_ctypes, - non_shorthand_field_patterns, - no_mangle_generic_items, - overflowing_literals, - path_statements, - patterns_in_fns_without_body, - private_in_public, - unconditional_recursion, - unused, - unused_allocation, - unused_comparisons, - unused_parens, - while_true -)] -#![allow(clippy::unused_unit)] -use am::transaction::CommitOptions; -use am::transaction::{Observed, Transactable, UnObserved}; -use am::ScalarValue; -use automerge as am; -use automerge::{sync::SyncDoc, Change, ObjId, Prop, ReadDoc, TextEncoding, Value, ROOT}; -use js_sys::{Array, Function, Object, Uint8Array}; -use serde::ser::Serialize; -use std::borrow::Cow; -use std::collections::HashMap; -use std::collections::HashSet; -use std::convert::TryInto; -use wasm_bindgen::prelude::*; -use wasm_bindgen::JsCast; - -mod interop; -mod observer; -mod sequence_tree; -mod sync; -mod value; - -use observer::Observer; - -use interop::{alloc, get_heads, import_obj, js_set, to_js_err, to_prop, AR, JS}; -use sync::SyncState; -use value::Datatype; - -use crate::interop::SubValIter; - -#[allow(unused_macros)] -macro_rules! log { - ( $( $t:tt )* ) => { - web_sys::console::log_1(&format!( $( $t )* ).into()); - }; -} - -type AutoCommit = am::AutoCommitWithObs>; - -#[cfg(feature = "wee_alloc")] -#[global_allocator] -static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; - -/// How text is represented in materialized objects on the JS side -#[derive(Debug, Eq, PartialEq, Clone, Copy)] -#[wasm_bindgen] -pub enum TextRepresentation { - /// As an array of characters and objects - Array, - /// As a single JS string - String, -} - -impl std::default::Default for TextRepresentation { - fn default() -> Self { - TextRepresentation::Array - } -} - -#[wasm_bindgen] -#[derive(Debug)] -pub struct Automerge { - doc: AutoCommit, - freeze: bool, - external_types: HashMap, - text_rep: TextRepresentation, -} - -#[wasm_bindgen] -impl Automerge { - pub fn new( - actor: Option, - text_rep: TextRepresentation, - ) -> Result { - let mut doc = AutoCommit::default().with_encoding(TextEncoding::Utf16); - if let Some(a) = actor { - let a = automerge::ActorId::from(hex::decode(a)?.to_vec()); - doc.set_actor(a); - } - Ok(Automerge { - doc, - freeze: false, - external_types: HashMap::default(), - text_rep, - }) - } - - #[allow(clippy::should_implement_trait)] - pub fn clone(&mut self, actor: Option) -> Result { - let mut automerge = Automerge { - doc: self.doc.clone(), - freeze: self.freeze, - external_types: self.external_types.clone(), - text_rep: self.text_rep, - }; - if let Some(s) = actor { - let actor = automerge::ActorId::from(hex::decode(s)?.to_vec()); - automerge.doc.set_actor(actor); - } - Ok(automerge) - } - - pub fn fork( - &mut self, - actor: Option, - heads: JsValue, - ) -> Result { - let heads: Result, _> = JS(heads).try_into(); - let doc = if let Ok(heads) = heads { - self.doc.fork_at(&heads)? - } else { - self.doc.fork() - }; - let mut automerge = Automerge { - doc, - freeze: self.freeze, - external_types: self.external_types.clone(), - text_rep: self.text_rep, - }; - if let Some(s) = actor { - let actor = - automerge::ActorId::from(hex::decode(s).map_err(error::BadActorId::from)?.to_vec()); - automerge.doc.set_actor(actor); - } - Ok(automerge) - } - - #[wasm_bindgen(js_name = pendingOps)] - pub fn pending_ops(&self) -> JsValue { - (self.doc.pending_ops() as u32).into() - } - - pub fn commit(&mut self, message: Option, time: Option) -> JsValue { - let mut commit_opts = CommitOptions::default(); - if let Some(message) = message { - commit_opts.set_message(message); - } - if let Some(time) = time { - commit_opts.set_time(time as i64); - } - let hash = self.doc.commit_with(commit_opts); - match hash { - Some(h) => JsValue::from_str(&hex::encode(h.0)), - None => JsValue::NULL, - } - } - - pub fn merge(&mut self, other: &mut Automerge) -> Result { - let heads = self.doc.merge(&mut other.doc)?; - let heads: Array = heads - .iter() - .map(|h| JsValue::from_str(&hex::encode(h.0))) - .collect(); - Ok(heads) - } - - pub fn rollback(&mut self) -> f64 { - self.doc.rollback() as f64 - } - - pub fn keys(&self, obj: JsValue, heads: Option) -> Result { - let (obj, _) = self.import(obj)?; - let result = if let Some(heads) = get_heads(heads)? { - self.doc - .keys_at(&obj, &heads) - .map(|s| JsValue::from_str(&s)) - .collect() - } else { - self.doc.keys(&obj).map(|s| JsValue::from_str(&s)).collect() - }; - Ok(result) - } - - pub fn text(&self, obj: JsValue, heads: Option) -> Result { - let (obj, _) = self.import(obj)?; - if let Some(heads) = get_heads(heads)? { - Ok(self.doc.text_at(&obj, &heads)?) - } else { - Ok(self.doc.text(&obj)?) - } - } - - pub fn splice( - &mut self, - obj: JsValue, - start: f64, - delete_count: f64, - text: JsValue, - ) -> Result<(), error::Splice> { - let (obj, obj_type) = self.import(obj)?; - let start = start as usize; - let delete_count = delete_count as usize; - let vals = if let Some(t) = text.as_string() { - if obj_type == am::ObjType::Text && self.text_rep == TextRepresentation::String { - self.doc.splice_text(&obj, start, delete_count, &t)?; - return Ok(()); - } else { - t.chars() - .map(|c| ScalarValue::Str(c.to_string().into())) - .collect::>() - } - } else { - let mut vals = vec![]; - if let Ok(array) = text.dyn_into::() { - for (index, i) in array.iter().enumerate() { - let value = self - .import_scalar(&i, &None) - .ok_or(error::Splice::ValueNotPrimitive(index))?; - vals.push(value); - } - } - vals - }; - if !vals.is_empty() { - self.doc.splice(&obj, start, delete_count, vals)?; - } else { - // no vals given but we still need to call the text vs splice - // bc utf16 - match obj_type { - am::ObjType::List => { - self.doc.splice(&obj, start, delete_count, vals)?; - } - am::ObjType::Text => match self.text_rep { - TextRepresentation::String => { - self.doc.splice_text(&obj, start, delete_count, "")?; - } - TextRepresentation::Array => { - self.doc.splice(&obj, start, delete_count, vals)?; - } - }, - _ => {} - } - } - Ok(()) - } - - pub fn push( - &mut self, - obj: JsValue, - value: JsValue, - datatype: JsValue, - ) -> Result<(), error::Insert> { - let (obj, _) = self.import(obj)?; - let value = self - .import_scalar(&value, &datatype.as_string()) - .ok_or(error::Insert::ValueNotPrimitive)?; - let index = self.doc.length(&obj); - self.doc.insert(&obj, index, value)?; - Ok(()) - } - - #[wasm_bindgen(js_name = pushObject)] - pub fn push_object( - &mut self, - obj: JsValue, - value: JsValue, - ) -> Result, error::InsertObject> { - let (obj, _) = self.import(obj)?; - let imported_obj = import_obj(&value, &None)?; - let index = self.doc.length(&obj); - let opid = self - .doc - .insert_object(&obj, index, imported_obj.objtype())?; - if let Some(s) = imported_obj.text() { - match self.text_rep { - TextRepresentation::String => { - self.doc.splice_text(&opid, 0, 0, s)?; - } - TextRepresentation::Array => { - self.subset::(&opid, imported_obj.subvals())?; - } - } - } else { - self.subset::(&opid, imported_obj.subvals())?; - } - Ok(opid.to_string().into()) - } - - pub fn insert( - &mut self, - obj: JsValue, - index: f64, - value: JsValue, - datatype: JsValue, - ) -> Result<(), error::Insert> { - let (obj, _) = self.import(obj)?; - let value = self - .import_scalar(&value, &datatype.as_string()) - .ok_or(error::Insert::ValueNotPrimitive)?; - self.doc.insert(&obj, index as usize, value)?; - Ok(()) - } - - #[wasm_bindgen(js_name = insertObject)] - pub fn insert_object( - &mut self, - obj: JsValue, - index: f64, - value: JsValue, - ) -> Result, error::InsertObject> { - let (obj, _) = self.import(obj)?; - let imported_obj = import_obj(&value, &None)?; - let opid = self - .doc - .insert_object(&obj, index as usize, imported_obj.objtype())?; - if let Some(s) = imported_obj.text() { - match self.text_rep { - TextRepresentation::String => { - self.doc.splice_text(&opid, 0, 0, s)?; - } - TextRepresentation::Array => { - self.subset::(&opid, imported_obj.subvals())?; - } - } - } else { - self.subset::(&opid, imported_obj.subvals())?; - } - Ok(opid.to_string().into()) - } - - pub fn put( - &mut self, - obj: JsValue, - prop: JsValue, - value: JsValue, - datatype: JsValue, - ) -> Result<(), error::Insert> { - let (obj, _) = self.import(obj)?; - let prop = self.import_prop(prop)?; - let value = self - .import_scalar(&value, &datatype.as_string()) - .ok_or(error::Insert::ValueNotPrimitive)?; - self.doc.put(&obj, prop, value)?; - Ok(()) - } - - #[wasm_bindgen(js_name = putObject)] - pub fn put_object( - &mut self, - obj: JsValue, - prop: JsValue, - value: JsValue, - ) -> Result { - let (obj, _) = self.import(obj)?; - let prop = self.import_prop(prop)?; - let imported_obj = import_obj(&value, &None)?; - let opid = self.doc.put_object(&obj, prop, imported_obj.objtype())?; - if let Some(s) = imported_obj.text() { - match self.text_rep { - TextRepresentation::String => { - self.doc.splice_text(&opid, 0, 0, s)?; - } - TextRepresentation::Array => { - self.subset::(&opid, imported_obj.subvals())?; - } - } - } else { - self.subset::(&opid, imported_obj.subvals())?; - } - Ok(opid.to_string().into()) - } - - fn subset<'a, E, I>(&mut self, obj: &am::ObjId, vals: I) -> Result<(), E> - where - I: IntoIterator, JsValue)>, - E: From - + From - + From, - { - for (p, v) in vals { - let (value, subvals) = self.import_value(v.as_ref(), None)?; - //let opid = self.0.set(id, p, value)?; - let opid = match (p.as_ref(), value) { - (Prop::Map(s), Value::Object(objtype)) => { - Some(self.doc.put_object(obj, s, objtype)?) - } - (Prop::Map(s), Value::Scalar(scalar)) => { - self.doc.put(obj, s, scalar.into_owned())?; - None - } - (Prop::Seq(i), Value::Object(objtype)) => { - Some(self.doc.insert_object(obj, *i, objtype)?) - } - (Prop::Seq(i), Value::Scalar(scalar)) => { - self.doc.insert(obj, *i, scalar.into_owned())?; - None - } - }; - if let Some(opid) = opid { - self.subset::(&opid, SubValIter::Slice(subvals.as_slice().iter()))?; - } - } - Ok(()) - } - - pub fn increment( - &mut self, - obj: JsValue, - prop: JsValue, - value: JsValue, - ) -> Result<(), error::Increment> { - let (obj, _) = self.import(obj)?; - let prop = self.import_prop(prop)?; - let value: f64 = value.as_f64().ok_or(error::Increment::ValueNotNumeric)?; - self.doc.increment(&obj, prop, value as i64)?; - Ok(()) - } - - #[wasm_bindgen(js_name = get)] - pub fn get( - &self, - obj: JsValue, - prop: JsValue, - heads: Option, - ) -> Result { - let (obj, _) = self.import(obj)?; - let prop = to_prop(prop); - let heads = get_heads(heads)?; - if let Ok(prop) = prop { - let value = if let Some(h) = heads { - self.doc.get_at(&obj, prop, &h)? - } else { - self.doc.get(&obj, prop)? - }; - if let Some((value, id)) = value { - match alloc(&value, self.text_rep) { - (datatype, js_value) if datatype.is_scalar() => Ok(js_value), - _ => Ok(id.to_string().into()), - } - } else { - Ok(JsValue::undefined()) - } - } else { - Ok(JsValue::undefined()) - } - } - - #[wasm_bindgen(js_name = getWithType)] - pub fn get_with_type( - &self, - obj: JsValue, - prop: JsValue, - heads: Option, - ) -> Result { - let (obj, _) = self.import(obj)?; - let prop = to_prop(prop); - let heads = get_heads(heads)?; - if let Ok(prop) = prop { - let value = if let Some(h) = heads { - self.doc.get_at(&obj, prop, &h)? - } else { - self.doc.get(&obj, prop)? - }; - if let Some(value) = value { - match &value { - (Value::Object(obj_type), obj_id) => { - let result = Array::new(); - result.push(&obj_type.to_string().into()); - result.push(&obj_id.to_string().into()); - Ok(result.into()) - } - (Value::Scalar(_), _) => { - let result = Array::new(); - let (datatype, value) = alloc(&value.0, self.text_rep); - result.push(&datatype.into()); - result.push(&value); - Ok(result.into()) - } - } - } else { - Ok(JsValue::null()) - } - } else { - Ok(JsValue::null()) - } - } - - #[wasm_bindgen(js_name = getAll)] - pub fn get_all( - &self, - obj: JsValue, - arg: JsValue, - heads: Option, - ) -> Result { - let (obj, _) = self.import(obj)?; - let result = Array::new(); - let prop = to_prop(arg); - if let Ok(prop) = prop { - let values = if let Some(heads) = get_heads(heads)? { - self.doc.get_all_at(&obj, prop, &heads) - } else { - self.doc.get_all(&obj, prop) - }?; - for (value, id) in values { - let sub = Array::new(); - let (datatype, js_value) = alloc(&value, self.text_rep); - sub.push(&datatype.into()); - if value.is_scalar() { - sub.push(&js_value); - } - sub.push(&id.to_string().into()); - result.push(&JsValue::from(&sub)); - } - } - Ok(result) - } - - #[wasm_bindgen(js_name = enableFreeze)] - pub fn enable_freeze(&mut self, enable: JsValue) -> Result { - let enable = enable - .as_bool() - .ok_or_else(|| to_js_err("must pass a bool to enableFreeze"))?; - let old_freeze = self.freeze; - self.freeze = enable; - Ok(old_freeze.into()) - } - - #[wasm_bindgen(js_name = enablePatches)] - pub fn enable_patches(&mut self, enable: JsValue) -> Result { - let enable = enable - .as_bool() - .ok_or_else(|| to_js_err("must pass a bool to enablePatches"))?; - let old_enabled = self.doc.observer().enable(enable); - self.doc.observer().set_text_rep(self.text_rep); - Ok(old_enabled.into()) - } - - #[wasm_bindgen(js_name = registerDatatype)] - pub fn register_datatype( - &mut self, - datatype: JsValue, - function: JsValue, - ) -> Result<(), value::InvalidDatatype> { - let datatype = Datatype::try_from(datatype)?; - if let Ok(function) = function.dyn_into::() { - self.external_types.insert(datatype, function); - } else { - self.external_types.remove(&datatype); - } - Ok(()) - } - - #[wasm_bindgen(js_name = applyPatches)] - pub fn apply_patches( - &mut self, - object: JsValue, - meta: JsValue, - callback: JsValue, - ) -> Result { - let mut object = object - .dyn_into::() - .map_err(|_| error::ApplyPatch::NotObjectd)?; - let patches = self.doc.observer().take_patches(); - let callback = callback.dyn_into::().ok(); - - // even if there are no patches we may need to update the meta object - // which requires that we update the object too - if patches.is_empty() && !meta.is_undefined() { - let (obj, datatype, id) = self.unwrap_object(&object)?; - object = Object::assign(&Object::new(), &obj); - object = self.wrap_object(object, datatype, &id, &meta)?; - } - - let mut exposed = HashSet::default(); - - let before = object.clone(); - - for p in &patches { - object = self.apply_patch(object, p, 0, &meta, &mut exposed)?; - } - - if let Some(c) = &callback { - if !patches.is_empty() { - let patches: Array = patches - .into_iter() - .map(JsValue::try_from) - .collect::>()?; - c.call3(&JsValue::undefined(), &patches.into(), &before, &object) - .map_err(error::ApplyPatch::PatchCallback)?; - } - } - - self.finalize_exposed(&object, exposed, &meta)?; - - Ok(object.into()) - } - - #[wasm_bindgen(js_name = popPatches)] - pub fn pop_patches(&mut self) -> Result { - // transactions send out observer updates as they occur, not waiting for them to be - // committed. - // If we pop the patches then we won't be able to revert them. - - let patches = self.doc.observer().take_patches(); - let result = Array::new(); - for p in patches { - result.push(&p.try_into()?); - } - Ok(result) - } - - pub fn length(&self, obj: JsValue, heads: Option) -> Result { - let (obj, _) = self.import(obj)?; - if let Some(heads) = get_heads(heads)? { - Ok(self.doc.length_at(&obj, &heads) as f64) - } else { - Ok(self.doc.length(&obj) as f64) - } - } - - pub fn delete(&mut self, obj: JsValue, prop: JsValue) -> Result<(), error::Get> { - let (obj, _) = self.import(obj)?; - let prop = to_prop(prop)?; - self.doc.delete(&obj, prop)?; - Ok(()) - } - - pub fn save(&mut self) -> Uint8Array { - Uint8Array::from(self.doc.save().as_slice()) - } - - #[wasm_bindgen(js_name = saveIncremental)] - pub fn save_incremental(&mut self) -> Uint8Array { - let bytes = self.doc.save_incremental(); - Uint8Array::from(bytes.as_slice()) - } - - #[wasm_bindgen(js_name = loadIncremental)] - pub fn load_incremental(&mut self, data: Uint8Array) -> Result { - let data = data.to_vec(); - let len = self.doc.load_incremental(&data)?; - Ok(len as f64) - } - - #[wasm_bindgen(js_name = applyChanges)] - pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), error::ApplyChangesError> { - let changes: Vec<_> = JS(changes).try_into()?; - self.doc.apply_changes(changes)?; - Ok(()) - } - - #[wasm_bindgen(js_name = getChanges)] - pub fn get_changes(&mut self, have_deps: JsValue) -> Result { - let deps: Vec<_> = JS(have_deps).try_into()?; - let changes = self.doc.get_changes(&deps)?; - let changes: Array = changes - .iter() - .map(|c| Uint8Array::from(c.raw_bytes())) - .collect(); - Ok(changes) - } - - #[wasm_bindgen(js_name = getChangeByHash)] - pub fn get_change_by_hash( - &mut self, - hash: JsValue, - ) -> Result { - let hash = JS(hash).try_into()?; - let change = self.doc.get_change_by_hash(&hash); - if let Some(c) = change { - Ok(Uint8Array::from(c.raw_bytes()).into()) - } else { - Ok(JsValue::null()) - } - } - - #[wasm_bindgen(js_name = getChangesAdded)] - pub fn get_changes_added(&mut self, other: &mut Automerge) -> Array { - let changes = self.doc.get_changes_added(&mut other.doc); - let changes: Array = changes - .iter() - .map(|c| Uint8Array::from(c.raw_bytes())) - .collect(); - changes - } - - #[wasm_bindgen(js_name = getHeads)] - pub fn get_heads(&mut self) -> Array { - let heads = self.doc.get_heads(); - let heads: Array = heads - .iter() - .map(|h| JsValue::from_str(&hex::encode(h.0))) - .collect(); - heads - } - - #[wasm_bindgen(js_name = getActorId)] - pub fn get_actor_id(&self) -> String { - let actor = self.doc.get_actor(); - actor.to_string() - } - - #[wasm_bindgen(js_name = getLastLocalChange)] - pub fn get_last_local_change(&mut self) -> JsValue { - if let Some(change) = self.doc.get_last_local_change() { - Uint8Array::from(change.raw_bytes()).into() - } else { - JsValue::null() - } - } - - pub fn dump(&mut self) { - self.doc.dump() - } - - #[wasm_bindgen(js_name = getMissingDeps)] - pub fn get_missing_deps(&mut self, heads: Option) -> Result { - let heads = get_heads(heads)?.unwrap_or_default(); - let deps = self.doc.get_missing_deps(&heads); - let deps: Array = deps - .iter() - .map(|h| JsValue::from_str(&hex::encode(h.0))) - .collect(); - Ok(deps) - } - - #[wasm_bindgen(js_name = receiveSyncMessage)] - pub fn receive_sync_message( - &mut self, - state: &mut SyncState, - message: Uint8Array, - ) -> Result<(), error::ReceiveSyncMessage> { - let message = message.to_vec(); - let message = am::sync::Message::decode(message.as_slice())?; - self.doc - .sync() - .receive_sync_message(&mut state.0, message)?; - Ok(()) - } - - #[wasm_bindgen(js_name = generateSyncMessage)] - pub fn generate_sync_message(&mut self, state: &mut SyncState) -> JsValue { - if let Some(message) = self.doc.sync().generate_sync_message(&mut state.0) { - Uint8Array::from(message.encode().as_slice()).into() - } else { - JsValue::null() - } - } - - #[wasm_bindgen(js_name = toJS)] - pub fn to_js(&mut self, meta: JsValue) -> Result { - self.export_object(&ROOT, Datatype::Map, None, &meta) - } - - pub fn materialize( - &mut self, - obj: JsValue, - heads: Option, - meta: JsValue, - ) -> Result { - let (obj, obj_type) = self.import(obj).unwrap_or((ROOT, am::ObjType::Map)); - let heads = get_heads(heads)?; - let _patches = self.doc.observer().take_patches(); // throw away patches - Ok(self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta)?) - } - - #[wasm_bindgen(js_name = emptyChange)] - pub fn empty_change(&mut self, message: Option, time: Option) -> JsValue { - let time = time.map(|f| f as i64); - let options = CommitOptions { message, time }; - let hash = self.doc.empty_change(options); - JsValue::from_str(&hex::encode(hash)) - } -} - -#[wasm_bindgen(js_name = create)] -pub fn init(text_v2: bool, actor: Option) -> Result { - console_error_panic_hook::set_once(); - let text_rep = if text_v2 { - TextRepresentation::String - } else { - TextRepresentation::Array - }; - Automerge::new(actor, text_rep) -} - -#[wasm_bindgen(js_name = load)] -pub fn load( - data: Uint8Array, - text_v2: bool, - actor: Option, -) -> Result { - let data = data.to_vec(); - let text_rep = if text_v2 { - TextRepresentation::String - } else { - TextRepresentation::Array - }; - let mut doc = am::AutoCommitWithObs::::load(&data)? - .with_observer(Observer::default().with_text_rep(text_rep)) - .with_encoding(TextEncoding::Utf16); - if let Some(s) = actor { - let actor = - automerge::ActorId::from(hex::decode(s).map_err(error::BadActorId::from)?.to_vec()); - doc.set_actor(actor); - } - Ok(Automerge { - doc, - freeze: false, - external_types: HashMap::default(), - text_rep, - }) -} - -#[wasm_bindgen(js_name = encodeChange)] -pub fn encode_change(change: JsValue) -> Result { - // Alex: Technically we should be using serde_wasm_bindgen::from_value instead of into_serde. - // Unfortunately serde_wasm_bindgen::from_value fails for some inscrutable reason, so instead - // we use into_serde (sorry to future me). - #[allow(deprecated)] - let change: am::ExpandedChange = change.into_serde()?; - let change: Change = change.into(); - Ok(Uint8Array::from(change.raw_bytes())) -} - -#[wasm_bindgen(js_name = decodeChange)] -pub fn decode_change(change: Uint8Array) -> Result { - let change = Change::from_bytes(change.to_vec())?; - let change: am::ExpandedChange = change.decode(); - let serializer = serde_wasm_bindgen::Serializer::json_compatible(); - Ok(change.serialize(&serializer)?) -} - -#[wasm_bindgen(js_name = initSyncState)] -pub fn init_sync_state() -> SyncState { - SyncState(am::sync::State::new()) -} - -// this is needed to be compatible with the automerge-js api -#[wasm_bindgen(js_name = importSyncState)] -pub fn import_sync_state(state: JsValue) -> Result { - Ok(SyncState(JS(state).try_into()?)) -} - -// this is needed to be compatible with the automerge-js api -#[wasm_bindgen(js_name = exportSyncState)] -pub fn export_sync_state(state: &SyncState) -> JsValue { - JS::from(state.0.clone()).into() -} - -#[wasm_bindgen(js_name = encodeSyncMessage)] -pub fn encode_sync_message(message: JsValue) -> Result { - let message: am::sync::Message = JS(message).try_into()?; - Ok(Uint8Array::from(message.encode().as_slice())) -} - -#[wasm_bindgen(js_name = decodeSyncMessage)] -pub fn decode_sync_message(msg: Uint8Array) -> Result { - let data = msg.to_vec(); - let msg = am::sync::Message::decode(&data)?; - let heads = AR::from(msg.heads.as_slice()); - let need = AR::from(msg.need.as_slice()); - let changes = AR::from(msg.changes.as_slice()); - let have = AR::from(msg.have.as_slice()); - let obj = Object::new().into(); - // SAFETY: we just created this object - js_set(&obj, "heads", heads).unwrap(); - js_set(&obj, "need", need).unwrap(); - js_set(&obj, "have", have).unwrap(); - js_set(&obj, "changes", changes).unwrap(); - Ok(obj) -} - -#[wasm_bindgen(js_name = encodeSyncState)] -pub fn encode_sync_state(state: &SyncState) -> Uint8Array { - Uint8Array::from(state.0.encode().as_slice()) -} - -#[wasm_bindgen(js_name = decodeSyncState)] -pub fn decode_sync_state(data: Uint8Array) -> Result { - SyncState::decode(data) -} - -pub mod error { - use automerge::AutomergeError; - use wasm_bindgen::JsValue; - - use crate::interop::{ - self, - error::{BadChangeHashes, BadJSChanges}, - }; - - #[derive(Debug, thiserror::Error)] - #[error("could not parse Actor ID as a hex string: {0}")] - pub struct BadActorId(#[from] hex::FromHexError); - - impl From for JsValue { - fn from(s: BadActorId) -> Self { - JsValue::from(s.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum ApplyChangesError { - #[error(transparent)] - DecodeChanges(#[from] BadJSChanges), - #[error("error applying changes: {0}")] - Apply(#[from] AutomergeError), - } - - impl From for JsValue { - fn from(e: ApplyChangesError) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum Fork { - #[error(transparent)] - BadActor(#[from] BadActorId), - #[error(transparent)] - Automerge(#[from] AutomergeError), - #[error(transparent)] - BadChangeHashes(#[from] BadChangeHashes), - } - - impl From for JsValue { - fn from(f: Fork) -> Self { - JsValue::from(f.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - #[error(transparent)] - pub struct Merge(#[from] AutomergeError); - - impl From for JsValue { - fn from(e: Merge) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum Get { - #[error("invalid object ID: {0}")] - ImportObj(#[from] interop::error::ImportObj), - #[error(transparent)] - Automerge(#[from] AutomergeError), - #[error("bad heads: {0}")] - BadHeads(#[from] interop::error::BadChangeHashes), - #[error(transparent)] - InvalidProp(#[from] interop::error::InvalidProp), - } - - impl From for JsValue { - fn from(e: Get) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum Splice { - #[error("invalid object ID: {0}")] - ImportObj(#[from] interop::error::ImportObj), - #[error(transparent)] - Automerge(#[from] AutomergeError), - #[error("value at {0} in values to insert was not a primitive")] - ValueNotPrimitive(usize), - } - - impl From for JsValue { - fn from(e: Splice) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum Insert { - #[error("invalid object id: {0}")] - ImportObj(#[from] interop::error::ImportObj), - #[error("the value to insert was not a primitive")] - ValueNotPrimitive, - #[error(transparent)] - Automerge(#[from] AutomergeError), - #[error(transparent)] - InvalidProp(#[from] interop::error::InvalidProp), - #[error(transparent)] - InvalidValue(#[from] interop::error::InvalidValue), - } - - impl From for JsValue { - fn from(e: Insert) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum InsertObject { - #[error("invalid object id: {0}")] - ImportObj(#[from] interop::error::ImportObj), - #[error("the value to insert must be an object")] - ValueNotObject, - #[error(transparent)] - Automerge(#[from] AutomergeError), - #[error(transparent)] - InvalidProp(#[from] interop::error::InvalidProp), - #[error(transparent)] - InvalidValue(#[from] interop::error::InvalidValue), - } - - impl From for JsValue { - fn from(e: InsertObject) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum Increment { - #[error("invalid object id: {0}")] - ImportObj(#[from] interop::error::ImportObj), - #[error(transparent)] - InvalidProp(#[from] interop::error::InvalidProp), - #[error("value was not numeric")] - ValueNotNumeric, - #[error(transparent)] - Automerge(#[from] AutomergeError), - } - - impl From for JsValue { - fn from(e: Increment) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum BadSyncMessage { - #[error("could not decode sync message: {0}")] - ReadMessage(#[from] automerge::sync::ReadMessageError), - } - - impl From for JsValue { - fn from(e: BadSyncMessage) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum ApplyPatch { - #[error(transparent)] - Interop(#[from] interop::error::ApplyPatch), - #[error(transparent)] - Export(#[from] interop::error::Export), - #[error("patch was not an object")] - NotObjectd, - #[error("error calling patch callback: {0:?}")] - PatchCallback(JsValue), - } - - impl From for JsValue { - fn from(e: ApplyPatch) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - #[error("unable to build patches: {0}")] - pub struct PopPatches(#[from] interop::error::Export); - - impl From for JsValue { - fn from(e: PopPatches) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum Materialize { - #[error(transparent)] - Export(#[from] interop::error::Export), - #[error("bad heads: {0}")] - Heads(#[from] interop::error::BadChangeHashes), - } - - impl From for JsValue { - fn from(e: Materialize) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum ReceiveSyncMessage { - #[error(transparent)] - Decode(#[from] automerge::sync::ReadMessageError), - #[error(transparent)] - Automerge(#[from] AutomergeError), - } - - impl From for JsValue { - fn from(e: ReceiveSyncMessage) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum Load { - #[error(transparent)] - Automerge(#[from] AutomergeError), - #[error(transparent)] - BadActor(#[from] BadActorId), - } - - impl From for JsValue { - fn from(e: Load) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - #[error("Unable to read JS change: {0}")] - pub struct EncodeChange(#[from] serde_json::Error); - - impl From for JsValue { - fn from(e: EncodeChange) -> Self { - JsValue::from(e.to_string()) - } - } - - #[derive(Debug, thiserror::Error)] - pub enum DecodeChange { - #[error(transparent)] - Load(#[from] automerge::LoadChangeError), - #[error(transparent)] - Serialize(#[from] serde_wasm_bindgen::Error), - } - - impl From for JsValue { - fn from(e: DecodeChange) -> Self { - JsValue::from(e.to_string()) - } - } -} diff --git a/rust/automerge-wasm/src/observer.rs b/rust/automerge-wasm/src/observer.rs deleted file mode 100644 index 2351c762..00000000 --- a/rust/automerge-wasm/src/observer.rs +++ /dev/null @@ -1,518 +0,0 @@ -#![allow(dead_code)] - -use std::borrow::Cow; - -use crate::{ - interop::{self, alloc, js_set}, - TextRepresentation, -}; -use automerge::{ObjId, OpObserver, Prop, ReadDoc, ScalarValue, Value}; -use js_sys::{Array, Object}; -use wasm_bindgen::prelude::*; - -use crate::sequence_tree::SequenceTree; - -#[derive(Debug, Clone, Default)] -pub(crate) struct Observer { - enabled: bool, - patches: Vec, - text_rep: TextRepresentation, -} - -impl Observer { - pub(crate) fn take_patches(&mut self) -> Vec { - std::mem::take(&mut self.patches) - } - pub(crate) fn enable(&mut self, enable: bool) -> bool { - if self.enabled && !enable { - self.patches.truncate(0) - } - let old_enabled = self.enabled; - self.enabled = enable; - old_enabled - } - - fn get_path(&mut self, doc: &R, obj: &ObjId) -> Option> { - match doc.parents(obj) { - Ok(parents) => parents.visible_path(), - Err(e) => { - automerge::log!("error generating patch : {:?}", e); - None - } - } - } - - pub(crate) fn with_text_rep(mut self, text_rep: TextRepresentation) -> Self { - self.text_rep = text_rep; - self - } - - pub(crate) fn set_text_rep(&mut self, text_rep: TextRepresentation) { - self.text_rep = text_rep; - } -} - -#[derive(Debug, Clone)] -pub(crate) enum Patch { - PutMap { - obj: ObjId, - path: Vec<(ObjId, Prop)>, - key: String, - value: (Value<'static>, ObjId), - expose: bool, - }, - PutSeq { - obj: ObjId, - path: Vec<(ObjId, Prop)>, - index: usize, - value: (Value<'static>, ObjId), - expose: bool, - }, - Insert { - obj: ObjId, - path: Vec<(ObjId, Prop)>, - index: usize, - values: SequenceTree<(Value<'static>, ObjId)>, - }, - SpliceText { - obj: ObjId, - path: Vec<(ObjId, Prop)>, - index: usize, - value: SequenceTree, - }, - Increment { - obj: ObjId, - path: Vec<(ObjId, Prop)>, - prop: Prop, - value: i64, - }, - DeleteMap { - obj: ObjId, - path: Vec<(ObjId, Prop)>, - key: String, - }, - DeleteSeq { - obj: ObjId, - path: Vec<(ObjId, Prop)>, - index: usize, - length: usize, - }, -} - -impl OpObserver for Observer { - fn insert( - &mut self, - doc: &R, - obj: ObjId, - index: usize, - tagged_value: (Value<'_>, ObjId), - ) { - if self.enabled { - let value = (tagged_value.0.to_owned(), tagged_value.1); - if let Some(Patch::Insert { - obj: tail_obj, - index: tail_index, - values, - .. - }) = self.patches.last_mut() - { - let range = *tail_index..=*tail_index + values.len(); - if tail_obj == &obj && range.contains(&index) { - values.insert(index - *tail_index, value); - return; - } - } - if let Some(path) = self.get_path(doc, &obj) { - let mut values = SequenceTree::new(); - values.push(value); - let patch = Patch::Insert { - path, - obj, - index, - values, - }; - self.patches.push(patch); - } - } - } - - fn splice_text(&mut self, doc: &R, obj: ObjId, index: usize, value: &str) { - if self.enabled { - if self.text_rep == TextRepresentation::Array { - for (i, c) in value.chars().enumerate() { - self.insert( - doc, - obj.clone(), - index + i, - ( - Value::Scalar(Cow::Owned(ScalarValue::Str(c.to_string().into()))), - ObjId::Root, // We hope this is okay - ), - ); - } - return; - } - if let Some(Patch::SpliceText { - obj: tail_obj, - index: tail_index, - value: prev_value, - .. - }) = self.patches.last_mut() - { - let range = *tail_index..=*tail_index + prev_value.len(); - if tail_obj == &obj && range.contains(&index) { - let i = index - *tail_index; - for (n, ch) in value.encode_utf16().enumerate() { - prev_value.insert(i + n, ch) - } - return; - } - } - if let Some(path) = self.get_path(doc, &obj) { - let mut v = SequenceTree::new(); - for ch in value.encode_utf16() { - v.push(ch) - } - let patch = Patch::SpliceText { - path, - obj, - index, - value: v, - }; - self.patches.push(patch); - } - } - } - - fn delete_seq(&mut self, doc: &R, obj: ObjId, index: usize, length: usize) { - if self.enabled { - match self.patches.last_mut() { - Some(Patch::SpliceText { - obj: tail_obj, - index: tail_index, - value, - .. - }) => { - let range = *tail_index..*tail_index + value.len(); - if tail_obj == &obj - && range.contains(&index) - && range.contains(&(index + length - 1)) - { - for _ in 0..length { - value.remove(index - *tail_index); - } - return; - } - } - Some(Patch::Insert { - obj: tail_obj, - index: tail_index, - values, - .. - }) => { - let range = *tail_index..*tail_index + values.len(); - if tail_obj == &obj - && range.contains(&index) - && range.contains(&(index + length - 1)) - { - for _ in 0..length { - values.remove(index - *tail_index); - } - return; - } - } - Some(Patch::DeleteSeq { - obj: tail_obj, - index: tail_index, - length: tail_length, - .. - }) => { - if tail_obj == &obj && index == *tail_index { - *tail_length += length; - return; - } - } - _ => {} - } - if let Some(path) = self.get_path(doc, &obj) { - let patch = Patch::DeleteSeq { - path, - obj, - index, - length, - }; - self.patches.push(patch) - } - } - } - - fn delete_map(&mut self, doc: &R, obj: ObjId, key: &str) { - if self.enabled { - if let Some(path) = self.get_path(doc, &obj) { - let patch = Patch::DeleteMap { - path, - obj, - key: key.to_owned(), - }; - self.patches.push(patch) - } - } - } - - fn put( - &mut self, - doc: &R, - obj: ObjId, - prop: Prop, - tagged_value: (Value<'_>, ObjId), - _conflict: bool, - ) { - if self.enabled { - let expose = false; - if let Some(path) = self.get_path(doc, &obj) { - let value = (tagged_value.0.to_owned(), tagged_value.1); - let patch = match prop { - Prop::Map(key) => Patch::PutMap { - path, - obj, - key, - value, - expose, - }, - Prop::Seq(index) => Patch::PutSeq { - path, - obj, - index, - value, - expose, - }, - }; - self.patches.push(patch); - } - } - } - - fn expose( - &mut self, - doc: &R, - obj: ObjId, - prop: Prop, - tagged_value: (Value<'_>, ObjId), - _conflict: bool, - ) { - if self.enabled { - let expose = true; - if let Some(path) = self.get_path(doc, &obj) { - let value = (tagged_value.0.to_owned(), tagged_value.1); - let patch = match prop { - Prop::Map(key) => Patch::PutMap { - path, - obj, - key, - value, - expose, - }, - Prop::Seq(index) => Patch::PutSeq { - path, - obj, - index, - value, - expose, - }, - }; - self.patches.push(patch); - } - } - } - - fn increment( - &mut self, - doc: &R, - obj: ObjId, - prop: Prop, - tagged_value: (i64, ObjId), - ) { - if self.enabled { - if let Some(path) = self.get_path(doc, &obj) { - let value = tagged_value.0; - self.patches.push(Patch::Increment { - path, - obj, - prop, - value, - }) - } - } - } - - fn text_as_seq(&self) -> bool { - self.text_rep == TextRepresentation::Array - } -} - -impl automerge::op_observer::BranchableObserver for Observer { - fn merge(&mut self, other: &Self) { - self.patches.extend_from_slice(other.patches.as_slice()) - } - - fn branch(&self) -> Self { - Observer { - patches: vec![], - enabled: self.enabled, - text_rep: self.text_rep, - } - } -} - -fn prop_to_js(p: &Prop) -> JsValue { - match p { - Prop::Map(key) => JsValue::from_str(key), - Prop::Seq(index) => JsValue::from_f64(*index as f64), - } -} - -fn export_path(path: &[(ObjId, Prop)], end: &Prop) -> Array { - let result = Array::new(); - for p in path { - result.push(&prop_to_js(&p.1)); - } - result.push(&prop_to_js(end)); - result -} - -impl Patch { - pub(crate) fn path(&self) -> &[(ObjId, Prop)] { - match &self { - Self::PutMap { path, .. } => path.as_slice(), - Self::PutSeq { path, .. } => path.as_slice(), - Self::Increment { path, .. } => path.as_slice(), - Self::Insert { path, .. } => path.as_slice(), - Self::SpliceText { path, .. } => path.as_slice(), - Self::DeleteMap { path, .. } => path.as_slice(), - Self::DeleteSeq { path, .. } => path.as_slice(), - } - } - - pub(crate) fn obj(&self) -> &ObjId { - match &self { - Self::PutMap { obj, .. } => obj, - Self::PutSeq { obj, .. } => obj, - Self::Increment { obj, .. } => obj, - Self::Insert { obj, .. } => obj, - Self::SpliceText { obj, .. } => obj, - Self::DeleteMap { obj, .. } => obj, - Self::DeleteSeq { obj, .. } => obj, - } - } -} - -impl TryFrom for JsValue { - type Error = interop::error::Export; - - fn try_from(p: Patch) -> Result { - let result = Object::new(); - match p { - Patch::PutMap { - path, key, value, .. - } => { - js_set(&result, "action", "put")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Map(key)), - )?; - js_set( - &result, - "value", - alloc(&value.0, TextRepresentation::String).1, - )?; - Ok(result.into()) - } - Patch::PutSeq { - path, index, value, .. - } => { - js_set(&result, "action", "put")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Seq(index)), - )?; - js_set( - &result, - "value", - alloc(&value.0, TextRepresentation::String).1, - )?; - Ok(result.into()) - } - Patch::Insert { - path, - index, - values, - .. - } => { - js_set(&result, "action", "insert")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Seq(index)), - )?; - js_set( - &result, - "values", - values - .iter() - .map(|v| alloc(&v.0, TextRepresentation::String).1) - .collect::(), - )?; - Ok(result.into()) - } - Patch::SpliceText { - path, index, value, .. - } => { - js_set(&result, "action", "splice")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Seq(index)), - )?; - let bytes: Vec = value.iter().cloned().collect(); - js_set(&result, "value", String::from_utf16_lossy(bytes.as_slice()))?; - Ok(result.into()) - } - Patch::Increment { - path, prop, value, .. - } => { - js_set(&result, "action", "inc")?; - js_set(&result, "path", export_path(path.as_slice(), &prop))?; - js_set(&result, "value", &JsValue::from_f64(value as f64))?; - Ok(result.into()) - } - Patch::DeleteMap { path, key, .. } => { - js_set(&result, "action", "del")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Map(key)), - )?; - Ok(result.into()) - } - Patch::DeleteSeq { - path, - index, - length, - .. - } => { - js_set(&result, "action", "del")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Seq(index)), - )?; - if length > 1 { - js_set(&result, "length", length)?; - } - Ok(result.into()) - } - } - } -} diff --git a/rust/automerge-wasm/src/value.rs b/rust/automerge-wasm/src/value.rs deleted file mode 100644 index 643e2881..00000000 --- a/rust/automerge-wasm/src/value.rs +++ /dev/null @@ -1,161 +0,0 @@ -use automerge::{ObjType, ScalarValue, Value}; -use wasm_bindgen::prelude::*; - -#[derive(Debug, Clone, Hash, Eq, PartialEq)] -pub(crate) enum Datatype { - Map, - Table, - List, - Text, - Bytes, - Str, - Int, - Uint, - F64, - Counter, - Timestamp, - Boolean, - Null, - Unknown(u8), -} - -impl Datatype { - pub(crate) fn is_scalar(&self) -> bool { - !matches!(self, Self::Map | Self::Table | Self::List | Self::Text) - } -} - -impl From<&ObjType> for Datatype { - fn from(o: &ObjType) -> Self { - (*o).into() - } -} - -impl From for Datatype { - fn from(o: ObjType) -> Self { - match o { - ObjType::Map => Self::Map, - ObjType::List => Self::List, - ObjType::Table => Self::Table, - ObjType::Text => Self::Text, - } - } -} - -impl std::fmt::Display for Datatype { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { - write!(f, "{}", String::from(self.clone())) - } -} - -impl From<&ScalarValue> for Datatype { - fn from(s: &ScalarValue) -> Self { - match s { - ScalarValue::Bytes(_) => Self::Bytes, - ScalarValue::Str(_) => Self::Str, - ScalarValue::Int(_) => Self::Int, - ScalarValue::Uint(_) => Self::Uint, - ScalarValue::F64(_) => Self::F64, - ScalarValue::Counter(_) => Self::Counter, - ScalarValue::Timestamp(_) => Self::Timestamp, - ScalarValue::Boolean(_) => Self::Boolean, - ScalarValue::Null => Self::Null, - ScalarValue::Unknown { type_code, .. } => Self::Unknown(*type_code), - } - } -} - -impl From<&Value<'_>> for Datatype { - fn from(v: &Value<'_>) -> Self { - match v { - Value::Object(o) => o.into(), - Value::Scalar(s) => s.as_ref().into(), - /* - ScalarValue::Bytes(_) => Self::Bytes, - ScalarValue::Str(_) => Self::Str, - ScalarValue::Int(_) => Self::Int, - ScalarValue::Uint(_) => Self::Uint, - ScalarValue::F64(_) => Self::F64, - ScalarValue::Counter(_) => Self::Counter, - ScalarValue::Timestamp(_) => Self::Timestamp, - ScalarValue::Boolean(_) => Self::Boolean, - ScalarValue::Null => Self::Null, - ScalarValue::Unknown { type_code, .. } => Self::Unknown(*type_code), - */ - } - } -} - -impl From for String { - fn from(d: Datatype) -> Self { - match d { - Datatype::Map => "map".into(), - Datatype::Table => "table".into(), - Datatype::List => "list".into(), - Datatype::Text => "text".into(), - Datatype::Bytes => "bytes".into(), - Datatype::Str => "str".into(), - Datatype::Int => "int".into(), - Datatype::Uint => "uint".into(), - Datatype::F64 => "f64".into(), - Datatype::Counter => "counter".into(), - Datatype::Timestamp => "timestamp".into(), - Datatype::Boolean => "boolean".into(), - Datatype::Null => "null".into(), - Datatype::Unknown(type_code) => format!("unknown{}", type_code), - } - } -} - -impl TryFrom for Datatype { - type Error = InvalidDatatype; - - fn try_from(datatype: JsValue) -> Result { - let datatype = datatype.as_string().ok_or(InvalidDatatype::NotString)?; - match datatype.as_str() { - "map" => Ok(Datatype::Map), - "table" => Ok(Datatype::Table), - "list" => Ok(Datatype::List), - "text" => Ok(Datatype::Text), - "bytes" => Ok(Datatype::Bytes), - "str" => Ok(Datatype::Str), - "int" => Ok(Datatype::Int), - "uint" => Ok(Datatype::Uint), - "f64" => Ok(Datatype::F64), - "counter" => Ok(Datatype::Counter), - "timestamp" => Ok(Datatype::Timestamp), - "boolean" => Ok(Datatype::Boolean), - "null" => Ok(Datatype::Null), - d => { - if d.starts_with("unknown") { - // TODO: handle "unknown{}", - Err(InvalidDatatype::UnknownNotImplemented) - } else { - Err(InvalidDatatype::Unknown(d.to_string())) - } - } - } - } -} - -impl From for JsValue { - fn from(d: Datatype) -> Self { - String::from(d).into() - } -} - -#[derive(Debug, thiserror::Error)] -pub enum InvalidDatatype { - #[error("unknown datatype")] - Unknown(String), - #[error("datatype is not a string")] - NotString, - #[error("cannot handle unknown datatype")] - UnknownNotImplemented, -} - -impl From for JsValue { - fn from(e: InvalidDatatype) -> Self { - JsValue::from(e.to_string()) - } -} diff --git a/rust/automerge-wasm/test/apply.ts b/rust/automerge-wasm/test/apply.ts deleted file mode 100644 index 453b4c26..00000000 --- a/rust/automerge-wasm/test/apply.ts +++ /dev/null @@ -1,229 +0,0 @@ - -import { describe, it } from 'mocha'; -import assert from 'assert' -import { create, Value } from '..' - -export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current - -// @ts-ignore -function _obj(doc: any) : any { - if (typeof doc === 'object' && doc !== null) { - return doc[OBJECT_ID] - } -} - -// sample classes for testing -class Counter { - value: number; - constructor(n: number) { - this.value = n - } -} - -describe('Automerge', () => { - describe('Patch Apply', () => { - it('apply nested sets on maps', () => { - const start = { hello: { mellow: { yellow: "world", x: 1 }, y : 2 } } - const doc1 = create(true) - doc1.putObject("/", "hello", start.hello); - let mat = doc1.materialize("/") - const doc2 = create(true) - doc2.enablePatches(true) - doc2.merge(doc1) - - let base = doc2.applyPatches({}) - assert.deepEqual(mat, start) - assert.deepEqual(base, start) - - doc2.delete("/hello/mellow", "yellow"); - // @ts-ignore - delete start.hello.mellow.yellow; - base = doc2.applyPatches(base) - mat = doc2.materialize("/") - - assert.deepEqual(mat, start) - assert.deepEqual(base, start) - }) - - it('apply patches on lists', () => { - const start = { list: [1,2,3,4] } - const doc1 = create(true) - doc1.putObject("/", "list", start.list); - let mat = doc1.materialize("/") - const doc2 = create(true) - doc2.enablePatches(true) - doc2.merge(doc1) - mat = doc1.materialize("/") - let base = doc2.applyPatches({}) - assert.deepEqual(mat, start) - assert.deepEqual(base, start) - - doc2.delete("/list", 3); - start.list.splice(3,1) - base = doc2.applyPatches(base) - - assert.deepEqual(base, start) - }) - - it('apply patches on lists of lists of lists', () => { - const start = { list: - [ - [ - [ 1, 2, 3, 4, 5, 6], - [ 7, 8, 9,10,11,12], - ], - [ - [ 7, 8, 9,10,11,12], - [ 1, 2, 3, 4, 5, 6], - ] - ] - } - const doc1 = create(true) - doc1.enablePatches(true) - doc1.putObject("/", "list", start.list); - let base = doc1.applyPatches({}) - let mat = doc1.clone().materialize("/") - assert.deepEqual(mat, start) - assert.deepEqual(base, start) - - doc1.delete("/list/0/1", 3) - start.list[0][1].splice(3,1) - - doc1.delete("/list/0", 0) - start.list[0].splice(0,1) - - mat = doc1.clone().materialize("/") - base = doc1.applyPatches(base) - assert.deepEqual(mat, start) - assert.deepEqual(base, start) - }) - - it('large inserts should make one splice patch', () => { - const doc1 = create(true) - doc1.enablePatches(true) - doc1.putObject("/", "list", "abc"); - const patches = doc1.popPatches() - assert.deepEqual( patches, [ - { action: 'put', path: [ 'list' ], value: "" }, - { action: 'splice', path: [ 'list', 0 ], value: 'abc' }]) - }) - - it('it should allow registering type wrappers', () => { - const doc1 = create(true) - doc1.enablePatches(true) - doc1.registerDatatype("counter", (n: number) => new Counter(n)) - const doc2 = doc1.fork() - doc1.put("/", "n", 10, "counter") - doc1.put("/", "m", 10, "int") - - let mat = doc1.materialize("/") - assert.deepEqual( mat, { n: new Counter(10), m: 10 } ) - - doc2.merge(doc1) - let apply = doc2.applyPatches({}) - assert.deepEqual( apply, { n: new Counter(10), m: 10 } ) - - doc1.increment("/","n", 5) - mat = doc1.materialize("/") - assert.deepEqual( mat, { n: new Counter(15), m: 10 } ) - - doc2.merge(doc1) - apply = doc2.applyPatches(apply) - assert.deepEqual( apply, { n: new Counter(15), m: 10 } ) - }) - - it('text can be managed as an array or a string', () => { - const doc1 = create(true, "aaaa") - doc1.enablePatches(true) - - doc1.putObject("/", "notes", "hello world") - - let mat = doc1.materialize("/") - - assert.deepEqual( mat, { notes: "hello world" } ) - - const doc2 = create(true) - let apply : any = doc2.materialize("/") - doc2.enablePatches(true) - apply = doc2.applyPatches(apply) - - doc2.merge(doc1); - apply = doc2.applyPatches(apply) - assert.deepEqual(_obj(apply), "_root") - assert.deepEqual( apply, { notes: "hello world" } ) - - doc2.splice("/notes", 6, 5, "everyone"); - apply = doc2.applyPatches(apply) - assert.deepEqual( apply, { notes: "hello everyone" } ) - - mat = doc2.materialize("/") - assert.deepEqual(_obj(mat), "_root") - // @ts-ignore - assert.deepEqual( mat, { notes: "hello everyone" } ) - }) - - it('should set the OBJECT_ID property on lists, maps, and text objects and not on scalars', () => { - const doc1 = create(true, 'aaaa') - const mat: any = doc1.materialize("/") - doc1.enablePatches(true) - doc1.registerDatatype("counter", (n: number) => new Counter(n)) - doc1.put("/", "string", "string", "str") - doc1.put("/", "uint", 2, "uint") - doc1.put("/", "int", 2, "int") - doc1.put("/", "float", 2.3, "f64") - doc1.put("/", "bytes", new Uint8Array(), "bytes") - doc1.put("/", "counter", 1, "counter") - doc1.put("/", "date", new Date(), "timestamp") - doc1.putObject("/", "text", "text") - doc1.putObject("/", "list", []) - doc1.putObject("/", "map", {}) - const applied = doc1.applyPatches(mat) - - assert.equal(_obj(applied.string), null) - assert.equal(_obj(applied.uint), null) - assert.equal(_obj(applied.int), null) - assert.equal(_obj(applied.float), null) - assert.equal(_obj(applied.bytes), null) - assert.equal(_obj(applied.counter), null) - assert.equal(_obj(applied.date), null) - assert.equal(_obj(applied.text), null) - - assert.notEqual(_obj(applied.list), null) - assert.notEqual(_obj(applied.map), null) - }) - - it('should set the root OBJECT_ID to "_root"', () => { - const doc1 = create(true, 'aaaa') - const mat: any = doc1.materialize("/") - assert.equal(_obj(mat), "_root") - doc1.enablePatches(true) - doc1.put("/", "key", "value") - const applied = doc1.applyPatches(mat) - assert.equal(_obj(applied), "_root") - }) - - it.skip('it can patch quickly', () => { -/* - console.time("init") - let doc1 = create() - doc1.enablePatches(true) - doc1.putObject("/", "notes", ""); - let mat = doc1.materialize("/") - let doc2 = doc1.fork() - let testData = new Array( 100000 ).join("x") - console.timeEnd("init") - console.time("splice") - doc2.splice("/notes", 0, 0, testData); - console.timeEnd("splice") - console.time("merge") - doc1.merge(doc2) - console.timeEnd("merge") - console.time("patch") - mat = doc1.applyPatches(mat) - console.timeEnd("patch") -*/ - }) - }) -}) - -// TODO: squash puts & deletes diff --git a/rust/automerge-wasm/test/readme.ts b/rust/automerge-wasm/test/readme.ts deleted file mode 100644 index e5823556..00000000 --- a/rust/automerge-wasm/test/readme.ts +++ /dev/null @@ -1,244 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ -import { describe, it } from 'mocha'; -import * as assert from 'assert' -import { create, load, initSyncState } from '..' - -describe('Automerge', () => { - describe('Readme Examples', () => { - it('Using the Library and Creating a Document', () => { - const doc = create(true) - const sync = initSyncState() - doc.free() - sync.free() - }) - it('Automerge Scalar Types (1)', () => { - const doc = create(true) - doc.put("/", "prop1", 100) // int - doc.put("/", "prop2", 3.14) // f64 - doc.put("/", "prop3", "hello world") - doc.put("/", "prop4", new Date(0)) - doc.put("/", "prop5", new Uint8Array([1,2,3])) - doc.put("/", "prop6", true) - doc.put("/", "prop7", null) - - assert.deepEqual(doc.materialize("/"), { - prop1: 100, - prop2: 3.14, - prop3: "hello world", - prop4: new Date(0), - prop5: new Uint8Array([1,2,3]), - prop6: true, - prop7: null - }) - }) - it('Automerge Scalar Types (2)', () => { - const doc = create(true) - doc.put("/", "prop1", 100, "int") - doc.put("/", "prop2", 100, "uint") - doc.put("/", "prop3", 100.5, "f64") - doc.put("/", "prop4", 100, "counter") - doc.put("/", "prop5", 1647531707301, "timestamp") - doc.put("/", "prop6", new Date(), "timestamp") - doc.put("/", "prop7", "hello world", "str") - doc.put("/", "prop8", new Uint8Array([1,2,3]), "bytes") - doc.put("/", "prop9", true, "boolean") - doc.put("/", "prop10", null, "null") - }) - it('Automerge Object Types (1)', () => { - const doc = create(true) - - // you can create an object by passing in the inital state - if blank pass in `{}` - // the return value is the Object Id - // these functions all return an object id - - const config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) - const token = doc.putObject("/", "tokens", {}) - - // lists can be made with javascript arrays - - const birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"]) - const bots = doc.putObject("/", "bots", []) - - // text is initialized with a string - - const notes = doc.putObject("/", "notes", "Hello world!") - }) - it('Automerge Object Types (2)', () => { - const doc = create(true) - - const config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) - - doc.put(config, "align", "right") - - // Anywhere Object Ids are being used a path can also be used. - // The following two statements are equivalent: - - const id = doc.getWithType("/", "config") - if (id && id[0] === 'map') { - doc.put(id[1], "align", "right") - } - - doc.put("/config", "align", "right") - - assert.deepEqual(doc.materialize("/"), { - config: { align: "right", archived: false, cycles: [ 10, 19, 21 ] } - }) - }) - it('Maps (1)', () => { - const doc = create(true) - const mymap = doc.putObject("_root", "mymap", { foo: "bar"}) - // make a new map with the foo key - - doc.put(mymap, "bytes", new Uint8Array([1,2,3])) - // assign a byte array to key `bytes` of the mymap object - - const submap = doc.putObject(mymap, "sub", {}) - // make a new empty object and assign it to the key `sub` of mymap - - assert.deepEqual(doc.keys(mymap),["bytes","foo","sub"]) - assert.deepEqual(doc.materialize("_root"), { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {} }}) - }) - it('Lists (1)', () => { - const doc = create(true) - const items = doc.putObject("_root", "items", [10,"box"]) - // init a new list with two elements - doc.push(items, true) // push `true` to the end of the list - doc.putObject(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value - doc.delete(items, 1) // delete "box" - doc.splice(items, 2, 0, ["bag", "brick"]) // splice in "bag" and "brick" at position 2 - doc.insert(items, 0, "bat") // insert "bat" to the beginning of the list - doc.insertObject(items, 1, [ 1, 2 ]) // insert a list with 2 values at pos 1 - - assert.deepEqual(doc.materialize(items),[ "bat", [ 1 ,2 ], { hello : "world" }, true, "bag", "brick" ]) - assert.deepEqual(doc.length(items),6) - }) - it('Text (1)', () => { - const doc = create(true, "aaaaaa") - const notes = doc.putObject("_root", "notes", "Hello world") - doc.splice(notes, 6, 5, "everyone") - - assert.deepEqual(doc.text(notes), "Hello everyone") - }) - it('Querying Data (1)', () => { - const doc1 = create(true, "aabbcc") - doc1.put("_root", "key1", "val1") - const key2 = doc1.putObject("_root", "key2", []) - - assert.deepEqual(doc1.get("_root", "key1"), "val1") - assert.deepEqual(doc1.getWithType("_root", "key2"), ["list", "2@aabbcc"]) - assert.deepEqual(doc1.keys("_root"), ["key1", "key2"]) - - const doc2 = doc1.fork("ffaaff") - - // set a value concurrently - doc1.put("_root","key3","doc1val") - doc2.put("_root","key3","doc2val") - - doc1.merge(doc2) - - assert.deepEqual(doc1.get("_root","key3"), "doc2val") - assert.deepEqual(doc1.getAll("_root","key3"),[[ "str", "doc1val", "3@aabbcc"], ["str", "doc2val", "3@ffaaff"]]) - }) - it('Counters (1)', () => { - const doc1 = create(true, "aaaaaa") - doc1.put("_root", "number", 0) - doc1.put("_root", "total", 0, "counter") - - const doc2 = doc1.fork("bbbbbb") - doc2.put("_root", "number", 10) - doc2.increment("_root", "total", 11) - - doc1.put("_root", "number", 20) - doc1.increment("_root", "total", 22) - - doc1.merge(doc2) - - assert.deepEqual(doc1.materialize("_root"), { number: 10, total: 33 }) - }) - it('Transactions (1)', () => { - const doc = create(true) - - doc.put("_root", "key", "val1") - - assert.deepEqual(doc.get("_root", "key"),"val1") - assert.deepEqual(doc.pendingOps(),1) - - doc.rollback() - - assert.deepEqual(doc.get("_root", "key"),undefined) - assert.deepEqual(doc.pendingOps(),0) - - doc.put("_root", "key", "val2") - - assert.deepEqual(doc.pendingOps(),1) - - doc.commit("test commit 1") - - assert.deepEqual(doc.get("_root", "key"),"val2") - assert.deepEqual(doc.pendingOps(),0) - }) - it('Viewing Old Versions of the Document (1)', () => { - const doc = create(true) - - doc.put("_root", "key", "val1") - const heads1 = doc.getHeads() - - doc.put("_root", "key", "val2") - const heads2 = doc.getHeads() - - doc.put("_root", "key", "val3") - - assert.deepEqual(doc.get("_root","key"), "val3") - assert.deepEqual(doc.get("_root","key",heads2), "val2") - assert.deepEqual(doc.get("_root","key",heads1), "val1") - assert.deepEqual(doc.get("_root","key",[]), undefined) - }) - it('Forking And Merging (1)', () => { - const doc1 = create(true) - doc1.put("_root", "key1", "val1") - - const doc2 = doc1.fork() - - doc1.put("_root", "key2", "val2") - doc2.put("_root", "key3", "val3") - - doc1.merge(doc2) - - assert.deepEqual(doc1.materialize("_root"), { key1: "val1", key2: "val2", key3: "val3" }) - assert.deepEqual(doc2.materialize("_root"), { key1: "val1", key3: "val3" }) - }) - it('Saving And Loading (1)', () => { - const doc1 = create(true) - - doc1.put("_root", "key1", "value1") - - const save1 = doc1.save() - - const doc2 = load(save1, true) - - doc2.materialize("_root") // returns { key1: "value1" } - - doc1.put("_root", "key2", "value2") - - const saveIncremental = doc1.saveIncremental() - - const save2 = doc1.save() - - const save3 = new Uint8Array([... save1, ... saveIncremental]) - - // save2 has fewer bytes than save3 but contains the same ops - - doc2.loadIncremental(saveIncremental) - - const doc3 = load(save2, true) - - const doc4 = load(save3, true) - - assert.deepEqual(doc1.materialize("_root"), { key1: "value1", key2: "value2" }) - assert.deepEqual(doc2.materialize("_root"), { key1: "value1", key2: "value2" }) - assert.deepEqual(doc3.materialize("_root"), { key1: "value1", key2: "value2" }) - assert.deepEqual(doc4.materialize("_root"), { key1: "value1", key2: "value2" }) - }) - //it.skip('Syncing (1)', () => { }) - }) -}) diff --git a/rust/automerge-wasm/test/test.ts b/rust/automerge-wasm/test/test.ts deleted file mode 100644 index bb4f71e3..00000000 --- a/rust/automerge-wasm/test/test.ts +++ /dev/null @@ -1,2173 +0,0 @@ -import { describe, it } from 'mocha'; -import assert from 'assert' -// @ts-ignore -import { BloomFilter } from './helpers/sync' -import { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' -import { Value, DecodedSyncMessage, Hash } from '..'; -import {kill} from 'process'; - -function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { - const MAX_ITER = 10 - let aToBmsg = null, bToAmsg = null, i = 0 - do { - aToBmsg = a.generateSyncMessage(aSyncState) - bToAmsg = b.generateSyncMessage(bSyncState) - - if (aToBmsg) { - b.receiveSyncMessage(bSyncState, aToBmsg) - } - if (bToAmsg) { - a.receiveSyncMessage(aSyncState, bToAmsg) - } - - if (i++ > MAX_ITER) { - throw new Error(`Did not synchronize within ${MAX_ITER} iterations`) - } - } while (aToBmsg || bToAmsg) -} - -describe('Automerge', () => { - describe('basics', () => { - - it('should create, clone and free', () => { - const doc1 = create(true) - const doc2 = doc1.clone() - doc2.free() - }) - - it('should be able to start and commit', () => { - const doc = create(true) - doc.commit() - }) - - it('getting a nonexistent prop does not throw an error', () => { - const doc = create(true) - const root = "_root" - const result = doc.getWithType(root, "hello") - assert.deepEqual(result, undefined) - }) - - it('should be able to set and get a simple value', () => { - const doc: Automerge = create(true, "aabbcc") - const root = "_root" - let result - - doc.put(root, "hello", "world") - doc.put(root, "number1", 5, "uint") - doc.put(root, "number2", 5) - doc.put(root, "number3", 5.5) - doc.put(root, "number4", 5.5, "f64") - doc.put(root, "number5", 5.5, "int") - doc.put(root, "bool", true) - doc.put(root, "time1", 1000, "timestamp") - doc.put(root, "time2", new Date(1001)) - doc.putObject(root, "list", []); - doc.put(root, "null", null) - - result = doc.getWithType(root, "hello") - assert.deepEqual(result, ["str", "world"]) - assert.deepEqual(doc.get("/", "hello"), "world") - - result = doc.getWithType(root, "number1") - assert.deepEqual(result, ["uint", 5]) - assert.deepEqual(doc.get("/", "number1"), 5) - - result = doc.getWithType(root, "number2") - assert.deepEqual(result, ["int", 5]) - - result = doc.getWithType(root, "number3") - assert.deepEqual(result, ["f64", 5.5]) - - result = doc.getWithType(root, "number4") - assert.deepEqual(result, ["f64", 5.5]) - - result = doc.getWithType(root, "number5") - assert.deepEqual(result, ["int", 5]) - - result = doc.getWithType(root, "bool") - assert.deepEqual(result, ["boolean", true]) - - doc.put(root, "bool", false, "boolean") - - result = doc.getWithType(root, "bool") - assert.deepEqual(result, ["boolean", false]) - - result = doc.getWithType(root, "time1") - assert.deepEqual(result, ["timestamp", new Date(1000)]) - - result = doc.getWithType(root, "time2") - assert.deepEqual(result, ["timestamp", new Date(1001)]) - - result = doc.getWithType(root, "list") - assert.deepEqual(result, ["list", "10@aabbcc"]); - - result = doc.getWithType(root, "null") - assert.deepEqual(result, ["null", null]); - }) - - it('should be able to use bytes', () => { - const doc = create(true) - doc.put("_root", "data1", new Uint8Array([10, 11, 12])); - doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); - const value1 = doc.getWithType("_root", "data1") - assert.deepEqual(value1, ["bytes", new Uint8Array([10, 11, 12])]); - const value2 = doc.getWithType("_root", "data2") - assert.deepEqual(value2, ["bytes", new Uint8Array([13, 14, 15])]); - }) - - it('should be able to make subobjects', () => { - const doc = create(true) - const root = "_root" - let result - - const submap = doc.putObject(root, "submap", {}) - doc.put(submap, "number", 6, "uint") - assert.strictEqual(doc.pendingOps(), 2) - - result = doc.getWithType(root, "submap") - assert.deepEqual(result, ["map", submap]) - - result = doc.getWithType(submap, "number") - assert.deepEqual(result, ["uint", 6]) - }) - - it('should be able to make lists', () => { - const doc = create(true) - const root = "_root" - - const sublist = doc.putObject(root, "numbers", []) - doc.insert(sublist, 0, "a"); - doc.insert(sublist, 1, "b"); - doc.insert(sublist, 2, "c"); - doc.insert(sublist, 0, "z"); - - assert.deepEqual(doc.getWithType(sublist, 0), ["str", "z"]) - assert.deepEqual(doc.getWithType(sublist, 1), ["str", "a"]) - assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b"]) - assert.deepEqual(doc.getWithType(sublist, 3), ["str", "c"]) - assert.deepEqual(doc.length(sublist), 4) - - doc.put(sublist, 2, "b v2"); - - assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b v2"]) - assert.deepEqual(doc.length(sublist), 4) - }) - - it('lists have insert, set, splice, and push ops', () => { - const doc = create(true) - const root = "_root" - - const sublist = doc.putObject(root, "letters", []) - doc.insert(sublist, 0, "a"); - doc.insert(sublist, 0, "b"); - assert.deepEqual(doc.materialize(), { letters: ["b", "a"] }) - doc.push(sublist, "c"); - const heads = doc.getHeads() - assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c"] }) - doc.push(sublist, 3, "timestamp"); - assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3)] }) - doc.splice(sublist, 1, 1, ["d", "e", "f"]); - assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3)] }) - doc.put(sublist, 0, "z"); - assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3)] }) - assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new Date(3)]) - assert.deepEqual(doc.length(sublist), 6) - assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] }) - }) - - it('should be able delete non-existent props', () => { - const doc = create(true) - - doc.put("_root", "foo", "bar") - doc.put("_root", "bip", "bap") - const hash1 = doc.commit() - - assert.deepEqual(doc.keys("_root"), ["bip", "foo"]) - - doc.delete("_root", "foo") - doc.delete("_root", "baz") - const hash2 = doc.commit() - - assert.deepEqual(doc.keys("_root"), ["bip"]) - assert.ok(hash1) - assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) - assert.ok(hash2) - assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) - }) - - it('should be able to del', () => { - const doc = create(true) - const root = "_root" - - doc.put(root, "xxx", "xxx"); - assert.deepEqual(doc.getWithType(root, "xxx"), ["str", "xxx"]) - doc.delete(root, "xxx"); - assert.deepEqual(doc.getWithType(root, "xxx"), undefined) - }) - - it('should be able to use counters', () => { - const doc = create(true) - const root = "_root" - - doc.put(root, "counter", 10, "counter"); - assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 10]) - doc.increment(root, "counter", 10); - assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 20]) - doc.increment(root, "counter", -5); - assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 15]) - }) - - it('should be able to splice text', () => { - const doc = create(true) - const root = "_root"; - - const text = doc.putObject(root, "text", ""); - doc.splice(text, 0, 0, "hello ") - doc.splice(text, 6, 0, "world") - doc.splice(text, 11, 0, "!?") - assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) - assert.deepEqual(doc.getWithType(text, 1), ["str", "e"]) - assert.deepEqual(doc.getWithType(text, 9), ["str", "l"]) - assert.deepEqual(doc.getWithType(text, 10), ["str", "d"]) - assert.deepEqual(doc.getWithType(text, 11), ["str", "!"]) - assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) - }) - - it.skip('should NOT be able to insert objects into text', () => { - const doc = create(true) - const text = doc.putObject("/", "text", "Hello world"); - assert.throws(() => { - doc.insertObject(text, 6, { hello: "world" }); - }) - }) - - it('should be able save all or incrementally', () => { - const doc = create(true) - - doc.put("_root", "foo", 1) - - const save1 = doc.save() - - doc.put("_root", "bar", 2) - - const saveMidway = doc.clone().save(); - - const save2 = doc.saveIncremental(); - - doc.put("_root", "baz", 3); - - const save3 = doc.saveIncremental(); - - const saveA = doc.save(); - const saveB = new Uint8Array([...save1, ...save2, ...save3]); - - assert.notDeepEqual(saveA, saveB); - - const docA = load(saveA, true); - const docB = load(saveB, true); - const docC = load(saveMidway, true) - docC.loadIncremental(save3) - - assert.deepEqual(docA.keys("_root"), docB.keys("_root")); - assert.deepEqual(docA.save(), docB.save()); - assert.deepEqual(docA.save(), docC.save()); - }) - - it('should be able to splice text', () => { - const doc = create(true) - const text = doc.putObject("_root", "text", ""); - doc.splice(text, 0, 0, "hello world"); - const hash1 = doc.commit(); - doc.splice(text, 6, 0, "big bad "); - const hash2 = doc.commit(); - assert.strictEqual(doc.text(text), "hello big bad world") - assert.strictEqual(doc.length(text), 19) - assert.ok(hash1) - assert.strictEqual(doc.text(text, [hash1]), "hello world") - assert.strictEqual(doc.length(text, [hash1]), 11) - assert.ok(hash2) - assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") - assert.ok(hash2) - assert.strictEqual(doc.length(text, [hash2]), 19) - }) - - it('local inc increments all visible counters in a map', () => { - const doc1 = create(true, "aaaa") - doc1.put("_root", "hello", "world") - const doc2 = load(doc1.save(), true, "bbbb"); - const doc3 = load(doc1.save(), true, "cccc"); - const heads = doc1.getHeads() - doc1.put("_root", "cnt", 20) - doc2.put("_root", "cnt", 0, "counter") - doc3.put("_root", "cnt", 10, "counter") - doc1.applyChanges(doc2.getChanges(heads)) - doc1.applyChanges(doc3.getChanges(heads)) - let result = doc1.getAll("_root", "cnt") - assert.deepEqual(result, [ - ['int', 20, '2@aaaa'], - ['counter', 0, '2@bbbb'], - ['counter', 10, '2@cccc'], - ]) - doc1.increment("_root", "cnt", 5) - result = doc1.getAll("_root", "cnt") - assert.deepEqual(result, [ - ['counter', 5, '2@bbbb'], - ['counter', 15, '2@cccc'], - ]) - - const save1 = doc1.save() - const doc4 = load(save1, true) - assert.deepEqual(doc4.save(), save1); - }) - - it('local inc increments all visible counters in a sequence', () => { - const doc1 = create(true, "aaaa") - const seq = doc1.putObject("_root", "seq", []) - doc1.insert(seq, 0, "hello") - const doc2 = load(doc1.save(), true, "bbbb"); - const doc3 = load(doc1.save(), true, "cccc"); - const heads = doc1.getHeads() - doc1.put(seq, 0, 20) - doc2.put(seq, 0, 0, "counter") - doc3.put(seq, 0, 10, "counter") - doc1.applyChanges(doc2.getChanges(heads)) - doc1.applyChanges(doc3.getChanges(heads)) - let result = doc1.getAll(seq, 0) - assert.deepEqual(result, [ - ['int', 20, '3@aaaa'], - ['counter', 0, '3@bbbb'], - ['counter', 10, '3@cccc'], - ]) - doc1.increment(seq, 0, 5) - result = doc1.getAll(seq, 0) - assert.deepEqual(result, [ - ['counter', 5, '3@bbbb'], - ['counter', 15, '3@cccc'], - ]) - - const save = doc1.save() - const doc4 = load(save, true) - assert.deepEqual(doc4.save(), save); - }) - - it('paths can be used instead of objids', () => { - const doc = create(true, "aaaa") - doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) - assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar" }, [1, 2, 3]] }) - assert.deepEqual(doc.materialize("/list"), [{ foo: "bar" }, [1, 2, 3]]) - assert.deepEqual(doc.materialize("/list/0"), { foo: "bar" }) - }) - - it('should be able to fetch changes by hash', () => { - const doc1 = create(true, "aaaa") - const doc2 = create(true, "bbbb") - doc1.put("/", "a", "b") - doc2.put("/", "b", "c") - const head1 = doc1.getHeads() - const head2 = doc2.getHeads() - const change1 = doc1.getChangeByHash(head1[0]) - const change2 = doc1.getChangeByHash(head2[0]) - assert.deepEqual(change2, null) - if (change1 === null) { throw new RangeError("change1 should not be null") } - assert.deepEqual(decodeChange(change1).hash, head1[0]) - }) - - it('recursive sets are possible', () => { - const doc = create(true, "aaaa") - const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) - const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) - doc.putObject("_root", "info1", "hello world") // 'text' object - doc.put("_root", "info2", "hello world") // 'str' - const l4 = doc.putObject("_root", "info3", "hello world") - assert.deepEqual(doc.materialize(), { - "list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]], - "info1": "hello world", - "info2": "hello world", - "info3": "hello world", - }) - assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) - assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]]) - assert.deepEqual(doc.materialize(l4), "hello world") - }) - - it('only returns an object id when objects are created', () => { - const doc = create(true, "aaaa") - const r1 = doc.put("_root", "foo", "bar") - const r2 = doc.putObject("_root", "list", []) - const r3 = doc.put("_root", "counter", 10, "counter") - const r4 = doc.increment("_root", "counter", 1) - const r5 = doc.delete("_root", "counter") - const r6 = doc.insert(r2, 0, 10); - const r7 = doc.insertObject(r2, 0, {}); - const r8 = doc.splice(r2, 1, 0, ["a", "b", "c"]); - //let r9 = doc.splice(r2,1,0,["a",[],{},"d"]); - assert.deepEqual(r1, null); - assert.deepEqual(r2, "2@aaaa"); - assert.deepEqual(r3, null); - assert.deepEqual(r4, null); - assert.deepEqual(r5, null); - assert.deepEqual(r6, null); - assert.deepEqual(r7, "7@aaaa"); - assert.deepEqual(r8, null); - //assert.deepEqual(r9,["12@aaaa","13@aaaa"]); - }) - - it('objects without properties are preserved', () => { - const doc1 = create(true, "aaaa") - const a = doc1.putObject("_root", "a", {}); - const b = doc1.putObject("_root", "b", {}); - const c = doc1.putObject("_root", "c", {}); - doc1.put(c, "d", "dd"); - const saved = doc1.save(); - const doc2 = load(saved, true); - assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) - assert.deepEqual(doc2.keys(a), []) - assert.deepEqual(doc2.getWithType("_root", "b"), ["map", b]) - assert.deepEqual(doc2.keys(b), []) - assert.deepEqual(doc2.getWithType("_root", "c"), ["map", c]) - assert.deepEqual(doc2.keys(c), ["d"]) - assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) - }) - - it('should allow you to fork at a heads', () => { - const A = create(true, "aaaaaa") - A.put("/", "key1", "val1"); - A.put("/", "key2", "val2"); - const heads1 = A.getHeads(); - const B = A.fork("bbbbbb") - A.put("/", "key3", "val3"); - B.put("/", "key4", "val4"); - A.merge(B) - const heads2 = A.getHeads(); - A.put("/", "key5", "val5"); - assert.deepEqual(A.fork(undefined, heads1).materialize("/"), A.materialize("/", heads1)) - assert.deepEqual(A.fork(undefined, heads2).materialize("/"), A.materialize("/", heads2)) - }) - - it('should handle merging text conflicts then saving & loading', () => { - const A = create(true, "aabbcc") - const At = A.putObject('_root', 'text', "") - A.splice(At, 0, 0, 'hello') - - const B = A.fork() - - assert.deepEqual(B.getWithType("_root", "text"), ["text", At]) - - B.splice(At, 4, 1) - B.splice(At, 4, 0, '!') - B.splice(At, 5, 0, ' ') - B.splice(At, 6, 0, 'world') - - A.merge(B) - - const binary = A.save() - - const C = load(binary, true) - - assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc']) - assert.deepEqual(C.text(At), 'hell! world') - }) - }) - - describe('patch generation', () => { - it('should include root object key updates', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') - doc1.put('_root', 'hello', 'world') - doc2.enablePatches(true) - doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['hello'], value: 'world' } - ]) - }) - - it('should include nested object creation', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') - doc1.putObject('_root', 'birds', { friday: { robins: 3 } }) - doc2.enablePatches(true) - doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: [ 'birds' ], value: {} }, - { action: 'put', path: [ 'birds', 'friday' ], value: {} }, - { action: 'put', path: [ 'birds', 'friday', 'robins' ], value: 3}, - ]) - }) - - it('should delete map keys', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') - doc1.put('_root', 'favouriteBird', 'Robin') - doc2.enablePatches(true) - doc2.loadIncremental(doc1.saveIncremental()) - doc1.delete('_root', 'favouriteBird') - doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: [ 'favouriteBird' ], value: 'Robin' }, - { action: 'del', path: [ 'favouriteBird' ] } - ]) - }) - - it('should include list element insertion', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') - doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) - doc2.enablePatches(true) - doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: [ 'birds' ], value: [] }, - { action: 'insert', path: [ 'birds', 0 ], values: ['Goldfinch', 'Chaffinch'] }, - ]) - }) - - it('should insert nested maps into a list', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') - doc1.putObject('_root', 'birds', []) - doc2.loadIncremental(doc1.saveIncremental()) - doc1.insertObject('1@aaaa', 0, { species: 'Goldfinch', count: 3 }) - doc2.enablePatches(true) - doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.popPatches(), [ - { action: 'insert', path: [ 'birds', 0 ], values: [{}] }, - { action: 'put', path: [ 'birds', 0, 'species' ], value: 'Goldfinch' }, - { action: 'put', path: [ 'birds', 0, 'count', ], value: 3 } - ]) - }) - - it('should calculate list indexes based on visible elements', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') - doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) - doc2.loadIncremental(doc1.saveIncremental()) - doc1.delete('1@aaaa', 0) - doc1.insert('1@aaaa', 1, 'Greenfinch') - doc2.enablePatches(true) - doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc1.getWithType('1@aaaa', 0), ['str', 'Chaffinch']) - assert.deepEqual(doc1.getWithType('1@aaaa', 1), ['str', 'Greenfinch']) - assert.deepEqual(doc2.popPatches(), [ - { action: 'del', path: ['birds', 0] }, - { action: 'insert', path: ['birds', 1], values: ['Greenfinch'] } - ]) - }) - - it('should handle concurrent insertions at the head of a list', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') - doc1.putObject('_root', 'values', []) - const change1 = doc1.saveIncremental() - doc2.loadIncremental(change1) - doc3.loadIncremental(change1) - doc4.loadIncremental(change1) - doc1.insert('1@aaaa', 0, 'c') - doc1.insert('1@aaaa', 1, 'd') - doc2.insert('1@aaaa', 0, 'a') - doc2.insert('1@aaaa', 1, 'b') - const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() - doc3.enablePatches(true) - doc4.enablePatches(true) - doc3.loadIncremental(change2); doc3.loadIncremental(change3) - doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual([0, 1, 2, 3].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) - assert.deepEqual([0, 1, 2, 3].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) - assert.deepEqual(doc3.popPatches(), [ - { action: 'insert', path: ['values', 0], values:['a','b','c','d'] }, - ]) - assert.deepEqual(doc4.popPatches(), [ - { action: 'insert', path: ['values',0], values:['a','b','c','d'] }, - ]) - }) - - it('should handle concurrent insertions beyond the head', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') - doc1.putObject('_root', 'values', ['a', 'b']) - const change1 = doc1.saveIncremental() - doc2.loadIncremental(change1) - doc3.loadIncremental(change1) - doc4.loadIncremental(change1) - doc1.insert('1@aaaa', 2, 'e') - doc1.insert('1@aaaa', 3, 'f') - doc2.insert('1@aaaa', 2, 'c') - doc2.insert('1@aaaa', 3, 'd') - const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() - doc3.enablePatches(true) - doc4.enablePatches(true) - doc3.loadIncremental(change2); doc3.loadIncremental(change3) - doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) - assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) - assert.deepEqual(doc3.popPatches(), [ - { action: 'insert', path: ['values', 2], values: ['c','d','e','f'] }, - ]) - assert.deepEqual(doc4.popPatches(), [ - { action: 'insert', path: ['values', 2], values: ['c','d','e','f'] }, - ]) - }) - - it('should handle conflicts on root object keys', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') - doc1.put('_root', 'bird', 'Greenfinch') - doc2.put('_root', 'bird', 'Goldfinch') - const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() - doc3.enablePatches(true) - doc4.enablePatches(true) - doc3.loadIncremental(change1); doc3.loadIncremental(change2) - doc4.loadIncremental(change2); doc4.loadIncremental(change1) - assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) - assert.deepEqual(doc4.getWithType('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc4.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) - assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Greenfinch' }, - { action: 'put', path: ['bird'], value: 'Goldfinch' }, - ]) - assert.deepEqual(doc4.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch' }, - ]) - }) - - it('should handle three-way conflicts', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc') - doc1.put('_root', 'bird', 'Greenfinch') - doc2.put('_root', 'bird', 'Chaffinch') - doc3.put('_root', 'bird', 'Goldfinch') - const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental(), change3 = doc3.saveIncremental() - doc1.enablePatches(true) - doc2.enablePatches(true) - doc3.enablePatches(true) - doc1.loadIncremental(change2); doc1.loadIncremental(change3) - doc2.loadIncremental(change3); doc2.loadIncremental(change1) - doc3.loadIncremental(change1); doc3.loadIncremental(change2) - assert.deepEqual(doc1.getWithType('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc1.getAll('_root', 'bird'), [ - ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] - ]) - assert.deepEqual(doc2.getWithType('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc2.getAll('_root', 'bird'), [ - ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] - ]) - assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc3.getAll('_root', 'bird'), [ - ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] - ]) - assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Chaffinch' }, - { action: 'put', path: ['bird'], value: 'Goldfinch' } - ]) - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch' }, - ]) - assert.deepEqual(doc3.popPatches(), [ ]) - }) - - it('should allow a conflict to be resolved', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc') - doc1.put('_root', 'bird', 'Greenfinch') - doc2.put('_root', 'bird', 'Chaffinch') - doc3.enablePatches(true) - const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() - doc1.loadIncremental(change2); doc3.loadIncremental(change1) - doc2.loadIncremental(change1); doc3.loadIncremental(change2) - doc1.put('_root', 'bird', 'Goldfinch') - doc3.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) - assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Greenfinch' }, - { action: 'put', path: ['bird'], value: 'Chaffinch' }, - { action: 'put', path: ['bird'], value: 'Goldfinch' } - ]) - }) - - it('should handle a concurrent map key overwrite and delete', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') - doc1.put('_root', 'bird', 'Greenfinch') - doc2.loadIncremental(doc1.saveIncremental()) - doc1.put('_root', 'bird', 'Goldfinch') - doc2.delete('_root', 'bird') - const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() - doc1.enablePatches(true) - doc2.enablePatches(true) - doc1.loadIncremental(change2) - doc2.loadIncremental(change1) - assert.deepEqual(doc1.getWithType('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc1.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) - assert.deepEqual(doc2.getWithType('_root', 'bird'), ['str', 'Goldfinch']) - assert.deepEqual(doc2.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) - assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch' } - ]) - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch' } - ]) - }) - - it('should handle a conflict on a list element', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') - doc1.putObject('_root', 'birds', ['Thrush', 'Magpie']) - const change1 = doc1.saveIncremental() - doc2.loadIncremental(change1) - doc3.loadIncremental(change1) - doc4.loadIncremental(change1) - doc1.put('1@aaaa', 0, 'Song Thrush') - doc2.put('1@aaaa', 0, 'Redwing') - const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() - doc3.enablePatches(true) - doc4.enablePatches(true) - doc3.loadIncremental(change2); doc3.loadIncremental(change3) - doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual(doc3.getWithType('1@aaaa', 0), ['str', 'Redwing']) - assert.deepEqual(doc3.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) - assert.deepEqual(doc4.getWithType('1@aaaa', 0), ['str', 'Redwing']) - assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) - assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['birds',0], value: 'Song Thrush' }, - { action: 'put', path: ['birds',0], value: 'Redwing' } - ]) - assert.deepEqual(doc4.popPatches(), [ - { action: 'put', path: ['birds',0], value: 'Redwing' }, - ]) - }) - - it('should handle a concurrent list element overwrite and delete', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') - doc1.putObject('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush']) - const change1 = doc1.saveIncremental() - doc2.loadIncremental(change1) - doc3.loadIncremental(change1) - doc4.loadIncremental(change1) - doc1.delete('1@aaaa', 0) - doc1.put('1@aaaa', 1, 'Song Thrush') - doc2.put('1@aaaa', 0, 'Ring-necked parakeet') - doc2.put('1@aaaa', 2, 'Redwing') - const change2 = doc1.saveIncremental(), change3 = doc2.saveIncremental() - doc3.enablePatches(true) - doc4.enablePatches(true) - doc3.loadIncremental(change2); doc3.loadIncremental(change3) - doc4.loadIncremental(change3); doc4.loadIncremental(change2) - assert.deepEqual(doc3.getAll('1@aaaa', 0), [['str', 'Ring-necked parakeet', '5@bbbb']]) - assert.deepEqual(doc3.getAll('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) - assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Ring-necked parakeet', '5@bbbb']]) - assert.deepEqual(doc4.getAll('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) - assert.deepEqual(doc3.popPatches(), [ - { action: 'del', path: ['birds',0], }, - { action: 'put', path: ['birds',1], value: 'Song Thrush' }, - { action: 'insert', path: ['birds',0], values: ['Ring-necked parakeet'] }, - { action: 'put', path: ['birds',2], value: 'Redwing' } - ]) - assert.deepEqual(doc4.popPatches(), [ - { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet' }, - { action: 'put', path: ['birds',2], value: 'Redwing' }, - { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet' }, - ]) - }) - - it('should handle deletion of a conflict value', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc') - doc1.put('_root', 'bird', 'Robin') - doc2.put('_root', 'bird', 'Wren') - const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() - doc2.delete('_root', 'bird') - const change3 = doc2.saveIncremental() - doc3.enablePatches(true) - doc3.loadIncremental(change1) - doc3.loadIncremental(change2) - assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa'], ['str', 'Wren', '1@bbbb']]) - assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Robin' }, - { action: 'put', path: ['bird'], value: 'Wren' } - ]) - doc3.loadIncremental(change3) - assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Robin']) - assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa']]) - assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Robin' } - ]) - }) - - it('should handle conflicting nested objects', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') - doc1.putObject('_root', 'birds', ['Parakeet']) - doc2.putObject('_root', 'birds', { 'Sparrowhawk': 1 }) - const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() - doc1.enablePatches(true) - doc2.enablePatches(true) - doc1.loadIncremental(change2) - doc2.loadIncremental(change1) - assert.deepEqual(doc1.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) - assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['birds'], value: {} }, - { action: 'put', path: ['birds', 'Sparrowhawk'], value: 1 } - ]) - assert.deepEqual(doc2.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) - assert.deepEqual(doc2.popPatches(), []) - }) - - it('should support date objects', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), now = new Date() - doc1.put('_root', 'createdAt', now) - doc2.enablePatches(true) - doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.getWithType('_root', 'createdAt'), ['timestamp', now]) - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['createdAt'], value: now } - ]) - }) - - it('should capture local put ops', () => { - const doc1 = create(true, 'aaaa') - doc1.enablePatches(true) - doc1.put('_root', 'key1', 1) - doc1.put('_root', 'key1', 2) - doc1.put('_root', 'key2', 3) - doc1.putObject('_root', 'map', {}) - doc1.putObject('_root', 'list', []) - - assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['key1'], value: 1 }, - { action: 'put', path: ['key1'], value: 2 }, - { action: 'put', path: ['key2'], value: 3 }, - { action: 'put', path: ['map'], value: {} }, - { action: 'put', path: ['list'], value: [] }, - ]) - }) - - it('should capture local insert ops', () => { - const doc1 = create(true, 'aaaa') - doc1.enablePatches(true) - const list = doc1.putObject('_root', 'list', []) - doc1.insert(list, 0, 1) - doc1.insert(list, 0, 2) - doc1.insert(list, 2, 3) - doc1.insertObject(list, 2, {}) - doc1.insertObject(list, 2, []) - - assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['list'], value: [] }, - { action: 'insert', path: ['list', 0], values: [2,1,[],{},3] }, - ]) - }) - - it('should capture local push ops', () => { - const doc1 = create(true, 'aaaa') - doc1.enablePatches(true) - const list = doc1.putObject('_root', 'list', []) - doc1.push(list, 1) - doc1.pushObject(list, {}) - doc1.pushObject(list, []) - - assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['list'], value: [] }, - { action: 'insert', path: ['list',0], values: [1,{},[]] }, - ]) - }) - - it('should capture local splice ops', () => { - const doc1 = create(true, 'aaaa') - doc1.enablePatches(true) - const list = doc1.putObject('_root', 'list', []) - doc1.splice(list, 0, 0, [1, 2, 3, 4]) - doc1.splice(list, 1, 2) - - assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['list'], value: [] }, - { action: 'insert', path: ['list',0], values: [1,4] }, - ]) - }) - - it('should capture local increment ops', () => { - const doc1 = create(true, 'aaaa') - doc1.enablePatches(true) - doc1.put('_root', 'counter', 2, 'counter') - doc1.increment('_root', 'counter', 4) - - assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['counter'], value: 2 }, - { action: 'inc', path: ['counter'], value: 4 }, - ]) - }) - - - it('should capture local delete ops', () => { - const doc1 = create(true, 'aaaa') - doc1.enablePatches(true) - doc1.put('_root', 'key1', 1) - doc1.put('_root', 'key2', 2) - doc1.delete('_root', 'key1') - doc1.delete('_root', 'key2') - assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['key1'], value: 1 }, - { action: 'put', path: ['key2'], value: 2 }, - { action: 'del', path: ['key1'], }, - { action: 'del', path: ['key2'], }, - ]) - }) - - it('should support counters in a map', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') - doc2.enablePatches(true) - doc1.put('_root', 'starlings', 2, 'counter') - doc2.loadIncremental(doc1.saveIncremental()) - doc1.increment('_root', 'starlings', 1) - doc2.loadIncremental(doc1.saveIncremental()) - assert.deepEqual(doc2.getWithType('_root', 'starlings'), ['counter', 3]) - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['starlings'], value: 2 }, - { action: 'inc', path: ['starlings'], value: 1 } - ]) - }) - - it('should support counters in a list', () => { - const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') - doc2.enablePatches(true) - const list = doc1.putObject('_root', 'list', []) - doc2.loadIncremental(doc1.saveIncremental()) - doc1.insert(list, 0, 1, 'counter') - doc2.loadIncremental(doc1.saveIncremental()) - doc1.increment(list, 0, 2) - doc2.loadIncremental(doc1.saveIncremental()) - doc1.increment(list, 0, -5) - doc2.loadIncremental(doc1.saveIncremental()) - - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['list'], value: [] }, - { action: 'insert', path: ['list',0], values: [1] }, - { action: 'inc', path: ['list',0], value: 2 }, - { action: 'inc', path: ['list',0], value: -5 }, - ]) - }) - - it('should delete a counter from a map') // TODO - }) - - describe('sync', () => { - it('should send a sync message implying no local data', () => { - const doc = create(true) - const s1 = initSyncState() - const m1 = doc.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") } - const message: DecodedSyncMessage = decodeSyncMessage(m1) - assert.deepStrictEqual(message.heads, []) - assert.deepStrictEqual(message.need, []) - assert.deepStrictEqual(message.have.length, 1) - assert.deepStrictEqual(message.have[0].lastSync, []) - assert.deepStrictEqual(message.have[0].bloom.byteLength, 0) - assert.deepStrictEqual(message.changes, []) - }) - - it('should not reply if we have no data as well', () => { - const n1 = create(true), n2 = create(true) - const s1 = initSyncState(), s2 = initSyncState() - const m1 = n1.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") } - n2.receiveSyncMessage(s2, m1) - const m2 = n2.generateSyncMessage(s2) - assert.deepStrictEqual(m2, null) - }) - - it('repos with equal heads do not need a reply message', () => { - const n1 = create(true), n2 = create(true) - const s1 = initSyncState(), s2 = initSyncState() - - // make two nodes with the same changes - const list = n1.putObject("_root", "n", []) - n1.commit("", 0) - for (let i = 0; i < 10; i++) { - n1.insert(list, i, i) - n1.commit("", 0) - } - n2.applyChanges(n1.getChanges([])) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - - // generate a naive sync message - const m1 = n1.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) - - // heads are equal so this message should be null - n2.receiveSyncMessage(s2, m1) - const m2 = n2.generateSyncMessage(s2) - assert.strictEqual(m2, null) - }) - - it('n1 should offer all changes to n2 when starting from nothing', () => { - const n1 = create(true), n2 = create(true) - - // make changes for n1 that n2 should request - const list = n1.putObject("_root", "n", []) - n1.commit("", 0) - for (let i = 0; i < 10; i++) { - n1.insert(list, i, i) - n1.commit("", 0) - } - - assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) - sync(n1, n2) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - }) - - it('should sync peers where one has commits the other does not', () => { - const n1 = create(true), n2 = create(true) - - // make changes for n1 that n2 should request - const list = n1.putObject("_root", "n", []) - n1.commit("", 0) - for (let i = 0; i < 10; i++) { - n1.insert(list, i, i) - n1.commit("", 0) - } - - assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) - sync(n1, n2) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - }) - - it('should work with prior sync state', () => { - // create & synchronize two nodes - const n1 = create(true), n2 = create(true) - const s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 5; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - - // modify the first node further - for (let i = 5; i < 10; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - }) - - it('should not generate messages once synced', () => { - // create & synchronize two nodes - const n1 = create(true, 'abc123'), n2 = create(true, 'def456') - const s1 = initSyncState(), s2 = initSyncState() - - let message - for (let i = 0; i < 5; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - for (let i = 0; i < 5; i++) { - n2.put("_root", "y", i) - n2.commit("", 0) - } - - // n1 reports what it has - message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") } - - // n2 receives that message and sends changes along with what it has - n2.receiveSyncMessage(s2, message) - message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) - //assert.deepStrictEqual(patch, null) // no changes arrived - - // n1 receives the changes and replies with the changes it now knows that n2 needs - n1.receiveSyncMessage(s1, message) - message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) - - // n2 applies the changes and sends confirmation ending the exchange - n2.receiveSyncMessage(s2, message) - message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") } - - // n1 receives the message and has nothing more to say - n1.receiveSyncMessage(s1, message) - message = n1.generateSyncMessage(s1) - assert.deepStrictEqual(message, null) - //assert.deepStrictEqual(patch, null) // no changes arrived - - // n2 also has nothing left to say - message = n2.generateSyncMessage(s2) - assert.deepStrictEqual(message, null) - }) - - it('should allow simultaneous messages during synchronization', () => { - // create & synchronize two nodes - const n1 = create(true, 'abc123'), n2 = create(true, 'def456') - const s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 5; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - for (let i = 0; i < 5; i++) { - n2.put("_root", "y", i) - n2.commit("", 0) - } - - const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] - - // both sides report what they have but have no shared peer state - let msg1to2, msg2to1 - msg1to2 = n1.generateSyncMessage(s1) - msg2to1 = n2.generateSyncMessage(s2) - if (msg1to2 === null) { throw new RangeError("message should not be null") } - if (msg2to1 === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) - assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0) - assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) - assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0) - - // n1 and n2 receive that message and update sync state but make no patch - n1.receiveSyncMessage(s1, msg2to1) - n2.receiveSyncMessage(s2, msg1to2) - - // now both reply with their local changes the other lacks - // (standard warning that 1% of the time this will result in a "need" message) - msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 5) - msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) - - // both should now apply the changes and update the frontend - n1.receiveSyncMessage(s1, msg2to1) - assert.deepStrictEqual(n1.getMissingDeps(), []) - //assert.notDeepStrictEqual(patch1, null) - assert.deepStrictEqual(n1.materialize(), { x: 4, y: 4 }) - - n2.receiveSyncMessage(s2, msg1to2) - assert.deepStrictEqual(n2.getMissingDeps(), []) - //assert.notDeepStrictEqual(patch2, null) - assert.deepStrictEqual(n2.materialize(), { x: 4, y: 4 }) - - // The response acknowledges the changes received and sends no further changes - msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) - msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) - - // After receiving acknowledgements, their shared heads should be equal - n1.receiveSyncMessage(s1, msg2to1) - n2.receiveSyncMessage(s2, msg1to2) - assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) - assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) - //assert.deepStrictEqual(patch1, null) - //assert.deepStrictEqual(patch2, null) - - // We're in sync, no more messages required - msg1to2 = n1.generateSyncMessage(s1) - msg2to1 = n2.generateSyncMessage(s2) - assert.deepStrictEqual(msg1to2, null) - assert.deepStrictEqual(msg2to1, null) - - // If we make one more change and start another sync then its lastSync should be updated - n1.put("_root", "x", 5) - msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) - }) - - it('should assume sent changes were received until we hear otherwise', () => { - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - const s1 = initSyncState(), s2 = initSyncState() - let message = null - - const items = n1.putObject("_root", "items", []) - n1.commit("", 0) - - sync(n1, n2, s1, s2) - - n1.push(items, "x") - n1.commit("", 0) - message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - - n1.push(items, "y") - n1.commit("", 0) - message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - - n1.push(items, "z") - n1.commit("", 0) - - message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) - }) - - it('should work regardless of who initiates the exchange', () => { - // create & synchronize two nodes - const n1 = create(true), n2 = create(true) - const s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 5; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - - // modify the first node further - for (let i = 5; i < 10; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - }) - - it('should work without prior sync state', () => { - // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- c15 <-- c16 <-- c17 - // lastSync is undefined. - - // create two peers both with divergent commits - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - //const s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 10; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - sync(n1, n2) - - for (let i = 10; i < 15; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - for (let i = 15; i < 18; i++) { - n2.put("_root", "x", i) - n2.commit("", 0) - } - - assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) - sync(n1, n2) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - }) - - it('should work with prior sync state', () => { - // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- c15 <-- c16 <-- c17 - // lastSync is c9. - - // create two peers both with divergent commits - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 10; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - - for (let i = 10; i < 15; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - for (let i = 15; i < 18; i++) { - n2.put("_root", "x", i) - n2.commit("", 0) - } - - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - - assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - }) - - it('should ensure non-empty state after sync', () => { - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - const s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 3; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - - assert.deepStrictEqual(s1.sharedHeads, n1.getHeads()) - assert.deepStrictEqual(s2.sharedHeads, n1.getHeads()) - }) - - it('should re-sync after one node crashed with data loss', () => { - // Scenario: (r) (n2) (n1) - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. - // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - let s1 = initSyncState() - const s2 = initSyncState() - - // n1 makes three changes, which we sync to n2 - for (let i = 0; i < 3; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - - // save a copy of n2 as "r" to simulate recovering from a crash - let r - let rSyncState - ;[r, rSyncState] = [n2.clone(), s2.clone()] - - // sync another few commits - for (let i = 3; i < 6; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - - // everyone should be on the same page here - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - - // now make a few more changes and then attempt to sync the fully-up-to-date n1 with the confused r - for (let i = 6; i < 9; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - s1 = decodeSyncState(encodeSyncState(s1)) - rSyncState = decodeSyncState(encodeSyncState(rSyncState)) - - assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) - assert.notDeepStrictEqual(n1.materialize(), r.materialize()) - assert.deepStrictEqual(n1.materialize(), { x: 8 }) - assert.deepStrictEqual(r.materialize(), { x: 2 }) - sync(n1, r, s1, rSyncState) - assert.deepStrictEqual(n1.getHeads(), r.getHeads()) - assert.deepStrictEqual(n1.materialize(), r.materialize()) - r = null - }) - - it('should re-sync after one node experiences data loss without disconnecting', () => { - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - const s1 = initSyncState(), s2 = initSyncState() - - // n1 makes three changes, which we sync to n2 - for (let i = 0; i < 3; i++) { - n1.put("_root", "x", i) - n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - - const n2AfterDataLoss = create(true, '89abcdef') - - // "n2" now has no data, but n1 still thinks it does. Note we don't do - // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting - sync(n1, n2AfterDataLoss, s1, initSyncState()) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - }) - - it('should handle changes concurrent to the last sync heads', () => { - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, 'fedcba98') - const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() - - // Change 1 is known to all three nodes - //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) - n1.put("_root", "x", 1); n1.commit("", 0) - - sync(n1, n2, s12, s21) - sync(n2, n3, s23, s32) - - // Change 2 is known to n1 and n2 - n1.put("_root", "x", 2); n1.commit("", 0) - - sync(n1, n2, s12, s21) - - // Each of the three nodes makes one change (changes 3, 4, 5) - n1.put("_root", "x", 3); n1.commit("", 0) - n2.put("_root", "x", 4); n2.commit("", 0) - n3.put("_root", "x", 5); n3.commit("", 0) - - // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to - // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) - let change = n3.getLastLocalChange() - if (change === null) throw new RangeError("no local change") - //ts-ignore - if (typeof Buffer === 'function') change = Buffer.from(change) - if (change === undefined) { throw new RangeError("last local change failed") } - n2.applyChanges([change]) - - // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads - sync(n1, n2, s12, s21) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - }) - - it('should handle histories with lots of branching and merging', () => { - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, 'fedcba98') - n1.put("_root", "x", 0); n1.commit("", 0) - const change1 = n1.getLastLocalChange() - if (change1 === null) throw new RangeError("no local change") - n2.applyChanges([change1]) - const change2 = n1.getLastLocalChange() - if (change2 === null) throw new RangeError("no local change") - n3.applyChanges([change2]) - n3.put("_root", "x", 1); n3.commit("", 0) - - // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 - // / \/ \/ \/ - // / /\ /\ /\ - // c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 - // \ / - // ---------------------------------------------- n3c1 <----- - for (let i = 1; i < 20; i++) { - n1.put("_root", "n1", i); n1.commit("", 0) - n2.put("_root", "n2", i); n2.commit("", 0) - const change1 = n1.getLastLocalChange() - if (change1 === null) throw new RangeError("no local change") - const change2 = n2.getLastLocalChange() - if (change2 === null) throw new RangeError("no local change") - n1.applyChanges([change2]) - n2.applyChanges([change1]) - } - - const s1 = initSyncState(), s2 = initSyncState() - sync(n1, n2, s1, s2) - - // Having n3's last change concurrent to the last sync heads forces us into the slower code path - const change3 = n3.getLastLocalChange() - if (change3 === null) throw new RangeError("no local change") - n2.applyChanges([change3]) - n1.put("_root", "n1", "final"); n1.commit("", 0) - n2.put("_root", "n2", "final"); n2.commit("", 0) - - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - assert.deepStrictEqual(n1.materialize(), n2.materialize()) - }) - - it('should handle a false-positive head', () => { - // Scenario: ,-- n1 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- n2 - // where n2 is a false positive in the Bloom filter containing {n1}. - // lastSync is c9. - let n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 10; i++) { - n1.put("_root", "x", i); n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - for (let i = 1; ; i++) { // search for false positive; see comment above - const n1up = n1.clone('01234567'); - n1up.put("_root", "x", `${i} @ n1`); n1up.commit("", 0) - const n2up = n2.clone('89abcdef'); - n2up.put("_root", "x", `${i} @ n2`); n2up.commit("", 0) - if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { - n1 = n1up; n2 = n2up; break - } - } - const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), allHeads) - assert.deepStrictEqual(n2.getHeads(), allHeads) - }) - - - describe('with a false-positive dependency', () => { - let n1: Automerge, n2: Automerge, s1: SyncState, s2: SyncState, n1hash2: Hash, n2hash2: Hash - - beforeEach(() => { - // Scenario: ,-- n1c1 <-- n1c2 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- n2c1 <-- n2c2 - // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. - // lastSync is c9. - n1 = create(true, '01234567') - n2 = create(true, '89abcdef') - s1 = initSyncState() - s2 = initSyncState() - for (let i = 0; i < 10; i++) { - n1.put("_root", "x", i); n1.commit("", 0) - } - sync(n1, n2, s1, s2) - - let n1hash1, n2hash1 - for (let i = 29; ; i++) { // search for false positive; see comment above - const n1us1 = n1.clone('01234567') - n1us1.put("_root", "x", `${i} @ n1`); n1us1.commit("", 0) - - const n2us1 = n2.clone('89abcdef') - n2us1.put("_root", "x", `${i} @ n1`); n2us1.commit("", 0) - - n1hash1 = n1us1.getHeads()[0]; n2hash1 = n2us1.getHeads()[0] - - const n1us2 = n1us1.clone(); - n1us2.put("_root", "x", `final @ n1`); n1us2.commit("", 0) - - const n2us2 = n2us1.clone(); - n2us2.put("_root", "x", `final @ n2`); n2us2.commit("", 0) - - n1hash2 = n1us2.getHeads()[0]; n2hash2 = n2us2.getHeads()[0] - if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { - n1 = n1us2; n2 = n2us2; break - } - } - }) - - it('should sync two nodes without connection reset', () => { - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), [n1hash2, n2hash2].sort()) - assert.deepStrictEqual(n2.getHeads(), [n1hash2, n2hash2].sort()) - }) - - it('should sync two nodes with connection reset', () => { - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), [n1hash2, n2hash2].sort()) - assert.deepStrictEqual(n2.getHeads(), [n1hash2, n2hash2].sort()) - }) - - it('should sync three nodes', () => { - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - - // First n1 and n2 exchange Bloom filters - let m1, m2 - m1 = n1.generateSyncMessage(s1) - m2 = n2.generateSyncMessage(s2) - if (m1 === null) { throw new RangeError("message should not be null") } - if (m2 === null) { throw new RangeError("message should not be null") } - n1.receiveSyncMessage(s1, m2) - n2.receiveSyncMessage(s2, m1) - - // Then n1 and n2 send each other their changes, except for the false positive - m1 = n1.generateSyncMessage(s1) - m2 = n2.generateSyncMessage(s2) - if (m1 === null) { throw new RangeError("message should not be null") } - if (m2 === null) { throw new RangeError("message should not be null") } - n1.receiveSyncMessage(s1, m2) - n2.receiveSyncMessage(s2, m1) - assert.strictEqual(decodeSyncMessage(m1).changes.length, 2) // n1c1 and n1c2 - assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent - - // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - const n3 = create(true, 'fedcba98'), s13 = initSyncState(), s31 = initSyncState() - sync(n1, n3, s13, s31) - assert.deepStrictEqual(n1.getHeads(), [n1hash2]) - assert.deepStrictEqual(n3.getHeads(), [n1hash2]) - }) - }) - - it('should not require an additional request when a false-positive depends on a true-negative', () => { - // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ - // `-- n2c1 <-- n2c2 <-- n2c3 - // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. - // lastSync is c4. - let n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - let n1hash3, n2hash3 - - for (let i = 0; i < 5; i++) { - n1.put("_root", "x", i); n1.commit("", 0) - } - sync(n1, n2, s1, s2) - for (let i = 86; ; i++) { // search for false positive; see comment above - const n1us1 = n1.clone('01234567') - n1us1.put("_root", "x", `${i} @ n1`); n1us1.commit("", 0) - - const n2us1 = n2.clone('89abcdef') - n2us1.put("_root", "x", `${i} @ n2`); n2us1.commit("", 0) - - //const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - //const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) - const n1hash1 = n1us1.getHeads()[0] - - const n1us2 = n1us1.clone() - n1us2.put("_root", "x", `${i + 1} @ n1`); n1us2.commit("", 0) - - const n2us2 = n2us1.clone() - n2us2.put("_root", "x", `${i + 1} @ n2`); n2us2.commit("", 0) - - const n1hash2 = n1us2.getHeads()[0], n2hash2 = n2us2.getHeads()[0] - - const n1us3 = n1us2.clone() - n1us3.put("_root", "x", `final @ n1`); n1us3.commit("", 0) - - const n2us3 = n2us2.clone() - n2us3.put("_root", "x", `final @ n2`); n2us3.commit("", 0) - - n1hash3 = n1us3.getHeads()[0]; n2hash3 = n2us3.getHeads()[0] - - if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) { - n1 = n1us3; n2 = n2us3; break - } - } - const bothHeads = [n1hash3, n2hash3].sort() - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), bothHeads) - assert.deepStrictEqual(n2.getHeads(), bothHeads) - }) - - it('should handle chains of false-positives', () => { - // Scenario: ,-- c5 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ - // `-- n2c1 <-- n2c2 <-- n2c3 - // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. - // lastSync is c4. - const n1 = create(true, '01234567') - let n2 = create(true, '89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - - for (let i = 0; i < 5; i++) { - n1.put("_root", "x", i); n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - - n1.put("_root", "x", 5); n1.commit("", 0) - - for (let i = 2; ; i++) { // search for false positive; see comment above - const n2us1 = n2.clone('89abcdef') - n2us1.put("_root", "x", `${i} @ n2`); n2us1.commit("", 0) - if (new BloomFilter(n1.getHeads()).containsHash(n2us1.getHeads()[0])) { - n2 = n2us1; break - } - } - for (let i = 141; ; i++) { // search for false positive; see comment above - const n2us2 = n2.clone('89abcdef') - n2us2.put("_root", "x", `${i} again`); n2us2.commit("", 0) - if (new BloomFilter(n1.getHeads()).containsHash(n2us2.getHeads()[0])) { - n2 = n2us2; break - } - } - n2.put("_root", "x", `final @ n2`); n2.commit("", 0) - - const allHeads = [...n1.getHeads(), ...n2.getHeads()].sort() - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - sync(n1, n2, s1, s2) - assert.deepStrictEqual(n1.getHeads(), allHeads) - assert.deepStrictEqual(n2.getHeads(), allHeads) - }) - - it('should allow the false-positive hash to be explicitly requested', () => { - // Scenario: ,-- n1 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- n2 - // where n2 causes a false positive in the Bloom filter containing {n1}. - let n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - let message - - for (let i = 0; i < 10; i++) { - n1.put("_root", "x", i); n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - - for (let i = 1; ; i++) { // brute-force search for false positive; see comment above - const n1up = n1.clone('01234567'); n1up.put("_root", "x", `${i} @ n1`); n1up.commit("", 0) - const n2up = n1.clone('89abcdef'); n2up.put("_root", "x", `${i} @ n2`); n2up.commit("", 0) - - // check if the bloom filter on n2 will believe n1 already has a particular hash - // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 - if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { - n1 = n1up; n2 = n2up; break - } - } - - // n1 creates a sync message for n2 with an ill-fated bloom - message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") } - assert.strictEqual(decodeSyncMessage(message).changes.length, 0) - - // n2 receives it and DOESN'T send a change back - n2.receiveSyncMessage(s2, message) - message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") } - assert.strictEqual(decodeSyncMessage(message).changes.length, 0) - - // n1 should now realize it's missing that change and request it explicitly - n1.receiveSyncMessage(s1, message) - message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") } - assert.deepStrictEqual(decodeSyncMessage(message).need, n2.getHeads()) - - // n2 should fulfill that request - n2.receiveSyncMessage(s2, message) - message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") } - assert.strictEqual(decodeSyncMessage(message).changes.length, 1) - - // n1 should apply the change and the two should now be in sync - n1.receiveSyncMessage(s1, message) - assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) - }) - - describe('protocol features', () => { - it('should allow multiple Bloom filters', () => { - // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 - // c0 <-- c1 <-- c2 <-+--- n2c1 <-- n2c2 <-- n2c3 - // `-- n3c1 <-- n3c2 <-- n3c3 - // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; - // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; - // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, '76543210') - let s13 = initSyncState() - const s12 = initSyncState() - const s21 = initSyncState() - let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() - let message1, message3 - - for (let i = 0; i < 3; i++) { - n1.put("_root", "x", i); n1.commit("", 0) - } - - // sync all 3 nodes - sync(n1, n2, s12, s21) // eslint-disable-line no-unused-vars -- kept for consistency - sync(n1, n3, s13, s31) - sync(n3, n2, s32, s23) - for (let i = 0; i < 2; i++) { - n1.put("_root", "x", `${i} @ n1`); n1.commit("", 0) - } - for (let i = 0; i < 2; i++) { - n2.put("_root", "x", `${i} @ n2`); n2.commit("", 0) - } - n1.applyChanges(n2.getChanges([])) - n2.applyChanges(n1.getChanges([])) - n1.put("_root", "x", `3 @ n1`); n1.commit("", 0) - n2.put("_root", "x", `3 @ n2`); n2.commit("", 0) - - for (let i = 0; i < 3; i++) { - n3.put("_root", "x", `${i} @ n3`); n3.commit("", 0) - } - const n1c3 = n1.getHeads()[0], n2c3 = n2.getHeads()[0], n3c3 = n3.getHeads()[0] - s13 = decodeSyncState(encodeSyncState(s13)) - s31 = decodeSyncState(encodeSyncState(s31)) - s23 = decodeSyncState(encodeSyncState(s23)) - s32 = decodeSyncState(encodeSyncState(s32)) - - - // Now n3 concurrently syncs with n1 and n2. Doing this naively would result in n3 receiving - // changes {n1c1, n1c2, n2c1, n2c2} twice (those are the changes that both n1 and n2 have, but - // that n3 does not have). We want to prevent this duplication. - message1 = n1.generateSyncMessage(s13) // message from n1 to n3 - if (message1 === null) { throw new RangeError("message should not be null") } - assert.strictEqual(decodeSyncMessage(message1).changes.length, 0) - n3.receiveSyncMessage(s31, message1) - message3 = n3.generateSyncMessage(s31) // message from n3 to n1 - if (message3 === null) { throw new RangeError("message should not be null") } - assert.strictEqual(decodeSyncMessage(message3).changes.length, 3) // {n3c1, n3c2, n3c3} - n1.receiveSyncMessage(s13, message3) - - // Copy the Bloom filter received from n1 into the message sent from n3 to n2. This Bloom - // filter indicates what changes n3 is going to receive from n1. - message3 = n3.generateSyncMessage(s32) // message from n3 to n2 - if (message3 === null) { throw new RangeError("message should not be null") } - const modifiedMessage = decodeSyncMessage(message3) - modifiedMessage.have.push(decodeSyncMessage(message1).have[0]) - assert.strictEqual(modifiedMessage.changes.length, 0) - n2.receiveSyncMessage(s23, encodeSyncMessage(modifiedMessage)) - - // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) - const message2 = n2.generateSyncMessage(s23) - if (message2 === null) { throw new RangeError("message should not be null") } - assert.strictEqual(decodeSyncMessage(message2).changes.length, 1) // {n2c3} - n3.receiveSyncMessage(s32, message2) - - // n1 replies to n3 - message1 = n1.generateSyncMessage(s13) - if (message1 === null) { throw new RangeError("message should not be null") } - assert.strictEqual(decodeSyncMessage(message1).changes.length, 5) // {n1c1, n1c2, n1c3, n2c1, n2c2} - n3.receiveSyncMessage(s31, message1) - assert.deepStrictEqual(n3.getHeads(), [n1c3, n2c3, n3c3].sort()) - }) - - it('should allow any change to be requested', () => { - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - const s1 = initSyncState(), s2 = initSyncState() - let message = null - - for (let i = 0; i < 3; i++) { - n1.put("_root", "x", i); n1.commit("", 0) - } - - const lastSync = n1.getHeads() - - for (let i = 3; i < 6; i++) { - n1.put("_root", "x", i); n1.commit("", 0) - } - - sync(n1, n2, s1, s2) - s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed - message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") } - const modMsg = decodeSyncMessage(message) - modMsg.need = lastSync // re-request change 2 - n2.receiveSyncMessage(s2, encodeSyncMessage(modMsg)) - message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") } - assert.strictEqual(decodeSyncMessage(message).changes.length, 1) - assert.strictEqual(decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) - }) - - it('should ignore requests for a nonexistent change', () => { - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') - const s1 = initSyncState(), s2 = initSyncState() - let message = null - - for (let i = 0; i < 3; i++) { - n1.put("_root", "x", i); n1.commit("", 0) - } - - n2.applyChanges(n1.getChanges([])) - message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") } - message = decodeSyncMessage(message) - message.need = ['0000000000000000000000000000000000000000000000000000000000000000'] - message = encodeSyncMessage(message) - n2.receiveSyncMessage(s2, message) - message = n2.generateSyncMessage(s2) - assert.strictEqual(message, null) - }) - - it('should allow a subset of changes to be sent', () => { - // ,-- c1 <-- c2 - // c0 <-+ - // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, '76543210') - let s1 = initSyncState(), s2 = initSyncState() - let msg - - n1.put("_root", "x", 0); n1.commit("", 0) - n3.applyChanges(n3.getChangesAdded(n1)) // merge() - for (let i = 1; i <= 2; i++) { - n1.put("_root", "x", i); n1.commit("", 0) - } - for (let i = 3; i <= 4; i++) { - n3.put("_root", "x", i); n3.commit("", 0) - } - const c2 = n1.getHeads()[0], c4 = n3.getHeads()[0] - n2.applyChanges(n2.getChangesAdded(n3)) // merge() - - // Sync n1 and n2, so their shared heads are {c2, c4} - sync(n1, n2, s1, s2) - s1 = decodeSyncState(encodeSyncState(s1)) - s2 = decodeSyncState(encodeSyncState(s2)) - assert.deepStrictEqual(s1.sharedHeads, [c2, c4].sort()) - assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort()) - - // n2 and n3 apply {c5, c6, c7, c8} - n3.put("_root", "x", 5); n3.commit("", 0) - const change5 = n3.getLastLocalChange() - if (change5 === null) throw new RangeError("no local change") - n3.put("_root", "x", 6); n3.commit("", 0) - const change6 = n3.getLastLocalChange(), c6 = n3.getHeads()[0] - if (change6 === null) throw new RangeError("no local change") - for (let i = 7; i <= 8; i++) { - n3.put("_root", "x", i); n3.commit("", 0) - } - const c8 = n3.getHeads()[0] - n2.applyChanges(n2.getChangesAdded(n3)) // merge() - - // Now n1 initiates a sync with n2, and n2 replies with {c5, c6}. n2 does not send {c7, c8} - msg = n1.generateSyncMessage(s1) - if (msg === null) { throw new RangeError("message should not be null") } - n2.receiveSyncMessage(s2, msg) - msg = n2.generateSyncMessage(s2) - if (msg === null) { throw new RangeError("message should not be null") } - const decodedMsg = decodeSyncMessage(msg) - decodedMsg.changes = [change5, change6] - msg = encodeSyncMessage(decodedMsg) - const sentHashes: any = {} - - sentHashes[decodeChange(change5).hash] = true - sentHashes[decodeChange(change6).hash] = true - - s2.sentHashes = sentHashes - n1.receiveSyncMessage(s1, msg) - assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) - - // n1 replies, confirming the receipt of {c5, c6} and requesting the remaining changes - msg = n1.generateSyncMessage(s1) - if (msg === null) { throw new RangeError("message should not be null") } - n2.receiveSyncMessage(s2, msg) - assert.deepStrictEqual(decodeSyncMessage(msg).need, [c8]) - assert.deepStrictEqual(decodeSyncMessage(msg).have[0].lastSync, [c2, c6].sort()) - assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) - assert.deepStrictEqual(s2.sharedHeads, [c2, c6].sort()) - - // n2 sends the remaining changes {c7, c8} - msg = n2.generateSyncMessage(s2) - if (msg === null) { throw new RangeError("message should not be null") } - n1.receiveSyncMessage(s1, msg) - assert.strictEqual(decodeSyncMessage(msg).changes.length, 2) - assert.deepStrictEqual(s1.sharedHeads, [c2, c8].sort()) - }) - }) - - it('can handle overlappying splices', () => { - const doc = create(true) - doc.enablePatches(true) - let mat : any = doc.materialize("/") - doc.putObject("/", "text", "abcdefghij") - doc.splice("/text", 2, 2, "00") - doc.splice("/text", 3, 5, "11") - mat = doc.applyPatches(mat) - assert.deepEqual(mat.text, "ab011ij") - }) - - it('can handle utf16 text', () => { - const doc = create(true) - doc.enablePatches(true) - let mat : any = doc.materialize("/") - - doc.putObject("/", "width1", "AAAAAA") - doc.putObject("/", "width2", "🐻🐻🐻🐻🐻🐻") - doc.putObject("/", "mixed", "A🐻A🐻A🐻") - - assert.deepEqual(doc.length("/width1"), 6); - assert.deepEqual(doc.length("/width2"), 12); - assert.deepEqual(doc.length("/mixed"), 9); - - const heads1 = doc.getHeads(); - - mat = doc.applyPatches(mat) - - const remote = load(doc.save(), true) - remote.enablePatches(true) - let r_mat : any = remote.materialize("/") - - assert.deepEqual(mat, { width1: "AAAAAA", width2: "🐻🐻🐻🐻🐻🐻", mixed: "A🐻A🐻A🐻" }) - assert.deepEqual(mat.width1.slice(2,4), "AA") - assert.deepEqual(mat.width2.slice(2,4), "🐻") - assert.deepEqual(mat.mixed.slice(1,4), "🐻A") - - assert.deepEqual(r_mat, { width1: "AAAAAA", width2: "🐻🐻🐻🐻🐻🐻", mixed: "A🐻A🐻A🐻" }) - assert.deepEqual(r_mat.width1.slice(2,4), "AA") - assert.deepEqual(r_mat.width2.slice(2,4), "🐻") - assert.deepEqual(r_mat.mixed.slice(1,4), "🐻A") - - doc.splice("/width1", 2, 2, "🐻") - doc.splice("/width2", 2, 2, "A🐻A") - doc.splice("/mixed", 3, 3, "X") - - mat = doc.applyPatches(mat) - remote.loadIncremental(doc.saveIncremental()); - r_mat = remote.applyPatches(r_mat) - - assert.deepEqual(mat.width1, "AA🐻AA") - assert.deepEqual(mat.width2, "🐻A🐻A🐻🐻🐻🐻") - assert.deepEqual(mat.mixed, "A🐻XA🐻") - - assert.deepEqual(r_mat.width1, "AA🐻AA") - assert.deepEqual(r_mat.width2, "🐻A🐻A🐻🐻🐻🐻") - assert.deepEqual(r_mat.mixed, "A🐻XA🐻") - assert.deepEqual(remote.length("/width1"), 6); - assert.deepEqual(remote.length("/width2"), 14); - assert.deepEqual(remote.length("/mixed"), 7); - - // when indexing in the middle of a multibyte char it indexes at the char before - doc.splice("/width2", 4, 1, "X") - mat = doc.applyPatches(mat) - remote.loadIncremental(doc.saveIncremental()); - r_mat = remote.applyPatches(r_mat) - - assert.deepEqual(mat.width2, "🐻AXA🐻🐻🐻🐻") - - assert.deepEqual(doc.length("/width1", heads1), 6); - assert.deepEqual(doc.length("/width2", heads1), 12); - assert.deepEqual(doc.length("/mixed", heads1), 9); - - assert.deepEqual(doc.get("/mixed", 0), 'A'); - assert.deepEqual(doc.get("/mixed", 1), '🐻'); - assert.deepEqual(doc.get("/mixed", 2), '🐻'); - assert.deepEqual(doc.get("/mixed", 3), 'X'); - assert.deepEqual(doc.get("/mixed", 1, heads1), '🐻'); - assert.deepEqual(doc.get("/mixed", 2, heads1), '🐻'); - assert.deepEqual(doc.get("/mixed", 3, heads1), 'A'); - assert.deepEqual(doc.get("/mixed", 4, heads1), '🐻'); - }) - - it('can handle non-characters embedded in text', () => { - const change : any = { - ops: [ - { action: 'makeText', obj: '_root', key: 'bad_text', pred: [] }, - { action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'A', pred: [] }, - { action: 'set', obj: '1@aaaa', elemId: '2@aaaa', insert: true, value: 'BBBBB', pred: [] }, - { action: 'makeMap', obj: '1@aaaa', elemId: '3@aaaa', insert: true, pred: [] }, - { action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'C', pred: [] } - ], - actor: 'aaaa', - seq: 1, - startOp: 1, - time: 0, - message: null, - deps: [] - } - const doc = load(encodeChange(change), true); - doc.enablePatches(true) - const mat : any = doc.materialize("/") - - // multi - char strings appear as a span of strings - // non strings appear as an object replacement unicode char - assert.deepEqual(mat.bad_text, 'ABBBBBC') - assert.deepEqual(doc.text("/bad_text"), 'ABBBBBC') - assert.deepEqual(doc.materialize("/bad_text"), 'ABBBBBC') - - // deleting in the middle of a multi-byte character will delete the whole thing - const doc1 = doc.fork() - doc1.splice("/bad_text", 3, 3, "X"); - assert.deepEqual(doc1.text("/bad_text"), 'AXC') - - // deleting in the middle of a multi-byte character will delete the whole thing - // and characters past its end - const doc2 = doc.fork() - doc2.splice("/bad_text", 3, 4, "X"); - assert.deepEqual(doc2.text("/bad_text"), 'AXC') - - const doc3 = doc.fork() - doc3.splice("/bad_text", 3, 5, "X"); - assert.deepEqual(doc3.text("/bad_text"), 'AX') - - // inserting in the middle of a mutli-bytes span inserts after - const doc4 = doc.fork() - doc4.splice("/bad_text", 3, 0, "X"); - assert.deepEqual(doc4.text("/bad_text"), 'ABBBBBXC') - - // deleting into the middle of a multi-byte span deletes the whole thing - const doc5 = doc.fork() - doc5.splice("/bad_text", 0, 2, "X"); - assert.deepEqual(doc5.text("/bad_text"), 'XC') - - // you can access elements in the text by text index - assert.deepEqual(doc5.getAll("/bad_text", 1), [['map', '4@aaaa' ]]) - assert.deepEqual(doc5.getAll("/bad_text", 2, doc.getHeads()), [['str', 'BBBBB', '3@aaaa' ]]) - }) - }) - - describe("the legacy text implementation", () => { - const root = "_root" - class FakeText { - elems: Array - constructor(elems: string | Array) { - if (typeof elems === "string") { - this.elems = Array.from(elems) - } else { - this.elems = elems - } - } - } - it("should materialize old style text", () => { - let doc = create(false); - doc.registerDatatype("text", (e: any) => new FakeText(e)) - doc.enablePatches(true) - let txt = doc.putObject(root, "text", "") - doc.splice(txt, 0, 0, "hello") - let mat: any = doc.materialize() - assert.deepEqual(mat.text, new FakeText("hello")) - }) - - it("should apply patches to old style text", () => { - let doc = create(false); - doc.registerDatatype("text", (e: any) => new FakeText(e)) - doc.enablePatches(true) - let mat : any = doc.materialize("/") - doc.putObject("/", "text", "abcdefghij") - doc.splice("/text", 2, 2, "00") - doc.splice("/text", 3, 5, "11") - mat = doc.applyPatches(mat) - assert.deepEqual(mat.text, new FakeText("ab011ij")) - }) - - it("should apply list patches to old style text", () => { - let doc = create(false); - doc.registerDatatype("text", (e: any) => new FakeText(e)) - doc.enablePatches(true) - let mat : any = doc.materialize("/") - doc.putObject("/", "text", "abc") - doc.insert("/text", 0, "0") - doc.insert("/text", 1, "1") - mat = doc.applyPatches(mat) - assert.deepEqual(mat.text, new FakeText("01abc")) - }) - - it("should allow inserting using list methods", () => { - let doc = create(false); - doc.registerDatatype("text", (e: any) => new FakeText(e)) - doc.enablePatches(true) - let mat : any = doc.materialize("/") - const txt = doc.putObject("/", "text", "abc") - doc.insert(txt, 3, "d") - doc.insert(txt, 0, "0") - mat = doc.applyPatches(mat) - assert.deepEqual(mat.text, new FakeText("0abcd")) - }) - - it("should allow inserting objects in old style text", () => { - let doc = create(false); - doc.registerDatatype("text", (e: any) => new FakeText(e)) - doc.enablePatches(true) - let mat : any = doc.materialize("/") - const txt = doc.putObject("/", "text", "abc") - doc.insertObject(txt, 0, {"key": "value"}) - doc.insertObject(txt, 2, ["elem"]) - doc.insert(txt, 2, "m") - mat = doc.applyPatches(mat) - assert.deepEqual(mat.text, new FakeText([ - {"key": "value"}, "a", "m", ["elem"], "b", "c" - ])) - }) - - class RawString { - val: string; - constructor(s: string) { - this.val = s - } - } - - it("should allow registering a different type for strings", () => { - let doc = create(false); - doc.registerDatatype("str", (e: any) => new RawString(e)) - doc.enablePatches(true) - doc.put("/", "key", "value") - let mat: any = doc.materialize() - assert.deepStrictEqual(mat.key, new RawString("value")) - }) - - it("should generate patches correctly for raw strings", () => { - let doc = create(false); - doc.registerDatatype("str", (e: any) => new RawString(e)) - doc.enablePatches(true) - let mat: any = doc.materialize() - doc.put("/", "key", "value") - mat = doc.applyPatches(mat) - assert.deepStrictEqual(mat.key, new RawString("value")) - }) - - }) -}) diff --git a/rust/automerge/Cargo.toml b/rust/automerge/Cargo.toml deleted file mode 100644 index 0c10cc2b..00000000 --- a/rust/automerge/Cargo.toml +++ /dev/null @@ -1,62 +0,0 @@ -[package] -name = "automerge" -version = "0.3.0" -edition = "2021" -license = "MIT" -repository = "https://github.com/automerge/automerge-rs" -documentation = "https://automerge.org/automerge-rs/automerge/" -rust-version = "1.57.0" -description = "A JSON-like data structure (a CRDT) that can be modified concurrently by different users, and merged again automatically" -readme = "./README.md" - -[features] -optree-visualisation = ["dot", "rand"] -wasm = ["js-sys", "wasm-bindgen", "web-sys", "uuid/js"] - -[dependencies] -hex = "^0.4.3" -leb128 = "^0.2.5" -sha2 = "^0.10.0" -thiserror = "^1.0.16" -itertools = "^0.10.3" -flate2 = "^1.0.22" -uuid = { version = "^1.2.1", features=["v4", "serde"] } -smol_str = { version = "^0.1.21", features=["serde"] } -tracing = { version = "^0.1.29" } -fxhash = "^0.2.1" -tinyvec = { version = "^1.5.1", features = ["alloc"] } -serde = { version = "^1.0", features=["derive"] } - -# optional deps -dot = { version = "0.1.4", optional = true } -js-sys = { version = "^0.3", optional = true } -wasm-bindgen = { version = "^0.2", optional = true } -rand = { version = "^0.8.4", optional = true } - -[dependencies.web-sys] -version = "^0.3.55" -features = ["console"] -optional = true - -[dev-dependencies] -pretty_assertions = "1.0.0" -proptest = { version = "^1.0.0", default-features = false, features = ["std"] } -serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } -maplit = { version = "^1.0" } -criterion = "0.4.0" -test-log = { version = "0.2.10", features=["trace"], default-features = false} -tracing-subscriber = {version = "0.3.9", features = ["fmt", "env-filter"] } -automerge-test = { path = "../automerge-test" } -prettytable = "0.10.0" - -[[bench]] -name = "range" -harness = false - -[[bench]] -name = "map" -harness = false - -[[bench]] -name = "sync" -harness = false diff --git a/rust/automerge/README.md b/rust/automerge/README.md deleted file mode 100644 index 97dbe4f8..00000000 --- a/rust/automerge/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Automerge - -Automerge is a library of data structures for building collaborative -[local-first](https://www.inkandswitch.com/local-first/) applications. This is -the Rust implementation. See [automerge.org](https://automerge.org/) diff --git a/rust/automerge/benches/map.rs b/rust/automerge/benches/map.rs deleted file mode 100644 index fcf3bfa3..00000000 --- a/rust/automerge/benches/map.rs +++ /dev/null @@ -1,268 +0,0 @@ -use automerge::{transaction::Transactable, Automerge, ScalarValue, ROOT}; -use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; - -fn repeated_increment(n: u64) -> Automerge { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "counter", ScalarValue::counter(0)).unwrap(); - for _ in 0..n { - tx.increment(ROOT, "counter", 1).unwrap(); - } - tx.commit(); - doc -} - -fn repeated_put(n: u64) -> Automerge { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - for i in 0..n { - tx.put(ROOT, "0", i).unwrap(); - } - tx.commit(); - doc -} - -fn increasing_put(n: u64) -> Automerge { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - for i in 0..n { - tx.put(ROOT, i.to_string(), i).unwrap(); - } - tx.commit(); - doc -} - -fn decreasing_put(n: u64) -> Automerge { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - for i in (0..n).rev() { - tx.put(ROOT, i.to_string(), i).unwrap(); - } - tx.commit(); - doc -} - -fn criterion_benchmark(c: &mut Criterion) { - let sizes = [100, 1_000, 10_000]; - - let mut group = c.benchmark_group("map"); - for size in &sizes { - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { - b.iter(|| repeated_put(size)) - }); - group.bench_with_input( - BenchmarkId::new("repeated increment", size), - size, - |b, &size| b.iter(|| repeated_increment(size)), - ); - - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input( - BenchmarkId::new("increasing put", size), - size, - |b, &size| b.iter(|| increasing_put(size)), - ); - - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input( - BenchmarkId::new("decreasing put", size), - size, - |b, &size| b.iter(|| decreasing_put(size)), - ); - } - group.finish(); - - let mut group = c.benchmark_group("map save"); - for size in &sizes { - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { - b.iter_batched( - || repeated_put(size), - |mut doc| doc.save(), - criterion::BatchSize::LargeInput, - ) - }); - group.bench_with_input( - BenchmarkId::new("repeated increment", size), - size, - |b, &size| { - b.iter_batched( - || repeated_increment(size), - |mut doc| doc.save(), - criterion::BatchSize::LargeInput, - ) - }, - ); - - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input( - BenchmarkId::new("increasing put", size), - size, - |b, &size| { - b.iter_batched( - || increasing_put(size), - |mut doc| doc.save(), - criterion::BatchSize::LargeInput, - ) - }, - ); - - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input( - BenchmarkId::new("decreasing put", size), - size, - |b, &size| { - b.iter_batched( - || decreasing_put(size), - |mut doc| doc.save(), - criterion::BatchSize::LargeInput, - ) - }, - ); - } - group.finish(); - - let mut group = c.benchmark_group("map load"); - for size in &sizes { - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { - b.iter_batched( - || repeated_put(size).save(), - |bytes| Automerge::load(&bytes).unwrap(), - criterion::BatchSize::LargeInput, - ) - }); - group.bench_with_input( - BenchmarkId::new("repeated increment", size), - size, - |b, &size| { - b.iter_batched( - || repeated_increment(size).save(), - |bytes| Automerge::load(&bytes).unwrap(), - criterion::BatchSize::LargeInput, - ) - }, - ); - - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input( - BenchmarkId::new("increasing put", size), - size, - |b, &size| { - b.iter_batched( - || increasing_put(size).save(), - |bytes| Automerge::load(&bytes).unwrap(), - criterion::BatchSize::LargeInput, - ) - }, - ); - - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input( - BenchmarkId::new("decreasing put", size), - size, - |b, &size| { - b.iter_batched( - || decreasing_put(size).save(), - |bytes| Automerge::load(&bytes).unwrap(), - criterion::BatchSize::LargeInput, - ) - }, - ); - } - group.finish(); - - let mut group = c.benchmark_group("map apply"); - for size in &sizes { - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input(BenchmarkId::new("repeated put", size), size, |b, &size| { - b.iter_batched( - || { - repeated_put(size) - .get_changes(&[]) - .unwrap() - .into_iter() - .cloned() - .collect::>() - }, - |changes| { - let mut doc = Automerge::new(); - doc.apply_changes(changes) - }, - criterion::BatchSize::LargeInput, - ) - }); - group.bench_with_input( - BenchmarkId::new("repeated increment", size), - size, - |b, &size| { - b.iter_batched( - || { - repeated_increment(size) - .get_changes(&[]) - .unwrap() - .into_iter() - .cloned() - .collect::>() - }, - |changes| { - let mut doc = Automerge::new(); - doc.apply_changes(changes) - }, - criterion::BatchSize::LargeInput, - ) - }, - ); - - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input( - BenchmarkId::new("increasing put", size), - size, - |b, &size| { - b.iter_batched( - || { - increasing_put(size) - .get_changes(&[]) - .unwrap() - .into_iter() - .cloned() - .collect::>() - }, - |changes| { - let mut doc = Automerge::new(); - doc.apply_changes(changes) - }, - criterion::BatchSize::LargeInput, - ) - }, - ); - - group.throughput(criterion::Throughput::Elements(*size)); - group.bench_with_input( - BenchmarkId::new("decreasing put", size), - size, - |b, &size| { - b.iter_batched( - || { - decreasing_put(size) - .get_changes(&[]) - .unwrap() - .into_iter() - .cloned() - .collect::>() - }, - |changes| { - let mut doc = Automerge::new(); - doc.apply_changes(changes) - }, - criterion::BatchSize::LargeInput, - ) - }, - ); - } - group.finish(); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/rust/automerge/benches/range.rs b/rust/automerge/benches/range.rs deleted file mode 100644 index 008ae159..00000000 --- a/rust/automerge/benches/range.rs +++ /dev/null @@ -1,36 +0,0 @@ -use automerge::{transaction::Transactable, Automerge, ReadDoc, ROOT}; -use criterion::{black_box, criterion_group, criterion_main, Criterion}; - -fn doc(n: u64) -> Automerge { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - for i in 0..n { - tx.put(ROOT, i.to_string(), i.to_string()).unwrap(); - } - tx.commit(); - doc -} - -fn range(doc: &Automerge) { - let range = doc.values(ROOT); - range.for_each(drop); -} - -fn range_at(doc: &Automerge) { - let range = doc.values_at(ROOT, &doc.get_heads()); - range.for_each(drop); -} - -fn criterion_benchmark(c: &mut Criterion) { - let n = 100_000; - let doc = doc(n); - c.bench_function(&format!("range {}", n), |b| { - b.iter(|| range(black_box(&doc))) - }); - c.bench_function(&format!("range_at {}", n), |b| { - b.iter(|| range_at(black_box(&doc))) - }); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/rust/automerge/benches/sync.rs b/rust/automerge/benches/sync.rs deleted file mode 100644 index 13965792..00000000 --- a/rust/automerge/benches/sync.rs +++ /dev/null @@ -1,95 +0,0 @@ -use automerge::{ - sync::{self, SyncDoc}, - transaction::Transactable, - Automerge, ROOT, -}; -use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; - -#[derive(Default)] -struct DocWithSync { - doc: Automerge, - peer_state: sync::State, -} - -impl From for DocWithSync { - fn from(doc: Automerge) -> Self { - Self { - doc, - peer_state: sync::State::default(), - } - } -} - -fn increasing_put(n: u64) -> Automerge { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - for i in 0..n { - tx.put(ROOT, i.to_string(), i).unwrap(); - } - tx.commit(); - doc -} - -// keep syncing until doc1 no longer generates a sync message for doc2. -fn sync(doc1: &mut DocWithSync, doc2: &mut DocWithSync) { - while let Some(message1) = doc1.doc.generate_sync_message(&mut doc1.peer_state) { - doc2.doc - .receive_sync_message(&mut doc2.peer_state, message1) - .unwrap(); - - if let Some(message2) = doc2.doc.generate_sync_message(&mut doc2.peer_state) { - doc1.doc - .receive_sync_message(&mut doc1.peer_state, message2) - .unwrap() - } - } -} - -fn criterion_benchmark(c: &mut Criterion) { - let sizes = [100, 1_000, 10_000]; - - let mut group = c.benchmark_group("sync unidirectional"); - for size in &sizes { - group.throughput(criterion::Throughput::Elements(*size)); - - group.bench_with_input( - BenchmarkId::new("increasing put", size), - size, - |b, &size| { - b.iter_batched( - || (increasing_put(size), DocWithSync::default()), - |(doc1, mut doc2)| sync(&mut doc1.into(), &mut doc2), - criterion::BatchSize::LargeInput, - ) - }, - ); - } - group.finish(); - - let mut group = c.benchmark_group("sync unidirectional every change"); - for size in &sizes { - group.throughput(criterion::Throughput::Elements(*size)); - - group.bench_with_input( - BenchmarkId::new("increasing put", size), - size, - |b, &size| { - b.iter(|| { - let mut doc1 = DocWithSync::default(); - let mut doc2 = DocWithSync::default(); - - for i in 0..size { - let mut tx = doc1.doc.transaction(); - tx.put(ROOT, i.to_string(), i).unwrap(); - tx.commit(); - sync(&mut doc1, &mut doc2); - } - }) - }, - ); - } - group.finish(); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/rust/automerge/examples/watch.rs b/rust/automerge/examples/watch.rs deleted file mode 100644 index 4cd8f4ea..00000000 --- a/rust/automerge/examples/watch.rs +++ /dev/null @@ -1,106 +0,0 @@ -use automerge::transaction::CommitOptions; -use automerge::transaction::Transactable; -use automerge::Automerge; -use automerge::AutomergeError; -use automerge::Patch; -use automerge::ReadDoc; -use automerge::VecOpObserver; -use automerge::ROOT; - -fn main() { - let mut doc = Automerge::new(); - - // a simple scalar change in the root object - let mut result = doc - .transact_observed_with::<_, _, AutomergeError, _, VecOpObserver>( - |_result| CommitOptions::default(), - |tx| { - tx.put(ROOT, "hello", "world").unwrap(); - Ok(()) - }, - ) - .unwrap(); - get_changes(&doc, result.op_observer.take_patches()); - - let mut tx = doc.transaction_with_observer(VecOpObserver::default()); - let map = tx - .put_object(ROOT, "my new map", automerge::ObjType::Map) - .unwrap(); - tx.put(&map, "blah", 1).unwrap(); - tx.put(&map, "blah2", 1).unwrap(); - let list = tx - .put_object(&map, "my list", automerge::ObjType::List) - .unwrap(); - tx.insert(&list, 0, "yay").unwrap(); - let m = tx.insert_object(&list, 0, automerge::ObjType::Map).unwrap(); - tx.put(&m, "hi", 2).unwrap(); - tx.insert(&list, 1, "woo").unwrap(); - let m = tx.insert_object(&list, 2, automerge::ObjType::Map).unwrap(); - tx.put(&m, "hi", 2).unwrap(); - let patches = tx.observer().take_patches(); - let _heads3 = tx.commit_with(CommitOptions::default()); - get_changes(&doc, patches); -} - -fn get_changes(doc: &Automerge, patches: Vec) { - for patch in patches { - match patch { - Patch::Put { - obj, prop, value, .. - } => { - println!( - "put {:?} at {:?} in obj {:?}, object path {:?}", - value, - prop, - obj, - doc.path_to_object(&obj) - ) - } - Patch::Insert { - obj, index, value, .. - } => { - println!( - "insert {:?} at {:?} in obj {:?}, object path {:?}", - value, - index, - obj, - doc.path_to_object(&obj) - ) - } - Patch::Splice { - obj, index, value, .. - } => { - println!( - "splice '{:?}' at {:?} in obj {:?}, object path {:?}", - value, - index, - obj, - doc.path_to_object(&obj) - ) - } - Patch::Increment { - obj, prop, value, .. - } => { - println!( - "increment {:?} in obj {:?} by {:?}, object path {:?}", - prop, - obj, - value, - doc.path_to_object(&obj) - ) - } - Patch::Delete { obj, prop, .. } => println!( - "delete {:?} in obj {:?}, object path {:?}", - prop, - obj, - doc.path_to_object(&obj) - ), - Patch::Expose { obj, prop, .. } => println!( - "expose {:?} in obj {:?}, object path {:?}", - prop, - obj, - doc.path_to_object(&obj) - ), - } - } -} diff --git a/rust/automerge/fuzz/.gitignore b/rust/automerge/fuzz/.gitignore deleted file mode 100644 index 2eb15f8e..00000000 --- a/rust/automerge/fuzz/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -target -corpus -coverage diff --git a/rust/automerge/fuzz/Cargo.toml b/rust/automerge/fuzz/Cargo.toml deleted file mode 100644 index 3461e9f3..00000000 --- a/rust/automerge/fuzz/Cargo.toml +++ /dev/null @@ -1,29 +0,0 @@ -[package] -name = "automerge-fuzz" -version = "0.0.0" -publish = false -edition = "2021" - -[package.metadata] -cargo-fuzz = true - -[dependencies] -libfuzzer-sys = "0.4" -leb128 = "^0.2.5" -sha2 = "^0.10.0" - -[dependencies.automerge] -path = ".." - -# Prevent this from interfering with workspaces -[workspace] -members = ["."] - -[profile.release] -debug = 1 - -[[bin]] -name = "load" -path = "fuzz_targets/load.rs" -test = false -doc = false \ No newline at end of file diff --git a/rust/automerge/fuzz/fuzz_targets/load.rs b/rust/automerge/fuzz/fuzz_targets/load.rs deleted file mode 100644 index 0dea2624..00000000 --- a/rust/automerge/fuzz/fuzz_targets/load.rs +++ /dev/null @@ -1,37 +0,0 @@ -#![no_main] - -use sha2::{Sha256, Digest}; -use automerge::{Automerge}; -use libfuzzer_sys::arbitrary::{Arbitrary, Result, Unstructured}; -use libfuzzer_sys::fuzz_target; - -#[derive(Debug)] -struct DocumentChunk { - bytes: Vec, -} - -fn add_header(typ: u8, data: &[u8]) -> Vec { - let mut input = vec![u8::from(typ)]; - leb128::write::unsigned(&mut input, data.len() as u64).unwrap(); - input.extend(data.as_ref()); - let hash_result = Sha256::digest(input.clone()); - let array: [u8; 32] = hash_result.into(); - - let mut out = vec![133, 111, 74, 131, array[0], array[1], array[2], array[3]]; - out.extend(input); - out -} - -impl<'a> Arbitrary<'a> for DocumentChunk -{ - fn arbitrary(u: &mut Unstructured<'a>) -> Result { - let input = u.bytes(u.len())?; - let contents = add_header(0, input); - - return Ok(DocumentChunk{bytes: contents}) - } -} - -fuzz_target!(|doc: DocumentChunk| { - Automerge::load(&doc.bytes); -}); diff --git a/rust/automerge/src/autocommit.rs b/rust/automerge/src/autocommit.rs deleted file mode 100644 index ae28596e..00000000 --- a/rust/automerge/src/autocommit.rs +++ /dev/null @@ -1,672 +0,0 @@ -use std::ops::RangeBounds; - -use crate::exid::ExId; -use crate::op_observer::{BranchableObserver, OpObserver}; -use crate::sync::SyncDoc; -use crate::transaction::{CommitOptions, Transactable}; -use crate::{ - sync, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, Parents, ReadDoc, - ScalarValue, -}; -use crate::{ - transaction::{Observation, Observed, TransactionInner, UnObserved}, - ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, TextEncoding, Value, Values, -}; - -/// An automerge document that automatically manages transactions. -/// -/// An `AutoCommit` can optionally manage an [`OpObserver`]. This observer will be notified of all -/// changes made by both remote and local changes. The type parameter `O` tracks whether this -/// document is observed or not. -/// -/// ## Creating, loading, merging and forking documents -/// -/// A new document can be created with [`Self::new`], which will create a document with a random -/// [`ActorId`]. Existing documents can be loaded with [`Self::load`]. -/// -/// If you have two documents and you want to merge the changes from one into the other you can use -/// [`Self::merge`]. -/// -/// If you have a document you want to split into two concurrent threads of execution you can use -/// [`Self::fork`]. If you want to split a document from ealier in its history you can use -/// [`Self::fork_at`]. -/// -/// ## Reading values -/// -/// [`Self`] implements [`ReadDoc`], which provides methods for reading values from the document. -/// -/// ## Modifying a document -/// -/// This type implements [`Transactable`] directly, so you can modify it using methods from [`Transactable`]. -/// -/// ## Synchronization -/// -/// To synchronise call [`Self::sync`] which returns an implementation of [`SyncDoc`] -/// -/// ## Observers -/// -/// An `AutoCommit` can optionally manage an [`OpObserver`]. [`Self::new`] will return a document -/// with no observer but you can set an observer using [`Self::with_observer`]. The observer must -/// implement both [`OpObserver`] and [`BranchableObserver`]. If you have an observed autocommit -/// then you can obtain a mutable reference to the observer with [`Self::observer`] -#[derive(Debug, Clone)] -pub struct AutoCommitWithObs { - doc: Automerge, - transaction: Option<(Obs, TransactionInner)>, - observation: Obs, -} - -/// An autocommit document with no observer -/// -/// See [`AutoCommitWithObs`] -pub type AutoCommit = AutoCommitWithObs; - -impl Default for AutoCommitWithObs> { - fn default() -> Self { - let op_observer = O::default(); - AutoCommitWithObs { - doc: Automerge::new(), - transaction: None, - observation: Observed::new(op_observer), - } - } -} - -impl AutoCommit { - pub fn new() -> AutoCommit { - AutoCommitWithObs { - doc: Automerge::new(), - transaction: None, - observation: UnObserved, - } - } - - pub fn load(data: &[u8]) -> Result { - let doc = Automerge::load(data)?; - Ok(Self { - doc, - transaction: None, - observation: UnObserved, - }) - } -} - -impl AutoCommitWithObs> { - pub fn observer(&mut self) -> &mut Obs { - self.ensure_transaction_closed(); - self.observation.observer() - } -} - -impl AutoCommitWithObs { - pub fn fork(&mut self) -> Self { - self.ensure_transaction_closed(); - Self { - doc: self.doc.fork(), - transaction: self.transaction.clone(), - observation: self.observation.clone(), - } - } - - pub fn fork_at(&mut self, heads: &[ChangeHash]) -> Result { - self.ensure_transaction_closed(); - Ok(Self { - doc: self.doc.fork_at(heads)?, - transaction: self.transaction.clone(), - observation: self.observation.clone(), - }) - } -} - -impl AutoCommitWithObs { - pub fn with_observer( - self, - op_observer: Obs2, - ) -> AutoCommitWithObs> { - AutoCommitWithObs { - doc: self.doc, - transaction: self - .transaction - .map(|(_, t)| (Observed::new(op_observer.branch()), t)), - observation: Observed::new(op_observer), - } - } - - /// Get the inner document. - #[doc(hidden)] - pub fn document(&mut self) -> &Automerge { - self.ensure_transaction_closed(); - &self.doc - } - - pub fn with_actor(mut self, actor: ActorId) -> Self { - self.ensure_transaction_closed(); - self.doc.set_actor(actor); - self - } - - pub fn set_actor(&mut self, actor: ActorId) -> &mut Self { - self.ensure_transaction_closed(); - self.doc.set_actor(actor); - self - } - - pub fn get_actor(&self) -> &ActorId { - self.doc.get_actor() - } - - /// Change the text encoding of this view of the document - /// - /// This is a cheap operation, it just changes the way indexes are calculated - pub fn with_encoding(mut self, encoding: TextEncoding) -> Self { - self.doc = self.doc.with_encoding(encoding); - self - } - - fn ensure_transaction_open(&mut self) { - if self.transaction.is_none() { - let args = self.doc.transaction_args(); - let inner = TransactionInner::new(args); - self.transaction = Some((self.observation.branch(), inner)) - } - } - - fn ensure_transaction_closed(&mut self) { - if let Some((current, tx)) = self.transaction.take() { - self.observation.merge(¤t); - tx.commit(&mut self.doc, None, None); - } - } - - /// Load an incremental save of a document. - /// - /// Unlike `load` this imports changes into an existing document. It will work with both the - /// output of [`Self::save`] and [`Self::save_incremental`] - /// - /// The return value is the number of ops which were applied, this is not useful and will - /// change in future. - pub fn load_incremental(&mut self, data: &[u8]) -> Result { - self.ensure_transaction_closed(); - // TODO - would be nice to pass None here instead of &mut () - if let Some(observer) = self.observation.observer() { - self.doc.load_incremental_with(data, Some(observer)) - } else { - self.doc.load_incremental(data) - } - } - - pub fn apply_changes( - &mut self, - changes: impl IntoIterator, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_closed(); - if let Some(observer) = self.observation.observer() { - self.doc.apply_changes_with(changes, Some(observer)) - } else { - self.doc.apply_changes(changes) - } - } - - /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge( - &mut self, - other: &mut AutoCommitWithObs, - ) -> Result, AutomergeError> { - self.ensure_transaction_closed(); - other.ensure_transaction_closed(); - if let Some(observer) = self.observation.observer() { - self.doc.merge_with(&mut other.doc, Some(observer)) - } else { - self.doc.merge(&mut other.doc) - } - } - - /// Save the entirety of this document in a compact form. - pub fn save(&mut self) -> Vec { - self.ensure_transaction_closed(); - self.doc.save() - } - - /// Save this document, but don't run it through DEFLATE afterwards - pub fn save_nocompress(&mut self) -> Vec { - self.ensure_transaction_closed(); - self.doc.save_nocompress() - } - - /// Save the changes since the last call to [Self::save`] - /// - /// The output of this will not be a compressed document format, but a series of individual - /// changes. This is useful if you know you have only made a small change since the last `save` - /// and you want to immediately send it somewhere (e.g. you've inserted a single character in a - /// text object). - pub fn save_incremental(&mut self) -> Vec { - self.ensure_transaction_closed(); - self.doc.save_incremental() - } - - pub fn get_missing_deps(&mut self, heads: &[ChangeHash]) -> Vec { - self.ensure_transaction_closed(); - self.doc.get_missing_deps(heads) - } - - /// Get the last change made by this documents actor ID - pub fn get_last_local_change(&mut self) -> Option<&Change> { - self.ensure_transaction_closed(); - self.doc.get_last_local_change() - } - - pub fn get_changes( - &mut self, - have_deps: &[ChangeHash], - ) -> Result, AutomergeError> { - self.ensure_transaction_closed(); - self.doc.get_changes(have_deps) - } - - pub fn get_change_by_hash(&mut self, hash: &ChangeHash) -> Option<&Change> { - self.ensure_transaction_closed(); - self.doc.get_change_by_hash(hash) - } - - /// Get changes in `other` that are not in `self - pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> { - self.ensure_transaction_closed(); - other.ensure_transaction_closed(); - self.doc.get_changes_added(&other.doc) - } - - #[doc(hidden)] - pub fn import(&self, s: &str) -> Result<(ExId, ObjType), AutomergeError> { - self.doc.import(s) - } - - #[doc(hidden)] - pub fn dump(&mut self) { - self.ensure_transaction_closed(); - self.doc.dump() - } - - /// Return a graphviz representation of the opset. - /// - /// # Arguments - /// - /// * objects: An optional list of object IDs to display, if not specified all objects are - /// visualised - #[cfg(feature = "optree-visualisation")] - pub fn visualise_optree(&self, objects: Option>) -> String { - self.doc.visualise_optree(objects) - } - - /// Get the current heads of the document. - /// - /// This closes the transaction first, if one is in progress. - pub fn get_heads(&mut self) -> Vec { - self.ensure_transaction_closed(); - self.doc.get_heads() - } - - /// Commit any uncommitted changes - /// - /// Returns `None` if there were no operations to commit - pub fn commit(&mut self) -> Option { - self.commit_with(CommitOptions::default()) - } - - /// Commit the current operations with some options. - /// - /// Returns `None` if there were no operations to commit - /// - /// ``` - /// # use automerge::transaction::CommitOptions; - /// # use automerge::transaction::Transactable; - /// # use automerge::ROOT; - /// # use automerge::AutoCommit; - /// # use automerge::ObjType; - /// # use std::time::SystemTime; - /// let mut doc = AutoCommit::new(); - /// doc.put_object(&ROOT, "todos", ObjType::List).unwrap(); - /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as - /// i64; - /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); - /// ``` - pub fn commit_with(&mut self, options: CommitOptions) -> Option { - // ensure that even no changes triggers a change - self.ensure_transaction_open(); - let (current, tx) = self.transaction.take().unwrap(); - self.observation.merge(¤t); - tx.commit(&mut self.doc, options.message, options.time) - } - - /// Remove any changes that have been made in the current transaction from the document - pub fn rollback(&mut self) -> usize { - self.transaction - .take() - .map(|(_, tx)| tx.rollback(&mut self.doc)) - .unwrap_or(0) - } - - /// Generate an empty change - /// - /// The main reason to do this is if you wish to create a "merge commit" which has all the - /// current heads of the documents as dependencies but you have no new operations to create. - /// - /// Because this structure is an "autocommit" there may actually be outstanding operations to - /// submit. If this is the case this function will create two changes, one with the outstanding - /// operations and a new one with no operations. The returned `ChangeHash` will always be the - /// hash of the empty change. - pub fn empty_change(&mut self, options: CommitOptions) -> ChangeHash { - self.ensure_transaction_closed(); - let args = self.doc.transaction_args(); - TransactionInner::empty(&mut self.doc, args, options.message, options.time) - } - - /// An implementation of [`crate::sync::SyncDoc`] for this autocommit - /// - /// This ensures that any outstanding transactions for this document are committed before - /// taking part in the sync protocol - pub fn sync(&mut self) -> impl SyncDoc + '_ { - self.ensure_transaction_closed(); - SyncWrapper { inner: self } - } -} - -impl ReadDoc for AutoCommitWithObs { - fn parents>(&self, obj: O) -> Result, AutomergeError> { - self.doc.parents(obj) - } - - fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { - self.doc.path_to_object(obj) - } - - fn keys>(&self, obj: O) -> Keys<'_, '_> { - self.doc.keys(obj) - } - - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { - self.doc.keys_at(obj, heads) - } - - fn map_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> MapRange<'_, R> { - self.doc.map_range(obj, range) - } - - fn map_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> MapRangeAt<'_, R> { - self.doc.map_range_at(obj, range, heads) - } - - fn list_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> ListRange<'_, R> { - self.doc.list_range(obj, range) - } - - fn list_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> ListRangeAt<'_, R> { - self.doc.list_range_at(obj, range, heads) - } - - fn values>(&self, obj: O) -> Values<'_> { - self.doc.values(obj) - } - - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { - self.doc.values_at(obj, heads) - } - - fn length>(&self, obj: O) -> usize { - self.doc.length(obj) - } - - fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { - self.doc.length_at(obj, heads) - } - - fn object_type>(&self, obj: O) -> Result { - self.doc.object_type(obj) - } - - fn text>(&self, obj: O) -> Result { - self.doc.text(obj) - } - - fn text_at>( - &self, - obj: O, - heads: &[ChangeHash], - ) -> Result { - self.doc.text_at(obj, heads) - } - - fn get, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - self.doc.get(obj, prop) - } - - fn get_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - self.doc.get_at(obj, prop, heads) - } - - fn get_all, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - self.doc.get_all(obj, prop) - } - - fn get_all_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - self.doc.get_all_at(obj, prop, heads) - } - - fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { - self.doc.get_missing_deps(heads) - } - - fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { - self.doc.get_change_by_hash(hash) - } -} - -impl Transactable for AutoCommitWithObs { - fn pending_ops(&self) -> usize { - self.transaction - .as_ref() - .map(|(_, t)| t.pending_ops()) - .unwrap_or(0) - } - - fn put, P: Into, V: Into>( - &mut self, - obj: O, - prop: P, - value: V, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.put(&mut self.doc, current.observer(), obj.as_ref(), prop, value) - } - - fn put_object, P: Into>( - &mut self, - obj: O, - prop: P, - value: ObjType, - ) -> Result { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.put_object(&mut self.doc, current.observer(), obj.as_ref(), prop, value) - } - - fn insert, V: Into>( - &mut self, - obj: O, - index: usize, - value: V, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.insert( - &mut self.doc, - current.observer(), - obj.as_ref(), - index, - value, - ) - } - - fn insert_object>( - &mut self, - obj: O, - index: usize, - value: ObjType, - ) -> Result { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.insert_object( - &mut self.doc, - current.observer(), - obj.as_ref(), - index, - value, - ) - } - - fn increment, P: Into>( - &mut self, - obj: O, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.increment(&mut self.doc, current.observer(), obj.as_ref(), prop, value) - } - - fn delete, P: Into>( - &mut self, - obj: O, - prop: P, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.delete(&mut self.doc, current.observer(), obj.as_ref(), prop) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - fn splice, V: IntoIterator>( - &mut self, - obj: O, - pos: usize, - del: usize, - vals: V, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.splice( - &mut self.doc, - current.observer(), - obj.as_ref(), - pos, - del, - vals, - ) - } - - fn splice_text>( - &mut self, - obj: O, - pos: usize, - del: usize, - text: &str, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.splice_text( - &mut self.doc, - current.observer(), - obj.as_ref(), - pos, - del, - text, - ) - } - - fn base_heads(&self) -> Vec { - self.doc.get_heads() - } -} - -// A wrapper we return from `AutoCommit::sync` to ensure that transactions are closed before we -// start syncing -struct SyncWrapper<'a, Obs: Observation> { - inner: &'a mut AutoCommitWithObs, -} - -impl<'a, Obs: Observation> SyncDoc for SyncWrapper<'a, Obs> { - fn generate_sync_message(&self, sync_state: &mut sync::State) -> Option { - self.inner.doc.generate_sync_message(sync_state) - } - - fn receive_sync_message( - &mut self, - sync_state: &mut sync::State, - message: sync::Message, - ) -> Result<(), AutomergeError> { - self.inner.ensure_transaction_closed(); - if let Some(observer) = self.inner.observation.observer() { - self.inner - .doc - .receive_sync_message_with(sync_state, message, observer) - } else { - self.inner.doc.receive_sync_message(sync_state, message) - } - } - - fn receive_sync_message_with( - &mut self, - sync_state: &mut sync::State, - message: sync::Message, - op_observer: &mut Obs2, - ) -> Result<(), AutomergeError> { - if let Some(our_observer) = self.inner.observation.observer() { - let mut composed = crate::op_observer::compose(our_observer, op_observer); - self.inner - .doc - .receive_sync_message_with(sync_state, message, &mut composed) - } else { - self.inner - .doc - .receive_sync_message_with(sync_state, message, op_observer) - } - } -} diff --git a/rust/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs deleted file mode 100644 index 0dd82253..00000000 --- a/rust/automerge/src/automerge.rs +++ /dev/null @@ -1,1452 +0,0 @@ -use std::cmp::Ordering; -use std::collections::{BTreeSet, HashMap, HashSet}; -use std::fmt::Debug; -use std::num::NonZeroU64; -use std::ops::RangeBounds; - -use crate::change_graph::ChangeGraph; -use crate::columnar::Key as EncodedKey; -use crate::exid::ExId; -use crate::keys::Keys; -use crate::op_observer::{BranchableObserver, OpObserver}; -use crate::op_set::OpSet; -use crate::parents::Parents; -use crate::storage::{self, load, CompressConfig, VerificationMode}; -use crate::transaction::{ - self, CommitOptions, Failure, Observed, Success, Transaction, TransactionArgs, UnObserved, -}; -use crate::types::{ - ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ListEncoding, ObjId, Op, OpId, - OpType, ScalarValue, TextEncoding, Value, -}; -use crate::{ - query, AutomergeError, Change, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, - Prop, ReadDoc, Values, -}; -use serde::Serialize; - -mod current_state; - -#[cfg(test)] -mod tests; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) enum Actor { - Unused(ActorId), - Cached(usize), -} - -/// What to do when loading a document partially succeeds -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum OnPartialLoad { - /// Ignore the error and return the loaded changes - Ignore, - /// Fail the entire load - Error, -} - -/// An automerge document which does not manage transactions for you. -/// -/// ## Creating, loading, merging and forking documents -/// -/// A new document can be created with [`Self::new`], which will create a document with a random -/// [`ActorId`]. Existing documents can be loaded with [`Self::load`], or [`Self::load_with`]. -/// -/// If you have two documents and you want to merge the changes from one into the other you can use -/// [`Self::merge`] or [`Self::merge_with`]. -/// -/// If you have a document you want to split into two concurrent threads of execution you can use -/// [`Self::fork`]. If you want to split a document from ealier in its history you can use -/// [`Self::fork_at`]. -/// -/// ## Reading values -/// -/// [`Self`] implements [`ReadDoc`], which provides methods for reading values from the document. -/// -/// ## Modifying a document (Transactions) -/// -/// [`Automerge`] provides an interface for viewing and modifying automerge documents which does -/// not manage transactions for you. To create changes you use either [`Automerge::transaction`] or -/// [`Automerge::transact`] (or the `_with` variants). -/// -/// ## Sync -/// -/// This type implements [`crate::sync::SyncDoc`] -/// -/// ## Observers -/// -/// Many of the methods on this type have an `_with` or `_observed` variant -/// which allow you to pass in an [`OpObserver`] to observe any changes which -/// occur. -#[derive(Debug, Clone)] -pub struct Automerge { - /// The list of unapplied changes that are not causally ready. - queue: Vec, - /// The history of changes that form this document, topologically sorted too. - history: Vec, - /// Mapping from change hash to index into the history list. - history_index: HashMap, - /// Graph of changes - change_graph: ChangeGraph, - /// Mapping from actor index to list of seqs seen for them. - states: HashMap>, - /// Current dependencies of this document (heads hashes). - deps: HashSet, - /// Heads at the last save. - saved: Vec, - /// The set of operations that form this document. - ops: OpSet, - /// The current actor. - actor: Actor, - /// The maximum operation counter this document has seen. - max_op: u64, - text_encoding: TextEncoding, -} - -impl Automerge { - /// Create a new document with a random actor id. - pub fn new() -> Self { - Automerge { - queue: vec![], - history: vec![], - history_index: HashMap::new(), - change_graph: ChangeGraph::new(), - states: HashMap::new(), - ops: Default::default(), - deps: Default::default(), - saved: Default::default(), - actor: Actor::Unused(ActorId::random()), - max_op: 0, - text_encoding: Default::default(), - } - } - - pub(crate) fn ops_mut(&mut self) -> &mut OpSet { - &mut self.ops - } - - pub(crate) fn ops(&self) -> &OpSet { - &self.ops - } - - /// Whether this document has any operations - pub fn is_empty(&self) -> bool { - self.history.is_empty() && self.queue.is_empty() - } - - pub(crate) fn actor_id(&self) -> ActorId { - match &self.actor { - Actor::Unused(id) => id.clone(), - Actor::Cached(idx) => self.ops.m.actors[*idx].clone(), - } - } - - /// Remove the current actor from the opset if it has no ops - /// - /// If the current actor ID has no ops in the opset then remove it from the cache of actor IDs. - /// This us used when rolling back a transaction. If the rolled back ops are the only ops for - /// the current actor then we want to remove that actor from the opset so it doesn't end up in - /// any saved version of the document. - /// - /// # Panics - /// - /// If the last actor in the OpSet is not the actor ID of this document - pub(crate) fn rollback_last_actor(&mut self) { - if let Actor::Cached(actor_idx) = self.actor { - if self.states.get(&actor_idx).is_none() && self.ops.m.actors.len() > 0 { - assert!(self.ops.m.actors.len() == actor_idx + 1); - let actor = self.ops.m.actors.remove_last(); - self.actor = Actor::Unused(actor); - } - } - } - - pub(crate) fn text_encoding(&self) -> TextEncoding { - self.text_encoding - } - - /// Change the text encoding of this view of the document - /// - /// This is a cheap operation, it just changes the way indexes are calculated - pub fn with_encoding(mut self, encoding: TextEncoding) -> Self { - self.text_encoding = encoding; - self - } - - /// Set the actor id for this document. - pub fn with_actor(mut self, actor: ActorId) -> Self { - self.actor = Actor::Unused(actor); - self - } - - /// Set the actor id for this document. - pub fn set_actor(&mut self, actor: ActorId) -> &mut Self { - self.actor = Actor::Unused(actor); - self - } - - /// Get the current actor id of this document. - pub fn get_actor(&self) -> &ActorId { - match &self.actor { - Actor::Unused(actor) => actor, - Actor::Cached(index) => self.ops.m.actors.get(*index), - } - } - - pub(crate) fn get_actor_index(&mut self) -> usize { - match &mut self.actor { - Actor::Unused(actor) => { - let index = self - .ops - .m - .actors - .cache(std::mem::replace(actor, ActorId::from(&[][..]))); - self.actor = Actor::Cached(index); - index - } - Actor::Cached(index) => *index, - } - } - - /// Start a transaction. - pub fn transaction(&mut self) -> Transaction<'_, UnObserved> { - let args = self.transaction_args(); - Transaction::new(self, args, UnObserved) - } - - /// Start a transaction with an observer - pub fn transaction_with_observer( - &mut self, - op_observer: Obs, - ) -> Transaction<'_, Observed> { - let args = self.transaction_args(); - Transaction::new(self, args, Observed::new(op_observer)) - } - - pub(crate) fn transaction_args(&mut self) -> TransactionArgs { - let actor = self.get_actor_index(); - let seq = self.states.get(&actor).map_or(0, |v| v.len()) as u64 + 1; - let mut deps = self.get_heads(); - if seq > 1 { - let last_hash = self.get_hash(actor, seq - 1).unwrap(); - if !deps.contains(&last_hash) { - deps.push(last_hash); - } - } - // SAFETY: this unwrap is safe as we always add 1 - let start_op = NonZeroU64::new(self.max_op + 1).unwrap(); - - TransactionArgs { - actor_index: actor, - seq, - start_op, - deps, - } - } - - /// Run a transaction on this document in a closure, automatically handling commit or rollback - /// afterwards. - pub fn transact(&mut self, f: F) -> transaction::Result - where - F: FnOnce(&mut Transaction<'_, UnObserved>) -> Result, - { - self.transact_with_impl(None::<&dyn Fn(&O) -> CommitOptions>, f) - } - - /// Like [`Self::transact`] but with a function for generating the commit options. - pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result - where - F: FnOnce(&mut Transaction<'_, UnObserved>) -> Result, - C: FnOnce(&O) -> CommitOptions, - { - self.transact_with_impl(Some(c), f) - } - - fn transact_with_impl( - &mut self, - c: Option, - f: F, - ) -> transaction::Result - where - F: FnOnce(&mut Transaction<'_, UnObserved>) -> Result, - C: FnOnce(&O) -> CommitOptions, - { - let mut tx = self.transaction(); - let result = f(&mut tx); - match result { - Ok(result) => { - let hash = if let Some(c) = c { - let commit_options = c(&result); - tx.commit_with(commit_options) - } else { - tx.commit() - }; - Ok(Success { - result, - hash, - op_observer: (), - }) - } - Err(error) => Err(Failure { - error, - cancelled: tx.rollback(), - }), - } - } - - /// Run a transaction on this document in a closure, observing ops with `Obs`, automatically handling commit or rollback - /// afterwards. - pub fn transact_observed(&mut self, f: F) -> transaction::Result - where - F: FnOnce(&mut Transaction<'_, Observed>) -> Result, - Obs: OpObserver + BranchableObserver + Default, - { - self.transact_observed_with_impl(None::<&dyn Fn(&O) -> CommitOptions>, f) - } - - /// Like [`Self::transact_observed`] but with a function for generating the commit options - pub fn transact_observed_with( - &mut self, - c: C, - f: F, - ) -> transaction::Result - where - F: FnOnce(&mut Transaction<'_, Observed>) -> Result, - C: FnOnce(&O) -> CommitOptions, - Obs: OpObserver + BranchableObserver + Default, - { - self.transact_observed_with_impl(Some(c), f) - } - - fn transact_observed_with_impl( - &mut self, - c: Option, - f: F, - ) -> transaction::Result - where - F: FnOnce(&mut Transaction<'_, Observed>) -> Result, - C: FnOnce(&O) -> CommitOptions, - Obs: OpObserver + BranchableObserver + Default, - { - let observer = Obs::default(); - let mut tx = self.transaction_with_observer(observer); - let result = f(&mut tx); - match result { - Ok(result) => { - let (obs, hash) = if let Some(c) = c { - let commit_options = c(&result); - tx.commit_with(commit_options) - } else { - tx.commit() - }; - Ok(Success { - result, - hash, - op_observer: obs, - }) - } - Err(error) => Err(Failure { - error, - cancelled: tx.rollback(), - }), - } - } - - /// Generate an empty change - /// - /// The main reason to do this is if you want to create a "merge commit", which is a change - /// that has all the current heads of the document as dependencies. - pub fn empty_commit(&mut self, opts: CommitOptions) -> ChangeHash { - let args = self.transaction_args(); - Transaction::empty(self, args, opts) - } - - /// Fork this document at the current point for use by a different actor. - /// - /// This will create a new actor ID for the forked document - pub fn fork(&self) -> Self { - let mut f = self.clone(); - f.set_actor(ActorId::random()); - f - } - - /// Fork this document at the given heads - /// - /// This will create a new actor ID for the forked document - pub fn fork_at(&self, heads: &[ChangeHash]) -> Result { - let mut seen = heads.iter().cloned().collect::>(); - let mut heads = heads.to_vec(); - let mut changes = vec![]; - while let Some(hash) = heads.pop() { - if let Some(idx) = self.history_index.get(&hash) { - let change = &self.history[*idx]; - for dep in change.deps() { - if !seen.contains(dep) { - heads.push(*dep); - } - } - changes.push(change); - seen.insert(hash); - } else { - return Err(AutomergeError::InvalidHash(hash)); - } - } - let mut f = Self::new(); - f.set_actor(ActorId::random()); - f.apply_changes(changes.into_iter().rev().cloned())?; - Ok(f) - } - - pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result<(ObjId, ObjType), AutomergeError> { - match id { - ExId::Root => Ok((ObjId::root(), ObjType::Map)), - ExId::Id(ctr, actor, idx) => { - // do a direct get here b/c this could be foriegn and not be within the array - // bounds - let obj = if self.ops.m.actors.cache.get(*idx) == Some(actor) { - ObjId(OpId::new(*ctr, *idx)) - } else { - // FIXME - make a real error - let idx = self - .ops - .m - .actors - .lookup(actor) - .ok_or(AutomergeError::Fail)?; - ObjId(OpId::new(*ctr, idx)) - }; - if let Some(obj_type) = self.ops.object_type(&obj) { - Ok((obj, obj_type)) - } else { - Err(AutomergeError::NotAnObject) - } - } - } - } - - pub(crate) fn id_to_exid(&self, id: OpId) -> ExId { - self.ops.id_to_exid(id) - } - - /// Load a document. - pub fn load(data: &[u8]) -> Result { - Self::load_with::<()>(data, OnPartialLoad::Error, VerificationMode::Check, None) - } - - /// Load a document without verifying the head hashes - /// - /// This is useful for debugging as it allows you to examine a corrupted document. - pub fn load_unverified_heads(data: &[u8]) -> Result { - Self::load_with::<()>( - data, - OnPartialLoad::Error, - VerificationMode::DontCheck, - None, - ) - } - - /// Load a document with an observer - #[tracing::instrument(skip(data, observer), err)] - pub fn load_with( - data: &[u8], - on_error: OnPartialLoad, - mode: VerificationMode, - mut observer: Option<&mut Obs>, - ) -> Result { - if data.is_empty() { - tracing::trace!("no data, initializing empty document"); - return Ok(Self::new()); - } - tracing::trace!("loading first chunk"); - let (remaining, first_chunk) = storage::Chunk::parse(storage::parse::Input::new(data)) - .map_err(|e| load::Error::Parse(Box::new(e)))?; - if !first_chunk.checksum_valid() { - return Err(load::Error::BadChecksum.into()); - } - - let mut change: Option = None; - let mut am = match first_chunk { - storage::Chunk::Document(d) => { - tracing::trace!("first chunk is document chunk, inflating"); - let storage::load::Reconstructed { - max_op, - result: op_set, - changes, - heads, - } = storage::load::reconstruct_document(&d, mode, OpSet::builder()) - .map_err(|e| load::Error::InflateDocument(Box::new(e)))?; - let mut hashes_by_index = HashMap::new(); - let mut actor_to_history: HashMap> = HashMap::new(); - let mut change_graph = ChangeGraph::new(); - for (index, change) in changes.iter().enumerate() { - // SAFETY: This should be fine because we just constructed an opset containing - // all the changes - let actor_index = op_set.m.actors.lookup(change.actor_id()).unwrap(); - actor_to_history.entry(actor_index).or_default().push(index); - hashes_by_index.insert(index, change.hash()); - change_graph.add_change(change, actor_index)?; - } - let history_index = hashes_by_index.into_iter().map(|(k, v)| (v, k)).collect(); - Self { - queue: vec![], - history: changes, - history_index, - states: actor_to_history, - change_graph, - ops: op_set, - deps: heads.into_iter().collect(), - saved: Default::default(), - actor: Actor::Unused(ActorId::random()), - max_op, - text_encoding: Default::default(), - } - } - storage::Chunk::Change(stored_change) => { - tracing::trace!("first chunk is change chunk"); - change = Some( - Change::new_from_unverified(stored_change.into_owned(), None) - .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?, - ); - Self::new() - } - storage::Chunk::CompressedChange(stored_change, compressed) => { - tracing::trace!("first chunk is compressed change"); - change = Some( - Change::new_from_unverified( - stored_change.into_owned(), - Some(compressed.into_owned()), - ) - .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?, - ); - Self::new() - } - }; - tracing::trace!("loading change chunks"); - match load::load_changes(remaining.reset()) { - load::LoadedChanges::Complete(c) => { - am.apply_changes(change.into_iter().chain(c))?; - if !am.queue.is_empty() { - return Err(AutomergeError::MissingDeps); - } - } - load::LoadedChanges::Partial { error, .. } => { - if on_error == OnPartialLoad::Error { - return Err(error.into()); - } - } - } - if let Some(observer) = &mut observer { - current_state::observe_current_state(&am, *observer); - } - Ok(am) - } - - /// Load an incremental save of a document. - /// - /// Unlike `load` this imports changes into an existing document. It will work with both the - /// output of [`Self::save`] and [`Self::save_incremental`] - /// - /// The return value is the number of ops which were applied, this is not useful and will - /// change in future. - pub fn load_incremental(&mut self, data: &[u8]) -> Result { - self.load_incremental_with::<()>(data, None) - } - - /// Like [`Self::load_incremental`] but with an observer - pub fn load_incremental_with( - &mut self, - data: &[u8], - op_observer: Option<&mut Obs>, - ) -> Result { - if self.is_empty() { - let mut doc = - Self::load_with::<()>(data, OnPartialLoad::Ignore, VerificationMode::Check, None)?; - doc = doc - .with_encoding(self.text_encoding) - .with_actor(self.actor_id()); - if let Some(obs) = op_observer { - current_state::observe_current_state(&doc, obs); - } - *self = doc; - return Ok(self.ops.len()); - } - let changes = match load::load_changes(storage::parse::Input::new(data)) { - load::LoadedChanges::Complete(c) => c, - load::LoadedChanges::Partial { error, loaded, .. } => { - tracing::warn!(successful_chunks=loaded.len(), err=?error, "partial load"); - loaded - } - }; - let start = self.ops.len(); - self.apply_changes_with(changes, op_observer)?; - let delta = self.ops.len() - start; - Ok(delta) - } - - fn duplicate_seq(&self, change: &Change) -> bool { - let mut dup = false; - if let Some(actor_index) = self.ops.m.actors.lookup(change.actor_id()) { - if let Some(s) = self.states.get(&actor_index) { - dup = s.len() >= change.seq() as usize; - } - } - dup - } - - /// Apply changes to this document. - /// - /// This is idemptotent in the sense that if a change has already been applied it will be - /// ignored. - pub fn apply_changes( - &mut self, - changes: impl IntoIterator, - ) -> Result<(), AutomergeError> { - self.apply_changes_with::<_, ()>(changes, None) - } - - /// Like [`Self::apply_changes`] but with an observer - pub fn apply_changes_with, Obs: OpObserver>( - &mut self, - changes: I, - mut op_observer: Option<&mut Obs>, - ) -> Result<(), AutomergeError> { - // Record this so we can avoid observing each individual change and instead just observe - // the final state after all the changes have been applied. We can only do this for an - // empty document right now, once we have logic to produce the diffs between arbitrary - // states of the OpSet we can make this cleaner. - let empty_at_start = self.is_empty(); - for c in changes { - if !self.history_index.contains_key(&c.hash()) { - if self.duplicate_seq(&c) { - return Err(AutomergeError::DuplicateSeqNumber( - c.seq(), - c.actor_id().clone(), - )); - } - if self.is_causally_ready(&c) { - if empty_at_start { - self.apply_change::<()>(c, &mut None); - } else { - self.apply_change(c, &mut op_observer); - } - } else { - self.queue.push(c); - } - } - } - while let Some(c) = self.pop_next_causally_ready_change() { - if !self.history_index.contains_key(&c.hash()) { - if empty_at_start { - self.apply_change::<()>(c, &mut None); - } else { - self.apply_change(c, &mut op_observer); - } - } - } - if empty_at_start { - if let Some(observer) = &mut op_observer { - current_state::observe_current_state(self, *observer); - } - } - Ok(()) - } - - fn apply_change(&mut self, change: Change, observer: &mut Option<&mut Obs>) { - let ops = self.import_ops(&change); - self.update_history(change, ops.len()); - if let Some(observer) = observer { - for (obj, op) in ops { - self.insert_op_with_observer(&obj, op, *observer); - } - } else { - for (obj, op) in ops { - self.insert_op(&obj, op); - } - } - } - - fn is_causally_ready(&self, change: &Change) -> bool { - change - .deps() - .iter() - .all(|d| self.history_index.contains_key(d)) - } - - fn pop_next_causally_ready_change(&mut self) -> Option { - let mut index = 0; - while index < self.queue.len() { - if self.is_causally_ready(&self.queue[index]) { - return Some(self.queue.swap_remove(index)); - } - index += 1; - } - None - } - - fn import_ops(&mut self, change: &Change) -> Vec<(ObjId, Op)> { - let actor = self.ops.m.actors.cache(change.actor_id().clone()); - let mut actors = Vec::with_capacity(change.other_actor_ids().len() + 1); - actors.push(actor); - actors.extend( - change - .other_actor_ids() - .iter() - .map(|a| self.ops.m.actors.cache(a.clone())) - .collect::>(), - ); - change - .iter_ops() - .enumerate() - .map(|(i, c)| { - let id = OpId::new(change.start_op().get() + i as u64, actor); - let key = match &c.key { - EncodedKey::Prop(n) => Key::Map(self.ops.m.props.cache(n.to_string())), - EncodedKey::Elem(e) if e.is_head() => Key::Seq(ElemId::head()), - EncodedKey::Elem(ElemId(o)) => { - Key::Seq(ElemId(OpId::new(o.counter(), actors[o.actor()]))) - } - }; - let obj = if c.obj.is_root() { - ObjId::root() - } else { - ObjId(OpId::new( - c.obj.opid().counter(), - actors[c.obj.opid().actor()], - )) - }; - let pred = c - .pred - .iter() - .map(|p| OpId::new(p.counter(), actors[p.actor()])); - let pred = self.ops.m.sorted_opids(pred); - ( - obj, - Op { - id, - action: OpType::from_action_and_value(c.action, c.val), - key, - succ: Default::default(), - pred, - insert: c.insert, - }, - ) - }) - .collect() - } - - /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> { - self.merge_with::<()>(other, None) - } - - /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge_with( - &mut self, - other: &mut Self, - op_observer: Option<&mut Obs>, - ) -> Result, AutomergeError> { - // TODO: Make this fallible and figure out how to do this transactionally - let changes = self - .get_changes_added(other) - .into_iter() - .cloned() - .collect::>(); - tracing::trace!(changes=?changes.iter().map(|c| c.hash()).collect::>(), "merging new changes"); - self.apply_changes_with(changes, op_observer)?; - Ok(self.get_heads()) - } - - /// Save the entirety of this document in a compact form. - /// - /// This takes a mutable reference to self because it saves the heads of the last save so that - /// `save_incremental` can be used to produce only the changes since the last `save`. This API - /// will be changing in future. - pub fn save(&mut self) -> Vec { - let heads = self.get_heads(); - let c = self.history.iter(); - let bytes = crate::storage::save::save_document( - c, - self.ops.iter().map(|(objid, _, op)| (objid, op)), - &self.ops.m.actors, - &self.ops.m.props, - &heads, - None, - ); - self.saved = self.get_heads(); - bytes - } - - /// Save this document, but don't run it through DEFLATE afterwards - pub fn save_nocompress(&mut self) -> Vec { - let heads = self.get_heads(); - let c = self.history.iter(); - let bytes = crate::storage::save::save_document( - c, - self.ops.iter().map(|(objid, _, op)| (objid, op)), - &self.ops.m.actors, - &self.ops.m.props, - &heads, - Some(CompressConfig::None), - ); - self.saved = self.get_heads(); - bytes - } - - /// Save the changes since the last call to [Self::save`] - /// - /// The output of this will not be a compressed document format, but a series of individual - /// changes. This is useful if you know you have only made a small change since the last `save` - /// and you want to immediately send it somewhere (e.g. you've inserted a single character in a - /// text object). - pub fn save_incremental(&mut self) -> Vec { - let changes = self - .get_changes(self.saved.as_slice()) - .expect("Should only be getting changes using previously saved heads"); - let mut bytes = vec![]; - for c in changes { - bytes.extend(c.raw_bytes()); - } - if !bytes.is_empty() { - self.saved = self.get_heads() - } - bytes - } - - /// Filter the changes down to those that are not transitive dependencies of the heads. - /// - /// Thus a graph with these heads has not seen the remaining changes. - pub(crate) fn filter_changes( - &self, - heads: &[ChangeHash], - changes: &mut BTreeSet, - ) -> Result<(), AutomergeError> { - let heads = heads - .iter() - .filter(|hash| self.history_index.contains_key(hash)) - .copied() - .collect::>(); - - self.change_graph.remove_ancestors(changes, &heads); - - Ok(()) - } - - /// Get the changes since `have_deps` in this document using a clock internally. - fn get_changes_clock(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { - // get the clock for the given deps - let clock = self.clock_at(have_deps); - - // get the documents current clock - - let mut change_indexes: Vec = Vec::new(); - // walk the state from the given deps clock and add them into the vec - for (actor_index, actor_changes) in &self.states { - if let Some(clock_data) = clock.get_for_actor(actor_index) { - // find the change in this actors sequence of changes that corresponds to the max_op - // recorded for them in the clock - change_indexes.extend(&actor_changes[clock_data.seq as usize..]); - } else { - change_indexes.extend(&actor_changes[..]); - } - } - - // ensure the changes are still in sorted order - change_indexes.sort_unstable(); - - Ok(change_indexes - .into_iter() - .map(|i| &self.history[i]) - .collect()) - } - - /// Get the last change this actor made to the document. - pub fn get_last_local_change(&self) -> Option<&Change> { - return self - .history - .iter() - .rev() - .find(|c| c.actor_id() == self.get_actor()); - } - - fn clock_at(&self, heads: &[ChangeHash]) -> Clock { - self.change_graph.clock_for_heads(heads) - } - - fn get_hash(&self, actor: usize, seq: u64) -> Result { - self.states - .get(&actor) - .and_then(|v| v.get(seq as usize - 1)) - .and_then(|&i| self.history.get(i)) - .map(|c| c.hash()) - .ok_or(AutomergeError::InvalidSeq(seq)) - } - - pub(crate) fn update_history(&mut self, change: Change, num_ops: usize) -> usize { - self.max_op = std::cmp::max(self.max_op, change.start_op().get() + num_ops as u64 - 1); - - self.update_deps(&change); - - let history_index = self.history.len(); - - let actor_index = self.ops.m.actors.cache(change.actor_id().clone()); - self.states - .entry(actor_index) - .or_default() - .push(history_index); - - self.history_index.insert(change.hash(), history_index); - self.change_graph - .add_change(&change, actor_index) - .expect("Change's deps should already be in the document"); - - self.history.push(change); - - history_index - } - - fn update_deps(&mut self, change: &Change) { - for d in change.deps() { - self.deps.remove(d); - } - self.deps.insert(change.hash()); - } - - #[doc(hidden)] - pub fn import(&self, s: &str) -> Result<(ExId, ObjType), AutomergeError> { - if s == "_root" { - Ok((ExId::Root, ObjType::Map)) - } else { - let n = s - .find('@') - .ok_or_else(|| AutomergeError::InvalidObjIdFormat(s.to_owned()))?; - let counter = s[0..n] - .parse() - .map_err(|_| AutomergeError::InvalidObjIdFormat(s.to_owned()))?; - let actor = ActorId::from(hex::decode(&s[(n + 1)..]).unwrap()); - let actor = self - .ops - .m - .actors - .lookup(&actor) - .ok_or_else(|| AutomergeError::InvalidObjId(s.to_owned()))?; - let obj = ExId::Id(counter, self.ops.m.actors.cache[actor].clone(), actor); - let obj_type = self - .object_type(&obj) - .map_err(|_| AutomergeError::InvalidObjId(s.to_owned()))?; - Ok((obj, obj_type)) - } - } - - pub(crate) fn to_string(&self, id: E) -> String { - match id.export() { - Export::Id(id) => format!("{}@{}", id.counter(), self.ops.m.actors[id.actor()]), - Export::Prop(index) => self.ops.m.props[index].clone(), - Export::Special(s) => s, - } - } - - pub fn dump(&self) { - log!( - " {:12} {:12} {:12} {:12} {:12} {:12}", - "id", - "obj", - "key", - "value", - "pred", - "succ" - ); - for (obj, _, op) in self.ops.iter() { - let id = self.to_string(op.id); - let obj = self.to_string(obj); - let key = match op.key { - Key::Map(n) => self.ops.m.props[n].clone(), - Key::Seq(n) => self.to_string(n), - }; - let value: String = match &op.action { - OpType::Put(value) => format!("{}", value), - OpType::Make(obj) => format!("make({})", obj), - OpType::Increment(obj) => format!("inc({})", obj), - OpType::Delete => format!("del{}", 0), - }; - let pred: Vec<_> = op.pred.iter().map(|id| self.to_string(*id)).collect(); - let succ: Vec<_> = op.succ.into_iter().map(|id| self.to_string(*id)).collect(); - log!( - " {:12} {:12} {:12} {:12} {:12?} {:12?}", - id, - obj, - key, - value, - pred, - succ - ); - } - } - - /// Return a graphviz representation of the opset. - /// - /// # Arguments - /// - /// * objects: An optional list of object IDs to display, if not specified all objects are - /// visualised - #[cfg(feature = "optree-visualisation")] - pub fn visualise_optree(&self, objects: Option>) -> String { - let objects = objects.map(|os| { - os.iter() - .filter_map(|o| self.exid_to_obj(o).ok()) - .map(|o| o.0) - .collect() - }); - self.ops.visualise(objects) - } - - pub(crate) fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op { - let q = self.ops.search(obj, query::SeekOp::new(&op)); - - let succ = q.succ; - let pos = q.pos; - - self.ops.add_succ(obj, &succ, &op); - - if !op.is_delete() { - self.ops.insert(pos, obj, op.clone()); - } - op - } - - pub(crate) fn insert_op_with_observer( - &mut self, - obj: &ObjId, - op: Op, - observer: &mut Obs, - ) -> Op { - let obj_type = self.ops.object_type(obj); - let encoding = obj_type - .map(|o| ListEncoding::new(o, self.text_encoding)) - .unwrap_or_default(); - let q = self - .ops - .search(obj, query::SeekOpWithPatch::new(&op, encoding)); - - let query::SeekOpWithPatch { - pos, - succ, - seen, - last_width, - values, - had_value_before, - .. - } = q; - - let ex_obj = self.ops.id_to_exid(obj.0); - - let key = match op.key { - Key::Map(index) => self.ops.m.props[index].clone().into(), - Key::Seq(_) => seen.into(), - }; - - if op.insert { - if obj_type == Some(ObjType::Text) { - observer.splice_text(self, ex_obj, seen, op.to_str()); - } else { - let value = (op.value(), self.ops.id_to_exid(op.id)); - observer.insert(self, ex_obj, seen, value); - } - } else if op.is_delete() { - if let Some(winner) = &values.last() { - let value = (winner.value(), self.ops.id_to_exid(winner.id)); - let conflict = values.len() > 1; - observer.expose(self, ex_obj, key, value, conflict); - } else if had_value_before { - match key { - Prop::Map(k) => observer.delete_map(self, ex_obj, &k), - Prop::Seq(index) => observer.delete_seq(self, ex_obj, index, last_width), - } - } - } else if let Some(value) = op.get_increment_value() { - // only observe this increment if the counter is visible, i.e. the counter's - // create op is in the values - //if values.iter().any(|value| op.pred.contains(&value.id)) { - if values - .last() - .map(|value| op.pred.contains(&value.id)) - .unwrap_or_default() - { - // we have observed the value - observer.increment(self, ex_obj, key, (value, self.ops.id_to_exid(op.id))); - } - } else { - let just_conflict = values - .last() - .map(|value| self.ops.m.lamport_cmp(op.id, value.id) != Ordering::Greater) - .unwrap_or(false); - let value = (op.value(), self.ops.id_to_exid(op.id)); - if op.is_list_op() && !had_value_before { - observer.insert(self, ex_obj, seen, value); - } else if just_conflict { - observer.flag_conflict(self, ex_obj, key); - } else { - let conflict = !values.is_empty(); - observer.put(self, ex_obj, key, value, conflict); - } - } - - self.ops.add_succ(obj, &succ, &op); - - if !op.is_delete() { - self.ops.insert(pos, obj, op.clone()); - } - - op - } - - /// Get the heads of this document. - pub fn get_heads(&self) -> Vec { - let mut deps: Vec<_> = self.deps.iter().copied().collect(); - deps.sort_unstable(); - deps - } - - pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { - self.get_changes_clock(have_deps) - } - - /// Get changes in `other` that are not in `self - pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { - // Depth-first traversal from the heads through the dependency graph, - // until we reach a change that is already present in other - let mut stack: Vec<_> = other.get_heads(); - tracing::trace!(their_heads=?stack, "finding changes to merge"); - let mut seen_hashes = HashSet::new(); - let mut added_change_hashes = Vec::new(); - while let Some(hash) = stack.pop() { - if !seen_hashes.contains(&hash) && self.get_change_by_hash(&hash).is_none() { - seen_hashes.insert(hash); - added_change_hashes.push(hash); - if let Some(change) = other.get_change_by_hash(&hash) { - stack.extend(change.deps()); - } - } - } - // Return those changes in the reverse of the order in which the depth-first search - // found them. This is not necessarily a topological sort, but should usually be close. - added_change_hashes.reverse(); - added_change_hashes - .into_iter() - .filter_map(|h| other.get_change_by_hash(&h)) - .collect() - } -} - -impl ReadDoc for Automerge { - fn parents>(&self, obj: O) -> Result, AutomergeError> { - let (obj_id, _) = self.exid_to_obj(obj.as_ref())?; - Ok(self.ops.parents(obj_id)) - } - - fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { - Ok(self.parents(obj.as_ref().clone())?.path()) - } - - fn keys>(&self, obj: O) -> Keys<'_, '_> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - let iter_keys = self.ops.keys(obj); - Keys::new(self, iter_keys) - } else { - Keys::new(self, None) - } - } - - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - let clock = self.clock_at(heads); - return KeysAt::new(self, self.ops.keys_at(obj, clock)); - } - KeysAt::new(self, None) - } - - fn map_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> MapRange<'_, R> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - MapRange::new(self, self.ops.map_range(obj, range)) - } else { - MapRange::new(self, None) - } - } - - fn map_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> MapRangeAt<'_, R> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - let clock = self.clock_at(heads); - let iter_range = self.ops.map_range_at(obj, range, clock); - return MapRangeAt::new(self, iter_range); - } - MapRangeAt::new(self, None) - } - - fn list_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> ListRange<'_, R> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - ListRange::new(self, self.ops.list_range(obj, range)) - } else { - ListRange::new(self, None) - } - } - - fn list_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> ListRangeAt<'_, R> { - if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { - let clock = self.clock_at(heads); - let iter_range = self.ops.list_range_at(obj, range, clock); - return ListRangeAt::new(self, iter_range); - } - ListRangeAt::new(self, None) - } - - fn values>(&self, obj: O) -> Values<'_> { - if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { - if obj_type.is_sequence() { - Values::new(self, self.ops.list_range(obj, ..)) - } else { - Values::new(self, self.ops.map_range(obj, ..)) - } - } else { - Values::empty(self) - } - } - - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { - if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { - let clock = self.clock_at(heads); - match obj_type { - ObjType::Map | ObjType::Table => { - let iter_range = self.ops.map_range_at(obj, .., clock); - Values::new(self, iter_range) - } - ObjType::List | ObjType::Text => { - let iter_range = self.ops.list_range_at(obj, .., clock); - Values::new(self, iter_range) - } - } - } else { - Values::empty(self) - } - } - - fn length>(&self, obj: O) -> usize { - if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { - if obj_type == ObjType::Map || obj_type == ObjType::Table { - self.keys(obj).count() - } else { - let encoding = ListEncoding::new(obj_type, self.text_encoding); - self.ops.search(&inner_obj, query::Len::new(encoding)).len - } - } else { - 0 - } - } - - fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { - if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { - let clock = self.clock_at(heads); - if obj_type == ObjType::Map || obj_type == ObjType::Table { - self.keys_at(obj, heads).count() - } else { - let encoding = ListEncoding::new(obj_type, self.text_encoding); - self.ops - .search(&inner_obj, query::LenAt::new(clock, encoding)) - .len - } - } else { - 0 - } - } - - fn object_type>(&self, obj: O) -> Result { - let (_, obj_type) = self.exid_to_obj(obj.as_ref())?; - Ok(obj_type) - } - - fn text>(&self, obj: O) -> Result { - let obj = self.exid_to_obj(obj.as_ref())?.0; - let query = self.ops.search(&obj, query::ListVals::new()); - let mut buffer = String::new(); - for q in &query.ops { - buffer.push_str(q.to_str()); - } - Ok(buffer) - } - - fn text_at>( - &self, - obj: O, - heads: &[ChangeHash], - ) -> Result { - let obj = self.exid_to_obj(obj.as_ref())?.0; - let clock = self.clock_at(heads); - let query = self.ops.search(&obj, query::ListValsAt::new(clock)); - let mut buffer = String::new(); - for q in &query.ops { - if let OpType::Put(ScalarValue::Str(s)) = &q.action { - buffer.push_str(s); - } else { - buffer.push('\u{fffc}'); - } - } - Ok(buffer) - } - - fn get, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - Ok(self.get_all(obj, prop.into())?.last().cloned()) - } - - fn get_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - Ok(self.get_all_at(obj, prop, heads)?.last().cloned()) - } - - fn get_all, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - let obj = self.exid_to_obj(obj.as_ref())?.0; - let mut result = match prop.into() { - Prop::Map(p) => { - let prop = self.ops.m.props.lookup(&p); - if let Some(p) = prop { - self.ops - .search(&obj, query::Prop::new(p)) - .ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect() - } else { - vec![] - } - } - Prop::Seq(n) => { - let obj_type = self.ops.object_type(&obj); - let encoding = obj_type - .map(|o| ListEncoding::new(o, self.text_encoding)) - .unwrap_or_default(); - self.ops - .search(&obj, query::Nth::new(n, encoding)) - .ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect() - } - }; - result.sort_by(|a, b| b.1.cmp(&a.1)); - Ok(result) - } - - fn get_all_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - let prop = prop.into(); - let obj = self.exid_to_obj(obj.as_ref())?.0; - let clock = self.clock_at(heads); - let result = match prop { - Prop::Map(p) => { - let prop = self.ops.m.props.lookup(&p); - if let Some(p) = prop { - self.ops - .search(&obj, query::PropAt::new(p, clock)) - .ops - .into_iter() - .map(|o| (o.clone_value(), self.id_to_exid(o.id))) - .collect() - } else { - vec![] - } - } - Prop::Seq(n) => { - let obj_type = self.ops.object_type(&obj); - let encoding = obj_type - .map(|o| ListEncoding::new(o, self.text_encoding)) - .unwrap_or_default(); - self.ops - .search(&obj, query::NthAt::new(n, clock, encoding)) - .ops - .into_iter() - .map(|o| (o.clone_value(), self.id_to_exid(o.id))) - .collect() - } - }; - Ok(result) - } - - fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { - let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash()).collect(); - let mut missing = HashSet::new(); - - for head in self.queue.iter().flat_map(|change| change.deps()) { - if !self.history_index.contains_key(head) { - missing.insert(head); - } - } - - for head in heads { - if !self.history_index.contains_key(head) { - missing.insert(head); - } - } - - let mut missing = missing - .into_iter() - .filter(|hash| !in_queue.contains(hash)) - .copied() - .collect::>(); - missing.sort(); - missing - } - - fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { - self.history_index - .get(hash) - .and_then(|index| self.history.get(*index)) - } -} - -impl Default for Automerge { - fn default() -> Self { - Self::new() - } -} - -#[derive(Serialize, Debug, Clone, PartialEq)] -pub(crate) struct SpanInfo { - pub(crate) id: ExId, - pub(crate) time: i64, - pub(crate) start: usize, - pub(crate) end: usize, - #[serde(rename = "type")] - pub(crate) span_type: String, - pub(crate) value: ScalarValue, -} diff --git a/rust/automerge/src/automerge/current_state.rs b/rust/automerge/src/automerge/current_state.rs deleted file mode 100644 index 3f7f4afc..00000000 --- a/rust/automerge/src/automerge/current_state.rs +++ /dev/null @@ -1,915 +0,0 @@ -use std::{borrow::Cow, collections::HashSet, iter::Peekable}; - -use itertools::Itertools; - -use crate::{ - types::{ElemId, Key, ListEncoding, ObjId, Op, OpId}, - ObjType, OpObserver, OpType, ScalarValue, Value, -}; - -/// Traverse the "current" state of the document, notifying `observer` -/// -/// The "current" state of the document is the set of visible operations. This function will -/// traverse that set of operations and call the corresponding methods on the `observer` as it -/// encounters values. The `observer` methods will be called in the order in which they appear in -/// the document. That is to say that the observer will be notified of parent objects before the -/// objects they contain and elements of a sequence will be notified in the order they occur. -/// -/// Due to only notifying of visible operations the observer will only be called with `put`, -/// `insert`, and `splice`, operations. -pub(super) fn observe_current_state(doc: &crate::Automerge, observer: &mut O) { - // The OpSet already exposes operations in the order they appear in the document. - // `OpSet::iter_objs` iterates over the objects in causal order, this means that parent objects - // will always appear before their children. Furthermore, the operations within each object are - // ordered by key (which means by their position in a sequence for sequences). - // - // Effectively then we iterate over each object, then we group the operations in the object by - // key and for each key find the visible operations for that key. Then we notify the observer - // for each of those visible operations. - let mut visible_objs = HashSet::new(); - visible_objs.insert(ObjId::root()); - for (obj, typ, ops) in doc.ops().iter_objs() { - if !visible_objs.contains(obj) { - continue; - } - let ops_by_key = ops.group_by(|o| o.key); - let actions = ops_by_key - .into_iter() - .flat_map(|(key, key_ops)| key_actions(key, key_ops)); - if typ == ObjType::Text && !observer.text_as_seq() { - track_new_objs_and_notify( - &mut visible_objs, - doc, - obj, - typ, - observer, - text_actions(actions), - ) - } else if typ == ObjType::List { - track_new_objs_and_notify( - &mut visible_objs, - doc, - obj, - typ, - observer, - list_actions(actions), - ) - } else { - track_new_objs_and_notify(&mut visible_objs, doc, obj, typ, observer, actions) - } - } -} - -fn track_new_objs_and_notify, O: OpObserver>( - visible_objs: &mut HashSet, - doc: &crate::Automerge, - obj: &ObjId, - typ: ObjType, - observer: &mut O, - actions: I, -) { - let exid = doc.id_to_exid(obj.0); - for action in actions { - if let Some(obj) = action.made_object() { - visible_objs.insert(obj); - } - action.notify_observer(doc, &exid, obj, typ, observer); - } -} - -trait Action { - /// Notify an observer of whatever this action does - fn notify_observer( - self, - doc: &crate::Automerge, - exid: &crate::ObjId, - obj: &ObjId, - typ: ObjType, - observer: &mut O, - ); - - /// If this action created an object, return the ID of that object - fn made_object(&self) -> Option; -} - -fn key_actions<'a, I: Iterator>( - key: Key, - key_ops: I, -) -> impl Iterator> { - #[derive(Clone)] - enum CurrentOp<'a> { - Put { - value: Value<'a>, - id: OpId, - conflicted: bool, - }, - Insert(Value<'a>, OpId), - } - let current_ops = key_ops - .filter(|o| o.visible()) - .filter_map(|o| match o.action { - OpType::Make(obj_type) => { - let value = Value::Object(obj_type); - if o.insert { - Some(CurrentOp::Insert(value, o.id)) - } else { - Some(CurrentOp::Put { - value, - id: o.id, - conflicted: false, - }) - } - } - OpType::Put(ref value) => { - let value = Value::Scalar(Cow::Borrowed(value)); - if o.insert { - Some(CurrentOp::Insert(value, o.id)) - } else { - Some(CurrentOp::Put { - value, - id: o.id, - conflicted: false, - }) - } - } - _ => None, - }); - current_ops - .coalesce(|previous, current| match (previous, current) { - (CurrentOp::Put { .. }, CurrentOp::Put { value, id, .. }) => Ok(CurrentOp::Put { - value, - id, - conflicted: true, - }), - (previous, current) => Err((previous, current)), - }) - .map(move |op| match op { - CurrentOp::Put { - value, - id, - conflicted, - } => SimpleAction::Put { - prop: key, - tagged_value: (value, id), - conflict: conflicted, - }, - CurrentOp::Insert(val, id) => SimpleAction::Insert { - elem_id: ElemId(id), - tagged_value: (val, id), - }, - }) -} - -/// Either a "put" or "insert" action. i.e. not splicing for text values -enum SimpleAction<'a> { - Put { - prop: Key, - tagged_value: (Value<'a>, OpId), - conflict: bool, - }, - Insert { - elem_id: ElemId, - tagged_value: (Value<'a>, OpId), - }, -} - -impl<'a> Action for SimpleAction<'a> { - fn notify_observer( - self, - doc: &crate::Automerge, - exid: &crate::ObjId, - obj: &ObjId, - typ: ObjType, - observer: &mut O, - ) { - let encoding = match typ { - ObjType::Text => ListEncoding::Text(doc.text_encoding()), - _ => ListEncoding::List, - }; - match self { - Self::Put { - prop, - tagged_value, - conflict, - } => { - let tagged_value = (tagged_value.0, doc.id_to_exid(tagged_value.1)); - let prop = doc.ops().export_key(*obj, prop, encoding).unwrap(); - observer.put(doc, exid.clone(), prop, tagged_value, conflict); - } - Self::Insert { - elem_id, - tagged_value: (value, opid), - } => { - let index = doc - .ops() - .search(obj, crate::query::ElemIdPos::new(elem_id, encoding)) - .index() - .unwrap(); - let tagged_value = (value, doc.id_to_exid(opid)); - observer.insert(doc, doc.id_to_exid(obj.0), index, tagged_value); - } - } - } - - fn made_object(&self) -> Option { - match self { - Self::Put { - tagged_value: (Value::Object(_), id), - .. - } => Some((*id).into()), - Self::Insert { - tagged_value: (Value::Object(_), id), - .. - } => Some((*id).into()), - _ => None, - } - } -} - -/// An `Action` which splices for text values -enum TextAction<'a> { - Action(SimpleAction<'a>), - Splice { start: ElemId, chars: String }, -} - -impl<'a> Action for TextAction<'a> { - fn notify_observer( - self, - doc: &crate::Automerge, - exid: &crate::ObjId, - obj: &ObjId, - typ: ObjType, - observer: &mut O, - ) { - match self { - Self::Action(action) => action.notify_observer(doc, exid, obj, typ, observer), - Self::Splice { start, chars } => { - let index = doc - .ops() - .search( - obj, - crate::query::ElemIdPos::new( - start, - ListEncoding::Text(doc.text_encoding()), - ), - ) - .index() - .unwrap(); - observer.splice_text(doc, doc.id_to_exid(obj.0), index, chars.as_str()); - } - } - } - - fn made_object(&self) -> Option { - match self { - Self::Action(action) => action.made_object(), - _ => None, - } - } -} - -fn list_actions<'a, I: Iterator>>( - actions: I, -) -> impl Iterator> { - actions.map(|a| match a { - SimpleAction::Put { - prop: Key::Seq(elem_id), - tagged_value, - .. - } => SimpleAction::Insert { - elem_id, - tagged_value, - }, - a => a, - }) -} - -/// Condense consecutive `SimpleAction::Insert` actions into one `TextAction::Splice` -fn text_actions<'a, I>(actions: I) -> impl Iterator> -where - I: Iterator>, -{ - TextActions { - ops: actions.peekable(), - } -} - -struct TextActions<'a, I: Iterator>> { - ops: Peekable, -} - -impl<'a, I: Iterator>> Iterator for TextActions<'a, I> { - type Item = TextAction<'a>; - - fn next(&mut self) -> Option { - if let Some(SimpleAction::Insert { .. }) = self.ops.peek() { - let (start, value) = match self.ops.next() { - Some(SimpleAction::Insert { - tagged_value: (value, opid), - .. - }) => (opid, value), - _ => unreachable!(), - }; - let mut chars = match value { - Value::Scalar(Cow::Borrowed(ScalarValue::Str(s))) => s.to_string(), - _ => "\u{fffc}".to_string(), - }; - while let Some(SimpleAction::Insert { .. }) = self.ops.peek() { - if let Some(SimpleAction::Insert { - tagged_value: (value, _), - .. - }) = self.ops.next() - { - match value { - Value::Scalar(Cow::Borrowed(ScalarValue::Str(s))) => chars.push_str(s), - _ => chars.push('\u{fffc}'), - } - } - } - Some(TextAction::Splice { - start: ElemId(start), - chars, - }) - } else { - self.ops.next().map(TextAction::Action) - } - } -} - -#[cfg(test)] -mod tests { - use std::{borrow::Cow, fs}; - - use crate::{transaction::Transactable, Automerge, ObjType, OpObserver, Prop, ReadDoc, Value}; - - // Observer ops often carry a "tagged value", which is a value and the OpID of the op which - // created that value. For a lot of values (i.e. any scalar value) we don't care about the - // opid. This type implements `PartialEq` for the `Untagged` variant by ignoring the tag, which - // allows us to express tests which don't care about the tag. - #[derive(Clone, Debug)] - enum ObservedValue { - Tagged(crate::Value<'static>, crate::ObjId), - Untagged(crate::Value<'static>), - } - - impl<'a> From<(Value<'a>, crate::ObjId)> for ObservedValue { - fn from(value: (Value<'a>, crate::ObjId)) -> Self { - Self::Tagged(value.0.into_owned(), value.1) - } - } - - impl PartialEq for ObservedValue { - fn eq(&self, other: &ObservedValue) -> bool { - match (self, other) { - (Self::Tagged(v1, o1), Self::Tagged(v2, o2)) => equal_vals(v1, v2) && o1 == o2, - (Self::Untagged(v1), Self::Untagged(v2)) => equal_vals(v1, v2), - (Self::Tagged(v1, _), Self::Untagged(v2)) => equal_vals(v1, v2), - (Self::Untagged(v1), Self::Tagged(v2, _)) => equal_vals(v1, v2), - } - } - } - - /// Consider counters equal if they have the same current value - fn equal_vals(v1: &Value<'_>, v2: &Value<'_>) -> bool { - match (v1, v2) { - (Value::Scalar(v1), Value::Scalar(v2)) => match (v1.as_ref(), v2.as_ref()) { - (crate::ScalarValue::Counter(c1), crate::ScalarValue::Counter(c2)) => { - c1.current == c2.current - } - _ => v1 == v2, - }, - _ => v1 == v2, - } - } - - #[derive(Debug, Clone, PartialEq)] - enum ObserverCall { - Put { - obj: crate::ObjId, - prop: Prop, - value: ObservedValue, - conflict: bool, - }, - Insert { - obj: crate::ObjId, - index: usize, - value: ObservedValue, - }, - SpliceText { - obj: crate::ObjId, - index: usize, - chars: String, - }, - } - - // A Vec is pretty hard to look at in a test failure. This wrapper prints the - // calls out in a nice table so it's easier to see what's different - #[derive(Clone, PartialEq)] - struct Calls(Vec); - - impl std::fmt::Debug for Calls { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut table = prettytable::Table::new(); - table.set_format(*prettytable::format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR); - table.set_titles(prettytable::row![ - "Op", "Object", "Property", "Value", "Conflict" - ]); - for call in &self.0 { - match call { - ObserverCall::Put { - obj, - prop, - value, - conflict, - } => { - table.add_row(prettytable::row![ - "Put", - format!("{}", obj), - prop, - match value { - ObservedValue::Tagged(v, o) => format!("{} ({})", v, o), - ObservedValue::Untagged(v) => format!("{}", v), - }, - conflict - ]); - } - ObserverCall::Insert { obj, index, value } => { - table.add_row(prettytable::row![ - "Insert", - format!("{}", obj), - index, - match value { - ObservedValue::Tagged(v, o) => format!("{} ({})", v, o), - ObservedValue::Untagged(v) => format!("{}", v), - }, - "" - ]); - } - ObserverCall::SpliceText { obj, index, chars } => { - table.add_row(prettytable::row![ - "SpliceText", - format!("{}", obj), - index, - chars, - "" - ]); - } - } - } - let mut out = Vec::new(); - table.print(&mut out).unwrap(); - write!(f, "\n{}\n", String::from_utf8(out).unwrap()) - } - } - - struct ObserverStub { - ops: Vec, - text_as_seq: bool, - } - - impl ObserverStub { - fn new() -> Self { - Self { - ops: Vec::new(), - text_as_seq: true, - } - } - - fn new_text_v2() -> Self { - Self { - ops: Vec::new(), - text_as_seq: false, - } - } - } - - impl OpObserver for ObserverStub { - fn insert( - &mut self, - _doc: &R, - objid: crate::ObjId, - index: usize, - tagged_value: (crate::Value<'_>, crate::ObjId), - ) { - self.ops.push(ObserverCall::Insert { - obj: objid, - index, - value: tagged_value.into(), - }); - } - - fn splice_text( - &mut self, - _doc: &R, - objid: crate::ObjId, - index: usize, - value: &str, - ) { - self.ops.push(ObserverCall::SpliceText { - obj: objid, - index, - chars: value.to_string(), - }); - } - - fn put( - &mut self, - _doc: &R, - objid: crate::ObjId, - prop: crate::Prop, - tagged_value: (crate::Value<'_>, crate::ObjId), - conflict: bool, - ) { - self.ops.push(ObserverCall::Put { - obj: objid, - prop, - value: tagged_value.into(), - conflict, - }); - } - - fn expose( - &mut self, - _doc: &R, - _objid: crate::ObjId, - _prop: crate::Prop, - _tagged_value: (crate::Value<'_>, crate::ObjId), - _conflict: bool, - ) { - panic!("expose not expected"); - } - - fn increment( - &mut self, - _doc: &R, - _objid: crate::ObjId, - _prop: crate::Prop, - _tagged_value: (i64, crate::ObjId), - ) { - panic!("increment not expected"); - } - - fn delete_map(&mut self, _doc: &R, _objid: crate::ObjId, _key: &str) { - panic!("delete not expected"); - } - - fn delete_seq( - &mut self, - _doc: &R, - _objid: crate::ObjId, - _index: usize, - _num: usize, - ) { - panic!("delete not expected"); - } - - fn text_as_seq(&self) -> bool { - self.text_as_seq - } - } - - #[test] - fn basic_test() { - let mut doc = crate::AutoCommit::new(); - doc.put(crate::ROOT, "key", "value").unwrap(); - let map = doc.put_object(crate::ROOT, "map", ObjType::Map).unwrap(); - doc.put(&map, "nested_key", "value").unwrap(); - let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); - doc.insert(&list, 0, "value").unwrap(); - let text = doc.put_object(crate::ROOT, "text", ObjType::Text).unwrap(); - doc.insert(&text, 0, "a").unwrap(); - - let mut obs = ObserverStub::new(); - super::observe_current_state(doc.document(), &mut obs); - - assert_eq!( - Calls(obs.ops), - Calls(vec![ - ObserverCall::Put { - obj: crate::ROOT, - prop: "key".into(), - value: ObservedValue::Untagged("value".into()), - conflict: false, - }, - ObserverCall::Put { - obj: crate::ROOT, - prop: "list".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), - conflict: false, - }, - ObserverCall::Put { - obj: crate::ROOT, - prop: "map".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::Map), map.clone()), - conflict: false, - }, - ObserverCall::Put { - obj: crate::ROOT, - prop: "text".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::Text), text.clone()), - conflict: false, - }, - ObserverCall::Put { - obj: map.clone(), - prop: "nested_key".into(), - value: ObservedValue::Untagged("value".into()), - conflict: false, - }, - ObserverCall::Insert { - obj: list, - index: 0, - value: ObservedValue::Untagged("value".into()), - }, - ObserverCall::Insert { - obj: text, - index: 0, - value: ObservedValue::Untagged("a".into()), - }, - ]) - ); - } - - #[test] - fn test_deleted_ops_omitted() { - let mut doc = crate::AutoCommit::new(); - doc.put(crate::ROOT, "key", "value").unwrap(); - doc.delete(crate::ROOT, "key").unwrap(); - let map = doc.put_object(crate::ROOT, "map", ObjType::Map).unwrap(); - doc.put(&map, "nested_key", "value").unwrap(); - doc.delete(&map, "nested_key").unwrap(); - let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); - doc.insert(&list, 0, "value").unwrap(); - doc.delete(&list, 0).unwrap(); - let text = doc.put_object(crate::ROOT, "text", ObjType::Text).unwrap(); - doc.insert(&text, 0, "a").unwrap(); - doc.delete(&text, 0).unwrap(); - - doc.put_object(crate::ROOT, "deleted_map", ObjType::Map) - .unwrap(); - doc.delete(crate::ROOT, "deleted_map").unwrap(); - doc.put_object(crate::ROOT, "deleted_list", ObjType::List) - .unwrap(); - doc.delete(crate::ROOT, "deleted_list").unwrap(); - doc.put_object(crate::ROOT, "deleted_text", ObjType::Text) - .unwrap(); - doc.delete(crate::ROOT, "deleted_text").unwrap(); - - let mut obs = ObserverStub::new(); - super::observe_current_state(doc.document(), &mut obs); - - assert_eq!( - Calls(obs.ops), - Calls(vec![ - ObserverCall::Put { - obj: crate::ROOT, - prop: "list".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), - conflict: false, - }, - ObserverCall::Put { - obj: crate::ROOT, - prop: "map".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::Map), map.clone()), - conflict: false, - }, - ObserverCall::Put { - obj: crate::ROOT, - prop: "text".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::Text), text.clone()), - conflict: false, - }, - ]) - ); - } - - #[test] - fn test_text_spliced() { - let mut doc = crate::AutoCommit::new(); - let text = doc.put_object(crate::ROOT, "text", ObjType::Text).unwrap(); - doc.insert(&text, 0, "a").unwrap(); - doc.splice_text(&text, 1, 0, "bcdef").unwrap(); - doc.splice_text(&text, 2, 2, "g").unwrap(); - - let mut obs = ObserverStub::new_text_v2(); - super::observe_current_state(doc.document(), &mut obs); - - assert_eq!( - Calls(obs.ops), - Calls(vec![ - ObserverCall::Put { - obj: crate::ROOT, - prop: "text".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::Text), text.clone()), - conflict: false, - }, - ObserverCall::SpliceText { - obj: text, - index: 0, - chars: "abgef".to_string() - } - ]) - ); - } - - #[test] - fn test_counters() { - let actor1 = crate::ActorId::from("aa".as_bytes()); - let actor2 = crate::ActorId::from("bb".as_bytes()); - let mut doc = crate::AutoCommit::new().with_actor(actor2); - - let mut doc2 = doc.fork().with_actor(actor1); - doc2.put(crate::ROOT, "key", "someval").unwrap(); - - doc.put(crate::ROOT, "key", crate::ScalarValue::Counter(1.into())) - .unwrap(); - doc.increment(crate::ROOT, "key", 2).unwrap(); - doc.increment(crate::ROOT, "key", 3).unwrap(); - - doc.merge(&mut doc2).unwrap(); - - let mut obs = ObserverStub::new_text_v2(); - super::observe_current_state(doc.document(), &mut obs); - - assert_eq!( - Calls(obs.ops), - Calls(vec![ObserverCall::Put { - obj: crate::ROOT, - prop: "key".into(), - value: ObservedValue::Untagged(Value::Scalar(Cow::Owned( - crate::ScalarValue::Counter(6.into()) - ))), - conflict: true, - },]) - ); - } - - #[test] - fn test_multiple_list_insertions() { - let mut doc = crate::AutoCommit::new(); - - let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); - doc.insert(&list, 0, 1).unwrap(); - doc.insert(&list, 1, 2).unwrap(); - - let mut obs = ObserverStub::new_text_v2(); - super::observe_current_state(doc.document(), &mut obs); - - assert_eq!( - Calls(obs.ops), - Calls(vec![ - ObserverCall::Put { - obj: crate::ROOT, - prop: "list".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), - conflict: false, - }, - ObserverCall::Insert { - obj: list.clone(), - index: 0, - value: ObservedValue::Untagged(1.into()), - }, - ObserverCall::Insert { - obj: list, - index: 1, - value: ObservedValue::Untagged(2.into()), - }, - ]) - ); - } - - #[test] - fn test_concurrent_insertions_at_same_index() { - let mut doc = crate::AutoCommit::new().with_actor(crate::ActorId::from("aa".as_bytes())); - - let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); - - let mut doc2 = doc.fork().with_actor(crate::ActorId::from("bb".as_bytes())); - - doc.insert(&list, 0, 1).unwrap(); - doc2.insert(&list, 0, 2).unwrap(); - doc.merge(&mut doc2).unwrap(); - - let mut obs = ObserverStub::new_text_v2(); - super::observe_current_state(doc.document(), &mut obs); - - assert_eq!( - Calls(obs.ops), - Calls(vec![ - ObserverCall::Put { - obj: crate::ROOT, - prop: "list".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), - conflict: false, - }, - ObserverCall::Insert { - obj: list.clone(), - index: 0, - value: ObservedValue::Untagged(2.into()), - }, - ObserverCall::Insert { - obj: list, - index: 1, - value: ObservedValue::Untagged(1.into()), - }, - ]) - ); - } - - #[test] - fn test_insert_objects() { - let mut doc = crate::AutoCommit::new().with_actor(crate::ActorId::from("aa".as_bytes())); - - let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); - - let map = doc.insert_object(&list, 0, ObjType::Map).unwrap(); - doc.put(&map, "key", "value").unwrap(); - - let mut obs = ObserverStub::new_text_v2(); - super::observe_current_state(doc.document(), &mut obs); - - assert_eq!( - Calls(obs.ops), - Calls(vec![ - ObserverCall::Put { - obj: crate::ROOT, - prop: "list".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), - conflict: false, - }, - ObserverCall::Insert { - obj: list.clone(), - index: 0, - value: ObservedValue::Tagged(Value::Object(ObjType::Map), map.clone()), - }, - ObserverCall::Put { - obj: map, - prop: "key".into(), - value: ObservedValue::Untagged("value".into()), - conflict: false - }, - ]) - ); - } - - #[test] - fn test_insert_and_update() { - let mut doc = crate::AutoCommit::new(); - - let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); - - doc.insert(&list, 0, "one").unwrap(); - doc.insert(&list, 1, "two").unwrap(); - doc.put(&list, 0, "three").unwrap(); - doc.put(&list, 1, "four").unwrap(); - - let mut obs = ObserverStub::new_text_v2(); - super::observe_current_state(doc.document(), &mut obs); - - assert_eq!( - Calls(obs.ops), - Calls(vec![ - ObserverCall::Put { - obj: crate::ROOT, - prop: "list".into(), - value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), - conflict: false, - }, - ObserverCall::Insert { - obj: list.clone(), - index: 0, - value: ObservedValue::Untagged("three".into()), - }, - ObserverCall::Insert { - obj: list.clone(), - index: 1, - value: ObservedValue::Untagged("four".into()), - }, - ]) - ); - } - - #[test] - fn test_load_changes() { - fn fixture(name: &str) -> Vec { - fs::read("./tests/fixtures/".to_owned() + name).unwrap() - } - - let mut obs = ObserverStub::new(); - let _doc = Automerge::load_with( - &fixture("counter_value_is_ok.automerge"), - crate::OnPartialLoad::Error, - crate::storage::VerificationMode::Check, - Some(&mut obs), - ); - - assert_eq!( - Calls(obs.ops), - Calls(vec![ObserverCall::Put { - obj: crate::ROOT, - prop: "a".into(), - value: ObservedValue::Untagged(crate::ScalarValue::Counter(2000.into()).into()), - conflict: false, - },]) - ); - } -} diff --git a/rust/automerge/src/automerge/tests.rs b/rust/automerge/src/automerge/tests.rs deleted file mode 100644 index 3511c4ed..00000000 --- a/rust/automerge/src/automerge/tests.rs +++ /dev/null @@ -1,1554 +0,0 @@ -use itertools::Itertools; -use pretty_assertions::assert_eq; - -use super::*; -use crate::op_tree::B; -use crate::transaction::Transactable; -use crate::*; -use std::convert::TryInto; - -#[test] -fn insert_op() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - let mut tx = doc.transaction(); - tx.put(ROOT, "hello", "world")?; - tx.get(ROOT, "hello")?; - tx.commit(); - Ok(()) -} - -#[test] -fn test_set() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - // setting a scalar value shouldn't return an opid as no object was created. - tx.put(ROOT, "a", 1)?; - - // setting the same value shouldn't return an opid as there is no change. - tx.put(ROOT, "a", 1)?; - - assert_eq!(tx.pending_ops(), 1); - - let map = tx.put_object(ROOT, "b", ObjType::Map)?; - // object already exists at b but setting a map again overwrites it so we get an opid. - tx.put(map, "a", 2)?; - - tx.put_object(ROOT, "b", ObjType::Map)?; - - assert_eq!(tx.pending_ops(), 4); - let map = tx.get(ROOT, "b").unwrap().unwrap().1; - assert_eq!(tx.get(&map, "a")?, None); - - tx.commit(); - Ok(()) -} - -#[test] -fn test_list() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - let mut tx = doc.transaction(); - let list_id = tx.put_object(ROOT, "items", ObjType::List)?; - tx.put(ROOT, "zzz", "zzzval")?; - assert!(tx.get(ROOT, "items")?.unwrap().1 == list_id); - tx.insert(&list_id, 0, "a")?; - tx.insert(&list_id, 0, "b")?; - tx.insert(&list_id, 2, "c")?; - tx.insert(&list_id, 1, "d")?; - assert!(tx.get(&list_id, 0)?.unwrap().0 == "b".into()); - assert!(tx.get(&list_id, 1)?.unwrap().0 == "d".into()); - assert!(tx.get(&list_id, 2)?.unwrap().0 == "a".into()); - assert!(tx.get(&list_id, 3)?.unwrap().0 == "c".into()); - assert!(tx.length(&list_id) == 4); - tx.commit(); - doc.save(); - Ok(()) -} - -#[test] -fn test_del() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor(ActorId::random()); - let mut tx = doc.transaction(); - tx.put(ROOT, "xxx", "xxx")?; - assert!(tx.get(ROOT, "xxx")?.is_some()); - tx.delete(ROOT, "xxx")?; - assert!(tx.get(ROOT, "xxx")?.is_none()); - tx.commit(); - Ok(()) -} - -#[test] -fn test_inc() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "counter", ScalarValue::counter(10))?; - assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(10)); - tx.increment(ROOT, "counter", 10)?; - assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(20)); - tx.increment(ROOT, "counter", -5)?; - assert!(tx.get(ROOT, "counter")?.unwrap().0 == Value::counter(15)); - tx.commit(); - Ok(()) -} - -#[test] -fn test_save_incremental() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - - let mut tx = doc.transaction(); - tx.put(ROOT, "foo", 1)?; - tx.commit(); - - let save1 = doc.save(); - - let mut tx = doc.transaction(); - tx.put(ROOT, "bar", 2)?; - tx.commit(); - - let save2 = doc.save_incremental(); - - let mut tx = doc.transaction(); - tx.put(ROOT, "baz", 3)?; - tx.commit(); - - let save3 = doc.save_incremental(); - - let mut save_a: Vec = vec![]; - save_a.extend(&save1); - save_a.extend(&save2); - save_a.extend(&save3); - - assert!(doc.save_incremental().is_empty()); - - let save_b = doc.save(); - - assert!(save_b.len() < save_a.len()); - - let mut doc_a = Automerge::load(&save_a)?; - let mut doc_b = Automerge::load(&save_b)?; - - assert!(doc_a.get_all(ROOT, "baz")? == doc_b.get_all(ROOT, "baz")?); - - assert!(doc_a.save() == doc_b.save()); - - Ok(()) -} - -#[test] -fn test_save_text() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let text = tx.put_object(ROOT, "text", ObjType::Text)?; - tx.commit(); - let heads1 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.splice_text(&text, 0, 0, "hello world")?; - tx.commit(); - let heads2 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.splice_text(&text, 6, 0, "big bad ")?; - tx.commit(); - let heads3 = doc.get_heads(); - - assert!(&doc.text(&text)? == "hello big bad world"); - assert!(&doc.text_at(&text, &heads1)?.is_empty()); - assert!(&doc.text_at(&text, &heads2)? == "hello world"); - assert!(&doc.text_at(&text, &heads3)? == "hello big bad world"); - - Ok(()) -} - -#[test] -fn test_props_vals_at() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor("aaaa".try_into().unwrap()); - let mut tx = doc.transaction(); - tx.put(ROOT, "prop1", "val1")?; - tx.commit(); - doc.get_heads(); - let heads1 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.put(ROOT, "prop1", "val2")?; - tx.commit(); - doc.get_heads(); - let heads2 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.put(ROOT, "prop2", "val3")?; - tx.commit(); - doc.get_heads(); - let heads3 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.delete(ROOT, "prop1")?; - tx.commit(); - doc.get_heads(); - let heads4 = doc.get_heads(); - let mut tx = doc.transaction(); - tx.put(ROOT, "prop3", "val4")?; - tx.commit(); - doc.get_heads(); - let heads5 = doc.get_heads(); - assert!(doc.keys_at(ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]); - assert_eq!(doc.length_at(ROOT, &heads1), 1); - assert!(doc.get_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); - assert!(doc.get_at(ROOT, "prop2", &heads1)?.is_none()); - assert!(doc.get_at(ROOT, "prop3", &heads1)?.is_none()); - - assert!(doc.keys_at(ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]); - assert_eq!(doc.length_at(ROOT, &heads2), 1); - assert!(doc.get_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); - assert!(doc.get_at(ROOT, "prop2", &heads2)?.is_none()); - assert!(doc.get_at(ROOT, "prop3", &heads2)?.is_none()); - - assert!( - doc.keys_at(ROOT, &heads3).collect_vec() == vec!["prop1".to_owned(), "prop2".to_owned()] - ); - assert_eq!(doc.length_at(ROOT, &heads3), 2); - assert!(doc.get_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); - assert!(doc.get_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); - assert!(doc.get_at(ROOT, "prop3", &heads3)?.is_none()); - - assert!(doc.keys_at(ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]); - assert_eq!(doc.length_at(ROOT, &heads4), 1); - assert!(doc.get_at(ROOT, "prop1", &heads4)?.is_none()); - assert!(doc.get_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); - assert!(doc.get_at(ROOT, "prop3", &heads4)?.is_none()); - - assert!( - doc.keys_at(ROOT, &heads5).collect_vec() == vec!["prop2".to_owned(), "prop3".to_owned()] - ); - assert_eq!(doc.length_at(ROOT, &heads5), 2); - assert_eq!(doc.length(ROOT), 2); - assert!(doc.get_at(ROOT, "prop1", &heads5)?.is_none()); - assert!(doc.get_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); - assert!(doc.get_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); - - assert_eq!(doc.keys_at(ROOT, &[]).count(), 0); - assert_eq!(doc.length_at(ROOT, &[]), 0); - assert!(doc.get_at(ROOT, "prop1", &[])?.is_none()); - assert!(doc.get_at(ROOT, "prop2", &[])?.is_none()); - assert!(doc.get_at(ROOT, "prop3", &[])?.is_none()); - Ok(()) -} - -#[test] -fn test_len_at() -> Result<(), AutomergeError> { - let mut doc = Automerge::new(); - doc.set_actor("aaaa".try_into().unwrap()); - - let mut tx = doc.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List)?; - tx.commit(); - let heads1 = doc.get_heads(); - - let mut tx = doc.transaction(); - tx.insert(&list, 0, 10)?; - tx.commit(); - let heads2 = doc.get_heads(); - - let mut tx = doc.transaction(); - tx.put(&list, 0, 20)?; - tx.insert(&list, 0, 30)?; - tx.commit(); - let heads3 = doc.get_heads(); - - let mut tx = doc.transaction(); - tx.put(&list, 1, 40)?; - tx.insert(&list, 1, 50)?; - tx.commit(); - let heads4 = doc.get_heads(); - - let mut tx = doc.transaction(); - tx.delete(&list, 2)?; - tx.commit(); - let heads5 = doc.get_heads(); - - let mut tx = doc.transaction(); - tx.delete(&list, 0)?; - tx.commit(); - let heads6 = doc.get_heads(); - - assert!(doc.length_at(&list, &heads1) == 0); - assert!(doc.get_at(&list, 0, &heads1)?.is_none()); - - assert!(doc.length_at(&list, &heads2) == 1); - assert!(doc.get_at(&list, 0, &heads2)?.unwrap().0 == Value::int(10)); - - assert!(doc.length_at(&list, &heads3) == 2); - assert!(doc.get_at(&list, 0, &heads3)?.unwrap().0 == Value::int(30)); - assert!(doc.get_at(&list, 1, &heads3)?.unwrap().0 == Value::int(20)); - - assert!(doc.length_at(&list, &heads4) == 3); - assert!(doc.get_at(&list, 0, &heads4)?.unwrap().0 == Value::int(30)); - assert!(doc.get_at(&list, 1, &heads4)?.unwrap().0 == Value::int(50)); - assert!(doc.get_at(&list, 2, &heads4)?.unwrap().0 == Value::int(40)); - - assert!(doc.length_at(&list, &heads5) == 2); - assert!(doc.get_at(&list, 0, &heads5)?.unwrap().0 == Value::int(30)); - assert!(doc.get_at(&list, 1, &heads5)?.unwrap().0 == Value::int(50)); - - assert!(doc.length_at(&list, &heads6) == 1); - assert!(doc.length(&list) == 1); - assert!(doc.get_at(&list, 0, &heads6)?.unwrap().0 == Value::int(50)); - - Ok(()) -} - -#[test] -fn keys_iter_map() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 3).unwrap(); - tx.put(ROOT, "b", 4).unwrap(); - tx.put(ROOT, "c", 5).unwrap(); - tx.put(ROOT, "d", 6).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 7).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 8).unwrap(); - tx.put(ROOT, "d", 9).unwrap(); - tx.commit(); - assert_eq!(doc.keys(ROOT).count(), 4); - - let mut keys = doc.keys(ROOT); - assert_eq!(keys.next(), Some("a".into())); - assert_eq!(keys.next(), Some("b".into())); - assert_eq!(keys.next(), Some("c".into())); - assert_eq!(keys.next(), Some("d".into())); - assert_eq!(keys.next(), None); - - let mut keys = doc.keys(ROOT); - assert_eq!(keys.next_back(), Some("d".into())); - assert_eq!(keys.next_back(), Some("c".into())); - assert_eq!(keys.next_back(), Some("b".into())); - assert_eq!(keys.next_back(), Some("a".into())); - assert_eq!(keys.next_back(), None); - - let mut keys = doc.keys(ROOT); - assert_eq!(keys.next(), Some("a".into())); - assert_eq!(keys.next_back(), Some("d".into())); - assert_eq!(keys.next_back(), Some("c".into())); - assert_eq!(keys.next_back(), Some("b".into())); - assert_eq!(keys.next_back(), None); - - let mut keys = doc.keys(ROOT); - assert_eq!(keys.next_back(), Some("d".into())); - assert_eq!(keys.next(), Some("a".into())); - assert_eq!(keys.next(), Some("b".into())); - assert_eq!(keys.next(), Some("c".into())); - assert_eq!(keys.next(), None); - let keys = doc.keys(ROOT); - assert_eq!(keys.collect::>(), vec!["a", "b", "c", "d"]); -} - -#[test] -fn keys_iter_seq() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); - tx.insert(&list, 0, 3).unwrap(); - tx.insert(&list, 1, 4).unwrap(); - tx.insert(&list, 2, 5).unwrap(); - tx.insert(&list, 3, 6).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(&list, 0, 7).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(&list, 0, 8).unwrap(); - tx.put(&list, 3, 9).unwrap(); - tx.commit(); - let actor = doc.get_actor(); - assert_eq!(doc.keys(&list).count(), 4); - - let mut keys = doc.keys(&list); - assert_eq!(keys.next(), Some(format!("2@{}", actor))); - assert_eq!(keys.next(), Some(format!("3@{}", actor))); - assert_eq!(keys.next(), Some(format!("4@{}", actor))); - assert_eq!(keys.next(), Some(format!("5@{}", actor))); - assert_eq!(keys.next(), None); - - let mut keys = doc.keys(&list); - assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("4@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("3@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("2@{}", actor))); - assert_eq!(keys.next_back(), None); - - let mut keys = doc.keys(&list); - assert_eq!(keys.next(), Some(format!("2@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("4@{}", actor))); - assert_eq!(keys.next_back(), Some(format!("3@{}", actor))); - assert_eq!(keys.next_back(), None); - - let mut keys = doc.keys(&list); - assert_eq!(keys.next_back(), Some(format!("5@{}", actor))); - assert_eq!(keys.next(), Some(format!("2@{}", actor))); - assert_eq!(keys.next(), Some(format!("3@{}", actor))); - assert_eq!(keys.next(), Some(format!("4@{}", actor))); - assert_eq!(keys.next(), None); - - let keys = doc.keys(&list); - assert_eq!( - keys.collect::>(), - vec![ - format!("2@{}", actor), - format!("3@{}", actor), - format!("4@{}", actor), - format!("5@{}", actor) - ] - ); -} - -#[test] -fn range_iter_map() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 3).unwrap(); - tx.put(ROOT, "b", 4).unwrap(); - tx.put(ROOT, "c", 5).unwrap(); - tx.put(ROOT, "d", 6).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 7).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 8).unwrap(); - tx.put(ROOT, "d", 9).unwrap(); - tx.commit(); - let actor = doc.get_actor(); - assert_eq!(doc.map_range(ROOT, ..).count(), 4); - - let mut range = doc.map_range(ROOT, "b".to_owned().."d".into()); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let mut range = doc.map_range(ROOT, "b".to_owned()..="d".into()); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("d", 9.into(), ExId::Id(7, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let mut range = doc.map_range(ROOT, ..="c".to_owned()); - assert_eq!( - range.next(), - Some(("a", 8.into(), ExId::Id(6, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let range = doc.map_range(ROOT, "a".to_owned()..); - assert_eq!( - range.collect::>(), - vec![ - ("a", 8.into(), ExId::Id(6, actor.clone(), 0)), - ("b", 4.into(), ExId::Id(2, actor.clone(), 0)), - ("c", 5.into(), ExId::Id(3, actor.clone(), 0)), - ("d", 9.into(), ExId::Id(7, actor.clone(), 0)), - ] - ); -} - -#[test] -fn map_range_back_and_forth_single() { - let mut doc = AutoCommit::new(); - let actor = doc.get_actor().clone(); - - doc.put(ROOT, "1", "a").unwrap(); - doc.put(ROOT, "2", "b").unwrap(); - doc.put(ROOT, "3", "c").unwrap(); - - let mut range_all = doc.map_range(ROOT, ..); - assert_eq!( - range_all.next(), - Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) - ); - assert_eq!( - range_all.next_back(), - Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range_all.next_back(), - Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc.map_range(ROOT, ..); - assert_eq!( - range_all.next(), - Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) - ); - assert_eq!( - range_all.next_back(), - Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range_all.next(), - Some(("2", Value::str("b"), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc.map_range(ROOT, ..); - assert_eq!( - range_all.next(), - Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) - ); - assert_eq!( - range_all.next(), - Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range_all.next(), - Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc.map_range(ROOT, ..); - assert_eq!( - range_all.next_back(), - Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range_all.next_back(), - Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range_all.next_back(), - Some(("1", "a".into(), ExId::Id(1, actor, 0))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); -} - -#[test] -fn map_range_back_and_forth_double() { - let mut doc1 = AutoCommit::new(); - doc1.set_actor(ActorId::from([0])); - - doc1.put(ROOT, "1", "a").unwrap(); - doc1.put(ROOT, "2", "b").unwrap(); - doc1.put(ROOT, "3", "c").unwrap(); - - // actor 2 should win in all conflicts here - let mut doc2 = AutoCommit::new(); - doc1.set_actor(ActorId::from([1])); - let actor2 = doc2.get_actor().clone(); - doc2.put(ROOT, "1", "aa").unwrap(); - doc2.put(ROOT, "2", "bb").unwrap(); - doc2.put(ROOT, "3", "cc").unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - let mut range_all = doc1.map_range(ROOT, ..); - assert_eq!( - range_all.next(), - Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next_back(), - Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next_back(), - Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc1.map_range(ROOT, ..); - assert_eq!( - range_all.next(), - Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next_back(), - Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next(), - Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc1.map_range(ROOT, ..); - assert_eq!( - range_all.next(), - Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next(), - Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next(), - Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc1.map_range(ROOT, ..); - assert_eq!( - range_all.next_back(), - Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next_back(), - Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next_back(), - Some(("1", "aa".into(), ExId::Id(1, actor2, 1))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); -} - -#[test] -fn map_range_at_back_and_forth_single() { - let mut doc = AutoCommit::new(); - let actor = doc.get_actor().clone(); - - doc.put(ROOT, "1", "a").unwrap(); - doc.put(ROOT, "2", "b").unwrap(); - doc.put(ROOT, "3", "c").unwrap(); - - let heads = doc.get_heads(); - - let mut range_all = doc.map_range_at(ROOT, .., &heads); - assert_eq!( - range_all.next(), - Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) - ); - assert_eq!( - range_all.next_back(), - Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range_all.next_back(), - Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc.map_range_at(ROOT, .., &heads); - assert_eq!( - range_all.next(), - Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) - ); - assert_eq!( - range_all.next_back(), - Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range_all.next(), - Some(("2", Value::str("b"), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc.map_range_at(ROOT, .., &heads); - assert_eq!( - range_all.next(), - Some(("1", "a".into(), ExId::Id(1, actor.clone(), 0))) - ); - assert_eq!( - range_all.next(), - Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range_all.next(), - Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc.map_range_at(ROOT, .., &heads); - assert_eq!( - range_all.next_back(), - Some(("3", "c".into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range_all.next_back(), - Some(("2", "b".into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range_all.next_back(), - Some(("1", "a".into(), ExId::Id(1, actor, 0))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); -} - -#[test] -fn map_range_at_back_and_forth_double() { - let mut doc1 = AutoCommit::new(); - doc1.set_actor(ActorId::from([0])); - - doc1.put(ROOT, "1", "a").unwrap(); - doc1.put(ROOT, "2", "b").unwrap(); - doc1.put(ROOT, "3", "c").unwrap(); - - // actor 2 should win in all conflicts here - let mut doc2 = AutoCommit::new(); - doc1.set_actor(ActorId::from([1])); - let actor2 = doc2.get_actor().clone(); - doc2.put(ROOT, "1", "aa").unwrap(); - doc2.put(ROOT, "2", "bb").unwrap(); - doc2.put(ROOT, "3", "cc").unwrap(); - - doc1.merge(&mut doc2).unwrap(); - let heads = doc1.get_heads(); - - let mut range_all = doc1.map_range_at(ROOT, .., &heads); - assert_eq!( - range_all.next(), - Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next_back(), - Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next_back(), - Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc1.map_range_at(ROOT, .., &heads); - assert_eq!( - range_all.next(), - Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next_back(), - Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next(), - Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc1.map_range_at(ROOT, .., &heads); - assert_eq!( - range_all.next(), - Some(("1", "aa".into(), ExId::Id(1, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next(), - Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next(), - Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); - - let mut range_all = doc1.map_range_at(ROOT, .., &heads); - assert_eq!( - range_all.next_back(), - Some(("3", "cc".into(), ExId::Id(3, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next_back(), - Some(("2", "bb".into(), ExId::Id(2, actor2.clone(), 1))) - ); - assert_eq!( - range_all.next_back(), - Some(("1", "aa".into(), ExId::Id(1, actor2, 1))) - ); - assert_eq!(range_all.next_back(), None); - assert_eq!(range_all.next(), None); -} - -#[test] -fn insert_at_index() { - let mut doc = AutoCommit::new(); - - let list = &doc.put_object(ROOT, "list", ObjType::List).unwrap(); - doc.insert(list, 0, 0).unwrap(); - doc.insert(list, 0, 1).unwrap(); // both inserts at the same index - - assert_eq!(doc.length(list), 2); - assert_eq!(doc.keys(list).count(), 2); - assert_eq!(doc.list_range(list, ..).count(), 2); -} - -#[test] -fn get_list_values() -> Result<(), AutomergeError> { - let mut doc1 = Automerge::new(); - let mut tx = doc1.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List)?; - - // insert elements - tx.insert(&list, 0, "First")?; - tx.insert(&list, 1, "Second")?; - tx.insert(&list, 2, "Third")?; - tx.insert(&list, 3, "Forth")?; - tx.insert(&list, 4, "Fith")?; - tx.insert(&list, 5, "Sixth")?; - tx.insert(&list, 6, "Seventh")?; - tx.insert(&list, 7, "Eights")?; - tx.commit(); - - let v1 = doc1.get_heads(); - let mut doc2 = doc1.fork(); - - let mut tx = doc1.transaction(); - tx.put(&list, 2, "Third V2")?; - tx.commit(); - - let mut tx = doc2.transaction(); - tx.put(&list, 2, "Third V3")?; - tx.commit(); - - doc1.merge(&mut doc2)?; - - assert_eq!(doc1.list_range(&list, ..).count(), 8); - - for (i, val1, id) in doc1.list_range(&list, ..) { - let val2 = doc1.get(&list, i)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.list_range(&list, 3..6).count(), 3); - assert_eq!(doc1.list_range(&list, 3..6).next().unwrap().0, 3); - assert_eq!(doc1.list_range(&list, 3..6).last().unwrap().0, 5); - - for (i, val1, id) in doc1.list_range(&list, 3..6) { - let val2 = doc1.get(&list, i)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.list_range_at(&list, .., &v1).count(), 8); - for (i, val1, id) in doc1.list_range_at(&list, .., &v1) { - let val2 = doc1.get_at(&list, i, &v1)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.list_range_at(&list, 3..6, &v1).count(), 3); - assert_eq!(doc1.list_range_at(&list, 3..6, &v1).next().unwrap().0, 3); - assert_eq!(doc1.list_range_at(&list, 3..6, &v1).last().unwrap().0, 5); - - for (i, val1, id) in doc1.list_range_at(&list, 3..6, &v1) { - let val2 = doc1.get_at(&list, i, &v1)?; - assert_eq!(Some((val1, id)), val2); - } - - let range: Vec<_> = doc1 - .list_range(&list, ..) - .map(|(_, val, id)| (val, id)) - .collect(); - let values = doc1.values(&list); - let values: Vec<_> = values.collect(); - assert_eq!(range, values); - - let range: Vec<_> = doc1 - .list_range_at(&list, .., &v1) - .map(|(_, val, id)| (val, id)) - .collect(); - let values: Vec<_> = doc1.values_at(&list, &v1).collect(); - assert_eq!(range, values); - - Ok(()) -} - -#[test] -fn get_range_values() -> Result<(), AutomergeError> { - let mut doc1 = Automerge::new(); - let mut tx = doc1.transaction(); - tx.put(ROOT, "aa", "aaa")?; - tx.put(ROOT, "bb", "bbb")?; - tx.put(ROOT, "cc", "ccc")?; - tx.put(ROOT, "dd", "ddd")?; - tx.commit(); - - let v1 = doc1.get_heads(); - let mut doc2 = doc1.fork(); - - let mut tx = doc1.transaction(); - tx.put(ROOT, "cc", "ccc V2")?; - tx.commit(); - - let mut tx = doc2.transaction(); - tx.put(ROOT, "cc", "ccc V3")?; - tx.commit(); - - doc1.merge(&mut doc2)?; - - let range = "b".to_string().."d".to_string(); - - assert_eq!(doc1.map_range(ROOT, range.clone()).count(), 2); - - for (key, val1, id) in doc1.map_range(ROOT, range.clone()) { - let val2 = doc1.get(ROOT, key)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.map_range(ROOT, range.clone()).rev().count(), 2); - - for (key, val1, id) in doc1.map_range(ROOT, range.clone()).rev() { - let val2 = doc1.get(ROOT, key)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.map_range_at(ROOT, range.clone(), &v1).count(), 2); - - for (key, val1, id) in doc1.map_range_at(ROOT, range.clone(), &v1) { - let val2 = doc1.get_at(ROOT, key, &v1)?; - assert_eq!(Some((val1, id)), val2); - } - - assert_eq!(doc1.map_range_at(ROOT, range.clone(), &v1).rev().count(), 2); - - for (key, val1, id) in doc1.map_range_at(ROOT, range, &v1).rev() { - let val2 = doc1.get_at(ROOT, key, &v1)?; - assert_eq!(Some((val1, id)), val2); - } - - let range: Vec<_> = doc1 - .map_range(ROOT, ..) - .map(|(_, val, id)| (val, id)) - .collect(); - let values: Vec<_> = doc1.values(ROOT).collect(); - assert_eq!(range, values); - - let range: Vec<_> = doc1 - .map_range_at(ROOT, .., &v1) - .map(|(_, val, id)| (val, id)) - .collect(); - let values: Vec<_> = doc1.values_at(ROOT, &v1).collect(); - assert_eq!(range, values); - - Ok(()) -} - -#[test] -fn range_iter_map_rev() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 3).unwrap(); - tx.put(ROOT, "b", 4).unwrap(); - tx.put(ROOT, "c", 5).unwrap(); - tx.put(ROOT, "d", 6).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 7).unwrap(); - tx.commit(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 8).unwrap(); - tx.put(ROOT, "d", 9).unwrap(); - tx.commit(); - let actor = doc.get_actor(); - assert_eq!(doc.map_range(ROOT, ..).rev().count(), 4); - - let mut range = doc.map_range(ROOT, "b".to_owned().."d".into()).rev(); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let mut range = doc.map_range(ROOT, "b".to_owned()..="d".into()).rev(); - assert_eq!( - range.next(), - Some(("d", 9.into(), ExId::Id(7, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let mut range = doc.map_range(ROOT, ..="c".to_owned()).rev(); - assert_eq!( - range.next(), - Some(("c", 5.into(), ExId::Id(3, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("b", 4.into(), ExId::Id(2, actor.clone(), 0))) - ); - assert_eq!( - range.next(), - Some(("a", 8.into(), ExId::Id(6, actor.clone(), 0))) - ); - assert_eq!(range.next(), None); - - let range = doc.map_range(ROOT, "a".to_owned()..).rev(); - assert_eq!( - range.collect::>(), - vec![ - ("d", 9.into(), ExId::Id(7, actor.clone(), 0)), - ("c", 5.into(), ExId::Id(3, actor.clone(), 0)), - ("b", 4.into(), ExId::Id(2, actor.clone(), 0)), - ("a", 8.into(), ExId::Id(6, actor.clone(), 0)), - ] - ); -} - -#[test] -fn rolling_back_transaction_has_no_effect() { - let mut doc = Automerge::new(); - let old_states = doc.states.clone(); - let bytes = doc.save(); - let tx = doc.transaction(); - tx.rollback(); - let new_states = doc.states.clone(); - assert_eq!(old_states, new_states); - let new_bytes = doc.save(); - assert_eq!(bytes, new_bytes); -} - -#[test] -fn mutate_old_objects() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - // create a map - let map1 = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); - tx.put(&map1, "b", 1).unwrap(); - // overwrite the first map with a new one - let map2 = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); - tx.put(&map2, "c", 2).unwrap(); - tx.commit(); - - // we can get the new map by traversing the tree - let map = doc.get(&ROOT, "a").unwrap().unwrap().1; - assert_eq!(doc.get(&map, "b").unwrap(), None); - // and get values from it - assert_eq!( - doc.get(&map, "c").unwrap().map(|s| s.0), - Some(ScalarValue::Int(2).into()) - ); - - // but we can still access the old one if we know the ID! - assert_eq!(doc.get(&map1, "b").unwrap().unwrap().0, Value::int(1)); - // and even set new things in it! - let mut tx = doc.transaction(); - tx.put(&map1, "c", 3).unwrap(); - tx.commit(); - - assert_eq!(doc.get(&map1, "c").unwrap().unwrap().0, Value::int(3)); -} - -#[test] -fn delete_nothing_in_map_is_noop() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - // deleting a missing key in a map should just be a noop - assert!(tx.delete(ROOT, "a",).is_ok()); - tx.commit(); - let last_change = doc.get_last_local_change(); - assert!(last_change.is_none()); - - let bytes = doc.save(); - assert!(Automerge::load(&bytes,).is_ok()); - - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 1).unwrap(); - tx.commit(); - let last_change = doc.get_last_local_change().unwrap(); - assert_eq!(last_change.len(), 1); - - let mut tx = doc.transaction(); - // a real op - tx.delete(ROOT, "a").unwrap(); - // a no-op - tx.delete(ROOT, "a").unwrap(); - tx.commit(); - let last_change = doc.get_last_local_change().unwrap(); - assert_eq!(last_change.len(), 1); -} - -#[test] -fn delete_nothing_in_list_returns_error() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - // deleting an element in a list that does not exist is an error - assert!(tx.delete(ROOT, 0,).is_err()); -} - -#[test] -fn loaded_doc_changes_have_hash() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - tx.put(ROOT, "a", 1_u64).unwrap(); - tx.commit(); - let hash = doc.get_last_local_change().unwrap().hash(); - let bytes = doc.save(); - let doc = Automerge::load(&bytes).unwrap(); - assert_eq!(doc.get_change_by_hash(&hash).unwrap().hash(), hash); -} - -#[test] -fn load_change_with_zero_start_op() { - let bytes = &[ - 133, 111, 74, 131, 202, 50, 52, 158, 2, 96, 163, 163, 83, 255, 255, 255, 50, 50, 50, 50, - 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 255, 255, 245, 53, 1, 0, 0, 0, 0, 0, 0, 4, 233, - 245, 239, 255, 1, 0, 0, 0, 133, 111, 74, 131, 163, 96, 0, 0, 2, 10, 202, 144, 125, 19, 48, - 89, 133, 49, 10, 10, 67, 91, 111, 10, 74, 131, 96, 0, 163, 131, 255, 255, 255, 255, 255, - 255, 255, 255, 255, 1, 153, 0, 0, 246, 255, 255, 255, 157, 157, 157, 157, 157, 157, 157, - 157, 157, 157, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, - 255, 255, 255, 48, 254, 208, - ]; - let _ = Automerge::load(bytes); -} - -#[test] -fn load_broken_list() { - enum Action { - InsertText(usize, char), - DelText(usize), - } - use Action::*; - let actions = [ - InsertText(0, 'a'), - InsertText(0, 'b'), - DelText(1), - InsertText(0, 'c'), - DelText(1), - DelText(0), - InsertText(0, 'd'), - InsertText(0, 'e'), - InsertText(1, 'f'), - DelText(2), - DelText(1), - InsertText(0, 'g'), - DelText(1), - DelText(0), - InsertText(0, 'h'), - InsertText(1, 'i'), - DelText(1), - DelText(0), - InsertText(0, 'j'), - InsertText(0, 'k'), - DelText(1), - DelText(0), - InsertText(0, 'l'), - DelText(0), - InsertText(0, 'm'), - InsertText(0, 'n'), - DelText(1), - DelText(0), - InsertText(0, 'o'), - DelText(0), - InsertText(0, 'p'), - InsertText(1, 'q'), - InsertText(1, 'r'), - InsertText(1, 's'), - InsertText(3, 't'), - InsertText(5, 'u'), - InsertText(0, 'v'), - InsertText(3, 'w'), - InsertText(4, 'x'), - InsertText(0, 'y'), - InsertText(6, 'z'), - InsertText(11, '1'), - InsertText(0, '2'), - InsertText(0, '3'), - InsertText(0, '4'), - InsertText(13, '5'), - InsertText(11, '6'), - InsertText(17, '7'), - ]; - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); - for action in actions { - match action { - Action::InsertText(index, c) => { - tx.insert(&list, index, c).unwrap(); - } - Action::DelText(index) => { - tx.delete(&list, index).unwrap(); - } - } - } - tx.commit(); - let bytes = doc.save(); - let mut doc2 = Automerge::load(&bytes).unwrap(); - let bytes2 = doc2.save(); - assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); - - assert_eq!(doc.queue, doc2.queue); - assert_eq!(doc.history, doc2.history); - assert_eq!(doc.history_index, doc2.history_index); - assert_eq!(doc.states, doc2.states); - assert_eq!(doc.deps, doc2.deps); - assert_eq!(doc.saved, doc2.saved); - assert_eq!(doc.ops, doc2.ops); - assert_eq!(doc.max_op, doc2.max_op); - - assert_eq!(bytes, bytes2); -} - -#[test] -fn load_broken_list_short() { - // breaks when the B constant in OpSet is 3 - enum Action { - InsertText(usize, char), - DelText(usize), - } - use Action::*; - let actions = [ - InsertText(0, 'a'), - InsertText(1, 'b'), - DelText(1), - InsertText(1, 'c'), - InsertText(2, 'd'), - InsertText(2, 'e'), - InsertText(0, 'f'), - DelText(4), - InsertText(4, 'g'), - ]; - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let list = tx.put_object(ROOT, "list", ObjType::List).unwrap(); - for action in actions { - match action { - Action::InsertText(index, c) => { - tx.insert(&list, index, c).unwrap(); - } - Action::DelText(index) => { - tx.delete(&list, index).unwrap(); - } - } - } - tx.commit(); - let bytes = doc.save(); - let mut doc2 = Automerge::load(&bytes).unwrap(); - let bytes2 = doc2.save(); - assert_eq!(doc.text(&list).unwrap(), doc2.text(&list).unwrap()); - - assert_eq!(doc.queue, doc2.queue); - assert_eq!(doc.history, doc2.history); - assert_eq!(doc.history_index, doc2.history_index); - assert_eq!(doc.states, doc2.states); - assert_eq!(doc.deps, doc2.deps); - assert_eq!(doc.saved, doc2.saved); - assert_eq!(doc.ops, doc2.ops); - assert_eq!(doc.max_op, doc2.max_op); - - assert_eq!(bytes, bytes2); -} - -#[test] -fn compute_list_indexes_correctly_when_list_element_is_split_across_tree_nodes() { - let max = B as u64 * 2; - let actor1 = ActorId::from(b"aaaa"); - let mut doc1 = AutoCommit::new().with_actor(actor1.clone()); - let actor2 = ActorId::from(b"bbbb"); - let mut doc2 = AutoCommit::new().with_actor(actor2.clone()); - let list = doc1.put_object(ROOT, "list", ObjType::List).unwrap(); - doc1.insert(&list, 0, 0).unwrap(); - doc2.load_incremental(&doc1.save_incremental()).unwrap(); - for i in 1..=max { - doc1.put(&list, 0, i).unwrap() - } - for i in 1..=max { - doc2.put(&list, 0, i).unwrap() - } - let change1 = doc1.save_incremental(); - let change2 = doc2.save_incremental(); - doc2.load_incremental(&change1).unwrap(); - doc1.load_incremental(&change2).unwrap(); - assert_eq!(doc1.length(&list), 1); - assert_eq!(doc2.length(&list), 1); - assert_eq!( - doc1.get_all(&list, 0).unwrap(), - vec![ - (max.into(), ExId::Id(max + 2, actor1.clone(), 0)), - (max.into(), ExId::Id(max + 2, actor2.clone(), 1)) - ] - ); - assert_eq!( - doc2.get_all(&list, 0).unwrap(), - vec![ - (max.into(), ExId::Id(max + 2, actor1, 0)), - (max.into(), ExId::Id(max + 2, actor2, 1)) - ] - ); - assert!(doc1.get(&list, 1).unwrap().is_none()); - assert!(doc2.get(&list, 1).unwrap().is_none()); -} - -#[test] -fn get_parent_objects() { - let mut doc = AutoCommit::new(); - let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.put_object(&map, "b", ObjType::List).unwrap(); - doc.insert(&list, 0, 2).unwrap(); - let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); - - assert_eq!( - doc.parents(&map).unwrap().next(), - Some(Parent { - obj: ROOT, - prop: Prop::Map("a".into()), - visible: true - }) - ); - assert_eq!( - doc.parents(&list).unwrap().next(), - Some(Parent { - obj: map, - prop: Prop::Map("b".into()), - visible: true - }) - ); - assert_eq!( - doc.parents(&text).unwrap().next(), - Some(Parent { - obj: list, - prop: Prop::Seq(0), - visible: true - }) - ); -} - -#[test] -fn get_path_to_object() { - let mut doc = AutoCommit::new(); - let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.put_object(&map, "b", ObjType::List).unwrap(); - doc.insert(&list, 0, 2).unwrap(); - let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); - - assert_eq!( - doc.path_to_object(&map).unwrap(), - vec![(ROOT, Prop::Map("a".into()))] - ); - assert_eq!( - doc.path_to_object(&list).unwrap(), - vec![ - (ROOT, Prop::Map("a".into())), - (map.clone(), Prop::Map("b".into())), - ] - ); - assert_eq!( - doc.path_to_object(text).unwrap(), - vec![ - (ROOT, Prop::Map("a".into())), - (map, Prop::Map("b".into())), - (list, Prop::Seq(0)), - ] - ); -} - -#[test] -fn parents_iterator() { - let mut doc = AutoCommit::new(); - let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.put_object(&map, "b", ObjType::List).unwrap(); - doc.insert(&list, 0, 2).unwrap(); - let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); - - let mut parents = doc.parents(text).unwrap(); - assert_eq!( - parents.next(), - Some(Parent { - obj: list, - prop: Prop::Seq(0), - visible: true - }) - ); - assert_eq!( - parents.next(), - Some(Parent { - obj: map, - prop: Prop::Map("b".into()), - visible: true - }) - ); - assert_eq!( - parents.next(), - Some(Parent { - obj: ROOT, - prop: Prop::Map("a".into()), - visible: true - }) - ); - assert_eq!(parents.next(), None); -} - -#[test] -fn can_insert_a_grapheme_into_text() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); - let polar_bear = "🐻‍❄️"; - tx.splice_text(&text, 0, 0, polar_bear).unwrap(); - tx.commit(); - let s = doc.text(&text).unwrap(); - assert_eq!(s, polar_bear); - let len = doc.length(&text); - assert_eq!(len, 4); // 4 utf8 chars -} - -#[test] -fn long_strings_spliced_into_text_get_segmented_by_utf8_chars() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); - let polar_bear = "🐻‍❄️"; - let polar_bear_army = polar_bear.repeat(100); - tx.splice_text(&text, 0, 0, &polar_bear_army).unwrap(); - tx.commit(); - let s = doc.text(&text).unwrap(); - assert_eq!(s, polar_bear_army); - let len = doc.length(&text); - assert_eq!(len, polar_bear.chars().count() * 100); - assert_eq!(len, 400); -} - -#[test] -fn splice_text_uses_unicode_scalars() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); - let polar_bear = "🐻‍❄️"; - tx.splice_text(&text, 0, 0, polar_bear).unwrap(); - tx.commit(); - let s = doc.text(&text).unwrap(); - assert_eq!(s, polar_bear); - let len = doc.length(&text); - assert_eq!(len, 4); // 4 chars -} - -#[test] -fn observe_counter_change_application_overwrite() { - let mut doc1 = AutoCommit::new(); - doc1.set_actor(ActorId::from([1])); - doc1.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); - doc1.commit(); - - let mut doc2 = doc1.fork(); - doc2.set_actor(ActorId::from([2])); - doc2.put(ROOT, "counter", "mystring").unwrap(); - doc2.commit(); - - doc1.increment(ROOT, "counter", 2).unwrap(); - doc1.commit(); - doc1.increment(ROOT, "counter", 5).unwrap(); - doc1.commit(); - - let mut doc3 = doc1.fork().with_observer(VecOpObserver::default()); - doc3.merge(&mut doc2).unwrap(); - - assert_eq!( - doc3.observer().take_patches(), - vec![Patch::Put { - obj: ExId::Root, - path: vec![], - prop: Prop::Map("counter".into()), - value: ( - ScalarValue::Str("mystring".into()).into(), - ExId::Id(2, doc2.get_actor().clone(), 1) - ), - conflict: false - }] - ); - - let mut doc4 = doc2.clone().with_observer(VecOpObserver::default()); - doc4.merge(&mut doc1).unwrap(); - - // no patches as the increments operate on an invisible counter - assert_eq!(doc4.observer().take_patches(), vec![]); -} - -#[test] -fn observe_counter_change_application() { - let mut doc = AutoCommit::new(); - doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); - doc.increment(ROOT, "counter", 2).unwrap(); - doc.increment(ROOT, "counter", 5).unwrap(); - let changes = doc.get_changes(&[]).unwrap().into_iter().cloned(); - - let mut new_doc = AutoCommit::new().with_observer(VecOpObserver::default()); - // make a new change to the doc to stop the empty doc logic from skipping the intermediate - // patches. The is probably not really necessary, we could update this test to just test that - // the correct final state is emitted. For now though, we leave it as is. - new_doc.put(ROOT, "foo", "bar").unwrap(); - new_doc.observer().take_patches(); - new_doc.apply_changes(changes).unwrap(); - assert_eq!( - new_doc.observer().take_patches(), - vec![ - Patch::Put { - obj: ExId::Root, - path: vec![], - prop: Prop::Map("counter".into()), - value: ( - ScalarValue::counter(1).into(), - ExId::Id(1, doc.get_actor().clone(), 0) - ), - conflict: false - }, - Patch::Increment { - obj: ExId::Root, - path: vec![], - prop: Prop::Map("counter".into()), - value: (2, ExId::Id(2, doc.get_actor().clone(), 0)), - }, - Patch::Increment { - obj: ExId::Root, - path: vec![], - prop: Prop::Map("counter".into()), - value: (5, ExId::Id(3, doc.get_actor().clone(), 0)), - } - ] - ); -} - -#[test] -fn get_changes_heads_empty() { - let mut doc = AutoCommit::new(); - doc.put(ROOT, "key1", 1).unwrap(); - doc.commit(); - doc.put(ROOT, "key2", 1).unwrap(); - doc.commit(); - let heads = doc.get_heads(); - assert_eq!(doc.get_changes(&heads).unwrap(), Vec::<&Change>::new()); -} diff --git a/rust/automerge/src/autoserde.rs b/rust/automerge/src/autoserde.rs deleted file mode 100644 index ccfc6ae6..00000000 --- a/rust/automerge/src/autoserde.rs +++ /dev/null @@ -1,124 +0,0 @@ -use serde::ser::{SerializeMap, SerializeSeq}; - -use crate::{ObjId, ObjType, ReadDoc, Value}; - -/// A wrapper type which implements [`serde::Serialize`] for a [`ReadDoc`]. -/// -/// # Example -/// -/// ``` -/// # fn main() -> Result<(), Box> { -/// use automerge::{AutoCommit, AutomergeError, Value, transaction::Transactable}; -/// let mut doc = AutoCommit::new(); -/// doc.put(automerge::ROOT, "key", "value")?; -/// -/// let serialized = serde_json::to_string(&automerge::AutoSerde::from(&doc)).unwrap(); -/// -/// assert_eq!(serialized, r#"{"key":"value"}"#); -/// # Ok(()) -/// # } -/// ``` -#[derive(Debug)] -pub struct AutoSerde<'a, R: crate::ReadDoc>(&'a R); - -impl<'a, R: ReadDoc> From<&'a R> for AutoSerde<'a, R> { - fn from(a: &'a R) -> Self { - AutoSerde(a) - } -} - -impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerde<'a, R> { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - AutoSerdeMap { - doc: self.0, - obj: ObjId::Root, - } - .serialize(serializer) - } -} - -struct AutoSerdeMap<'a, R> { - doc: &'a R, - obj: ObjId, -} - -impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerdeMap<'a, R> { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - let mut map_ser = serializer.serialize_map(Some(self.doc.length(&ObjId::Root)))?; - for key in self.doc.keys(&self.obj) { - // SAFETY: This only errors if the object ID is unknown, but we construct this type - // with a known real object ID - let (val, obj) = self.doc.get(&self.obj, &key).unwrap().unwrap(); - let serdeval = AutoSerdeVal { - doc: self.doc, - val, - obj, - }; - map_ser.serialize_entry(&key, &serdeval)?; - } - map_ser.end() - } -} - -struct AutoSerdeSeq<'a, R> { - doc: &'a R, - obj: ObjId, -} - -impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerdeSeq<'a, R> { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - let mut seq_ser = serializer.serialize_seq(None)?; - for i in 0..self.doc.length(&self.obj) { - // SAFETY: This only errors if the object ID is unknown, but we construct this type - // with a known real object ID - let (val, obj) = self.doc.get(&self.obj, i).unwrap().unwrap(); - let serdeval = AutoSerdeVal { - doc: self.doc, - val, - obj, - }; - seq_ser.serialize_element(&serdeval)?; - } - seq_ser.end() - } -} - -struct AutoSerdeVal<'a, R> { - doc: &'a R, - val: Value<'a>, - obj: ObjId, -} - -impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerdeVal<'a, R> { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - match &self.val { - Value::Object(ObjType::Map | ObjType::Table) => { - let map = AutoSerdeMap { - doc: self.doc, - obj: self.obj.clone(), - }; - map.serialize(serializer) - } - Value::Object(ObjType::List | ObjType::Text) => { - let seq = AutoSerdeSeq { - doc: self.doc, - obj: self.obj.clone(), - }; - seq.serialize(serializer) - } - Value::Scalar(v) => v.serialize(serializer), - } - } -} diff --git a/rust/automerge/src/change.rs b/rust/automerge/src/change.rs deleted file mode 100644 index be467a84..00000000 --- a/rust/automerge/src/change.rs +++ /dev/null @@ -1,402 +0,0 @@ -use std::{borrow::Cow, num::NonZeroU64}; - -use crate::{ - columnar::Key as StoredKey, - storage::{ - change::{Unverified, Verified}, - parse, Change as StoredChange, ChangeOp, Chunk, Compressed, ReadChangeOpError, - }, - types::{ActorId, ChangeHash, ElemId}, -}; - -#[derive(Clone, Debug, PartialEq)] -pub struct Change { - stored: StoredChange<'static, Verified>, - compression: CompressionState, - len: usize, -} - -impl Change { - pub(crate) fn new(stored: StoredChange<'static, Verified>) -> Self { - let len = stored.iter_ops().count(); - Self { - stored, - len, - compression: CompressionState::NotCompressed, - } - } - - pub(crate) fn new_from_unverified( - stored: StoredChange<'static, Unverified>, - compressed: Option>, - ) -> Result { - let mut len = 0; - let stored = stored.verify_ops(|_| len += 1)?; - let compression = if let Some(c) = compressed { - CompressionState::Compressed(c) - } else { - CompressionState::NotCompressed - }; - Ok(Self { - stored, - len, - compression, - }) - } - - pub fn actor_id(&self) -> &ActorId { - self.stored.actor() - } - - pub fn other_actor_ids(&self) -> &[ActorId] { - self.stored.other_actors() - } - - pub fn len(&self) -> usize { - self.len - } - - pub fn is_empty(&self) -> bool { - self.len == 0 - } - - pub fn max_op(&self) -> u64 { - self.stored.start_op().get() + (self.len as u64) - 1 - } - - pub fn start_op(&self) -> NonZeroU64 { - self.stored.start_op() - } - - pub fn message(&self) -> Option<&String> { - self.stored.message().as_ref() - } - - pub fn deps(&self) -> &[ChangeHash] { - self.stored.dependencies() - } - - pub fn hash(&self) -> ChangeHash { - self.stored.hash() - } - - pub fn seq(&self) -> u64 { - self.stored.seq() - } - - pub fn timestamp(&self) -> i64 { - self.stored.timestamp() - } - - pub fn bytes(&mut self) -> Cow<'_, [u8]> { - if let CompressionState::NotCompressed = self.compression { - if let Some(compressed) = self.stored.compress() { - self.compression = CompressionState::Compressed(compressed); - } else { - self.compression = CompressionState::TooSmallToCompress; - } - }; - match &self.compression { - // SAFETY: We just checked this case above - CompressionState::NotCompressed => unreachable!(), - CompressionState::TooSmallToCompress => Cow::Borrowed(self.stored.bytes()), - CompressionState::Compressed(c) => c.bytes(), - } - } - - pub fn raw_bytes(&self) -> &[u8] { - self.stored.bytes() - } - - pub(crate) fn iter_ops(&self) -> impl Iterator + '_ { - self.stored.iter_ops() - } - - pub fn extra_bytes(&self) -> &[u8] { - self.stored.extra_bytes() - } - - // TODO replace all uses of this with TryFrom<&[u8]> - pub fn from_bytes(bytes: Vec) -> Result { - Self::try_from(&bytes[..]) - } - - pub fn decode(&self) -> crate::ExpandedChange { - crate::ExpandedChange::from(self) - } -} - -#[derive(Clone, Debug, PartialEq)] -enum CompressionState { - /// We haven't tried to compress this change - NotCompressed, - /// We have compressed this change - Compressed(Compressed<'static>), - /// We tried to compress this change but it wasn't big enough to be worth it - TooSmallToCompress, -} - -impl AsRef> for Change { - fn as_ref(&self) -> &StoredChange<'static, Verified> { - &self.stored - } -} - -impl From for StoredChange<'static, Verified> { - fn from(c: Change) -> Self { - c.stored - } -} - -#[derive(thiserror::Error, Debug)] -pub enum LoadError { - #[error("unable to parse change: {0}")] - Parse(Box), - #[error("leftover data after parsing")] - LeftoverData, - #[error("wrong chunk type")] - WrongChunkType, -} - -impl<'a> TryFrom<&'a [u8]> for Change { - type Error = LoadError; - - fn try_from(value: &'a [u8]) -> Result { - let input = parse::Input::new(value); - let (remaining, chunk) = Chunk::parse(input).map_err(|e| LoadError::Parse(Box::new(e)))?; - if !remaining.is_empty() { - return Err(LoadError::LeftoverData); - } - match chunk { - Chunk::Change(c) => Self::new_from_unverified(c.into_owned(), None) - .map_err(|e| LoadError::Parse(Box::new(e))), - Chunk::CompressedChange(c, compressed) => { - Self::new_from_unverified(c.into_owned(), Some(compressed.into_owned())) - .map_err(|e| LoadError::Parse(Box::new(e))) - } - _ => Err(LoadError::WrongChunkType), - } - } -} - -impl<'a> TryFrom> for Change { - type Error = ReadChangeOpError; - - fn try_from(c: StoredChange<'a, Unverified>) -> Result { - Self::new_from_unverified(c.into_owned(), None) - } -} - -impl From for Change { - fn from(e: crate::ExpandedChange) -> Self { - let stored = StoredChange::builder() - .with_actor(e.actor_id) - .with_extra_bytes(e.extra_bytes) - .with_seq(e.seq) - .with_dependencies(e.deps) - .with_timestamp(e.time) - .with_start_op(e.start_op) - .with_message(e.message) - .build(e.operations.iter()); - match stored { - Ok(c) => Change::new(c), - Err(crate::storage::change::PredOutOfOrder) => { - // Should never happen because we use `SortedVec` in legacy::Op::pred - panic!("preds out of order"); - } - } - } -} - -mod convert_expanded { - use std::borrow::Cow; - - use crate::{convert, legacy, storage::AsChangeOp, types::ActorId, ScalarValue}; - - impl<'a> AsChangeOp<'a> for &'a legacy::Op { - type ActorId = &'a ActorId; - type OpId = &'a legacy::OpId; - type PredIter = std::slice::Iter<'a, legacy::OpId>; - - fn action(&self) -> u64 { - self.action.action_index() - } - - fn insert(&self) -> bool { - self.insert - } - - fn pred(&self) -> Self::PredIter { - self.pred.iter() - } - - fn key(&self) -> convert::Key<'a, Self::OpId> { - match &self.key { - legacy::Key::Map(s) => convert::Key::Prop(Cow::Borrowed(s)), - legacy::Key::Seq(legacy::ElementId::Head) => { - convert::Key::Elem(convert::ElemId::Head) - } - legacy::Key::Seq(legacy::ElementId::Id(o)) => { - convert::Key::Elem(convert::ElemId::Op(o)) - } - } - } - - fn obj(&self) -> convert::ObjId { - match &self.obj { - legacy::ObjectId::Root => convert::ObjId::Root, - legacy::ObjectId::Id(o) => convert::ObjId::Op(o), - } - } - - fn val(&self) -> Cow<'a, crate::ScalarValue> { - match self.primitive_value() { - Some(v) => Cow::Owned(v), - None => Cow::Owned(ScalarValue::Null), - } - } - } - - impl<'a> convert::OpId<&'a ActorId> for &'a legacy::OpId { - fn counter(&self) -> u64 { - legacy::OpId::counter(self) - } - - fn actor(&self) -> &'a ActorId { - &self.1 - } - } -} - -impl From<&Change> for crate::ExpandedChange { - fn from(c: &Change) -> Self { - let actors = std::iter::once(c.actor_id()) - .chain(c.other_actor_ids().iter()) - .cloned() - .enumerate() - .collect::>(); - let operations = c - .iter_ops() - .map(|o| crate::legacy::Op { - action: crate::types::OpType::from_action_and_value(o.action, o.val), - insert: o.insert, - key: match o.key { - StoredKey::Elem(e) if e.is_head() => { - crate::legacy::Key::Seq(crate::legacy::ElementId::Head) - } - StoredKey::Elem(ElemId(o)) => { - crate::legacy::Key::Seq(crate::legacy::ElementId::Id( - crate::legacy::OpId::new(o.counter(), actors.get(&o.actor()).unwrap()), - )) - } - StoredKey::Prop(p) => crate::legacy::Key::Map(p), - }, - obj: if o.obj.is_root() { - crate::legacy::ObjectId::Root - } else { - crate::legacy::ObjectId::Id(crate::legacy::OpId::new( - o.obj.opid().counter(), - actors.get(&o.obj.opid().actor()).unwrap(), - )) - }, - pred: o - .pred - .into_iter() - .map(|p| crate::legacy::OpId::new(p.counter(), actors.get(&p.actor()).unwrap())) - .collect(), - }) - .collect::>(); - crate::ExpandedChange { - operations, - actor_id: actors.get(&0).unwrap().clone(), - hash: Some(c.hash()), - time: c.timestamp(), - deps: c.deps().to_vec(), - seq: c.seq(), - start_op: c.start_op(), - extra_bytes: c.extra_bytes().to_vec(), - message: c.message().cloned(), - } - } -} - -#[cfg(test)] -pub(crate) mod gen { - use super::Change; - use crate::{ - op_tree::OpSetMetadata, - storage::{change::ChangeBuilder, convert::op_as_actor_id}, - types::{ - gen::{gen_hash, gen_op}, - ObjId, Op, OpId, - }, - ActorId, - }; - use proptest::prelude::*; - - fn gen_actor() -> impl Strategy { - proptest::array::uniform32(proptest::bits::u8::ANY).prop_map(ActorId::from) - } - - prop_compose! { - fn gen_actors()(this_actor in gen_actor(), other_actors in proptest::collection::vec(gen_actor(), 0..10)) -> (ActorId, Vec) { - (this_actor, other_actors) - } - } - - fn gen_ops( - this_actor: ActorId, - other_actors: Vec, - ) -> impl Strategy, OpSetMetadata)> { - let mut all_actors = vec![this_actor]; - all_actors.extend(other_actors); - let mut m = OpSetMetadata::from_actors(all_actors); - m.props.cache("someprop".to_string()); - let root_id = ObjId::root(); - (0_u64..10) - .prop_map(|num_ops| { - (0..num_ops) - .map(|counter| OpId::new(counter, 0)) - .collect::>() - }) - .prop_flat_map(move |opids| { - let mut strat = Just(Vec::new()).boxed(); - for opid in opids { - strat = (gen_op(opid, vec![0]), strat) - .prop_map(move |(op, ops)| { - let mut result = Vec::with_capacity(ops.len() + 1); - result.extend(ops); - result.push((root_id, op)); - result - }) - .boxed(); - } - strat - }) - .prop_map(move |ops| (ops, m.clone())) - } - - prop_compose! { - pub(crate) fn gen_change()((this_actor, other_actors) in gen_actors())( - (ops, metadata) in gen_ops(this_actor.clone(), other_actors), - start_op in 1_u64..200000, - seq in 0_u64..200000, - timestamp in 0..i64::MAX, - deps in proptest::collection::vec(gen_hash(), 0..100), - message in proptest::option::of("[a-z]{200}"), - this_actor in Just(this_actor), - ) -> Change { - let ops = ops.iter().map(|(obj, op)| op_as_actor_id(obj, op, &metadata)); - Change::new(ChangeBuilder::new() - .with_dependencies(deps) - .with_start_op(start_op.try_into().unwrap()) - .with_message(message) - .with_actor(this_actor) - .with_seq(seq) - .with_timestamp(timestamp) - .build(ops.into_iter()) - .unwrap()) - } - - } -} diff --git a/rust/automerge/src/change_graph.rs b/rust/automerge/src/change_graph.rs deleted file mode 100644 index 01d269d8..00000000 --- a/rust/automerge/src/change_graph.rs +++ /dev/null @@ -1,344 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use crate::{ - clock::{Clock, ClockData}, - Change, ChangeHash, -}; - -/// The graph of changes -/// -/// This is a sort of adjacency list based representation, except that instead of using linked -/// lists, we keep all the edges and nodes in two vecs and reference them by index which plays nice -/// with the cache -#[derive(Debug, Clone)] -pub(crate) struct ChangeGraph { - nodes: Vec, - edges: Vec, - hashes: Vec, - nodes_by_hash: BTreeMap, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -struct NodeIdx(u32); - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -struct EdgeIdx(u32); - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -struct HashIdx(u32); - -#[derive(Debug, Clone)] -struct Edge { - // Edges are always child -> parent so we only store the target, the child is implicit - // as you get the edge from the child - target: NodeIdx, - next: Option, -} - -#[derive(Debug, Clone)] -struct ChangeNode { - hash_idx: HashIdx, - actor_index: usize, - seq: u64, - max_op: u64, - parents: Option, -} - -impl ChangeGraph { - pub(crate) fn new() -> Self { - Self { - nodes: Vec::new(), - edges: Vec::new(), - nodes_by_hash: BTreeMap::new(), - hashes: Vec::new(), - } - } - - pub(crate) fn add_change( - &mut self, - change: &Change, - actor_idx: usize, - ) -> Result<(), MissingDep> { - let hash = change.hash(); - if self.nodes_by_hash.contains_key(&hash) { - return Ok(()); - } - let parent_indices = change - .deps() - .iter() - .map(|h| self.nodes_by_hash.get(h).copied().ok_or(MissingDep(*h))) - .collect::, _>>()?; - let node_idx = self.add_node(actor_idx, change); - self.nodes_by_hash.insert(hash, node_idx); - for parent_idx in parent_indices { - self.add_parent(node_idx, parent_idx); - } - Ok(()) - } - - fn add_node(&mut self, actor_index: usize, change: &Change) -> NodeIdx { - let idx = NodeIdx(self.nodes.len() as u32); - let hash_idx = self.add_hash(change.hash()); - self.nodes.push(ChangeNode { - hash_idx, - actor_index, - seq: change.seq(), - max_op: change.max_op(), - parents: None, - }); - idx - } - - fn add_hash(&mut self, hash: ChangeHash) -> HashIdx { - let idx = HashIdx(self.hashes.len() as u32); - self.hashes.push(hash); - idx - } - - fn add_parent(&mut self, child_idx: NodeIdx, parent_idx: NodeIdx) { - let new_edge_idx = EdgeIdx(self.edges.len() as u32); - let new_edge = Edge { - target: parent_idx, - next: None, - }; - self.edges.push(new_edge); - - let child = &mut self.nodes[child_idx.0 as usize]; - if let Some(edge_idx) = child.parents { - let mut edge = &mut self.edges[edge_idx.0 as usize]; - while let Some(next) = edge.next { - edge = &mut self.edges[next.0 as usize]; - } - edge.next = Some(new_edge_idx); - } else { - child.parents = Some(new_edge_idx); - } - } - - fn parents(&self, node_idx: NodeIdx) -> impl Iterator + '_ { - let mut edge_idx = self.nodes[node_idx.0 as usize].parents; - std::iter::from_fn(move || { - let this_edge_idx = edge_idx?; - let edge = &self.edges[this_edge_idx.0 as usize]; - edge_idx = edge.next; - Some(edge.target) - }) - } - - pub(crate) fn clock_for_heads(&self, heads: &[ChangeHash]) -> Clock { - let mut clock = Clock::new(); - - self.traverse_ancestors(heads, |node, _hash| { - clock.include( - node.actor_index, - ClockData { - max_op: node.max_op, - seq: node.seq, - }, - ); - }); - - clock - } - - pub(crate) fn remove_ancestors( - &self, - changes: &mut BTreeSet, - heads: &[ChangeHash], - ) { - self.traverse_ancestors(heads, |_node, hash| { - changes.remove(hash); - }); - } - - /// Call `f` for each (node, hash) in the graph, starting from the given heads - /// - /// No guarantees are made about the order of traversal but each node will only be visited - /// once. - fn traverse_ancestors( - &self, - heads: &[ChangeHash], - mut f: F, - ) { - let mut to_visit = heads - .iter() - .filter_map(|h| self.nodes_by_hash.get(h)) - .copied() - .collect::>(); - - let mut visited = BTreeSet::new(); - - while let Some(idx) = to_visit.pop() { - if visited.contains(&idx) { - continue; - } else { - visited.insert(idx); - } - let node = &self.nodes[idx.0 as usize]; - let hash = &self.hashes[node.hash_idx.0 as usize]; - f(node, hash); - to_visit.extend(self.parents(idx)); - } - } -} - -#[derive(Debug, thiserror::Error)] -#[error("attempted to derive a clock for a change with dependencies we don't have")] -pub struct MissingDep(ChangeHash); - -#[cfg(test)] -mod tests { - use std::{ - num::NonZeroU64, - time::{SystemTime, UNIX_EPOCH}, - }; - - use crate::{ - clock::ClockData, - op_tree::OpSetMetadata, - storage::{change::ChangeBuilder, convert::op_as_actor_id}, - types::{Key, ObjId, Op, OpId, OpIds}, - ActorId, - }; - - use super::*; - - #[test] - fn clock_by_heads() { - let mut builder = TestGraphBuilder::new(); - let actor1 = builder.actor(); - let actor2 = builder.actor(); - let actor3 = builder.actor(); - let change1 = builder.change(&actor1, 10, &[]); - let change2 = builder.change(&actor2, 20, &[change1]); - let change3 = builder.change(&actor3, 30, &[change1]); - let change4 = builder.change(&actor1, 10, &[change2, change3]); - let graph = builder.build(); - - let mut expected_clock = Clock::new(); - expected_clock.include(builder.index(&actor1), ClockData { max_op: 50, seq: 2 }); - expected_clock.include(builder.index(&actor2), ClockData { max_op: 30, seq: 1 }); - expected_clock.include(builder.index(&actor3), ClockData { max_op: 40, seq: 1 }); - - let clock = graph.clock_for_heads(&[change4]); - assert_eq!(clock, expected_clock); - } - - #[test] - fn remove_ancestors() { - let mut builder = TestGraphBuilder::new(); - let actor1 = builder.actor(); - let actor2 = builder.actor(); - let actor3 = builder.actor(); - let change1 = builder.change(&actor1, 10, &[]); - let change2 = builder.change(&actor2, 20, &[change1]); - let change3 = builder.change(&actor3, 30, &[change1]); - let change4 = builder.change(&actor1, 10, &[change2, change3]); - let graph = builder.build(); - - let mut changes = vec![change1, change2, change3, change4] - .into_iter() - .collect::>(); - let heads = vec![change2]; - graph.remove_ancestors(&mut changes, &heads); - - let expected_changes = vec![change3, change4].into_iter().collect::>(); - - assert_eq!(changes, expected_changes); - } - - struct TestGraphBuilder { - actors: Vec, - changes: Vec, - seqs_by_actor: BTreeMap, - } - - impl TestGraphBuilder { - fn new() -> Self { - TestGraphBuilder { - actors: Vec::new(), - changes: Vec::new(), - seqs_by_actor: BTreeMap::new(), - } - } - - fn actor(&mut self) -> ActorId { - let actor = ActorId::random(); - self.actors.push(actor.clone()); - actor - } - - fn index(&self, actor: &ActorId) -> usize { - self.actors.iter().position(|a| a == actor).unwrap() - } - - /// Create a change with `num_new_ops` and `parents` for `actor` - /// - /// The `start_op` and `seq` of the change will be computed from the - /// previous changes for the same actor. - fn change( - &mut self, - actor: &ActorId, - num_new_ops: usize, - parents: &[ChangeHash], - ) -> ChangeHash { - let mut meta = OpSetMetadata::from_actors(self.actors.clone()); - let key = meta.props.cache("key".to_string()); - - let start_op = parents - .iter() - .map(|c| { - self.changes - .iter() - .find(|change| change.hash() == *c) - .unwrap() - .max_op() - }) - .max() - .unwrap_or(0) - + 1; - - let actor_idx = self.index(actor); - let ops = (0..num_new_ops) - .map(|opnum| Op { - id: OpId::new(start_op + opnum as u64, actor_idx), - action: crate::OpType::Put("value".into()), - key: Key::Map(key), - succ: OpIds::empty(), - pred: OpIds::empty(), - insert: false, - }) - .collect::>(); - - let root = ObjId::root(); - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_millis() as i64; - let seq = self.seqs_by_actor.entry(actor.clone()).or_insert(1); - let change = Change::new( - ChangeBuilder::new() - .with_dependencies(parents.to_vec()) - .with_start_op(NonZeroU64::new(start_op).unwrap()) - .with_actor(actor.clone()) - .with_seq(*seq) - .with_timestamp(timestamp) - .build(ops.iter().map(|op| op_as_actor_id(&root, op, &meta))) - .unwrap(), - ); - *seq = seq.checked_add(1).unwrap(); - let hash = change.hash(); - self.changes.push(change); - hash - } - - fn build(&self) -> ChangeGraph { - let mut graph = ChangeGraph::new(); - for change in &self.changes { - let actor_idx = self.index(change.actor_id()); - graph.add_change(change, actor_idx).unwrap(); - } - graph - } - } -} diff --git a/rust/automerge/src/clock.rs b/rust/automerge/src/clock.rs deleted file mode 100644 index 64d00fcf..00000000 --- a/rust/automerge/src/clock.rs +++ /dev/null @@ -1,161 +0,0 @@ -use crate::types::OpId; -use fxhash::FxBuildHasher; -use std::{cmp::Ordering, collections::HashMap}; - -#[derive(Default, Debug, Clone, Copy, PartialEq)] -pub(crate) struct ClockData { - /// Maximum operation counter of the actor at the point in time. - pub(crate) max_op: u64, - /// Sequence number of the change from this actor. - pub(crate) seq: u64, -} - -// a clock for the same actor is ahead of another if it has a higher max_op -impl PartialOrd for ClockData { - fn partial_cmp(&self, other: &Self) -> Option { - self.max_op.partial_cmp(&other.max_op) - } -} - -/// Vector clock mapping actor indices to the max op counter of the changes created by that actor. -#[derive(Default, Debug, Clone, PartialEq)] -pub(crate) struct Clock(HashMap); - -// A general clock is greater if it has one element the other does not or has a counter higher than -// the other for a given actor. -// -// It is equal with another clock if it has the same entries everywhere. -// -// It is less than another clock otherwise. -impl PartialOrd for Clock { - fn partial_cmp(&self, other: &Self) -> Option { - if self.0 == other.0 { - Some(Ordering::Equal) - } else if self.is_greater(other) { - Some(Ordering::Greater) - } else if other.is_greater(self) { - Some(Ordering::Less) - } else { - // concurrent - None - } - } -} - -impl Clock { - pub(crate) fn new() -> Self { - Clock(Default::default()) - } - - pub(crate) fn include(&mut self, actor_index: usize, data: ClockData) { - self.0 - .entry(actor_index) - .and_modify(|d| { - if data.max_op > d.max_op { - *d = data; - } - }) - .or_insert(data); - } - - pub(crate) fn covers(&self, id: &OpId) -> bool { - if let Some(data) = self.0.get(&id.actor()) { - data.max_op >= id.counter() - } else { - false - } - } - - /// Get the max_op counter recorded in this clock for the actor. - pub(crate) fn get_for_actor(&self, actor_index: &usize) -> Option<&ClockData> { - self.0.get(actor_index) - } - - fn is_greater(&self, other: &Self) -> bool { - let mut has_greater = false; - - let mut others_found = 0; - - for (actor, data) in &self.0 { - if let Some(other_data) = other.0.get(actor) { - if data < other_data { - // may be concurrent or less - return false; - } else if data > other_data { - has_greater = true; - } - others_found += 1; - } else { - // other doesn't have this so effectively has a greater element - has_greater = true; - } - } - - if has_greater { - // if they are equal then we have seen every key in the other clock and have at least - // one greater element so our clock is greater - // - // If they aren't the same then we haven't seen every key but have a greater element - // anyway so are concurrent - others_found == other.0.len() - } else { - // our clock doesn't have anything greater than the other clock so can't be greater but - // could still be concurrent - false - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn covers() { - let mut clock = Clock::new(); - - clock.include(1, ClockData { max_op: 20, seq: 1 }); - clock.include(2, ClockData { max_op: 10, seq: 2 }); - - assert!(clock.covers(&OpId::new(10, 1))); - assert!(clock.covers(&OpId::new(20, 1))); - assert!(!clock.covers(&OpId::new(30, 1))); - - assert!(clock.covers(&OpId::new(5, 2))); - assert!(clock.covers(&OpId::new(10, 2))); - assert!(!clock.covers(&OpId::new(15, 2))); - - assert!(!clock.covers(&OpId::new(1, 3))); - assert!(!clock.covers(&OpId::new(100, 3))); - } - - #[test] - fn comparison() { - let mut base_clock = Clock::new(); - base_clock.include(1, ClockData { max_op: 1, seq: 1 }); - base_clock.include(2, ClockData { max_op: 1, seq: 1 }); - - let mut after_clock = base_clock.clone(); - after_clock.include(1, ClockData { max_op: 2, seq: 2 }); - - assert!(after_clock > base_clock); - assert!(base_clock < after_clock); - - assert!(base_clock == base_clock); - - let mut new_actor_clock = base_clock.clone(); - new_actor_clock.include(3, ClockData { max_op: 1, seq: 1 }); - - assert_eq!( - base_clock.partial_cmp(&new_actor_clock), - Some(Ordering::Less) - ); - assert_eq!( - new_actor_clock.partial_cmp(&base_clock), - Some(Ordering::Greater) - ); - - assert_eq!(after_clock.partial_cmp(&new_actor_clock), None); - assert_eq!(new_actor_clock.partial_cmp(&after_clock), None); - } -} diff --git a/rust/automerge/src/columnar.rs b/rust/automerge/src/columnar.rs deleted file mode 100644 index bb727626..00000000 --- a/rust/automerge/src/columnar.rs +++ /dev/null @@ -1,14 +0,0 @@ -//! Types for reading data which is stored in a columnar storage format -//! -//! The details of how values are encoded in `encoding`, which exposes a set of "decoder" and -//! "encoder" types. -//! -//! The `column_range` module exposes a set of types - most of which are newtypes over -//! `Range` - which have useful instance methods such as `encode()` to create a new range and -//! `decoder()` to return an iterator of the correct type. -pub(crate) mod column_range; -pub(crate) use column_range::Key; -pub(crate) mod encoding; - -mod splice_error; -pub(crate) use splice_error::SpliceError; diff --git a/rust/automerge/src/columnar/column_range.rs b/rust/automerge/src/columnar/column_range.rs deleted file mode 100644 index 5762ed14..00000000 --- a/rust/automerge/src/columnar/column_range.rs +++ /dev/null @@ -1,21 +0,0 @@ -mod rle; -pub(crate) use rle::RleRange; -mod delta; -pub(crate) use delta::DeltaRange; -mod boolean; -pub(crate) use boolean::BooleanRange; -mod raw; -pub(crate) use raw::RawRange; -mod opid; -pub(crate) use opid::{OpIdEncoder, OpIdIter, OpIdRange}; -mod opid_list; -pub(crate) use opid_list::{OpIdListEncoder, OpIdListIter, OpIdListRange}; -mod deps; -pub(crate) use deps::{DepsIter, DepsRange}; -mod value; -pub(crate) use value::{ValueEncoder, ValueIter, ValueRange}; -pub(crate) mod generic; -mod key; -pub(crate) use key::{Key, KeyEncoder, KeyIter, KeyRange}; -mod obj_id; -pub(crate) use obj_id::{ObjIdEncoder, ObjIdIter, ObjIdRange}; diff --git a/rust/automerge/src/columnar/column_range/boolean.rs b/rust/automerge/src/columnar/column_range/boolean.rs deleted file mode 100644 index 3cefaf0d..00000000 --- a/rust/automerge/src/columnar/column_range/boolean.rs +++ /dev/null @@ -1,40 +0,0 @@ -use std::{borrow::Cow, ops::Range}; - -use crate::columnar::encoding::{BooleanDecoder, BooleanEncoder}; - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct BooleanRange(Range); - -impl BooleanRange { - pub(crate) fn decoder<'a>(&self, data: &'a [u8]) -> BooleanDecoder<'a> { - BooleanDecoder::from(Cow::Borrowed(&data[self.0.clone()])) - } - - pub(crate) fn encode>(items: I, out: &mut Vec) -> Self { - let start = out.len(); - let mut encoder = BooleanEncoder::from(out); - for i in items { - encoder.append(i); - } - let (_, len) = encoder.finish(); - (start..(start + len)).into() - } -} - -impl AsRef> for BooleanRange { - fn as_ref(&self) -> &Range { - &self.0 - } -} - -impl From> for BooleanRange { - fn from(r: Range) -> BooleanRange { - BooleanRange(r) - } -} - -impl From for Range { - fn from(r: BooleanRange) -> Range { - r.0 - } -} diff --git a/rust/automerge/src/columnar/column_range/delta.rs b/rust/automerge/src/columnar/column_range/delta.rs deleted file mode 100644 index 9dae43b8..00000000 --- a/rust/automerge/src/columnar/column_range/delta.rs +++ /dev/null @@ -1,152 +0,0 @@ -use std::{borrow::Cow, convert::Infallible, ops::Range}; - -use crate::columnar::{ - encoding::{raw, DeltaDecoder, DeltaEncoder, Sink}, - SpliceError, -}; - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct DeltaRange(Range); - -impl DeltaRange { - pub(crate) fn decoder<'a>(&self, data: &'a [u8]) -> DeltaDecoder<'a> { - DeltaDecoder::from(Cow::Borrowed(&data[self.0.clone()])) - } - - pub(crate) fn encoder(&self, output: S) -> DeltaEncoder { - DeltaEncoder::from(output) - } - - pub(crate) fn len(&self) -> usize { - self.0.len() - } - - pub(crate) fn encode>>(items: I, out: &mut Vec) -> Self { - // SAFETY: The incoming iterator is infallible and there are no existing items - Self::from(0..0) - .splice::(&[], 0..0, items.map(Ok), out) - .unwrap() - } - - pub(crate) fn splice, E>>>( - &self, - data: &[u8], - replace: Range, - mut replace_with: I, - out: &mut Vec, - ) -> Result> { - let start = out.len(); - let mut decoder = self.decoder(data); - let mut encoder = self.encoder(out); - let mut idx = 0; - while idx < replace.start { - match decoder - .next() - .transpose() - .map_err(SpliceError::ReadExisting)? - { - Some(elem) => encoder.append(elem), - None => panic!("out of bounds"), - } - idx += 1; - } - for _ in 0..replace.len() { - decoder - .next() - .transpose() - .map_err(SpliceError::ReadExisting)?; - if let Some(next) = replace_with - .next() - .transpose() - .map_err(SpliceError::ReadReplace)? - { - encoder.append(next); - } - } - for next in replace_with { - let next = next.map_err(SpliceError::ReadReplace)?; - encoder.append(next); - } - for next in decoder { - let next = next.map_err(SpliceError::ReadExisting)?; - encoder.append(next); - } - let (_, len) = encoder.finish(); - Ok((start..(start + len)).into()) - } -} - -impl AsRef> for DeltaRange { - fn as_ref(&self) -> &Range { - &self.0 - } -} - -impl From> for DeltaRange { - fn from(r: Range) -> DeltaRange { - DeltaRange(r) - } -} - -impl From for Range { - fn from(r: DeltaRange) -> Range { - r.0 - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::columnar::encoding::properties::option_splice_scenario; - use proptest::prelude::*; - - fn encode>>(vals: I) -> (DeltaRange, Vec) { - let mut buf = Vec::::new(); - let range = DeltaRange::encode(vals, &mut buf); - (range, buf) - } - - fn decode(range: DeltaRange, buf: &[u8]) -> Vec> { - range.decoder(buf).collect::, _>>().unwrap() - } - - fn encodable_int() -> impl Strategy + Clone { - 0..(i64::MAX / 2) - } - - proptest! { - #[test] - fn encode_decode_delta(vals in proptest::collection::vec(proptest::option::of(encodable_int()), 0..100)) { - let (r, encoded) = encode(vals.iter().copied()); - if vals.iter().all(|v| v.is_none()) { - assert_eq!(encoded.len(), 0); - let decoded = decode(r, &encoded); - assert_eq!(Vec::>::new(), decoded) - } else { - let decoded = decode(r, &encoded); - assert_eq!(vals, decoded) - } - } - - #[test] - fn splice_delta(scenario in option_splice_scenario(proptest::option::of(encodable_int()))) { - let (range, encoded) = encode(scenario.initial_values.iter().copied()); - let mut out = Vec::new(); - let replacements: Vec, Infallible>> = scenario.replacements.iter().cloned().map(Ok).collect(); - let new_range = range.splice(&encoded, scenario.replace_range.clone(), replacements.into_iter(), &mut out).unwrap(); - let decoded = decode(new_range, &out); - scenario.check_optional(decoded); - } - } - - #[test] - fn bugbug() { - let vals: Vec = vec![6, 5, 8, 9, 10, 11, 12, 13]; - let (r, encoded) = encode(vals.iter().copied().map(Some)); - let decoded = decode(r, &encoded) - .into_iter() - .map(Option::unwrap) - .collect::>(); - assert_eq!(decoded, vals); - } -} diff --git a/rust/automerge/src/columnar/column_range/deps.rs b/rust/automerge/src/columnar/column_range/deps.rs deleted file mode 100644 index 1956acd1..00000000 --- a/rust/automerge/src/columnar/column_range/deps.rs +++ /dev/null @@ -1,123 +0,0 @@ -use super::{DeltaRange, RleRange}; -use crate::columnar::encoding::{DecodeColumnError, DeltaDecoder, RleDecoder}; - -/// A grouped column containing lists of u64s -#[derive(Clone, Debug)] -pub(crate) struct DepsRange { - num: RleRange, - deps: DeltaRange, -} - -impl DepsRange { - pub(crate) fn new(num: RleRange, deps: DeltaRange) -> Self { - Self { num, deps } - } - - pub(crate) fn num_range(&self) -> &RleRange { - &self.num - } - - pub(crate) fn deps_range(&self) -> &DeltaRange { - &self.deps - } - - pub(crate) fn encode(deps: I, out: &mut Vec) -> DepsRange - where - I: Iterator + Clone, - II: IntoIterator + ExactSizeIterator, - { - let num = RleRange::encode(deps.clone().map(|d| Some(d.len() as u64)), out); - let deps = DeltaRange::encode( - deps.flat_map(|d| d.into_iter().map(|d| Some(d as i64))), - out, - ); - DepsRange { num, deps } - } - - pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> DepsIter<'a> { - DepsIter { - num: self.num.decoder(data), - deps: self.deps.decoder(data), - } - } -} - -#[derive(Clone)] -pub(crate) struct DepsIter<'a> { - num: RleDecoder<'a, u64>, - deps: DeltaDecoder<'a>, -} - -impl<'a> DepsIter<'a> { - fn try_next(&mut self) -> Result>, DecodeColumnError> { - let num = match self - .num - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("num", e))? - { - Some(Some(n)) => n as usize, - Some(None) => { - return Err(DecodeColumnError::unexpected_null("group")); - } - None => return Ok(None), - }; - // We cannot trust `num` because it is provided over the network, - // but in the common case it will be correct and small (so we - // use with_capacity to make sure the vector is precisely the right - // size). - let mut result = Vec::with_capacity(std::cmp::min(num, 100)); - while result.len() < num { - match self - .deps - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("deps", e))? - { - Some(Some(elem)) => { - let elem = match u64::try_from(elem) { - Ok(e) => e, - Err(e) => { - tracing::error!(err=?e, dep=elem, "error converting dep index to u64"); - return Err(DecodeColumnError::invalid_value( - "deps", - "error converting dep index to u64", - )); - } - }; - result.push(elem); - } - _ => return Err(DecodeColumnError::unexpected_null("deps")), - } - } - Ok(Some(result)) - } -} - -impl<'a> Iterator for DepsIter<'a> { - type Item = Result, DecodeColumnError>; - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use proptest::collection::vec as propvec; - use proptest::prelude::*; - - fn encodable_u64() -> impl Strategy + Clone { - 0_u64..((i64::MAX / 2) as u64) - } - - proptest! { - #[test] - fn encode_decode_deps(deps in propvec(propvec(encodable_u64(), 0..100), 0..100)) { - let mut out = Vec::new(); - let range = DepsRange::encode(deps.iter().cloned().map(|d| d.into_iter()), &mut out); - let decoded = range.iter(&out).collect::, _>>().unwrap(); - assert_eq!(deps, decoded); - } - } -} diff --git a/rust/automerge/src/columnar/column_range/generic.rs b/rust/automerge/src/columnar/column_range/generic.rs deleted file mode 100644 index 03a0e362..00000000 --- a/rust/automerge/src/columnar/column_range/generic.rs +++ /dev/null @@ -1,91 +0,0 @@ -use std::ops::Range; - -use crate::{columnar::encoding::DecodeColumnError, ScalarValue}; - -use super::{ValueIter, ValueRange}; -mod simple; -use simple::SimpleColIter; -pub(crate) use simple::SimpleColRange; -mod group; -use group::GroupIter; -pub(crate) use group::{GroupRange, GroupedColumnRange}; - -/// A range which can represent any column which is valid with respect to the data model of the -/// column oriented storage format. This is primarily intended to be used in two cases: -/// -/// 1. As an intermediate step when parsing binary storage. We parse the column metadata into -/// GenericColumnRange, then from there into more specific range types. -/// 2. when we encounter a column which we don't expect but which we still need to retain and -/// re-encode when writing new changes. -/// -/// The generic data model is represented by `CellValue`, an iterator over a generic column will -/// produce a `CellValue` for each row in the column. -#[derive(Debug, Clone)] -pub(crate) enum GenericColumnRange { - /// A "simple" column is one which directly corresponds to a single column in the raw format - Simple(SimpleColRange), - /// A value range consists of two columns and produces `ScalarValue`s - Value(ValueRange), - /// A "group" range consists of zero or more grouped columns and produces `CellValue::Group`s - Group(GroupRange), -} - -impl GenericColumnRange { - pub(crate) fn range(&self) -> Range { - match self { - Self::Simple(sc) => sc.range(), - Self::Value(v) => v.range(), - Self::Group(g) => g.range(), - } - } -} - -/// The type of values which can be stored in a generic column -pub(crate) enum CellValue { - /// The contents of a simple column - Simple(SimpleValue), - /// The values in a set of grouped columns - Group(Vec>), -} - -pub(crate) enum SimpleValue { - Uint(Option), - Int(Option), - String(Option), - Bool(bool), - /// The contents of a value metadata and value raw column - Value(ScalarValue), -} - -#[derive(Debug, Clone)] -#[allow(dead_code)] -pub(crate) enum GenericColIter<'a> { - Simple(SimpleColIter<'a>), - Value(ValueIter<'a>), - Group(GroupIter<'a>), -} - -impl<'a> GenericColIter<'a> { - fn try_next(&mut self) -> Result, DecodeColumnError> { - match self { - Self::Simple(s) => s - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("a simple column", e)) - .map(|v| v.map(CellValue::Simple)), - Self::Value(v) => v - .next() - .transpose() - .map(|v| v.map(|v| CellValue::Simple(SimpleValue::Value(v)))), - Self::Group(g) => g.next().transpose(), - } - } -} - -impl<'a> Iterator for GenericColIter<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} diff --git a/rust/automerge/src/columnar/column_range/generic/group.rs b/rust/automerge/src/columnar/column_range/generic/group.rs deleted file mode 100644 index b1392428..00000000 --- a/rust/automerge/src/columnar/column_range/generic/group.rs +++ /dev/null @@ -1,138 +0,0 @@ -use std::ops::Range; - -use super::{CellValue, SimpleColIter, SimpleColRange, SimpleValue}; -use crate::columnar::{ - column_range::{RleRange, ValueIter, ValueRange}, - encoding::{col_error::DecodeColumnError, RleDecoder}, -}; - -/// A group column range is one with a "num" column and zero or more "grouped" columns. The "num" -/// column contains RLE encoded u64s, each `u64` represents the number of values to read from each -/// of the grouped columns in order to produce a `CellValue::Group` for the current row. -#[derive(Debug, Clone)] -pub(crate) struct GroupRange { - pub(crate) num: RleRange, - pub(crate) values: Vec, -} - -impl GroupRange { - pub(crate) fn new(num: RleRange, values: Vec) -> Self { - Self { num, values } - } - - #[allow(dead_code)] - pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> GroupIter<'a> { - GroupIter { - num: self.num.decoder(data), - values: self.values.iter().map(|v| v.iter(data)).collect(), - } - } - - pub(crate) fn range(&self) -> Range { - let start = self.num.start(); - let end = self - .values - .last() - .map(|v| v.range().end) - .unwrap_or_else(|| self.num.end()); - start..end - } -} - -/// The type of ranges which can be the "grouped" columns in a `GroupRange` -#[derive(Debug, Clone)] -pub(crate) enum GroupedColumnRange { - Value(ValueRange), - Simple(SimpleColRange), -} - -impl GroupedColumnRange { - fn iter<'a>(&self, data: &'a [u8]) -> GroupedColIter<'a> { - match self { - Self::Value(vr) => GroupedColIter::Value(vr.iter(data)), - Self::Simple(sc) => GroupedColIter::Simple(sc.iter(data)), - } - } - - pub(crate) fn range(&self) -> Range { - match self { - Self::Value(vr) => vr.range(), - Self::Simple(s) => s.range(), - } - } -} - -#[derive(Debug, Clone)] -pub(crate) struct GroupIter<'a> { - num: RleDecoder<'a, u64>, - values: Vec>, -} - -impl<'a> GroupIter<'a> { - fn try_next(&mut self) -> Result, DecodeColumnError> { - let num = self - .num - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("num", e))?; - match num { - None => Ok(None), - Some(None) => Err(DecodeColumnError::unexpected_null("num")), - Some(Some(num)) => { - let mut row = Vec::new(); - for _ in 0..num { - let mut inner_row = Vec::new(); - for (index, value_col) in self.values.iter_mut().enumerate() { - match value_col.next().transpose()? { - None => { - return Err(DecodeColumnError::unexpected_null(format!( - "col {}", - index - ))) - } - Some(v) => { - inner_row.push(v); - } - } - } - row.push(inner_row); - } - Ok(Some(CellValue::Group(row))) - } - } - } -} - -impl<'a> Iterator for GroupIter<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} - -#[derive(Debug, Clone)] -enum GroupedColIter<'a> { - Value(ValueIter<'a>), - Simple(SimpleColIter<'a>), -} - -impl<'a> GroupedColIter<'a> { - fn try_next(&mut self) -> Result, DecodeColumnError> { - match self { - Self::Value(viter) => Ok(viter.next().transpose()?.map(SimpleValue::Value)), - Self::Simple(siter) => siter - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("a simple column", e)), - } - } -} - -impl<'a> Iterator for GroupedColIter<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} diff --git a/rust/automerge/src/columnar/column_range/generic/simple.rs b/rust/automerge/src/columnar/column_range/generic/simple.rs deleted file mode 100644 index 9eb3c177..00000000 --- a/rust/automerge/src/columnar/column_range/generic/simple.rs +++ /dev/null @@ -1,76 +0,0 @@ -use std::ops::Range; - -use crate::columnar::{ - column_range::{BooleanRange, DeltaRange, RleRange}, - encoding::{raw, BooleanDecoder, DeltaDecoder, RleDecoder}, -}; - -use super::SimpleValue; - -/// The four types of "simple" column defined in the raw format -#[derive(Debug, Clone)] -pub(crate) enum SimpleColRange { - /// A column containing RLE encoded u64's - RleInt(RleRange), - /// A column containing RLE encoded strings - RleString(RleRange), - /// A column containing delta -> RLE encoded i64s - Delta(DeltaRange), - /// A column containing boolean values - Boolean(BooleanRange), -} - -impl SimpleColRange { - pub(super) fn iter<'a>(&self, data: &'a [u8]) -> SimpleColIter<'a> { - match self { - Self::RleInt(r) => SimpleColIter::RleInt(r.decoder(data)), - Self::RleString(r) => SimpleColIter::RleString(r.decoder(data)), - Self::Delta(r) => SimpleColIter::Delta(r.decoder(data)), - Self::Boolean(r) => SimpleColIter::Boolean(r.decoder(data)), - } - } - - pub(crate) fn range(&self) -> Range { - match self { - Self::RleInt(r) => r.clone().into(), - Self::RleString(r) => r.clone().into(), - Self::Delta(r) => r.clone().into(), - Self::Boolean(r) => r.clone().into(), - } - } -} - -#[derive(Debug, Clone)] -pub(crate) enum SimpleColIter<'a> { - RleInt(RleDecoder<'a, u64>), - RleString(RleDecoder<'a, smol_str::SmolStr>), - Delta(DeltaDecoder<'a>), - Boolean(BooleanDecoder<'a>), -} - -impl<'a> SimpleColIter<'a> { - fn try_next(&mut self) -> Result, raw::Error> { - match self { - Self::RleInt(d) => read_col(d, SimpleValue::Uint), - Self::RleString(d) => read_col(d, SimpleValue::String), - Self::Delta(d) => read_col(d, SimpleValue::Int), - Self::Boolean(d) => Ok(d.next().transpose()?.map(SimpleValue::Bool)), - } - } -} - -fn read_col(mut col: C, f: F) -> Result, raw::Error> -where - C: Iterator, raw::Error>>, - F: Fn(Option) -> U, -{ - col.next().transpose().map(|v| v.map(f)) -} - -impl<'a> Iterator for SimpleColIter<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} diff --git a/rust/automerge/src/columnar/column_range/key.rs b/rust/automerge/src/columnar/column_range/key.rs deleted file mode 100644 index 70ea8e1e..00000000 --- a/rust/automerge/src/columnar/column_range/key.rs +++ /dev/null @@ -1,258 +0,0 @@ -use std::{convert::Infallible, ops::Range}; - -use super::{DeltaRange, RleRange}; -use crate::{ - columnar::{ - encoding::{ - raw, DecodeColumnError, DeltaDecoder, DeltaEncoder, RleDecoder, RleEncoder, Sink, - }, - SpliceError, - }, - convert, - types::{ElemId, OpId}, -}; - -#[derive(Clone, Debug, PartialEq)] -pub(crate) enum Key { - Prop(smol_str::SmolStr), - Elem(ElemId), -} - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct KeyRange { - actor: RleRange, - counter: DeltaRange, - string: RleRange, -} - -impl KeyRange { - pub(crate) fn new( - actor: RleRange, - counter: DeltaRange, - string: RleRange, - ) -> Self { - Self { - actor, - counter, - string, - } - } - - pub(crate) fn actor_range(&self) -> &RleRange { - &self.actor - } - - pub(crate) fn counter_range(&self) -> &DeltaRange { - &self.counter - } - - pub(crate) fn string_range(&self) -> &RleRange { - &self.string - } - - pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> KeyIter<'a> { - KeyIter { - actor: self.actor.decoder(data), - counter: self.counter.decoder(data), - string: self.string.decoder(data), - } - } - - pub(crate) fn encode<'b, O, I: Iterator> + Clone>( - items: I, - out: &mut Vec, - ) -> Self - where - O: convert::OpId, - { - // SAFETY: The incoming iterator is infallible and there are no existing items - Self { - actor: (0..0).into(), - counter: (0..0).into(), - string: (0..0).into(), - } - .splice::<_, Infallible, _>(&[], 0..0, items.map(Ok), out) - .unwrap() - } - - /// Splice new keys into this set of keys, encoding the resulting actor, counter, and str - /// columns in `out`. - pub(crate) fn splice<'b, O, E, I>( - &mut self, - data: &[u8], - replace: Range, - replace_with: I, - out: &mut Vec, - ) -> Result> - where - O: convert::OpId, - E: std::error::Error, - I: Iterator, E>> + Clone, - { - let actor = self.actor.splice( - data, - replace.clone(), - replace_with.clone().map(|k| { - k.map(|k| match k { - convert::Key::Prop(_) => None, - convert::Key::Elem(convert::ElemId::Head) => None, - convert::Key::Elem(convert::ElemId::Op(o)) => Some(o.actor() as u64), - }) - }), - out, - )?; - - let counter = self.counter.splice( - data, - replace.clone(), - replace_with.clone().map(|k| { - k.map(|k| match k { - convert::Key::Prop(_) => None, - convert::Key::Elem(convert::ElemId::Head) => Some(0), - convert::Key::Elem(convert::ElemId::Op(o)) => Some(o.counter() as i64), - }) - }), - out, - )?; - - let string = self.string.splice( - data, - replace, - replace_with.map(|k| { - k.map(|k| match k { - convert::Key::Prop(s) => Some(s), - convert::Key::Elem(_) => None, - }) - }), - out, - )?; - - Ok(Self { - actor, - counter, - string, - }) - } -} - -#[derive(Clone, Debug)] -pub(crate) struct KeyIter<'a> { - actor: RleDecoder<'a, u64>, - counter: DeltaDecoder<'a>, - string: RleDecoder<'a, smol_str::SmolStr>, -} - -impl<'a> KeyIter<'a> { - fn try_next(&mut self) -> Result, DecodeColumnError> { - let actor = self - .actor - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("actor", e))?; - let counter = self - .counter - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; - let string = self - .string - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("string", e))?; - match (actor, counter, string) { - (Some(Some(_)), Some(Some(_)), Some(Some(_))) => { - Err(DecodeColumnError::invalid_value("key", "too many values")) - } - (Some(None) | None, Some(None) | None, Some(Some(string))) => { - Ok(Some(Key::Prop(string))) - } - (Some(None) | None, Some(Some(0)), Some(None) | None) => { - Ok(Some(Key::Elem(ElemId(OpId::new(0, 0))))) - } - (Some(Some(actor)), Some(Some(ctr)), Some(None) | None) => match ctr.try_into() { - //Ok(ctr) => Some(Ok(Key::Elem(ElemId(OpId(ctr, actor as usize))))), - Ok(ctr) => Ok(Some(Key::Elem(ElemId(OpId::new(ctr, actor as usize))))), - Err(_) => Err(DecodeColumnError::invalid_value( - "counter", - "negative value for counter", - )), - }, - (None | Some(None), None | Some(None), None | Some(None)) => Ok(None), - (None | Some(None), k, _) => { - tracing::error!(key=?k, "unexpected null actor"); - Err(DecodeColumnError::unexpected_null("actor")) - } - (_, None | Some(None), _) => Err(DecodeColumnError::unexpected_null("counter")), - } - } -} - -impl<'a> Iterator for KeyIter<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} - -pub(crate) struct KeyEncoder { - actor: RleEncoder, - counter: DeltaEncoder, - string: RleEncoder, -} - -impl KeyEncoder> { - pub(crate) fn new() -> KeyEncoder> { - KeyEncoder { - actor: RleEncoder::new(Vec::new()), - counter: DeltaEncoder::new(Vec::new()), - string: RleEncoder::new(Vec::new()), - } - } - - pub(crate) fn finish(self, out: &mut Vec) -> KeyRange { - let actor_start = out.len(); - let (actor, _) = self.actor.finish(); - out.extend(actor); - let actor_end = out.len(); - - let (counter, _) = self.counter.finish(); - out.extend(counter); - let counter_end = out.len(); - - let (string, _) = self.string.finish(); - out.extend(string); - let string_end = out.len(); - - KeyRange { - actor: (actor_start..actor_end).into(), - counter: (actor_end..counter_end).into(), - string: (counter_end..string_end).into(), - } - } -} - -impl KeyEncoder { - pub(crate) fn append(&mut self, key: convert::Key<'_, O>) - where - O: convert::OpId, - { - match key { - convert::Key::Prop(p) => { - self.string.append_value(p.clone()); - self.actor.append_null(); - self.counter.append_null(); - } - convert::Key::Elem(convert::ElemId::Head) => { - self.string.append_null(); - self.actor.append_null(); - self.counter.append_value(0); - } - convert::Key::Elem(convert::ElemId::Op(o)) => { - self.string.append_null(); - self.actor.append_value(o.actor() as u64); - self.counter.append_value(o.counter() as i64); - } - } - } -} diff --git a/rust/automerge/src/columnar/column_range/obj_id.rs b/rust/automerge/src/columnar/column_range/obj_id.rs deleted file mode 100644 index d282563e..00000000 --- a/rust/automerge/src/columnar/column_range/obj_id.rs +++ /dev/null @@ -1,202 +0,0 @@ -use std::{convert::Infallible, ops::Range}; - -use crate::{ - columnar::{ - encoding::{raw, DecodeColumnError, RleDecoder, RleEncoder, Sink}, - SpliceError, - }, - convert, - types::{ObjId, OpId}, -}; - -use super::RleRange; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct ObjIdRange { - actor: RleRange, - counter: RleRange, -} - -impl ObjIdRange { - pub(crate) fn new(actor: RleRange, counter: RleRange) -> Option { - if actor.is_empty() || counter.is_empty() { - None - } else { - Some(Self { actor, counter }) - } - } - - pub(crate) fn actor_range(&self) -> &RleRange { - &self.actor - } - - pub(crate) fn counter_range(&self) -> &RleRange { - &self.counter - } - - pub(crate) fn encode> + Clone>( - ids: I, - out: &mut Vec, - ) -> Option - where - O: convert::OpId, - { - // SAFETY: the incoming iterator is infallible and there are no existing elements - Self { - actor: (0..0).into(), - counter: (0..0).into(), - } - .splice::<_, Infallible, _>(&[], 0..0, ids.map(Ok), out) - .unwrap() - } - - /// Given some existing columns of object IDs splice a new set of object IDs in with the - /// existing ones - /// - /// Note that this returns `None` if the resulting range is empty (which will only occur if the - /// replace range is larger than the input iterator and `ids` is an empty iterator). - pub(crate) fn splice< - O, - E: std::error::Error, - I: Iterator, E>> + Clone, - >( - &self, - data: &[u8], - replace: Range, - ids: I, - out: &mut Vec, - ) -> Result, SpliceError> - where - O: convert::OpId, - { - let actor = self.actor.splice( - data, - replace.clone(), - ids.clone().map(|id| id.map(encoded_actor)), - out, - )?; - - if actor.is_empty() { - return Ok(None); - } - - let counter = self.counter.splice( - data, - replace, - ids.map(|i| { - i.map(|i| match i { - convert::ObjId::Root => None, - convert::ObjId::Op(o) => Some(o.counter()), - }) - }), - out, - )?; - - Ok(Some(Self { actor, counter })) - } - - pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> ObjIdIter<'a> { - ObjIdIter { - actor: self.actor.decoder(data), - counter: self.counter.decoder(data), - } - } -} - -fn encoded_actor(id: convert::ObjId) -> Option -where - O: convert::OpId, -{ - match id { - convert::ObjId::Root => None, - convert::ObjId::Op(o) => Some(o.actor() as u64), - } -} - -#[derive(Clone)] -pub(crate) struct ObjIdIter<'a> { - actor: RleDecoder<'a, u64>, - counter: RleDecoder<'a, u64>, -} - -impl<'a> ObjIdIter<'a> { - fn try_next(&mut self) -> Result, DecodeColumnError> { - let actor = self - .actor - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("actor", e))?; - let counter = self - .counter - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; - match (actor, counter) { - (None | Some(None), None | Some(None)) => Ok(Some(ObjId::root())), - (Some(Some(a)), Some(Some(c))) => Ok(Some(ObjId(OpId::new(c, a as usize)))), - (_, Some(Some(0))) => Ok(Some(ObjId::root())), - (Some(None) | None, _) => Err(DecodeColumnError::unexpected_null("actor")), - (_, Some(None) | None) => Err(DecodeColumnError::unexpected_null("counter")), - } - } -} - -impl<'a> Iterator for ObjIdIter<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} - -pub(crate) struct ObjIdEncoder { - actor: RleEncoder, - counter: RleEncoder, -} - -impl ObjIdEncoder { - pub(crate) fn append(&mut self, id: convert::ObjId) - where - O: convert::OpId, - { - match id { - convert::ObjId::Root => { - self.actor.append_null(); - self.counter.append_null(); - } - convert::ObjId::Op(o) => { - self.actor.append_value(o.actor() as u64); - self.counter.append_value(o.counter()); - } - } - } -} - -impl ObjIdEncoder> { - pub(crate) fn new() -> Self { - Self { - actor: RleEncoder::from(Vec::new()), - counter: RleEncoder::from(Vec::new()), - } - } - - pub(crate) fn finish(self, out: &mut Vec) -> Option { - let start = out.len(); - let (actor, _) = self.actor.finish(); - out.extend(actor); - let actor_end = out.len(); - - let (counter, _) = self.counter.finish(); - out.extend(counter); - let counter_end = out.len(); - - if start == counter_end { - None - } else { - Some(ObjIdRange { - actor: (start..actor_end).into(), - counter: (actor_end..counter_end).into(), - }) - } - } -} diff --git a/rust/automerge/src/columnar/column_range/opid.rs b/rust/automerge/src/columnar/column_range/opid.rs deleted file mode 100644 index d2cdce79..00000000 --- a/rust/automerge/src/columnar/column_range/opid.rs +++ /dev/null @@ -1,210 +0,0 @@ -use std::ops::Range; - -use super::{DeltaRange, RleRange}; -use crate::{ - columnar::{ - encoding::{ - raw, DecodeColumnError, DeltaDecoder, DeltaEncoder, RleDecoder, RleEncoder, Sink, - }, - SpliceError, - }, - convert, - types::OpId, -}; - -#[derive(Debug, Clone)] -pub(crate) struct OpIdRange { - actor: RleRange, - counter: DeltaRange, -} - -impl OpIdRange { - pub(crate) fn new(actor: RleRange, counter: DeltaRange) -> Self { - Self { actor, counter } - } - - pub(crate) fn actor_range(&self) -> &RleRange { - &self.actor - } - - pub(crate) fn counter_range(&self) -> &DeltaRange { - &self.counter - } - - pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> OpIdIter<'a> { - OpIdIter { - actor: self.actor.decoder(data), - counter: self.counter.decoder(data), - } - } - - pub(crate) fn encode(opids: I, out: &mut Vec) -> Self - where - O: convert::OpId, - I: Iterator + Clone, - { - let actor = RleRange::encode(opids.clone().map(|o| Some(o.actor() as u64)), out); - let counter = DeltaRange::encode(opids.map(|o| Some(o.counter() as i64)), out); - Self { actor, counter } - } - - #[allow(dead_code)] - pub(crate) fn splice( - &self, - data: &[u8], - replace: Range, - replace_with: I, - out: &mut Vec, - ) -> Result> - where - O: convert::OpId, - E: std::error::Error, - I: Iterator> + Clone, - { - let actor = self.actor.splice( - data, - replace.clone(), - replace_with - .clone() - .map(|i| i.map(|i| Some(i.actor() as u64))), - out, - )?; - let counter = self.counter.splice( - data, - replace, - replace_with.map(|i| i.map(|i| Some(i.counter() as i64))), - out, - )?; - Ok(Self { actor, counter }) - } -} - -#[derive(Clone)] -pub(crate) struct OpIdIter<'a> { - actor: RleDecoder<'a, u64>, - counter: DeltaDecoder<'a>, -} - -impl<'a> OpIdIter<'a> { - pub(crate) fn done(&self) -> bool { - self.counter.done() - } -} - -impl<'a> OpIdIter<'a> { - fn try_next(&mut self) -> Result, DecodeColumnError> { - let actor = self - .actor - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("actor", e))?; - let counter = self - .counter - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; - match (actor, counter) { - (Some(Some(a)), Some(Some(c))) => match u32::try_from(c) { - Ok(c) => Ok(Some(OpId::new(c as u64, a as usize))), - Err(_) => Err(DecodeColumnError::invalid_value( - "counter", - "negative or large value encountered", - )), - }, - (Some(None), _) => Err(DecodeColumnError::unexpected_null("actor")), - (_, Some(None)) => Err(DecodeColumnError::unexpected_null("actor")), - (Some(_), None) => Err(DecodeColumnError::unexpected_null("ctr")), - (None, Some(_)) => Err(DecodeColumnError::unexpected_null("actor")), - (None, None) => Ok(None), - } - } -} - -impl<'a> Iterator for OpIdIter<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} - -pub(crate) struct OpIdEncoder { - actor: RleEncoder, - counter: DeltaEncoder, -} - -impl OpIdEncoder { - pub(crate) fn append>(&mut self, opid: O) { - self.actor.append_value(opid.actor() as u64); - self.counter.append_value(opid.counter() as i64); - } -} - -impl OpIdEncoder> { - pub(crate) fn new() -> Self { - Self { - actor: RleEncoder::from(Vec::new()), - counter: DeltaEncoder::from(Vec::new()), - } - } - - pub(crate) fn finish(self, out: &mut Vec) -> OpIdRange { - let start = out.len(); - let (actor, _) = self.actor.finish(); - out.extend(actor); - let actor_end = out.len(); - - let (counter, _) = self.counter.finish(); - out.extend(counter); - let counter_end = out.len(); - - OpIdRange { - actor: (start..actor_end).into(), - counter: (actor_end..counter_end).into(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{ - columnar::encoding::properties::{opid, splice_scenario}, - types::OpId, - }; - use proptest::prelude::*; - use std::convert::Infallible; - - fn encode(vals: &[OpId]) -> (Vec, OpIdRange) { - let mut out = Vec::new(); - let r = OpIdRange::encode(vals.iter().copied(), &mut out); - (out, r) - } - - fn decode(buf: &[u8], range: OpIdRange) -> Vec { - range.iter(buf).map(|c| c.unwrap()).collect() - } - - proptest! { - #[test] - fn encode_decode_opid(opids in proptest::collection::vec(opid(), 0..100)) { - let (encoded, range) = encode(&opids); - assert_eq!(opids, decode(&encoded[..], range)); - } - - #[test] - fn splice_opids(scenario in splice_scenario(opid())) { - let (encoded, range) = encode(&scenario.initial_values); - let mut out = Vec::new(); - let replacements: Vec> = scenario.replacements.iter().cloned().map(Ok).collect(); - let new_range = range.splice( - &encoded, - scenario.replace_range.clone(), - replacements.into_iter(), - &mut out - ).unwrap(); - let result = decode(&out[..], new_range); - scenario.check(result); - } - } -} diff --git a/rust/automerge/src/columnar/column_range/opid_list.rs b/rust/automerge/src/columnar/column_range/opid_list.rs deleted file mode 100644 index 6a9c8a38..00000000 --- a/rust/automerge/src/columnar/column_range/opid_list.rs +++ /dev/null @@ -1,329 +0,0 @@ -use std::{convert::Infallible, ops::Range}; - -use super::{DeltaRange, RleRange}; -use crate::{ - columnar::{ - encoding::{ - raw, DecodeColumnError, DeltaDecoder, DeltaEncoder, RleDecoder, RleEncoder, Sink, - }, - SpliceError, - }, - convert, - types::OpId, -}; - -/// A collection of ranges which decode to lists of OpIds -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct OpIdListRange { - num: RleRange, - actor: RleRange, - counter: DeltaRange, -} - -impl OpIdListRange { - pub(crate) fn new(num: RleRange, actor: RleRange, counter: DeltaRange) -> Self { - Self { - num, - actor, - counter, - } - } - - pub(crate) fn group_range(&self) -> &RleRange { - &self.num - } - - pub(crate) fn actor_range(&self) -> &RleRange { - &self.actor - } - - pub(crate) fn counter_range(&self) -> &DeltaRange { - &self.counter - } - - pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> OpIdListIter<'a> { - OpIdListIter { - num: self.num.decoder(data), - actor: self.actor.decoder(data), - counter: self.counter.decoder(data), - } - } - - pub(crate) fn encode(opids: I, out: &mut Vec) -> Self - where - O: convert::OpId, - II: IntoIterator, - IE: Iterator + ExactSizeIterator, - I: Iterator + Clone, - { - let num = RleRange::encode( - opids.clone().map(|os| Some(os.into_iter().len() as u64)), - out, - ); - let actor = RleRange::encode( - opids - .clone() - .flat_map(|os| os.into_iter().map(|o| Some(o.actor() as u64))), - out, - ); - let counter = DeltaRange::encode( - opids.flat_map(|os| os.into_iter().map(|o| Some(o.counter() as i64))), - out, - ); - Self { - num, - actor, - counter, - } - } - - #[allow(dead_code)] - pub(crate) fn splice( - &self, - data: &[u8], - replace: Range, - replace_with: I, - out: &mut Vec, - ) -> Result> - where - R: std::error::Error + Clone, - II: IntoIterator, - IE: Iterator + ExactSizeIterator, - I: Iterator> + Clone, - { - let group_replace = group_replace_range(replace.clone(), self.num.decoder(data)) - .map_err(|e| e.existing())?; - let num = self.num.splice( - data, - replace, - replace_with - .clone() - .map(|elems| elems.map(|elems| Some(elems.into_iter().len() as u64))), - out, - )?; - let actor = self.actor.splice( - data, - group_replace.clone(), - replace_with.clone().flat_map(|elem| match elem { - Err(e) => SplicingIter::Failed(e), - Ok(i) => SplicingIter::Iter(i.into_iter(), |oid: OpId| oid.actor() as u64), - }), - out, - )?; - let counter = self.counter.splice( - data, - group_replace, - replace_with.flat_map(|elem| match elem { - Err(e) => SplicingIter::Failed(e), - Ok(i) => SplicingIter::Iter(i.into_iter(), |oid: OpId| oid.counter() as i64), - }), - out, - )?; - Ok(Self { - num, - actor, - counter, - }) - } -} - -enum SplicingIter { - Failed(E), - Iter(I, F), -} - -impl Iterator for SplicingIter -where - E: std::error::Error + Clone, - I: Iterator, - F: Fn(OpId) -> U, -{ - type Item = Result, E>; - - fn next(&mut self) -> Option { - match self { - Self::Failed(e) => Some(Err(e.clone())), - Self::Iter(i, f) => i.next().map(|oid| Ok(Some(f(oid)))), - } - } -} - -/// Find the replace range for the grouped columns. -fn group_replace_range( - replace: Range, - mut num: RleDecoder<'_, u64>, -) -> Result, SpliceError> { - let mut idx = 0; - let mut grouped_replace_start: usize = 0; - let mut grouped_replace_len: usize = 0; - while idx < replace.start { - if let Some(Some(count)) = num.next().transpose().map_err(SpliceError::ReadExisting)? { - grouped_replace_start += count as usize; - } - idx += 1; - } - for _ in 0..replace.len() { - if let Some(Some(count)) = num.next().transpose().map_err(SpliceError::ReadExisting)? { - grouped_replace_len += count as usize; - } - } - Ok(grouped_replace_start..(grouped_replace_start + grouped_replace_len)) -} - -#[derive(Clone)] -pub(crate) struct OpIdListIter<'a> { - num: RleDecoder<'a, u64>, - actor: RleDecoder<'a, u64>, - counter: DeltaDecoder<'a>, -} - -impl<'a> OpIdListIter<'a> { - fn try_next(&mut self) -> Result>, DecodeColumnError> { - let num = match self - .num - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("num", e))? - { - Some(Some(n)) => n, - Some(None) => return Err(DecodeColumnError::unexpected_null("num")), - None => return Ok(None), - }; - - // We cannot trust `num` because it is provided over the network, - // but in the common case it will be correct and small (so we - // use with_capacity to make sure the vector is precisely the right - // size). - let mut p = Vec::with_capacity(std::cmp::min(num, 100) as usize); - for _ in 0..num { - let actor = self - .actor - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("actor", e))?; - let counter = self - .counter - .next() - .transpose() - .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; - match (actor, counter) { - (Some(Some(a)), Some(Some(ctr))) => match ctr.try_into() { - Ok(ctr) => p.push(OpId::new(ctr, a as usize)), - Err(_e) => { - return Err(DecodeColumnError::invalid_value( - "counter", - "negative value for counter", - )) - } - }, - (Some(None) | None, _) => return Err(DecodeColumnError::unexpected_null("actor")), - (_, Some(None) | None) => { - return Err(DecodeColumnError::unexpected_null("counter")) - } - } - } - Ok(Some(p)) - } -} - -impl<'a> Iterator for OpIdListIter<'a> { - type Item = Result, DecodeColumnError>; - - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} - -pub(crate) struct OpIdListEncoder { - num: RleEncoder, - actor: RleEncoder, - counter: DeltaEncoder, -} - -impl OpIdListEncoder { - pub(crate) fn append(&mut self, ids: I) - where - I: Iterator + ExactSizeIterator, - O: convert::OpId, - { - self.num.append_value(ids.len() as u64); - for id in ids { - self.actor.append_value(id.actor() as u64); - self.counter.append_value(id.counter() as i64); - } - } -} - -impl OpIdListEncoder> { - pub(crate) fn new() -> Self { - Self { - num: RleEncoder::from(Vec::new()), - actor: RleEncoder::from(Vec::new()), - counter: DeltaEncoder::from(Vec::new()), - } - } - - pub(crate) fn finish(self, out: &mut Vec) -> OpIdListRange { - let start = out.len(); - let (num, _) = self.num.finish(); - out.extend(num); - let num_end = out.len(); - - let (actor, _) = self.actor.finish(); - out.extend(actor); - let actor_end = out.len(); - - let (counter, _) = self.counter.finish(); - out.extend(counter); - let counter_end = out.len(); - - OpIdListRange { - num: (start..num_end).into(), - actor: (num_end..actor_end).into(), - counter: (actor_end..counter_end).into(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use proptest::collection::vec as propvec; - use proptest::prelude::*; - - use crate::columnar::encoding::properties::{opid, splice_scenario}; - - fn encode(opids: Vec>) -> (OpIdListRange, Vec) { - let mut out = Vec::new(); - let range = OpIdListRange::encode(opids.iter(), &mut out); - (range, out) - } - - fn decode(range: OpIdListRange, buf: &[u8]) -> Vec> { - range.iter(buf).map(|c| c.unwrap()).collect() - } - - proptest! { - #[test] - fn encode_decode_opid_list(opids in propvec(propvec(opid(), 0..100), 0..100)){ - let (range, encoded) = encode(opids.clone()); - let result = decode(range, &encoded); - assert_eq!(opids, result) - } - - #[test] - fn splice_opid_list(scenario in splice_scenario(propvec(opid(), 0..100))) { - let (range, encoded) = encode(scenario.initial_values.clone()); - let mut out = Vec::new(); - let replacements: Vec, Infallible>> = scenario.replacements.iter().cloned().map(Ok).collect(); - let new_range = range.splice( - &encoded, - scenario.replace_range.clone(), - replacements.into_iter(), - &mut out - ).unwrap(); - let result = decode(new_range, &out[..]); - scenario.check(result); - } - } -} diff --git a/rust/automerge/src/columnar/column_range/raw.rs b/rust/automerge/src/columnar/column_range/raw.rs deleted file mode 100644 index 3520a89a..00000000 --- a/rust/automerge/src/columnar/column_range/raw.rs +++ /dev/null @@ -1,38 +0,0 @@ -use std::{borrow::Cow, ops::Range}; - -use crate::columnar::encoding::RawDecoder; - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct RawRange(Range); - -impl RawRange { - pub(crate) fn decoder<'a>(&self, data: &'a [u8]) -> RawDecoder<'a> { - RawDecoder::from(Cow::Borrowed(&data[self.0.clone()])) - } - - pub(crate) fn is_empty(&self) -> bool { - self.0.is_empty() - } - - pub(crate) fn end(&self) -> usize { - self.0.end - } -} - -impl AsRef> for RawRange { - fn as_ref(&self) -> &Range { - &self.0 - } -} - -impl From> for RawRange { - fn from(r: Range) -> RawRange { - RawRange(r) - } -} - -impl From for Range { - fn from(r: RawRange) -> Range { - r.0 - } -} diff --git a/rust/automerge/src/columnar/column_range/rle.rs b/rust/automerge/src/columnar/column_range/rle.rs deleted file mode 100644 index c500a7f4..00000000 --- a/rust/automerge/src/columnar/column_range/rle.rs +++ /dev/null @@ -1,216 +0,0 @@ -use std::{ - borrow::{Borrow, Cow}, - fmt::Debug, - marker::PhantomData, - ops::Range, -}; - -use crate::columnar::{ - encoding::{raw, Decodable, Encodable, RleDecoder, RleEncoder, Sink}, - SpliceError, -}; - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct RleRange { - range: Range, - _phantom: PhantomData, -} - -impl RleRange { - pub(crate) fn decoder<'a>(&self, data: &'a [u8]) -> RleDecoder<'a, T> { - RleDecoder::from(Cow::Borrowed(&data[self.range.clone()])) - } - - pub(crate) fn is_empty(&self) -> bool { - self.range.is_empty() - } - - pub(crate) fn start(&self) -> usize { - self.range.start - } - - pub(crate) fn end(&self) -> usize { - self.range.end - } -} - -impl RleRange { - /// The semantics of this are similar to `Vec::splice` - /// - /// # Arguments - /// - /// * `data` - The buffer containing the original rows - /// * `replace` - The range of elements in the original collection to replace - /// * `replace_with` - An iterator to insert in place of the original elements. - /// * `out` - The buffer to encode the resulting collection into - pub(crate) fn splice< - 'a, - I: Iterator, E>>, - TB: Borrow + 'a, - E: std::error::Error, - >( - &self, - data: &[u8], - replace: Range, - mut replace_with: I, - out: &mut Vec, - ) -> Result> { - let start = out.len(); - let mut encoder = self.encoder(out); - let mut decoder = self.decoder(data); - let mut idx = 0; - while idx < replace.start { - match decoder - .next() - .transpose() - .map_err(SpliceError::ReadExisting)? - { - Some(elem) => encoder.append(elem.as_ref()), - None => panic!("out of bounds"), - } - idx += 1; - } - for _ in 0..replace.len() { - decoder.next(); - if let Some(next) = replace_with - .next() - .transpose() - .map_err(SpliceError::ReadReplace)? - { - encoder.append(next.as_ref().map(|n| n.borrow())); - } - } - for next in replace_with { - let next = next.map_err(SpliceError::ReadReplace)?; - encoder.append(next.as_ref().map(|n| n.borrow())); - } - for next in decoder { - let next = next.map_err(SpliceError::ReadExisting)?; - encoder.append(next.as_ref()); - } - let (_, len) = encoder.finish(); - let range = start..(start + len); - Ok(range.into()) - } -} - -impl<'a, T: Encodable + Clone + PartialEq + 'a> RleRange { - pub(crate) fn encoder(&self, output: S) -> RleEncoder { - RleEncoder::from(output) - } - - pub(crate) fn encode, I: Iterator>>( - items: I, - out: &mut Vec, - ) -> Self { - let start = out.len(); - let mut encoder = RleEncoder::new(out); - for item in items { - encoder.append(item); - } - let (_, len) = encoder.finish(); - (start..(start + len)).into() - } -} - -impl AsRef> for RleRange { - fn as_ref(&self) -> &Range { - &self.range - } -} - -impl From> for RleRange { - fn from(r: Range) -> RleRange { - RleRange { - range: r, - _phantom: PhantomData, - } - } -} - -impl From> for Range { - fn from(r: RleRange) -> Range { - r.range - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::columnar::encoding::properties::option_splice_scenario; - use proptest::prelude::*; - use std::{borrow::Cow, convert::Infallible}; - - #[test] - fn rle_int_round_trip() { - let vals = [1, 1, 2, 2, 3, 2, 3, 1, 3]; - let mut buf = Vec::with_capacity(vals.len() * 3); - let mut encoder: RleEncoder<_, u64> = RleEncoder::new(&mut buf); - for val in vals { - encoder.append_value(val) - } - let (_, total_slice_len) = encoder.finish(); - let mut decoder: RleDecoder<'_, u64> = - RleDecoder::from(Cow::Borrowed(&buf[0..total_slice_len])); - let mut result = Vec::new(); - while let Some(Some(val)) = decoder.next().transpose().unwrap() { - result.push(val); - } - assert_eq!(result, vals); - } - - #[test] - fn rle_int_insert() { - let vals = [1, 1, 2, 2, 3, 2, 3, 1, 3]; - let mut buf = Vec::with_capacity(vals.len() * 3); - let mut encoder: RleEncoder<_, u64> = RleEncoder::new(&mut buf); - for val in vals.iter().take(4) { - encoder.append_value(val) - } - encoder.append_value(5); - for val in vals.iter().skip(4) { - encoder.append_value(val); - } - let (_, total_slice_len) = encoder.finish(); - let mut decoder: RleDecoder<'_, u64> = - RleDecoder::from(Cow::Borrowed(&buf[0..total_slice_len])); - let mut result = Vec::new(); - while let Some(Some(val)) = decoder.next().transpose().unwrap() { - result.push(val); - } - let expected = [1, 1, 2, 2, 5, 3, 2, 3, 1, 3]; - assert_eq!(result, expected); - } - - fn encode(vals: &[Option]) -> (RleRange, Vec) { - let mut buf = Vec::with_capacity(vals.len() * 3); - let range = RleRange::::encode(vals.iter().map(|v| v.as_ref()), &mut buf); - (range, buf) - } - - fn decode(range: RleRange, buf: &[u8]) -> Vec> { - range.decoder(buf).collect::, _>>().unwrap() - } - - proptest! { - #[test] - fn splice_ints(scenario in option_splice_scenario(any::>())) { - let (range, buf) = encode(&scenario.initial_values); - let mut out = Vec::new(); - let replacements: Vec, Infallible>> = scenario.replacements.iter().cloned().map(Ok).collect(); - let new_range = range.splice(&buf, scenario.replace_range.clone(), replacements.into_iter(), &mut out).unwrap(); - let result = decode::(new_range, &out); - scenario.check_optional(result) - } - - #[test] - fn splice_strings(scenario in option_splice_scenario(any::>())) { - let (range, buf) = encode(&scenario.initial_values); - let mut out = Vec::new(); - let replacements: Vec, Infallible>> = scenario.replacements.iter().cloned().map(Ok).collect(); - let new_range = range.splice(&buf, scenario.replace_range.clone(), replacements.into_iter(), &mut out).unwrap(); - let result = decode::(new_range, &out); - scenario.check_optional(result) - } - } -} diff --git a/rust/automerge/src/columnar/column_range/value.rs b/rust/automerge/src/columnar/column_range/value.rs deleted file mode 100644 index 03a5aa60..00000000 --- a/rust/automerge/src/columnar/column_range/value.rs +++ /dev/null @@ -1,547 +0,0 @@ -use std::{borrow::Cow, ops::Range}; - -use crate::{ - columnar::{ - encoding::{ - leb128::{lebsize, ulebsize}, - raw, DecodeColumnError, DecodeError, RawBytes, RawDecoder, RawEncoder, RleDecoder, - RleEncoder, Sink, - }, - SpliceError, - }, - storage::parse::{ - leb128::{leb128_i64, leb128_u64}, - Input, ParseResult, - }, - ScalarValue, -}; - -use super::{RawRange, RleRange}; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct ValueRange { - meta: RleRange, - raw: RawRange, -} - -impl ValueRange { - pub(crate) fn new(meta: RleRange, raw: RawRange) -> Self { - Self { meta, raw } - } - - pub(crate) fn range(&self) -> Range { - // This is a hack, instead `raw` should be `Option` - if self.raw.is_empty() { - self.meta.clone().into() - } else { - self.meta.start()..self.raw.end() - } - } - - pub(crate) fn meta_range(&self) -> &RleRange { - &self.meta - } - - pub(crate) fn raw_range(&self) -> &RawRange { - &self.raw - } - - pub(crate) fn encode<'a, 'b, I>(items: I, out: &'b mut Vec) -> Self - where - I: Iterator> + Clone + 'a, - { - Self { - meta: (0..0).into(), - raw: (0..0).into(), - } - .splice(&[], 0..0, items, out) - } - - pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> ValueIter<'a> { - ValueIter { - meta: self.meta.decoder(data), - raw: self.raw.decoder(data), - } - } - - pub(crate) fn splice<'b, I>( - &self, - data: &[u8], - replace: Range, - replace_with: I, - out: &mut Vec, - ) -> Self - where - I: Iterator> + Clone, - { - // SAFETY: try_splice fails if either the iterator of replacements fails, or the iterator - // of existing elements fails. But the replacement iterator is infallible and there - // are no existing elements - self.try_splice::<_, ()>(data, replace, replace_with.map(Ok), out) - .unwrap() - } - - pub(crate) fn try_splice<'b, I, E>( - &self, - data: &[u8], - replace: Range, - mut replace_with: I, - out: &mut Vec, - ) -> Result> - where - I: Iterator, E>> + Clone, - { - // Our semantics here are similar to those of Vec::splice. We can describe this - // imperatively like this: - // - // * First copy everything up to the start of `replace` into the output - // * For every index in `replace` skip that index from ourselves and if `replace_with` - // returns `Some` then copy that value to the output - // * Once we have iterated past `replace.end` we continue to call `replace_with` until it - // returns None, copying the results to the output - // * Finally we copy the remainder of our data into the output - // - // However, things are complicated by the fact that our data is stored in two columns. This - // means that we do this in two passes. First we execute the above logic for the metadata - // column. Then we do it all over again for the value column. - - // First pass - metadata - // - // Copy the metadata decoder so we can iterate over it again when we read the values in the - // second pass - let start = out.len(); - let mut meta_copy = self.meta.decoder(data); - let mut meta_out = RleEncoder::<_, u64>::from(&mut *out); - let mut idx = 0; - // Copy everything up to replace.start to the output - while idx < replace.start { - let val = meta_copy - .next() - .transpose() - .map_err(SpliceError::ReadExisting)? - .unwrap_or(None); - meta_out.append(val.as_ref()); - idx += 1; - } - // Now step through replace, skipping our data and inserting the replacement data (if there - // is any) - let mut meta_replace_with = replace_with.clone(); - for _ in 0..replace.len() { - meta_copy.next(); - if let Some(val) = meta_replace_with.next() { - let val = val.map_err(SpliceError::ReadReplace)?; - // Note that we are just constructing metadata values here. - let meta_val = &u64::from(ValueMeta::from(val.as_ref())); - meta_out.append(Some(meta_val)); - } - idx += 1; - } - // Copy any remaining input from the replacments to the output - for val in meta_replace_with { - let val = val.map_err(SpliceError::ReadReplace)?; - let meta_val = &u64::from(ValueMeta::from(val.as_ref())); - meta_out.append(Some(meta_val)); - idx += 1; - } - // Now copy any remaining data we have to the output - while !meta_copy.done() { - let val = meta_copy - .next() - .transpose() - .map_err(SpliceError::ReadExisting)? - .unwrap_or(None); - meta_out.append(val.as_ref()); - } - let (_, meta_len) = meta_out.finish(); - let meta_range = start..(start + meta_len); - - // Second pass, copying the values. For this pass we iterate over ourselves. - // - // - let mut value_range_len = 0; - let mut raw_encoder = RawEncoder::from(out); - let mut iter = self.iter(data); - idx = 0; - // Copy everything up to replace.start to the output - while idx < replace.start { - let val = iter.next().unwrap().unwrap_or(ScalarValue::Null); - value_range_len += encode_val(&mut raw_encoder, &val); - idx += 1; - } - - // Now step through replace, skipping our data and inserting the replacement data (if there - // is any) - for _ in 0..replace.len() { - iter.next(); - if let Some(val) = replace_with.next() { - let val = val.map_err(SpliceError::ReadReplace)?; - value_range_len += encode_val(&mut raw_encoder, val.as_ref()); - } - idx += 1; - } - // Copy any remaining input from the replacments to the output - for val in replace_with { - let val = val.map_err(SpliceError::ReadReplace)?; - value_range_len += encode_val(&mut raw_encoder, val.as_ref()); - idx += 1; - } - // Now copy any remaining data we have to the output - while !iter.done() { - let val = iter.next().unwrap().unwrap_or(ScalarValue::Null); - value_range_len += encode_val(&mut raw_encoder, &val); - } - - let value_range = meta_range.end..(meta_range.end + value_range_len); - - Ok(Self { - meta: meta_range.into(), - raw: value_range.into(), - }) - } -} - -#[derive(Debug, Clone)] -pub(crate) struct ValueIter<'a> { - meta: RleDecoder<'a, u64>, - raw: RawDecoder<'a>, -} - -impl<'a> Iterator for ValueIter<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - let next = match self.meta.next().transpose() { - Ok(n) => n, - Err(e) => return Some(Err(DecodeColumnError::decode_raw("meta", e))), - }; - match next { - Some(Some(next)) => { - let val_meta = ValueMeta::from(next); - #[allow(clippy::redundant_slicing)] - match val_meta.type_code() { - ValueType::Null => Some(Ok(ScalarValue::Null)), - ValueType::True => Some(Ok(ScalarValue::Boolean(true))), - ValueType::False => Some(Ok(ScalarValue::Boolean(false))), - ValueType::Uleb => self.parse_input(val_meta, leb128_u64), - ValueType::Leb => self.parse_input(val_meta, leb128_i64), - ValueType::String => self.parse_raw(val_meta, |bytes| { - let val = std::str::from_utf8(bytes) - .map_err(|e| DecodeColumnError::invalid_value("value", e.to_string()))? - .into(); - Ok(ScalarValue::Str(val)) - }), - ValueType::Float => self.parse_raw(val_meta, |bytes| { - if val_meta.length() != 8 { - return Err(DecodeColumnError::invalid_value( - "value", - format!("float should have length 8, had {0}", val_meta.length()), - )); - } - let raw: [u8; 8] = bytes - .try_into() - // SAFETY: parse_raw() calls read_bytes(val_meta.length()) and we have - // checked that val_meta.length() == 8 - .unwrap(); - let val = f64::from_le_bytes(raw); - Ok(ScalarValue::F64(val)) - }), - ValueType::Counter => self.parse_input(val_meta, |input| { - leb128_i64(input).map(|(i, n)| (i, ScalarValue::Counter(n.into()))) - }), - ValueType::Timestamp => self.parse_input(val_meta, |input| { - leb128_i64(input).map(|(i, n)| (i, ScalarValue::Timestamp(n))) - }), - ValueType::Unknown(code) => self.parse_raw(val_meta, |bytes| { - Ok(ScalarValue::Unknown { - type_code: code, - bytes: bytes.to_vec(), - }) - }), - ValueType::Bytes => match self.raw.read_bytes(val_meta.length()) { - Err(e) => Some(Err(DecodeColumnError::invalid_value( - "value", - e.to_string(), - ))), - Ok(bytes) => Some(Ok(ScalarValue::Bytes(bytes.to_vec()))), - }, - } - } - Some(None) => Some(Err(DecodeColumnError::unexpected_null("meta"))), - None => None, - } - } -} - -impl<'a> ValueIter<'a> { - fn parse_raw<'b, R, F: Fn(&'b [u8]) -> Result>( - &'b mut self, - meta: ValueMeta, - f: F, - ) -> Option> { - let raw = match self.raw.read_bytes(meta.length()) { - Err(e) => { - return Some(Err(DecodeColumnError::invalid_value( - "value", - e.to_string(), - ))) - } - Ok(bytes) => bytes, - }; - Some(f(raw)) - } - - fn parse_input<'b, R, F: Fn(Input<'b>) -> ParseResult<'b, R, DecodeError>>( - &'b mut self, - meta: ValueMeta, - f: F, - ) -> Option> - where - R: Into, - { - self.parse_raw(meta, |raw| match f(Input::new(raw)) { - Err(e) => Err(DecodeColumnError::invalid_value("value", e.to_string())), - Ok((i, _)) if !i.is_empty() => { - Err(DecodeColumnError::invalid_value("value", "extra bytes")) - } - Ok((_, v)) => Ok(v.into()), - }) - } - - pub(crate) fn done(&self) -> bool { - self.meta.done() - } -} - -/// Appends values row-wise. That is to say, this struct manages two separate chunks of memory, one -/// for the value metadata and one for the raw values. To use it, create a new encoder using -/// `ValueEncoder::new`, sequentially append values using `ValueEncoder::append`, and finallly -/// concatenate the two columns and append them to a buffer returning the range within the output -/// buffer which contains the concatenated columns using `ValueEncoder::finish`. -pub(crate) struct ValueEncoder { - meta: RleEncoder, - raw: RawEncoder, -} - -impl ValueEncoder { - pub(crate) fn append(&mut self, value: &ScalarValue) { - let meta_val = &u64::from(ValueMeta::from(value)); - self.meta.append_value(meta_val); - encode_val(&mut self.raw, value); - } -} - -impl ValueEncoder> { - pub(crate) fn new() -> Self { - Self { - meta: RleEncoder::new(Vec::new()), - raw: RawEncoder::from(Vec::new()), - } - } - pub(crate) fn finish(self, out: &mut Vec) -> ValueRange { - let meta_start = out.len(); - let (meta, _) = self.meta.finish(); - out.extend(meta); - let meta_end = out.len(); - - let (val, _) = self.raw.finish(); - out.extend(val); - let val_end = out.len(); - ValueRange { - meta: (meta_start..meta_end).into(), - raw: (meta_end..val_end).into(), - } - } -} - -fn encode_val(out: &mut RawEncoder, val: &ScalarValue) -> usize { - match val { - ScalarValue::Uint(i) => out.append(*i), - ScalarValue::Int(i) => out.append(*i), - ScalarValue::Null => 0, - ScalarValue::Boolean(_) => 0, - ScalarValue::Timestamp(i) => out.append(*i), - ScalarValue::F64(f) => out.append(*f), - ScalarValue::Counter(i) => out.append(i.start), - ScalarValue::Str(s) => out.append(RawBytes::from(s.as_bytes())), - ScalarValue::Bytes(b) => out.append(RawBytes::from(&b[..])), - ScalarValue::Unknown { bytes, .. } => out.append(RawBytes::from(&bytes[..])), - } -} - -#[derive(Debug)] -enum ValueType { - Null, - False, - True, - Uleb, - Leb, - Float, - String, - Bytes, - Counter, - Timestamp, - Unknown(u8), -} - -#[derive(Copy, Clone)] -struct ValueMeta(u64); - -impl ValueMeta { - fn type_code(&self) -> ValueType { - let low_byte = (self.0 as u8) & 0b00001111; - match low_byte { - 0 => ValueType::Null, - 1 => ValueType::False, - 2 => ValueType::True, - 3 => ValueType::Uleb, - 4 => ValueType::Leb, - 5 => ValueType::Float, - 6 => ValueType::String, - 7 => ValueType::Bytes, - 8 => ValueType::Counter, - 9 => ValueType::Timestamp, - other => ValueType::Unknown(other), - } - } - - fn length(&self) -> usize { - (self.0 >> 4) as usize - } -} - -impl From<&ScalarValue> for ValueMeta { - fn from(p: &ScalarValue) -> Self { - match p { - ScalarValue::Uint(i) => Self((ulebsize(*i) << 4) | 3), - ScalarValue::Int(i) => Self((lebsize(*i) << 4) | 4), - ScalarValue::Null => Self(0), - ScalarValue::Boolean(b) => Self(match b { - false => 1, - true => 2, - }), - ScalarValue::Timestamp(i) => Self((lebsize(*i) << 4) | 9), - ScalarValue::F64(_) => Self((8 << 4) | 5), - ScalarValue::Counter(i) => Self((lebsize(i.start) << 4) | 8), - ScalarValue::Str(s) => Self(((s.as_bytes().len() as u64) << 4) | 6), - ScalarValue::Bytes(b) => Self(((b.len() as u64) << 4) | 7), - ScalarValue::Unknown { type_code, bytes } => { - Self(((bytes.len() as u64) << 4) | (*type_code as u64)) - } - } - } -} - -impl From for ValueMeta { - fn from(raw: u64) -> Self { - ValueMeta(raw) - } -} - -impl From for u64 { - fn from(v: ValueMeta) -> Self { - v.0 - } -} - -impl From<&ScalarValue> for ValueType { - fn from(p: &ScalarValue) -> Self { - match p { - ScalarValue::Uint(_) => ValueType::Uleb, - ScalarValue::Int(_) => ValueType::Leb, - ScalarValue::Null => ValueType::Null, - ScalarValue::Boolean(b) => match b { - true => ValueType::True, - false => ValueType::False, - }, - ScalarValue::Timestamp(_) => ValueType::Timestamp, - ScalarValue::F64(_) => ValueType::Float, - ScalarValue::Counter(_) => ValueType::Counter, - ScalarValue::Str(_) => ValueType::String, - ScalarValue::Bytes(_) => ValueType::Bytes, - ScalarValue::Unknown { type_code, .. } => ValueType::Unknown(*type_code), - } - } -} - -impl From for u64 { - fn from(v: ValueType) -> Self { - match v { - ValueType::Null => 0, - ValueType::False => 1, - ValueType::True => 2, - ValueType::Uleb => 3, - ValueType::Leb => 4, - ValueType::Float => 5, - ValueType::String => 6, - ValueType::Bytes => 7, - ValueType::Counter => 8, - ValueType::Timestamp => 9, - ValueType::Unknown(other) => other as u64, - } - } -} -#[cfg(test)] -mod tests { - use super::*; - use crate::columnar::encoding::properties::{scalar_value, splice_scenario}; - use proptest::prelude::*; - use std::borrow::Cow; - - fn encode_values(vals: &[ScalarValue]) -> (Vec, ValueRange) { - let mut out = Vec::new(); - let range = ValueRange::encode(vals.iter().cloned().map(Cow::Owned), &mut out); - (out, range) - } - - fn encode_rowwise(vals: &[ScalarValue]) -> (Vec, ValueRange) { - let mut out = Vec::new(); - let mut encoder = ValueEncoder::new(); - for val in vals { - encoder.append(val); - } - let range = encoder.finish(&mut out); - (out, range) - } - - proptest! { - #[test] - fn test_initialize_splice(values in proptest::collection::vec(scalar_value(), 0..100)) { - let (out, range) = encode_values(&values[..]); - let testvals = range.iter(&out).collect::, _>>().unwrap(); - assert_eq!(values, testvals); - } - - #[test] - fn test_splice_values(scenario in splice_scenario(scalar_value())){ - let (out, range) = encode_values(&scenario.initial_values); - let mut spliced = Vec::new(); - let new_range = range - .splice( - &out, - scenario.replace_range.clone(), - scenario.replacements.clone().into_iter().map(Cow::Owned), - &mut spliced, - ); - let result_values = new_range.iter(&spliced).collect::, _>>().unwrap(); - let mut expected: Vec<_> = scenario.initial_values.clone(); - expected.splice(scenario.replace_range, scenario.replacements); - assert_eq!(result_values, expected); - } - - #[test] - fn encode_row_wise_and_columnwise_equal(values in proptest::collection::vec(scalar_value(), 0..50)) { - let (colwise, col_range) = encode_values(&values[..]); - let (rowwise, row_range) = encode_rowwise(&values[..]); - assert_eq!(colwise, rowwise); - assert_eq!(col_range, row_range); - } - } - - #[test] - fn test_value_uleb() { - let vals = [ScalarValue::Uint(127), ScalarValue::Uint(183)]; - let (out, range) = encode_values(&vals); - let result = range.iter(&out).collect::, _>>().unwrap(); - assert_eq!(result, vals); - } -} diff --git a/rust/automerge/src/columnar/encoding.rs b/rust/automerge/src/columnar/encoding.rs deleted file mode 100644 index c9435448..00000000 --- a/rust/automerge/src/columnar/encoding.rs +++ /dev/null @@ -1,65 +0,0 @@ -pub(crate) mod raw; - -pub(crate) use raw::{RawDecoder, RawEncoder}; -mod rle; -pub(crate) use rle::{RleDecoder, RleEncoder}; -mod boolean; -pub(crate) use boolean::{BooleanDecoder, BooleanEncoder}; -mod delta; -pub(crate) use delta::{DeltaDecoder, DeltaEncoder}; -pub(crate) mod leb128; - -pub(crate) mod column_decoder; -pub(crate) use column_decoder::ColumnDecoder; - -#[cfg(test)] -pub(crate) mod properties; - -pub(crate) trait Sink { - fn append(&mut self, bytes: &[u8]); -} - -impl<'a> Sink for &'a mut Vec { - fn append(&mut self, bytes: &[u8]) { - self.extend(bytes) - } -} - -impl Sink for Vec { - fn append(&mut self, bytes: &[u8]) { - self.extend(bytes) - } -} - -pub(crate) trait Encodable { - fn encode(&self, out: &mut S) -> usize; -} - -mod encodable_impls; -pub(crate) use encodable_impls::RawBytes; - -#[derive(thiserror::Error, Debug)] -pub(crate) enum DecodeError { - #[error(transparent)] - Io(#[from] std::io::Error), - #[error("invalid integer")] - FromInt(#[from] std::num::TryFromIntError), - #[error("bad leb128")] - BadLeb(#[from] ::leb128::read::Error), - #[error(transparent)] - BadLeb128(#[from] crate::storage::parse::leb128::Error), - #[error("attempted to allocate {attempted} which is larger than the maximum of {maximum}")] - OverlargeAllocation { attempted: usize, maximum: usize }, - #[error("invalid string encoding")] - BadString, -} - -pub(crate) trait Decodable: Sized { - fn decode(bytes: &mut R) -> Result - where - R: std::io::Read; -} -mod decodable_impls; - -pub(crate) mod col_error; -pub(crate) use col_error::DecodeColumnError; diff --git a/rust/automerge/src/columnar/encoding/boolean.rs b/rust/automerge/src/columnar/encoding/boolean.rs deleted file mode 100644 index 26cb1838..00000000 --- a/rust/automerge/src/columnar/encoding/boolean.rs +++ /dev/null @@ -1,131 +0,0 @@ -use std::borrow::Cow; - -use super::{raw, Encodable, RawDecoder, Sink}; - -/// Encodes booleans by storing the count of the same value. -/// -/// The sequence of numbers describes the count of false values on even indices (0-indexed) and the -/// count of true values on odd indices (0-indexed). -/// -/// Counts are encoded as usize. -pub(crate) struct BooleanEncoder { - written: usize, - //buf: &'a mut Vec, - buf: S, - last: bool, - count: usize, -} - -impl BooleanEncoder> { - pub(crate) fn new() -> BooleanEncoder> { - BooleanEncoder::from_sink(Vec::new()) - } -} - -impl BooleanEncoder { - pub(crate) fn from_sink(sink: S) -> Self { - BooleanEncoder { - written: 0, - buf: sink, - last: false, - count: 0, - } - } - - pub(crate) fn append(&mut self, value: bool) { - if value == self.last { - self.count += 1; - } else { - self.written += self.count.encode(&mut self.buf); - self.last = value; - self.count = 1; - } - } - - pub(crate) fn finish(mut self) -> (S, usize) { - if self.count > 0 { - self.written += self.count.encode(&mut self.buf); - } - (self.buf, self.written) - } -} - -impl From for BooleanEncoder { - fn from(output: S) -> Self { - BooleanEncoder::from_sink(output) - } -} - -/// See the discussion of [`BooleanEncoder`] for details on this encoding -#[derive(Clone, Debug)] -pub(crate) struct BooleanDecoder<'a> { - decoder: RawDecoder<'a>, - last_value: bool, - count: usize, -} - -impl<'a> From> for BooleanDecoder<'a> { - fn from(bytes: Cow<'a, [u8]>) -> Self { - BooleanDecoder { - decoder: RawDecoder::from(bytes), - last_value: true, - count: 0, - } - } -} - -impl<'a> From<&'a [u8]> for BooleanDecoder<'a> { - fn from(d: &'a [u8]) -> Self { - Cow::Borrowed(d).into() - } -} - -// this is an endless iterator that returns false after input is exhausted -impl<'a> Iterator for BooleanDecoder<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - while self.count == 0 { - if self.decoder.done() && self.count == 0 { - return None; - } - self.count = match self.decoder.read() { - Ok(c) => c, - Err(e) => return Some(Err(e)), - }; - self.last_value = !self.last_value; - } - self.count -= 1; - Some(Ok(self.last_value)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - use proptest::prelude::*; - - fn encode(vals: &[bool]) -> Vec { - let mut buf = Vec::new(); - let mut encoder = BooleanEncoder::from_sink(&mut buf); - for val in vals { - encoder.append(*val); - } - encoder.finish(); - buf - } - - fn decode(buf: &[u8]) -> Vec { - BooleanDecoder::from(buf) - .collect::, _>>() - .unwrap() - } - - proptest! { - #[test] - fn encode_decode_bools(vals in proptest::collection::vec(any::(), 0..100)) { - assert_eq!(vals, decode(&encode(&vals))) - } - } -} diff --git a/rust/automerge/src/columnar/encoding/col_error.rs b/rust/automerge/src/columnar/encoding/col_error.rs deleted file mode 100644 index 089556b6..00000000 --- a/rust/automerge/src/columnar/encoding/col_error.rs +++ /dev/null @@ -1,88 +0,0 @@ -#[derive(Clone, Debug)] -pub struct DecodeColumnError { - path: Path, - error: DecodeColErrorKind, -} - -impl std::error::Error for DecodeColumnError {} - -impl std::fmt::Display for DecodeColumnError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match &self.error { - DecodeColErrorKind::UnexpectedNull => { - write!(f, "unexpected null in column {}", self.path) - } - DecodeColErrorKind::InvalidValue { reason } => { - write!(f, "invalid value in column {}: {}", self.path, reason) - } - } - } -} - -#[derive(Clone, Debug)] -struct Path(Vec); - -impl std::fmt::Display for Path { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - for (index, elem) in self.0.iter().rev().enumerate() { - if index != 0 { - write!(f, ":")?; - } - write!(f, "{}", elem)?; - } - Ok(()) - } -} - -impl Path { - fn push>(&mut self, col: S) { - self.0.push(col.as_ref().to_string()) - } -} - -impl> From for Path { - fn from(p: S) -> Self { - Self(vec![p.as_ref().to_string()]) - } -} - -#[derive(Clone, Debug)] -enum DecodeColErrorKind { - UnexpectedNull, - InvalidValue { reason: String }, -} - -impl DecodeColumnError { - pub(crate) fn decode_raw>(col: S, raw_err: super::raw::Error) -> Self { - Self { - path: col.into(), - error: DecodeColErrorKind::InvalidValue { - reason: raw_err.to_string(), - }, - } - } - - pub(crate) fn unexpected_null>(col: S) -> DecodeColumnError { - Self { - path: col.into(), - error: DecodeColErrorKind::UnexpectedNull, - } - } - - pub(crate) fn invalid_value, R: AsRef>( - col: S, - reason: R, - ) -> DecodeColumnError { - Self { - path: col.into(), - error: DecodeColErrorKind::InvalidValue { - reason: reason.as_ref().to_string(), - }, - } - } - - pub(crate) fn in_column>(mut self, col: S) -> DecodeColumnError { - self.path.push(col.as_ref()); - self - } -} diff --git a/rust/automerge/src/columnar/encoding/column_decoder.rs b/rust/automerge/src/columnar/encoding/column_decoder.rs deleted file mode 100644 index 8e3237fb..00000000 --- a/rust/automerge/src/columnar/encoding/column_decoder.rs +++ /dev/null @@ -1,157 +0,0 @@ -use crate::{ - columnar::{ - column_range::{DepsIter, KeyIter, ObjIdIter, OpIdIter, OpIdListIter, ValueIter}, - encoding, Key, - }, - types::{ObjId, OpId}, - ScalarValue, -}; - -pub(crate) trait IntoColError: std::error::Error { - fn into_col_error>(self, col_name: S) -> encoding::DecodeColumnError; -} - -impl IntoColError for encoding::raw::Error { - fn into_col_error>(self, col_name: S) -> encoding::DecodeColumnError { - encoding::DecodeColumnError::decode_raw(col_name, self) - } -} - -impl IntoColError for encoding::DecodeColumnError { - fn into_col_error>(self, col_name: S) -> encoding::DecodeColumnError { - self.in_column(col_name) - } -} - -/// A helper trait which allows users to annotate decoders with errors containing a column name -/// -/// Frequently we have an iterator which decodes values from some underlying column storage, e.g. -/// we might have a `BooleanDecoder` which decodes items from an `insert` column. In the context -/// where we are reading from this column we would like to produce errors which describe which -/// column the error occurred in - to this end we require that the error produced by the underlying -/// decoder implement `IntoColError` and we provide the `next_in_col` method to call -/// `into_col_error` on any errors produced by the decoder. -pub(crate) trait ColumnDecoder: Iterator> { - type Error: IntoColError; - type Value; - - fn maybe_next_in_col>( - &mut self, - col_name: S, - ) -> Result, encoding::DecodeColumnError>; - - /// Decode the next value from this decoder, annotating any error with the `col_name` - fn next_in_col>( - &mut self, - col_name: S, - ) -> Result { - self.maybe_next_in_col(&col_name)? - .ok_or_else(|| encoding::DecodeColumnError::unexpected_null(col_name)) - } -} - -impl<'a> ColumnDecoder for encoding::BooleanDecoder<'a> { - type Error = encoding::raw::Error; - type Value = bool; - - fn maybe_next_in_col>( - &mut self, - col_name: S, - ) -> Result, encoding::DecodeColumnError> { - self.next() - .transpose() - .map_err(|e| e.into_col_error(col_name)) - } -} - -impl ColumnDecoder> for I -where - I: Iterator, E>>, - E: IntoColError, -{ - type Error = E; - type Value = T; - - fn maybe_next_in_col>( - &mut self, - col_name: S, - ) -> Result, encoding::DecodeColumnError> { - Ok(self - .next() - .transpose() - .map_err(|e| e.into_col_error(col_name))? - .flatten()) - } -} - -impl<'a> ColumnDecoder> for OpIdListIter<'a> { - type Error = encoding::DecodeColumnError; - type Value = Vec; - - fn maybe_next_in_col>( - &mut self, - col_name: S, - ) -> Result>, encoding::DecodeColumnError> { - self.next().transpose().map_err(|e| e.in_column(col_name)) - } -} - -impl<'a> ColumnDecoder for ValueIter<'a> { - type Error = encoding::DecodeColumnError; - type Value = ScalarValue; - - fn maybe_next_in_col>( - &mut self, - col_name: S, - ) -> Result, encoding::DecodeColumnError> { - self.next().transpose().map_err(|e| e.in_column(col_name)) - } -} - -impl<'a> ColumnDecoder for KeyIter<'a> { - type Error = encoding::DecodeColumnError; - type Value = Key; - - fn maybe_next_in_col>( - &mut self, - col_name: S, - ) -> Result, encoding::DecodeColumnError> { - self.next().transpose().map_err(|e| e.in_column(col_name)) - } -} - -impl<'a> ColumnDecoder for ObjIdIter<'a> { - type Value = ObjId; - type Error = encoding::DecodeColumnError; - - fn maybe_next_in_col>( - &mut self, - col_name: S, - ) -> Result, encoding::DecodeColumnError> { - self.next().transpose().map_err(|e| e.in_column(col_name)) - } -} - -impl<'a> ColumnDecoder for OpIdIter<'a> { - type Value = OpId; - type Error = encoding::DecodeColumnError; - - fn maybe_next_in_col>( - &mut self, - col_name: S, - ) -> Result, encoding::DecodeColumnError> { - self.next().transpose().map_err(|e| e.in_column(col_name)) - } -} - -impl<'a> ColumnDecoder> for DepsIter<'a> { - type Value = Vec; - type Error = encoding::DecodeColumnError; - - fn maybe_next_in_col>( - &mut self, - col_name: S, - ) -> Result, encoding::DecodeColumnError> { - self.next().transpose().map_err(|e| e.in_column(col_name)) - } -} diff --git a/rust/automerge/src/columnar/encoding/decodable_impls.rs b/rust/automerge/src/columnar/encoding/decodable_impls.rs deleted file mode 100644 index 26425f15..00000000 --- a/rust/automerge/src/columnar/encoding/decodable_impls.rs +++ /dev/null @@ -1,175 +0,0 @@ -use smol_str::SmolStr; -use std::{borrow::Cow, convert::TryFrom, io::Read, str}; - -use super::{Decodable, DecodeError}; -use crate::ActorId; - -// We don't allow decoding items which are larger than this. Almost nothing should be this large -// so this is really guarding against bad encodings which accidentally grab loads of memory -const MAX_ALLOCATION: usize = 1000000000; - -impl Decodable for u8 { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - let mut buffer = [0; 1]; - bytes.read_exact(&mut buffer)?; - Ok(buffer[0]) - } -} - -impl Decodable for u32 { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - u64::decode::(bytes).and_then(|val| Self::try_from(val).map_err(DecodeError::from)) - } -} - -impl Decodable for usize { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - u64::decode::(bytes).and_then(|val| Self::try_from(val).map_err(DecodeError::from)) - } -} - -impl Decodable for isize { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - i64::decode::(bytes).and_then(|val| Self::try_from(val).map_err(DecodeError::from)) - } -} - -impl Decodable for i32 { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - i64::decode::(bytes).and_then(|val| Self::try_from(val).map_err(DecodeError::from)) - } -} - -impl Decodable for i64 { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - leb128::read::signed(bytes).map_err(DecodeError::from) - } -} - -impl Decodable for f64 { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - let mut buffer = [0; 8]; - bytes.read_exact(&mut buffer)?; - Ok(Self::from_le_bytes(buffer)) - } -} - -impl Decodable for f32 { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - let mut buffer = [0; 4]; - bytes.read_exact(&mut buffer)?; - Ok(Self::from_le_bytes(buffer)) - } -} - -impl Decodable for u64 { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - leb128::read::unsigned(bytes).map_err(DecodeError::from) - } -} - -impl Decodable for Vec { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - let len = usize::decode::(bytes)?; - if len == 0 { - return Ok(vec![]); - } - if len > MAX_ALLOCATION { - return Err(DecodeError::OverlargeAllocation { - attempted: len, - maximum: MAX_ALLOCATION, - }); - } - let mut buffer = vec![0; len]; - bytes.read_exact(buffer.as_mut_slice())?; - Ok(buffer) - } -} - -impl Decodable for SmolStr { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - let buffer = Vec::decode(bytes)?; - str::from_utf8(&buffer) - .map(|t| t.into()) - .map_err(|_| DecodeError::BadString) - } -} - -impl Decodable for Cow<'static, SmolStr> { - fn decode(bytes: &mut R) -> Result - where - R: std::io::Read, - { - SmolStr::decode(bytes).map(Cow::Owned) - } -} - -impl Decodable for String { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - let buffer = Vec::decode(bytes)?; - str::from_utf8(&buffer) - .map(|t| t.into()) - .map_err(|_| DecodeError::BadString) - } -} - -impl Decodable for Option { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - let buffer = Vec::decode(bytes)?; - if buffer.is_empty() { - return Ok(None); - } - str::from_utf8(&buffer) - .map(|t| Some(t.into())) - .map_err(|_| DecodeError::BadString) - } -} - -impl Decodable for ActorId { - fn decode(bytes: &mut R) -> Result - where - R: Read, - { - let buffer = Vec::decode(bytes)?; - Ok(buffer.into()) - } -} diff --git a/rust/automerge/src/columnar/encoding/delta.rs b/rust/automerge/src/columnar/encoding/delta.rs deleted file mode 100644 index 6234875b..00000000 --- a/rust/automerge/src/columnar/encoding/delta.rs +++ /dev/null @@ -1,95 +0,0 @@ -use std::borrow::Cow; - -use super::{raw, RleDecoder, RleEncoder, Sink}; - -/// Encodes integers as the change since the previous value. -/// -/// The initial value is 0 encoded as u64. Deltas are encoded as i64. -/// -/// Run length encoding is then applied to the resulting sequence. -pub(crate) struct DeltaEncoder { - rle: RleEncoder, - absolute_value: i64, -} - -impl DeltaEncoder { - pub(crate) fn new(output: S) -> DeltaEncoder { - DeltaEncoder { - rle: RleEncoder::new(output), - absolute_value: 0, - } - } - - pub(crate) fn append_value(&mut self, value: i64) { - self.rle - .append_value(value.saturating_sub(self.absolute_value)); - self.absolute_value = value; - } - - pub(crate) fn append_null(&mut self) { - self.rle.append_null(); - } - - pub(crate) fn append(&mut self, val: Option) { - match val { - Some(v) => self.append_value(v), - None => self.append_null(), - } - } - - pub(crate) fn finish(self) -> (S, usize) { - self.rle.finish() - } -} - -impl From for DeltaEncoder { - fn from(output: S) -> Self { - DeltaEncoder::new(output) - } -} - -/// See discussion on [`DeltaEncoder`] for the format data is stored in. -#[derive(Debug, Clone)] -pub(crate) struct DeltaDecoder<'a> { - rle: RleDecoder<'a, i64>, - absolute_val: i64, -} - -impl<'a> DeltaDecoder<'a> { - pub(crate) fn done(&self) -> bool { - self.rle.done() - } -} - -impl<'a> From> for DeltaDecoder<'a> { - fn from(bytes: Cow<'a, [u8]>) -> Self { - DeltaDecoder { - rle: RleDecoder::from(bytes), - absolute_val: 0, - } - } -} - -impl<'a> From<&'a [u8]> for DeltaDecoder<'a> { - fn from(d: &'a [u8]) -> Self { - Cow::Borrowed(d).into() - } -} - -impl<'a> Iterator for DeltaDecoder<'a> { - type Item = Result, raw::Error>; - - fn next(&mut self) -> Option { - match self.rle.next() { - Some(Ok(next)) => match next { - Some(delta) => { - self.absolute_val = self.absolute_val.saturating_add(delta); - Some(Ok(Some(self.absolute_val))) - } - None => Some(Ok(None)), - }, - Some(Err(e)) => Some(Err(e)), - None => None, - } - } -} diff --git a/rust/automerge/src/columnar/encoding/encodable_impls.rs b/rust/automerge/src/columnar/encoding/encodable_impls.rs deleted file mode 100644 index a1b5d8ce..00000000 --- a/rust/automerge/src/columnar/encoding/encodable_impls.rs +++ /dev/null @@ -1,200 +0,0 @@ -use super::{Encodable, Sink}; - -use std::borrow::Cow; - -use smol_str::SmolStr; - -/// Encodes bytes without a length prefix -pub(crate) struct RawBytes<'a>(Cow<'a, [u8]>); - -impl<'a> From<&'a [u8]> for RawBytes<'a> { - fn from(r: &'a [u8]) -> Self { - RawBytes(r.into()) - } -} - -impl<'a> From> for RawBytes<'a> { - fn from(c: Cow<'a, [u8]>) -> Self { - RawBytes(c) - } -} - -impl<'a> Encodable for RawBytes<'a> { - fn encode(&self, out: &mut S) -> usize { - out.append(&self.0); - self.0.len() - } -} - -impl Encodable for SmolStr { - fn encode(&self, buf: &mut S) -> usize { - let bytes = self.as_bytes(); - let len_encoded = bytes.len().encode(buf); - let data_len = bytes.encode(buf); - len_encoded + data_len - } -} - -impl<'a> Encodable for Cow<'a, SmolStr> { - fn encode(&self, buf: &mut S) -> usize { - self.as_ref().encode(buf) - } -} - -impl Encodable for String { - fn encode(&self, buf: &mut S) -> usize { - let bytes = self.as_bytes(); - let len_encoded = bytes.len().encode(buf); - let data_len = bytes.encode(buf); - len_encoded + data_len - } -} - -impl Encodable for Option { - fn encode(&self, buf: &mut S) -> usize { - if let Some(s) = self { - s.encode(buf) - } else { - 0.encode(buf) - } - } -} - -impl<'a> Encodable for Option> { - fn encode(&self, out: &mut S) -> usize { - if let Some(s) = self { - SmolStr::encode(s, out) - } else { - 0.encode(out) - } - } -} - -impl Encodable for f64 { - fn encode(&self, buf: &mut S) -> usize { - let bytes = self.to_le_bytes(); - buf.append(&bytes); - bytes.len() - } -} - -impl Encodable for f32 { - fn encode(&self, buf: &mut S) -> usize { - let bytes = self.to_le_bytes(); - buf.append(&bytes); - bytes.len() - } -} - -impl Encodable for usize { - fn encode(&self, buf: &mut S) -> usize { - (*self as u64).encode(buf) - } -} - -impl Encodable for u32 { - fn encode(&self, buf: &mut S) -> usize { - u64::from(*self).encode(buf) - } -} - -impl Encodable for i32 { - fn encode(&self, buf: &mut S) -> usize { - i64::from(*self).encode(buf) - } -} - -impl Encodable for [u8] { - fn encode(&self, out: &mut S) -> usize { - out.append(self); - self.len() - } -} - -impl Encodable for &[u8] { - fn encode(&self, out: &mut S) -> usize { - out.append(self); - self.len() - } -} - -impl<'a> Encodable for Cow<'a, [u8]> { - fn encode(&self, out: &mut S) -> usize { - out.append(self); - self.len() - } -} - -impl Encodable for Vec { - fn encode(&self, out: &mut S) -> usize { - Encodable::encode(&self[..], out) - } -} - -mod leb128_things { - use super::{Encodable, Sink}; - - impl Encodable for u64 { - fn encode(&self, buf: &mut S) -> usize { - let mut val = *self; - let mut bytes_written = 0; - loop { - let mut byte = low_bits_of_u64(val); - val >>= 7; - if val != 0 { - // More bytes to come, so set the continuation bit. - byte |= CONTINUATION_BIT; - } - - buf.append(&[byte]); - bytes_written += 1; - - if val == 0 { - return bytes_written; - } - } - } - } - - impl Encodable for i64 { - fn encode(&self, buf: &mut S) -> usize { - let mut val = *self; - let mut bytes_written = 0; - loop { - let mut byte = val as u8; - // Keep the sign bit for testing - val >>= 6; - let done = val == 0 || val == -1; - if done { - byte &= !CONTINUATION_BIT; - } else { - // Remove the sign bit - val >>= 1; - // More bytes to come, so set the continuation bit. - byte |= CONTINUATION_BIT; - } - - buf.append(&[byte]); - bytes_written += 1; - - if done { - return bytes_written; - } - } - } - } - - #[doc(hidden)] - const CONTINUATION_BIT: u8 = 1 << 7; - - #[inline] - fn low_bits_of_byte(byte: u8) -> u8 { - byte & !CONTINUATION_BIT - } - - #[inline] - fn low_bits_of_u64(val: u64) -> u8 { - let byte = val & (std::u8::MAX as u64); - low_bits_of_byte(byte as u8) - } -} diff --git a/rust/automerge/src/columnar/encoding/leb128.rs b/rust/automerge/src/columnar/encoding/leb128.rs deleted file mode 100644 index cbb82c31..00000000 --- a/rust/automerge/src/columnar/encoding/leb128.rs +++ /dev/null @@ -1,82 +0,0 @@ -/// The number of bytes required to encode `val` as a LEB128 integer -pub(crate) fn lebsize(mut val: i64) -> u64 { - if val < 0 { - val = !val - } - // 1 extra for the sign bit - leb_bytes(1 + 64 - val.leading_zeros() as u64) -} - -/// The number of bytes required to encode `val` as a uLEB128 integer -pub(crate) fn ulebsize(val: u64) -> u64 { - if val == 0 { - return 1; - } - leb_bytes(64 - val.leading_zeros() as u64) -} - -fn leb_bytes(bits: u64) -> u64 { - (bits + 6) / 7 -} - -#[cfg(test)] -mod tests { - use super::*; - use proptest::prelude::*; - - proptest! { - #[test] - fn test_ulebsize(val in 0..u64::MAX) { - let mut out = Vec::new(); - leb128::write::unsigned(&mut out, val).unwrap(); - let expected = out.len() as u64; - assert_eq!(expected, ulebsize(val)) - } - - #[test] - fn test_lebsize(val in i64::MIN..i64::MAX) { - let mut out = Vec::new(); - leb128::write::signed(&mut out, val).unwrap(); - let expected = out.len() as u64; - assert_eq!(expected, lebsize(val)) - } - } - - #[test] - fn ulebsize_examples() { - let scenarios = vec![0, 1, 127, 128, 129, 169, u64::MAX]; - for val in scenarios { - let mut out = Vec::new(); - leb128::write::unsigned(&mut out, val).unwrap(); - let expected = out.len() as u64; - assert_eq!(ulebsize(val), expected, "value: {}", val) - } - } - - #[test] - fn lebsize_examples() { - let scenarios = vec![ - 0, - 1, - -1, - 63, - 64, - -64, - -65, - 127, - 128, - -127, - -128, - -2097152, - 169, - i64::MIN, - i64::MAX, - ]; - for val in scenarios { - let mut out = Vec::new(); - leb128::write::signed(&mut out, val).unwrap(); - let expected = out.len() as u64; - assert_eq!(lebsize(val), expected, "value: {}", val) - } - } -} diff --git a/rust/automerge/src/columnar/encoding/properties.rs b/rust/automerge/src/columnar/encoding/properties.rs deleted file mode 100644 index 30f1169d..00000000 --- a/rust/automerge/src/columnar/encoding/properties.rs +++ /dev/null @@ -1,178 +0,0 @@ -//! Helpers for property tests. - -use std::{fmt::Debug, ops::Range}; - -use proptest::prelude::*; -use smol_str::SmolStr; - -use crate::{ - columnar::Key, - types::{ElemId, OpId, ScalarValue}, -}; - -#[derive(Clone, Debug)] -pub(crate) struct SpliceScenario { - pub(crate) initial_values: Vec, - pub(crate) replace_range: Range, - pub(crate) replacements: Vec, -} - -impl SpliceScenario { - pub(crate) fn check(&self, results: Vec) { - let mut expected = self.initial_values.clone(); - expected.splice(self.replace_range.clone(), self.replacements.clone()); - assert_eq!(expected, results) - } -} - -impl SpliceScenario> { - /// Checks that `results` are the same as `SpliceScenario::initial_values.splice(replace_range, - /// replacements)`, with two slight changes: - /// - /// * If all of `initial_values` are `None` then this returns true if the output is just - /// `replacements` - /// * If the result of `Vec::splice` would return a vector of all `None` then this checks the - /// result is actually an empty vector - /// - /// This is to accomodate the fact that the RLE encoder can encode a sequence of all `None` as - /// an empty sequence, in which case we decode it as an empty sequence. - pub(crate) fn check_optional(&self, results: Vec>) { - if self.initial_values.iter().all(|v| v.is_none()) { - if self.replacements.iter().all(|v| v.is_none()) { - assert!(results.is_empty()); - } else { - assert_eq!(results, self.replacements); - } - } else { - let mut expected = self.initial_values.clone(); - expected.splice(self.replace_range.clone(), self.replacements.clone()); - if expected.iter().all(|e| e.is_none()) { - assert!(results.is_empty()) - } else { - assert_eq!(expected, results) - } - } - } -} - -pub(crate) fn splice_scenario + Clone, T: Debug + Clone + 'static>( - item_strat: S, -) -> impl Strategy> { - ( - proptest::collection::vec(item_strat.clone(), 0..100), - proptest::collection::vec(item_strat, 0..10), - ) - .prop_flat_map(move |(values, to_splice)| { - if values.is_empty() { - Just(SpliceScenario { - initial_values: values, - replace_range: 0..0, - replacements: to_splice, - }) - .boxed() - } else { - // This is somewhat awkward to write because we have to carry the `values` and - // `to_splice` through as `Just(..)` to please the borrow checker. - (0..values.len(), Just(values), Just(to_splice)) - .prop_flat_map(move |(replace_range_start, values, to_splice)| { - ( - 0..(values.len() - replace_range_start), - Just(values), - Just(to_splice), - ) - .prop_map( - move |(replace_range_len, values, to_splice)| SpliceScenario { - initial_values: values, - replace_range: replace_range_start - ..(replace_range_start + replace_range_len), - replacements: to_splice, - }, - ) - }) - .boxed() - } - }) -} - -/// Like splice scenario except that if the initial values we generate are all `None` then the -/// replace range is 0..0. -pub(crate) fn option_splice_scenario< - S: Strategy> + Clone, - T: Debug + Clone + 'static, ->( - item_strat: S, -) -> impl Strategy>> { - ( - proptest::collection::vec(item_strat.clone(), 0..100), - proptest::collection::vec(item_strat, 0..10), - ) - .prop_flat_map(move |(values, to_splice)| { - if values.is_empty() || values.iter().all(|v| v.is_none()) { - Just(SpliceScenario { - initial_values: values, - replace_range: 0..0, - replacements: to_splice, - }) - .boxed() - } else { - // This is somewhat awkward to write because we have to carry the `values` and - // `to_splice` through as `Just(..)` to please the borrow checker. - (0..values.len(), Just(values), Just(to_splice)) - .prop_flat_map(move |(replace_range_start, values, to_splice)| { - ( - 0..(values.len() - replace_range_start), - Just(values), - Just(to_splice), - ) - .prop_map( - move |(replace_range_len, values, to_splice)| SpliceScenario { - initial_values: values, - replace_range: replace_range_start - ..(replace_range_start + replace_range_len), - replacements: to_splice, - }, - ) - }) - .boxed() - } - }) -} - -pub(crate) fn opid() -> impl Strategy + Clone { - (0..(u32::MAX as usize), 0..(u32::MAX as u64)).prop_map(|(actor, ctr)| OpId::new(ctr, actor)) -} - -pub(crate) fn elemid() -> impl Strategy + Clone { - opid().prop_map(ElemId) -} - -pub(crate) fn key() -> impl Strategy + Clone { - prop_oneof! { - elemid().prop_map(Key::Elem), - any::().prop_map(|s| Key::Prop(s.into())), - } -} - -pub(crate) fn encodable_int() -> impl Strategy + Clone { - let bounds = i64::MAX / 2; - -bounds..bounds -} - -pub(crate) fn scalar_value() -> impl Strategy + Clone { - prop_oneof! { - Just(ScalarValue::Null), - any::().prop_map(ScalarValue::Boolean), - any::().prop_map(ScalarValue::Uint), - encodable_int().prop_map(ScalarValue::Int), - any::().prop_map(ScalarValue::F64), - smol_str().prop_map(ScalarValue::Str), - any::>().prop_map(ScalarValue::Bytes), - encodable_int().prop_map(|i| ScalarValue::Counter(i.into())), - encodable_int().prop_map(ScalarValue::Timestamp), - (10..15_u8, any::>()).prop_map(|(c, b)| ScalarValue::Unknown { type_code: c, bytes: b }), - } -} - -fn smol_str() -> impl Strategy + Clone { - any::().prop_map(SmolStr::from) -} diff --git a/rust/automerge/src/columnar/encoding/raw.rs b/rust/automerge/src/columnar/encoding/raw.rs deleted file mode 100644 index b86443e5..00000000 --- a/rust/automerge/src/columnar/encoding/raw.rs +++ /dev/null @@ -1,97 +0,0 @@ -use std::{ - borrow::{Borrow, Cow}, - fmt::Debug, -}; - -use super::{Decodable, DecodeError, Encodable, Sink}; - -#[derive(Clone, Debug)] -pub(crate) struct RawDecoder<'a> { - offset: usize, - last_read: usize, - data: Cow<'a, [u8]>, -} - -#[derive(thiserror::Error, Debug)] -pub(crate) enum Error { - #[error("buffer size did not change")] - BufferSizeDidNotChange, - #[error("trying to read past end")] - TryingToReadPastEnd, - #[error(transparent)] - Decode(#[from] DecodeError), -} - -impl<'a> RawDecoder<'a> { - pub(crate) fn new(data: Cow<'a, [u8]>) -> Self { - RawDecoder { - offset: 0, - last_read: 0, - data, - } - } - - pub(crate) fn read(&mut self) -> Result { - let mut buf = &self.data[self.offset..]; - let init_len = buf.len(); - let val = T::decode::<&[u8]>(&mut buf)?; - let delta = init_len - buf.len(); - if delta == 0 { - Err(Error::BufferSizeDidNotChange) - } else { - self.last_read = delta; - self.offset += delta; - Ok(val) - } - } - - pub(crate) fn read_bytes(&mut self, index: usize) -> Result<&[u8], Error> { - if self.offset + index > self.data.len() { - Err(Error::TryingToReadPastEnd) - } else { - let head = &self.data[self.offset..self.offset + index]; - self.last_read = index; - self.offset += index; - Ok(head) - } - } - - pub(crate) fn done(&self) -> bool { - self.offset >= self.data.len() - } -} - -impl<'a> From<&'a [u8]> for RawDecoder<'a> { - fn from(d: &'a [u8]) -> Self { - Cow::Borrowed(d).into() - } -} - -impl<'a> From> for RawDecoder<'a> { - fn from(d: Cow<'a, [u8]>) -> Self { - RawDecoder::new(d) - } -} - -pub(crate) struct RawEncoder { - written: usize, - output: S, -} - -impl RawEncoder { - pub(crate) fn append, I: Encodable>(&mut self, value: B) -> usize { - let written = value.borrow().encode(&mut self.output); - self.written += written; - written - } - - pub(crate) fn finish(self) -> (S, usize) { - (self.output, self.written) - } -} - -impl From for RawEncoder { - fn from(output: S) -> Self { - RawEncoder { written: 0, output } - } -} diff --git a/rust/automerge/src/columnar/encoding/rle.rs b/rust/automerge/src/columnar/encoding/rle.rs deleted file mode 100644 index 26a16899..00000000 --- a/rust/automerge/src/columnar/encoding/rle.rs +++ /dev/null @@ -1,239 +0,0 @@ -use std::{ - borrow::{Borrow, Cow}, - fmt::Debug, -}; - -use super::{raw, Decodable, Encodable, RawDecoder, Sink}; - -pub(crate) struct RleEncoder -where - T: Encodable + PartialEq + Clone, -{ - buf: S, - written: usize, - state: RleState, -} - -impl RleEncoder -where - S: Sink, - T: Encodable + PartialEq + Clone, -{ - pub(crate) fn new(output_buf: S) -> RleEncoder { - RleEncoder { - buf: output_buf, - written: 0, - state: RleState::Empty, - } - } - - /// Flush the encoded values and return the output buffer and the number of bytes written - pub(crate) fn finish(mut self) -> (S, usize) { - match self.take_state() { - RleState::InitialNullRun(_size) => {} - RleState::NullRun(size) => { - self.flush_null_run(size); - } - RleState::LoneVal(value) => self.flush_lit_run(vec![value]), - RleState::Run(value, len) => self.flush_run(&value, len), - RleState::LiteralRun(last, mut run) => { - run.push(last); - self.flush_lit_run(run); - } - RleState::Empty => {} - } - (self.buf, self.written) - } - - fn flush_run(&mut self, val: &T, len: usize) { - self.encode(&(len as i64)); - self.encode(val); - } - - fn flush_null_run(&mut self, len: usize) { - self.encode::(&0); - self.encode(&len); - } - - fn flush_lit_run(&mut self, run: Vec) { - self.encode(&-(run.len() as i64)); - for val in run { - self.encode(&val); - } - } - - fn take_state(&mut self) -> RleState { - let mut state = RleState::Empty; - std::mem::swap(&mut self.state, &mut state); - state - } - - pub(crate) fn append_null(&mut self) { - self.state = match self.take_state() { - RleState::Empty => RleState::InitialNullRun(1), - RleState::InitialNullRun(size) => RleState::InitialNullRun(size + 1), - RleState::NullRun(size) => RleState::NullRun(size + 1), - RleState::LoneVal(other) => { - self.flush_lit_run(vec![other]); - RleState::NullRun(1) - } - RleState::Run(other, len) => { - self.flush_run(&other, len); - RleState::NullRun(1) - } - RleState::LiteralRun(last, mut run) => { - run.push(last); - self.flush_lit_run(run); - RleState::NullRun(1) - } - } - } - - pub(crate) fn append_value>(&mut self, value: BT) { - self.state = match self.take_state() { - RleState::Empty => RleState::LoneVal(value.borrow().clone()), - RleState::LoneVal(other) => { - if &other == value.borrow() { - RleState::Run(value.borrow().clone(), 2) - } else { - let mut v = Vec::with_capacity(2); - v.push(other); - RleState::LiteralRun(value.borrow().clone(), v) - } - } - RleState::Run(other, len) => { - if &other == value.borrow() { - RleState::Run(other, len + 1) - } else { - self.flush_run(&other, len); - RleState::LoneVal(value.borrow().clone()) - } - } - RleState::LiteralRun(last, mut run) => { - if &last == value.borrow() { - self.flush_lit_run(run); - RleState::Run(value.borrow().clone(), 2) - } else { - run.push(last); - RleState::LiteralRun(value.borrow().clone(), run) - } - } - RleState::NullRun(size) | RleState::InitialNullRun(size) => { - self.flush_null_run(size); - RleState::LoneVal(value.borrow().clone()) - } - } - } - - pub(crate) fn append>(&mut self, value: Option) { - match value { - Some(t) => self.append_value(t), - None => self.append_null(), - } - } - - fn encode(&mut self, val: &V) - where - V: Encodable, - { - self.written += val.encode(&mut self.buf); - } -} - -enum RleState { - Empty, - // Note that this is different to a `NullRun` because if every element of a column is null - // (i.e. the state when we call `finish` is `InitialNullRun`) then we don't output anything at - // all for the column - InitialNullRun(usize), - NullRun(usize), - LiteralRun(T, Vec), - LoneVal(T), - Run(T, usize), -} - -impl From for RleEncoder { - fn from(output: S) -> Self { - Self::new(output) - } -} - -/// See discussion on [`RleEncoder`] for the format data is stored in. -#[derive(Clone, Debug)] -pub(crate) struct RleDecoder<'a, T> { - decoder: RawDecoder<'a>, - last_value: Option, - count: isize, - literal: bool, -} - -impl<'a, T> RleDecoder<'a, T> { - pub(crate) fn done(&self) -> bool { - self.decoder.done() && self.count == 0 - } - - fn try_next(&mut self) -> Result>, raw::Error> - where - T: Decodable + Clone + Debug, - { - while self.count == 0 { - if self.decoder.done() { - return Ok(None); - } - match self.decoder.read::()? { - count if count > 0 => { - // normal run - self.count = count as isize; - self.last_value = Some(self.decoder.read()?); - self.literal = false; - } - count if count < 0 => { - // literal run - self.count = count.abs() as isize; - self.literal = true; - } - _ => { - // null run - // FIXME(jeffa5): handle usize > i64 here somehow - self.count = self.decoder.read::()? as isize; - self.last_value = None; - self.literal = false; - } - } - } - self.count -= 1; - if self.literal { - Ok(Some(Some(self.decoder.read()?))) - } else { - Ok(Some(self.last_value.clone())) - } - } -} - -impl<'a, T> From> for RleDecoder<'a, T> { - fn from(bytes: Cow<'a, [u8]>) -> Self { - RleDecoder { - decoder: RawDecoder::from(bytes), - last_value: None, - count: 0, - literal: false, - } - } -} - -impl<'a, T> From<&'a [u8]> for RleDecoder<'a, T> { - fn from(d: &'a [u8]) -> Self { - Cow::Borrowed(d).into() - } -} - -impl<'a, T> Iterator for RleDecoder<'a, T> -where - T: Clone + Debug + Decodable, -{ - type Item = Result, raw::Error>; - - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} diff --git a/rust/automerge/src/columnar/splice_error.rs b/rust/automerge/src/columnar/splice_error.rs deleted file mode 100644 index 54d5f478..00000000 --- a/rust/automerge/src/columnar/splice_error.rs +++ /dev/null @@ -1,47 +0,0 @@ -use std::convert::Infallible; - -/// Represents an error which occurred when splicing. -/// -/// When splicing values into existing column storage there are two kinds of errors which can -/// occur, those caused by iterating over the existing items, and those caused by iterating over -/// the replacement items. -#[derive(Debug)] -pub(crate) enum SpliceError { - /// There was an error reading from the existing column storage - ReadExisting(E), - /// There was an error reading from the iterator of new rows - ReadReplace(R), -} - -impl SpliceError { - /// Map a spliceerror which is infallible in it's `Replace` error type into a different error. - /// - /// This is used when you have performed a splice with a `replace` iterator which is - /// infallible and need to return a more general `SpliceError` - pub(crate) fn existing(self) -> SpliceError { - match self { - SpliceError::ReadExisting(e) => SpliceError::ReadExisting(e), - SpliceError::ReadReplace(_) => unreachable!("absurd"), - } - } -} - -impl std::error::Error for SpliceError -where - E: std::error::Error, - R: std::error::Error, -{ -} - -impl std::fmt::Display for SpliceError -where - E: std::fmt::Display, - R: std::fmt::Display, -{ - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::ReadExisting(e) => write!(f, "error reading from existing rows: {}", e), - Self::ReadReplace(e) => write!(f, "error reading from replacement rows: {}", e), - } - } -} diff --git a/rust/automerge/src/convert.rs b/rust/automerge/src/convert.rs deleted file mode 100644 index a99f96a1..00000000 --- a/rust/automerge/src/convert.rs +++ /dev/null @@ -1,102 +0,0 @@ -//! Types for converting between different OpId representations -//! -//! In various places throughout the codebase we refer to operation IDs. The canonical type for -//! representing an operation ID is [`crate::types::OpId`]. This type holds the counter of the operation -//! ID but it does not store the actor ID, instead storing an index into an array of actor IDs -//! stored elsewhere. This makes using OpIds very memory efficient. We also store operation IDs on -//! disc. Here again we use a representation where the actor ID is stored as an offset into an -//! array which is held elsewhere. We occasionally do need to refer to an operation ID which -//! contains the full actor ID - typically when exporting to other processes or to the user. -//! -//! This is problematic when we want to write code which is generic over all these representations, -//! or which needs to convert between them. This module hopes to solve that problem. The basic -//! approach is to define the trait `OpId`, which is generic over the type of its `actor`. Using a -//! trait means that there is no need to allocate intermediate collections of operation IDs when -//! converting (for example when encoding a bunch of OpSet operation IDs into a change, where we -//! have to translate the indices). -//! -//! Having defined the `OpId` trait we then define a bunch of enums representing each of the -//! entities in the automerge data model which contain an `OpId`, namely `ObjId`, `Key`, and -//! `ElemId`. Each of these enums implements a `map` method, which allows you to convert the actor -//! ID of any contained operation using a mappping function. - -use std::borrow::Cow; - -pub(crate) trait OpId { - fn actor(&self) -> ActorId; - fn counter(&self) -> u64; -} - -#[derive(Clone, Debug)] -pub(crate) enum ObjId { - Root, - Op(O), -} - -impl ObjId { - pub(crate) fn map(self, f: F) -> ObjId

- where - F: Fn(O) -> P, - { - match self { - ObjId::Root => ObjId::Root, - ObjId::Op(o) => ObjId::Op(f(o)), - } - } -} - -#[derive(Clone)] -pub(crate) enum ElemId { - Head, - Op(O), -} - -impl ElemId { - pub(crate) fn map(self, f: F) -> ElemId

- where - F: Fn(O) -> P, - { - match self { - ElemId::Head => ElemId::Head, - ElemId::Op(o) => ElemId::Op(f(o)), - } - } -} - -#[derive(Clone)] -pub(crate) enum Key<'a, O> { - Prop(Cow<'a, smol_str::SmolStr>), - Elem(ElemId), -} - -impl<'a, O> Key<'a, O> { - pub(crate) fn map(self, f: F) -> Key<'a, P> - where - F: Fn(O) -> P, - { - match self { - Key::Prop(p) => Key::Prop(p), - Key::Elem(e) => Key::Elem(e.map(f)), - } - } -} - -impl OpId for crate::types::OpId { - fn counter(&self) -> u64 { - self.counter() - } - - fn actor(&self) -> usize { - self.actor() - } -} - -impl<'a> OpId for &'a crate::types::OpId { - fn counter(&self) -> u64 { - crate::types::OpId::counter(self) - } - - fn actor(&self) -> usize { - crate::types::OpId::actor(self) - } -} diff --git a/rust/automerge/src/error.rs b/rust/automerge/src/error.rs deleted file mode 100644 index 62a7b72f..00000000 --- a/rust/automerge/src/error.rs +++ /dev/null @@ -1,107 +0,0 @@ -use crate::change::LoadError as LoadChangeError; -use crate::storage::load::Error as LoadError; -use crate::types::{ActorId, ScalarValue}; -use crate::value::DataType; -use crate::{ChangeHash, ObjType}; -use thiserror::Error; - -#[derive(Error, Debug)] -pub enum AutomergeError { - #[error(transparent)] - ChangeGraph(#[from] crate::change_graph::MissingDep), - #[error("failed to load compressed data: {0}")] - Deflate(#[source] std::io::Error), - #[error("duplicate seq {0} found for actor {1}")] - DuplicateSeqNumber(u64, ActorId), - #[error("key must not be an empty string")] - EmptyStringKey, - #[error("general failure")] - Fail, - #[error("invalid actor ID `{0}`")] - InvalidActorId(String), - #[error(transparent)] - InvalidChangeHashBytes(#[from] InvalidChangeHashSlice), - #[error("invalid UTF-8 character at {0}")] - InvalidCharacter(usize), - #[error("invalid hash {0}")] - InvalidHash(ChangeHash), - #[error("index {0} is out of bounds")] - InvalidIndex(usize), - #[error("invalid obj id `{0}`")] - InvalidObjId(String), - #[error("invalid obj id format `{0}`")] - InvalidObjIdFormat(String), - #[error("invalid op for object of type `{0}`")] - InvalidOp(ObjType), - #[error("seq {0} is out of bounds")] - InvalidSeq(u64), - #[error("invalid type of value, expected `{expected}` but received `{unexpected}`")] - InvalidValueType { - expected: String, - unexpected: String, - }, - #[error(transparent)] - Load(#[from] LoadError), - #[error(transparent)] - LoadChangeError(#[from] LoadChangeError), - #[error("increment operations must be against a counter value")] - MissingCounter, - #[error("hash {0} does not correspond to a change in this document")] - MissingHash(ChangeHash), - #[error("change's deps should already be in the document")] - MissingDeps, - #[error("compressed chunk was not a change")] - NonChangeCompressed, - #[error("id was not an object id")] - NotAnObject, -} - -impl PartialEq for AutomergeError { - fn eq(&self, other: &Self) -> bool { - std::mem::discriminant(self) == std::mem::discriminant(other) - } -} - -#[cfg(feature = "wasm")] -impl From for wasm_bindgen::JsValue { - fn from(err: AutomergeError) -> Self { - js_sys::Error::new(&std::format!("{}", err)).into() - } -} - -#[derive(Error, Debug)] -#[error("Invalid actor ID: {0}")] -pub struct InvalidActorId(pub String); - -#[derive(Error, Debug, PartialEq)] -#[error("Invalid scalar value, expected {expected} but received {unexpected}")] -pub(crate) struct InvalidScalarValue { - pub(crate) raw_value: ScalarValue, - pub(crate) datatype: DataType, - pub(crate) unexpected: String, - pub(crate) expected: String, -} - -#[derive(Error, Debug, Eq, PartialEq)] -#[error("Invalid change hash slice: {0:?}")] -pub struct InvalidChangeHashSlice(pub Vec); - -#[derive(Error, Debug, Eq, PartialEq)] -#[error("Invalid object ID: {0}")] -pub struct InvalidObjectId(pub String); - -#[derive(Error, Debug)] -#[error("Invalid element ID: {0}")] -pub struct InvalidElementId(pub String); - -#[derive(Error, Debug)] -#[error("Invalid OpID: {0}")] -pub struct InvalidOpId(pub String); - -#[derive(Error, Debug)] -pub enum InvalidOpType { - #[error("unrecognized action index {0}")] - UnknownAction(u64), - #[error("non numeric argument for inc op")] - NonNumericInc, -} diff --git a/rust/automerge/src/exid.rs b/rust/automerge/src/exid.rs deleted file mode 100644 index 3a5a2ca2..00000000 --- a/rust/automerge/src/exid.rs +++ /dev/null @@ -1,224 +0,0 @@ -use crate::storage::parse; -use crate::ActorId; -use serde::Serialize; -use serde::Serializer; -use std::cmp::{Ord, Ordering}; -use std::fmt; -use std::hash::{Hash, Hasher}; - -/// An identifier for an object in a document -/// -/// This can be persisted using `to_bytes` and `TryFrom<&[u8]>` breaking changes to the -/// serialization format will be considered breaking changes for this library version. -#[derive(Debug, Clone)] -pub enum ExId { - Root, - Id(u64, ActorId, usize), -} - -const SERIALIZATION_VERSION_TAG: u8 = 0; -const TYPE_ROOT: u8 = 0; -const TYPE_ID: u8 = 1; - -impl ExId { - /// Serialize this object ID to a byte array. - /// - /// This serialization format is versioned and incompatible changes to it will be considered a - /// breaking change for the version of this library. - pub fn to_bytes(&self) -> Vec { - // The serialized format is - // - // .--------------------------------. - // | version | type | data | - // +--------------------------------+ - // | 4 bytes |4 bytes | variable | - // '--------------------------------' - // - // Version is currently always `0` - // - // `data` depends on the type - // - // * If the type is `TYPE_ROOT` (0) then there is no data - // * If the type is `TYPE_ID` (1) then the data is - // - // .-------------------------------------------------------. - // | actor ID len | actor ID bytes | counter | actor index | - // '-------------------------------------------------------' - // - // Where the actor ID len, counter, and actor index are all uLEB encoded - // integers. The actor ID bytes is just an array of bytes. - // - match self { - ExId::Root => { - let val: u8 = SERIALIZATION_VERSION_TAG | (TYPE_ROOT << 4); - vec![val] - } - ExId::Id(id, actor, counter) => { - let actor_bytes = actor.to_bytes(); - let mut bytes = Vec::with_capacity(actor_bytes.len() + 4 + 4); - let tag = SERIALIZATION_VERSION_TAG | (TYPE_ID << 4); - bytes.push(tag); - leb128::write::unsigned(&mut bytes, actor_bytes.len() as u64).unwrap(); - bytes.extend_from_slice(actor_bytes); - leb128::write::unsigned(&mut bytes, *counter as u64).unwrap(); - leb128::write::unsigned(&mut bytes, *id).unwrap(); - bytes - } - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum ObjIdFromBytesError { - #[error("no version tag")] - NoVersion, - #[error("invalid version tag")] - InvalidVersion(u8), - #[error("invalid type tag")] - InvalidType(u8), - #[error("invalid Actor ID length: {0}")] - ParseActorLen(String), - #[error("Not enough bytes in actor ID")] - ParseActor, - #[error("invalid counter: {0}")] - ParseCounter(String), - #[error("invalid actor index hint: {0}")] - ParseActorIdxHint(String), -} - -impl<'a> TryFrom<&'a [u8]> for ExId { - type Error = ObjIdFromBytesError; - - fn try_from(value: &'a [u8]) -> Result { - let i = parse::Input::new(value); - let (i, tag) = parse::take1::<()>(i).map_err(|_| ObjIdFromBytesError::NoVersion)?; - let version = tag & 0b1111; - if version != SERIALIZATION_VERSION_TAG { - return Err(ObjIdFromBytesError::InvalidVersion(version)); - } - let type_tag = tag >> 4; - match type_tag { - TYPE_ROOT => Ok(ExId::Root), - TYPE_ID => { - let (i, len) = parse::leb128_u64::(i) - .map_err(|e| ObjIdFromBytesError::ParseActorLen(e.to_string()))?; - let (i, actor) = parse::take_n::<()>(len as usize, i) - .map_err(|_| ObjIdFromBytesError::ParseActor)?; - let (i, counter) = parse::leb128_u64::(i) - .map_err(|e| ObjIdFromBytesError::ParseCounter(e.to_string()))?; - let (_i, actor_idx_hint) = parse::leb128_u64::(i) - .map_err(|e| ObjIdFromBytesError::ParseActorIdxHint(e.to_string()))?; - Ok(Self::Id(actor_idx_hint, actor.into(), counter as usize)) - } - other => Err(ObjIdFromBytesError::InvalidType(other)), - } - } -} - -impl PartialEq for ExId { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - (ExId::Root, ExId::Root) => true, - (ExId::Id(ctr1, actor1, _), ExId::Id(ctr2, actor2, _)) - if ctr1 == ctr2 && actor1 == actor2 => - { - true - } - _ => false, - } - } -} - -impl Eq for ExId {} - -impl fmt::Display for ExId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - ExId::Root => write!(f, "_root"), - ExId::Id(ctr, actor, _) => write!(f, "{}@{}", ctr, actor), - } - } -} - -impl Hash for ExId { - fn hash(&self, state: &mut H) { - match self { - ExId::Root => 0.hash(state), - ExId::Id(ctr, actor, _) => { - ctr.hash(state); - actor.hash(state); - } - } - } -} - -impl Ord for ExId { - fn cmp(&self, other: &Self) -> Ordering { - match (self, other) { - (ExId::Root, ExId::Root) => Ordering::Equal, - (ExId::Root, _) => Ordering::Less, - (_, ExId::Root) => Ordering::Greater, - (ExId::Id(c1, a1, _), ExId::Id(c2, a2, _)) if c1 == c2 => a2.cmp(a1), - (ExId::Id(c1, _, _), ExId::Id(c2, _, _)) => c1.cmp(c2), - } - } -} - -impl PartialOrd for ExId { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Serialize for ExId { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(self.to_string().as_str()) - } -} - -impl AsRef for ExId { - fn as_ref(&self) -> &ExId { - self - } -} - -#[cfg(test)] -mod tests { - use super::ExId; - use proptest::prelude::*; - - use crate::ActorId; - - fn gen_actorid() -> impl Strategy { - proptest::collection::vec(any::(), 0..100).prop_map(ActorId::from) - } - - prop_compose! { - fn gen_non_root_objid()(actor in gen_actorid(), counter in any::(), idx in any::()) -> ExId { - ExId::Id(idx as u64, actor, counter) - } - } - - fn gen_obji() -> impl Strategy { - prop_oneof![Just(ExId::Root), gen_non_root_objid()] - } - - proptest! { - #[test] - fn objid_roundtrip(objid in gen_obji()) { - let bytes = objid.to_bytes(); - let objid2 = ExId::try_from(&bytes[..]).unwrap(); - assert_eq!(objid, objid2); - } - } - - #[test] - fn test_root_roundtrip() { - let bytes = ExId::Root.to_bytes(); - let objid2 = ExId::try_from(&bytes[..]).unwrap(); - assert_eq!(ExId::Root, objid2); - } -} diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs deleted file mode 100644 index cbb535af..00000000 --- a/rust/automerge/src/lib.rs +++ /dev/null @@ -1,301 +0,0 @@ -//! # Automerge -//! -//! Automerge is a library of data structures for building collaborative, -//! [local-first](https://www.inkandswitch.com/local-first/) applications. The -//! idea of automerge is to provide a data structure which is quite general, -//! \- consisting of nested key/value maps and/or lists - which can be modified -//! entirely locally but which can at any time be merged with other instances of -//! the same data structure. -//! -//! In addition to the core data structure (which we generally refer to as a -//! "document"), we also provide an implementation of a sync protocol (in -//! [`crate::sync`]) which can be used over any reliable in-order transport; and -//! an efficient binary storage format. -//! -//! This crate is organised around two representations of a document - -//! [`Automerge`] and [`AutoCommit`]. The difference between the two is that -//! [`AutoCommit`] manages transactions for you. Both of these representations -//! implement [`ReadDoc`] for reading values from a document and -//! [`sync::SyncDoc`] for taking part in the sync protocol. [`AutoCommit`] -//! directly implements [`transaction::Transactable`] for making changes to a -//! document, whilst [`Automerge`] requires you to explicitly create a -//! [`transaction::Transaction`]. -//! -//! NOTE: The API this library provides for modifying data is quite low level -//! (somewhat analogous to directly creating JSON values rather than using -//! `serde` derive macros or equivalent). If you're writing a Rust application which uses automerge -//! you may want to look at [autosurgeon](https://github.com/automerge/autosurgeon). -//! -//! ## Data Model -//! -//! An automerge document is a map from strings to values -//! ([`Value`]) where values can be either -//! -//! * A nested composite value which is either -//! * A map from strings to values ([`ObjType::Map`]) -//! * A list of values ([`ObjType::List`]) -//! * A text object (a sequence of unicode characters) ([`ObjType::Text`]) -//! * A primitive value ([`ScalarValue`]) which is one of -//! * A string -//! * A 64 bit floating point number -//! * A signed 64 bit integer -//! * An unsigned 64 bit integer -//! * A boolean -//! * A counter object (a 64 bit integer which merges by addition) -//! ([`ScalarValue::Counter`]) -//! * A timestamp (a 64 bit integer which is milliseconds since the unix epoch) -//! -//! All composite values have an ID ([`ObjId`]) which is created when the value -//! is inserted into the document or is the root object ID [`ROOT`]. Values in -//! the document are then referred to by the pair (`object ID`, `key`). The -//! `key` is represented by the [`Prop`] type and is either a string for a maps, -//! or an index for sequences. -//! -//! ### Conflicts -//! -//! There are some things automerge cannot merge sensibly. For example, two -//! actors concurrently setting the key "name" to different values. In this case -//! automerge will pick a winning value in a random but deterministic way, but -//! the conflicting value is still available via the [`ReadDoc::get_all`] method. -//! -//! ### Change hashes and historical values -//! -//! Like git, points in the history of a document are identified by hash. Unlike -//! git there can be multiple hashes representing a particular point (because -//! automerge supports concurrent changes). These hashes can be obtained using -//! either [`Automerge::get_heads`] or [`AutoCommit::get_heads`] (note these -//! methods are not part of [`ReadDoc`] because in the case of [`AutoCommit`] it -//! requires a mutable reference to the document). -//! -//! These hashes can be used to read values from the document at a particular -//! point in history using the various `*_at` methods on [`ReadDoc`] which take a -//! slice of [`ChangeHash`] as an argument. -//! -//! ### Actor IDs -//! -//! Any change to an automerge document is made by an actor, represented by an -//! [`ActorId`]. An actor ID is any random sequence of bytes but each change by -//! the same actor ID must be sequential. This often means you will want to -//! maintain at least one actor ID per device. It is fine to generate a new -//! actor ID for each change, but be aware that each actor ID takes up space in -//! a document so if you expect a document to be long lived and/or to have many -//! changes then you should try to reuse actor IDs where possible. -//! -//! ### Text Encoding -//! -//! Both [`Automerge`] and [`AutoCommit`] provide a `with_encoding` method which -//! allows you to specify the [`crate::TextEncoding`] which is used for -//! interpreting the indexes passed to methods like [`ReadDoc::list_range`] or -//! [`transaction::Transactable::splice`]. The default encoding is UTF-8, but -//! you can switch to UTF-16. -//! -//! ## Sync Protocol -//! -//! See the [`sync`] module. -//! -//! ## Serde serialization -//! -//! Sometimes you just want to get the JSON value of an automerge document. For -//! this you can use [`AutoSerde`], which implements `serde::Serialize` for an -//! automerge document. -//! -//! ## Example -//! -//! Let's create a document representing an address book. -//! -//! ``` -//! use automerge::{ObjType, AutoCommit, transaction::Transactable, ReadDoc}; -//! -//! # fn main() -> Result<(), Box> { -//! let mut doc = AutoCommit::new(); -//! -//! // `put_object` creates a nested object in the root key/value map and -//! // returns the ID of the new object, in this case a list. -//! let contacts = doc.put_object(automerge::ROOT, "contacts", ObjType::List)?; -//! -//! // Now we can insert objects into the list -//! let alice = doc.insert_object(&contacts, 0, ObjType::Map)?; -//! -//! // Finally we can set keys in the "alice" map -//! doc.put(&alice, "name", "Alice")?; -//! doc.put(&alice, "email", "alice@example.com")?; -//! -//! // Create another contact -//! let bob = doc.insert_object(&contacts, 1, ObjType::Map)?; -//! doc.put(&bob, "name", "Bob")?; -//! doc.put(&bob, "email", "bob@example.com")?; -//! -//! // Now we save the address book, we can put this in a file -//! let data: Vec = doc.save(); -//! # Ok(()) -//! # } -//! ``` -//! -//! Now modify this document on two separate devices and merge the modifications. -//! -//! ``` -//! use std::borrow::Cow; -//! use automerge::{ObjType, AutoCommit, transaction::Transactable, ReadDoc}; -//! -//! # fn main() -> Result<(), Box> { -//! # let mut doc = AutoCommit::new(); -//! # let contacts = doc.put_object(automerge::ROOT, "contacts", ObjType::List)?; -//! # let alice = doc.insert_object(&contacts, 0, ObjType::Map)?; -//! # doc.put(&alice, "name", "Alice")?; -//! # doc.put(&alice, "email", "alice@example.com")?; -//! # let bob = doc.insert_object(&contacts, 1, ObjType::Map)?; -//! # doc.put(&bob, "name", "Bob")?; -//! # doc.put(&bob, "email", "bob@example.com")?; -//! # let saved: Vec = doc.save(); -//! -//! // Load the document on the first device and change alices email -//! let mut doc1 = AutoCommit::load(&saved)?; -//! let contacts = match doc1.get(automerge::ROOT, "contacts")? { -//! Some((automerge::Value::Object(ObjType::List), contacts)) => contacts, -//! _ => panic!("contacts should be a list"), -//! }; -//! let alice = match doc1.get(&contacts, 0)? { -//! Some((automerge::Value::Object(ObjType::Map), alice)) => alice, -//! _ => panic!("alice should be a map"), -//! }; -//! doc1.put(&alice, "email", "alicesnewemail@example.com")?; -//! -//! -//! // Load the document on the second device and change bobs name -//! let mut doc2 = AutoCommit::load(&saved)?; -//! let contacts = match doc2.get(automerge::ROOT, "contacts")? { -//! Some((automerge::Value::Object(ObjType::List), contacts)) => contacts, -//! _ => panic!("contacts should be a list"), -//! }; -//! let bob = match doc2.get(&contacts, 1)? { -//! Some((automerge::Value::Object(ObjType::Map), bob)) => bob, -//! _ => panic!("bob should be a map"), -//! }; -//! doc2.put(&bob, "name", "Robert")?; -//! -//! // Finally, we can merge the changes from the two devices -//! doc1.merge(&mut doc2)?; -//! let bobsname: Option = doc1.get(&bob, "name")?.map(|(v, _)| v); -//! assert_eq!(bobsname, Some(automerge::Value::Scalar(Cow::Owned("Robert".into())))); -//! -//! let alices_email: Option = doc1.get(&alice, "email")?.map(|(v, _)| v); -//! assert_eq!(alices_email, Some(automerge::Value::Scalar(Cow::Owned("alicesnewemail@example.com".into())))); -//! # Ok(()) -//! # } -//! ``` -//! - -#![doc( - html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg", - html_favicon_url = "https:///raw.githubusercontent.com/automerge/automerge-rs/main/img/favicon.ico" -)] -#![warn( - missing_debug_implementations, - // missing_docs, // TODO: add documentation! - rust_2018_idioms, - unreachable_pub, - bad_style, - dead_code, - improper_ctypes, - non_shorthand_field_patterns, - no_mangle_generic_items, - overflowing_literals, - path_statements, - patterns_in_fns_without_body, - private_in_public, - unconditional_recursion, - unused, - unused_allocation, - unused_comparisons, - unused_parens, - while_true -)] - -#[doc(hidden)] -#[macro_export] -macro_rules! log { - ( $( $t:tt )* ) => { - { - use $crate::__log; - __log!( $( $t )* ); - } - } - } - -#[cfg(all(feature = "wasm", target_family = "wasm"))] -#[doc(hidden)] -#[macro_export] -macro_rules! __log { - ( $( $t:tt )* ) => { - web_sys::console::log_1(&format!( $( $t )* ).into()); - } - } - -#[cfg(not(all(feature = "wasm", target_family = "wasm")))] -#[doc(hidden)] -#[macro_export] -macro_rules! __log { - ( $( $t:tt )* ) => { - println!( $( $t )* ); - } - } - -mod autocommit; -mod automerge; -mod autoserde; -mod change; -mod change_graph; -mod clock; -mod columnar; -mod convert; -mod error; -mod exid; -mod indexed_cache; -mod keys; -mod keys_at; -mod legacy; -mod list_range; -mod list_range_at; -mod map_range; -mod map_range_at; -pub mod op_observer; -mod op_set; -mod op_tree; -mod parents; -mod query; -mod read; -mod storage; -pub mod sync; -pub mod transaction; -mod types; -mod value; -mod values; -#[cfg(feature = "optree-visualisation")] -mod visualisation; - -pub use crate::automerge::{Automerge, OnPartialLoad}; -pub use autocommit::{AutoCommit, AutoCommitWithObs}; -pub use autoserde::AutoSerde; -pub use change::{Change, LoadError as LoadChangeError}; -pub use error::AutomergeError; -pub use error::InvalidActorId; -pub use error::InvalidChangeHashSlice; -pub use exid::{ExId as ObjId, ObjIdFromBytesError}; -pub use keys::Keys; -pub use keys_at::KeysAt; -pub use legacy::Change as ExpandedChange; -pub use list_range::ListRange; -pub use list_range_at::ListRangeAt; -pub use map_range::MapRange; -pub use map_range_at::MapRangeAt; -pub use op_observer::OpObserver; -pub use op_observer::Patch; -pub use op_observer::VecOpObserver; -pub use parents::{Parent, Parents}; -pub use read::ReadDoc; -pub use types::{ActorId, ChangeHash, ObjType, OpType, ParseChangeHashError, Prop, TextEncoding}; -pub use value::{ScalarValue, Value}; -pub use values::Values; - -/// The object ID for the root map of a document -pub const ROOT: ObjId = ObjId::Root; diff --git a/rust/automerge/src/list_range.rs b/rust/automerge/src/list_range.rs deleted file mode 100644 index a043da72..00000000 --- a/rust/automerge/src/list_range.rs +++ /dev/null @@ -1,30 +0,0 @@ -use crate::{exid::ExId, Value}; - -use crate::{query, Automerge}; -use std::ops::RangeBounds; - -/// An iterator over the elements of a list object -/// -/// This is returned by the [`crate::ReadDoc::list_range`] method -#[derive(Debug)] -pub struct ListRange<'a, R: RangeBounds> { - range: Option>, - doc: &'a Automerge, -} - -impl<'a, R: RangeBounds> ListRange<'a, R> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { - Self { range, doc } - } -} - -impl<'a, R: RangeBounds> Iterator for ListRange<'a, R> { - type Item = (usize, Value<'a>, ExId); - - fn next(&mut self) -> Option { - self.range - .as_mut()? - .next() - .map(|(idx, value, id)| (idx, value, self.doc.id_to_exid(id))) - } -} diff --git a/rust/automerge/src/list_range_at.rs b/rust/automerge/src/list_range_at.rs deleted file mode 100644 index ce8f5a46..00000000 --- a/rust/automerge/src/list_range_at.rs +++ /dev/null @@ -1,30 +0,0 @@ -use crate::{exid::ExId, Value}; -use std::ops::RangeBounds; - -use crate::{query, Automerge}; - -/// An iterator over the elements of a list object at a particular set of heads -/// -/// This is returned by the [`crate::ReadDoc::list_range_at`] method -#[derive(Debug)] -pub struct ListRangeAt<'a, R: RangeBounds> { - range: Option>, - doc: &'a Automerge, -} - -impl<'a, R: RangeBounds> ListRangeAt<'a, R> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { - Self { range, doc } - } -} - -impl<'a, R: RangeBounds> Iterator for ListRangeAt<'a, R> { - type Item = (usize, Value<'a>, ExId); - - fn next(&mut self) -> Option { - self.range - .as_mut()? - .next() - .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) - } -} diff --git a/rust/automerge/src/map_range.rs b/rust/automerge/src/map_range.rs deleted file mode 100644 index ad33ebf5..00000000 --- a/rust/automerge/src/map_range.rs +++ /dev/null @@ -1,39 +0,0 @@ -use crate::{exid::ExId, Value}; -use std::ops::RangeBounds; - -use crate::{query, Automerge}; - -/// An iterator over the keys and values of a map object -/// -/// This is returned by the [`crate::ReadDoc::map_range`] method -#[derive(Debug)] -pub struct MapRange<'a, R: RangeBounds> { - range: Option>, - doc: &'a Automerge, -} - -impl<'a, R: RangeBounds> MapRange<'a, R> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { - Self { range, doc } - } -} - -impl<'a, R: RangeBounds> Iterator for MapRange<'a, R> { - type Item = (&'a str, Value<'a>, ExId); - - fn next(&mut self) -> Option { - self.range - .as_mut()? - .next() - .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) - } -} - -impl<'a, R: RangeBounds> DoubleEndedIterator for MapRange<'a, R> { - fn next_back(&mut self) -> Option { - self.range - .as_mut()? - .next_back() - .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) - } -} diff --git a/rust/automerge/src/map_range_at.rs b/rust/automerge/src/map_range_at.rs deleted file mode 100644 index 8d008e89..00000000 --- a/rust/automerge/src/map_range_at.rs +++ /dev/null @@ -1,39 +0,0 @@ -use crate::{exid::ExId, Value}; -use std::ops::RangeBounds; - -use crate::{query, Automerge}; - -/// An iterator over the keys and values of a map object as at a particuar heads -/// -/// This is returned by the [`crate::ReadDoc::map_range_at`] method -#[derive(Debug)] -pub struct MapRangeAt<'a, R: RangeBounds> { - range: Option>, - doc: &'a Automerge, -} - -impl<'a, R: RangeBounds> MapRangeAt<'a, R> { - pub(crate) fn new(doc: &'a Automerge, range: Option>) -> Self { - Self { range, doc } - } -} - -impl<'a, R: RangeBounds> Iterator for MapRangeAt<'a, R> { - type Item = (&'a str, Value<'a>, ExId); - - fn next(&mut self) -> Option { - self.range - .as_mut()? - .next() - .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) - } -} - -impl<'a, R: RangeBounds> DoubleEndedIterator for MapRangeAt<'a, R> { - fn next_back(&mut self) -> Option { - self.range - .as_mut()? - .next_back() - .map(|(key, value, id)| (key, value, self.doc.id_to_exid(id))) - } -} diff --git a/rust/automerge/src/op_observer.rs b/rust/automerge/src/op_observer.rs deleted file mode 100644 index 5b33c21f..00000000 --- a/rust/automerge/src/op_observer.rs +++ /dev/null @@ -1,392 +0,0 @@ -use crate::exid::ExId; -use crate::Prop; -use crate::ReadDoc; -use crate::Value; - -mod compose; -pub use compose::compose; - -/// An observer of operations applied to the document. -pub trait OpObserver { - /// A new value has been inserted into the given object. - /// - /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information - /// - `objid`: the object that has been inserted into. - /// - `index`: the index the new value has been inserted at. - /// - `tagged_value`: the value that has been inserted and the id of the operation that did the - /// insert. - fn insert( - &mut self, - doc: &R, - objid: ExId, - index: usize, - tagged_value: (Value<'_>, ExId), - ); - - /// Some text has been spliced into a text object - fn splice_text(&mut self, _doc: &R, _objid: ExId, _index: usize, _value: &str); - - /// A new value has been put into the given object. - /// - /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information - /// - `objid`: the object that has been put into. - /// - `prop`: the prop that the value as been put at. - /// - `tagged_value`: the value that has been put into the object and the id of the operation - /// that did the put. - /// - `conflict`: whether this put conflicts with other operations. - fn put( - &mut self, - doc: &R, - objid: ExId, - prop: Prop, - tagged_value: (Value<'_>, ExId), - conflict: bool, - ); - - /// When a delete op exposes a previously conflicted value - /// Similar to a put op - except for maps, lists and text, edits - /// may already exist and need to be queried - /// - /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information - /// - `objid`: the object that has been put into. - /// - `prop`: the prop that the value as been put at. - /// - `tagged_value`: the value that has been put into the object and the id of the operation - /// that did the put. - /// - `conflict`: whether this put conflicts with other operations. - fn expose( - &mut self, - doc: &R, - objid: ExId, - prop: Prop, - tagged_value: (Value<'_>, ExId), - conflict: bool, - ); - - /// Flag a new conflict on a value without changing it - /// - /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information - /// - `objid`: the object that has been put into. - /// - `prop`: the prop that the value as been put at. - fn flag_conflict(&mut self, _doc: &R, _objid: ExId, _prop: Prop) {} - - /// A counter has been incremented. - /// - /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information - /// - `objid`: the object that contains the counter. - /// - `prop`: they prop that the chounter is at. - /// - `tagged_value`: the amount the counter has been incremented by, and the the id of the - /// increment operation. - fn increment( - &mut self, - doc: &R, - objid: ExId, - prop: Prop, - tagged_value: (i64, ExId), - ); - - /// A map value has beeen deleted. - /// - /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information - /// - `objid`: the object that has been deleted in. - /// - `prop`: the prop to be deleted - fn delete(&mut self, doc: &R, objid: ExId, prop: Prop) { - match prop { - Prop::Map(k) => self.delete_map(doc, objid, &k), - Prop::Seq(i) => self.delete_seq(doc, objid, i, 1), - } - } - - /// A map value has beeen deleted. - /// - /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information - /// - `objid`: the object that has been deleted in. - /// - `key`: the map key to be deleted - fn delete_map(&mut self, doc: &R, objid: ExId, key: &str); - - /// A one or more list values have beeen deleted. - /// - /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information - /// - `objid`: the object that has been deleted in. - /// - `index`: the index of the deletion - /// - `num`: the number of sequential elements deleted - fn delete_seq(&mut self, doc: &R, objid: ExId, index: usize, num: usize); - - /// Whether to call sequence methods or `splice_text` when encountering changes in text - /// - /// Returns `false` by default - fn text_as_seq(&self) -> bool { - false - } -} - -/// An observer which can be branched -/// -/// This is used when observing operations in a transaction. In this case `branch` will be called -/// at the beginning of the transaction to return a new observer and then `merge` will be called -/// with the branched observer as `other` when the transaction is comitted. -pub trait BranchableObserver { - /// Branch of a new op_observer later to be merged - /// - /// Called when creating a new transaction. Observer branch will be merged on `commit()` or - /// thrown away on `rollback()` - fn branch(&self) -> Self; - - /// Merge observed information from a transaction. - /// - /// Called by AutoCommit on `commit()` - /// - /// - `other`: Another Op Observer of the same type - fn merge(&mut self, other: &Self); -} - -impl OpObserver for () { - fn insert( - &mut self, - _doc: &R, - _objid: ExId, - _index: usize, - _tagged_value: (Value<'_>, ExId), - ) { - } - - fn splice_text(&mut self, _doc: &R, _objid: ExId, _index: usize, _value: &str) {} - - fn put( - &mut self, - _doc: &R, - _objid: ExId, - _prop: Prop, - _tagged_value: (Value<'_>, ExId), - _conflict: bool, - ) { - } - - fn expose( - &mut self, - _doc: &R, - _objid: ExId, - _prop: Prop, - _tagged_value: (Value<'_>, ExId), - _conflict: bool, - ) { - } - - fn increment( - &mut self, - _doc: &R, - _objid: ExId, - _prop: Prop, - _tagged_value: (i64, ExId), - ) { - } - - fn delete_map(&mut self, _doc: &R, _objid: ExId, _key: &str) {} - - fn delete_seq(&mut self, _doc: &R, _objid: ExId, _index: usize, _num: usize) {} -} - -impl BranchableObserver for () { - fn merge(&mut self, _other: &Self) {} - fn branch(&self) -> Self {} -} - -/// Capture operations into a [`Vec`] and store them as patches. -#[derive(Default, Debug, Clone)] -pub struct VecOpObserver { - patches: Vec, -} - -impl VecOpObserver { - /// Take the current list of patches, leaving the internal list empty and ready for new - /// patches. - pub fn take_patches(&mut self) -> Vec { - std::mem::take(&mut self.patches) - } -} - -impl OpObserver for VecOpObserver { - fn insert( - &mut self, - doc: &R, - obj: ExId, - index: usize, - (value, id): (Value<'_>, ExId), - ) { - if let Ok(p) = doc.parents(&obj) { - self.patches.push(Patch::Insert { - obj, - path: p.path(), - index, - value: (value.into_owned(), id), - }); - } - } - - fn splice_text(&mut self, doc: &R, obj: ExId, index: usize, value: &str) { - if let Ok(p) = doc.parents(&obj) { - self.patches.push(Patch::Splice { - obj, - path: p.path(), - index, - value: value.to_string(), - }) - } - } - - fn put( - &mut self, - doc: &R, - obj: ExId, - prop: Prop, - (value, id): (Value<'_>, ExId), - conflict: bool, - ) { - if let Ok(p) = doc.parents(&obj) { - self.patches.push(Patch::Put { - obj, - path: p.path(), - prop, - value: (value.into_owned(), id), - conflict, - }); - } - } - - fn expose( - &mut self, - doc: &R, - obj: ExId, - prop: Prop, - (value, id): (Value<'_>, ExId), - conflict: bool, - ) { - if let Ok(p) = doc.parents(&obj) { - self.patches.push(Patch::Expose { - obj, - path: p.path(), - prop, - value: (value.into_owned(), id), - conflict, - }); - } - } - - fn increment(&mut self, doc: &R, obj: ExId, prop: Prop, tagged_value: (i64, ExId)) { - if let Ok(p) = doc.parents(&obj) { - self.patches.push(Patch::Increment { - obj, - path: p.path(), - prop, - value: tagged_value, - }); - } - } - - fn delete_map(&mut self, doc: &R, obj: ExId, key: &str) { - if let Ok(p) = doc.parents(&obj) { - self.patches.push(Patch::Delete { - obj, - path: p.path(), - prop: Prop::Map(key.to_owned()), - num: 1, - }) - } - } - - fn delete_seq(&mut self, doc: &R, obj: ExId, index: usize, num: usize) { - if let Ok(p) = doc.parents(&obj) { - self.patches.push(Patch::Delete { - obj, - path: p.path(), - prop: Prop::Seq(index), - num, - }) - } - } -} - -impl BranchableObserver for VecOpObserver { - fn merge(&mut self, other: &Self) { - self.patches.extend_from_slice(other.patches.as_slice()) - } - - fn branch(&self) -> Self { - Self::default() - } -} - -/// A notification to the application that something has changed in a document. -#[derive(Debug, Clone, PartialEq)] -pub enum Patch { - /// Associating a new value with a prop in a map, or an existing list element - Put { - /// path to the object - path: Vec<(ExId, Prop)>, - /// The object that was put into. - obj: ExId, - /// The prop that the new value was put at. - prop: Prop, - /// The value that was put, and the id of the operation that put it there. - value: (Value<'static>, ExId), - /// Whether this put conflicts with another. - conflict: bool, - }, - /// Exposing (via delete) an old but conflicted value with a prop in a map, or a list element - Expose { - /// path to the object - path: Vec<(ExId, Prop)>, - /// The object that was put into. - obj: ExId, - /// The prop that the new value was put at. - prop: Prop, - /// The value that was put, and the id of the operation that put it there. - value: (Value<'static>, ExId), - /// Whether this put conflicts with another. - conflict: bool, - }, - /// Inserting a new element into a list - Insert { - /// path to the object - path: Vec<(ExId, Prop)>, - /// The object that was inserted into. - obj: ExId, - /// The index that the new value was inserted at. - index: usize, - /// The value that was inserted, and the id of the operation that inserted it there. - value: (Value<'static>, ExId), - }, - /// Splicing a text object - Splice { - /// path to the object - path: Vec<(ExId, Prop)>, - /// The object that was inserted into. - obj: ExId, - /// The index that the new value was inserted at. - index: usize, - /// The value that was spliced - value: String, - }, - /// Incrementing a counter. - Increment { - /// path to the object - path: Vec<(ExId, Prop)>, - /// The object that was incremented in. - obj: ExId, - /// The prop that was incremented. - prop: Prop, - /// The amount that the counter was incremented by, and the id of the operation that - /// did the increment. - value: (i64, ExId), - }, - /// Deleting an element from a list/text - Delete { - /// path to the object - path: Vec<(ExId, Prop)>, - /// The object that was deleted from. - obj: ExId, - /// The prop that was deleted. - prop: Prop, - /// number of items deleted (for seq) - num: usize, - }, -} diff --git a/rust/automerge/src/op_observer/compose.rs b/rust/automerge/src/op_observer/compose.rs deleted file mode 100644 index 92fe3b1e..00000000 --- a/rust/automerge/src/op_observer/compose.rs +++ /dev/null @@ -1,102 +0,0 @@ -use super::OpObserver; - -pub fn compose<'a, O1: OpObserver, O2: OpObserver>( - obs1: &'a mut O1, - obs2: &'a mut O2, -) -> impl OpObserver + 'a { - ComposeObservers { obs1, obs2 } -} - -struct ComposeObservers<'a, O1: OpObserver, O2: OpObserver> { - obs1: &'a mut O1, - obs2: &'a mut O2, -} - -impl<'a, O1: OpObserver, O2: OpObserver> OpObserver for ComposeObservers<'a, O1, O2> { - fn insert( - &mut self, - doc: &R, - objid: crate::ObjId, - index: usize, - tagged_value: (crate::Value<'_>, crate::ObjId), - ) { - self.obs1 - .insert(doc, objid.clone(), index, tagged_value.clone()); - self.obs2.insert(doc, objid, index, tagged_value); - } - - fn splice_text( - &mut self, - doc: &R, - objid: crate::ObjId, - index: usize, - value: &str, - ) { - self.obs1.splice_text(doc, objid.clone(), index, value); - self.obs2.splice_text(doc, objid, index, value); - } - - fn put( - &mut self, - doc: &R, - objid: crate::ObjId, - prop: crate::Prop, - tagged_value: (crate::Value<'_>, crate::ObjId), - conflict: bool, - ) { - self.obs1.put( - doc, - objid.clone(), - prop.clone(), - tagged_value.clone(), - conflict, - ); - self.obs2.put(doc, objid, prop, tagged_value, conflict); - } - - fn expose( - &mut self, - doc: &R, - objid: crate::ObjId, - prop: crate::Prop, - tagged_value: (crate::Value<'_>, crate::ObjId), - conflict: bool, - ) { - self.obs1.expose( - doc, - objid.clone(), - prop.clone(), - tagged_value.clone(), - conflict, - ); - self.obs2.expose(doc, objid, prop, tagged_value, conflict); - } - - fn increment( - &mut self, - doc: &R, - objid: crate::ObjId, - prop: crate::Prop, - tagged_value: (i64, crate::ObjId), - ) { - self.obs1 - .increment(doc, objid.clone(), prop.clone(), tagged_value.clone()); - self.obs2.increment(doc, objid, prop, tagged_value); - } - - fn delete_map(&mut self, doc: &R, objid: crate::ObjId, key: &str) { - self.obs1.delete_map(doc, objid.clone(), key); - self.obs2.delete_map(doc, objid, key); - } - - fn delete_seq( - &mut self, - doc: &R, - objid: crate::ObjId, - index: usize, - num: usize, - ) { - self.obs2.delete_seq(doc, objid.clone(), index, num); - self.obs2.delete_seq(doc, objid, index, num); - } -} diff --git a/rust/automerge/src/op_set.rs b/rust/automerge/src/op_set.rs deleted file mode 100644 index aab8ce74..00000000 --- a/rust/automerge/src/op_set.rs +++ /dev/null @@ -1,405 +0,0 @@ -use crate::clock::Clock; -use crate::exid::ExId; -use crate::indexed_cache::IndexedCache; -use crate::op_tree::{self, OpTree}; -use crate::parents::Parents; -use crate::query::{self, OpIdVisSearch, TreeQuery}; -use crate::types::{self, ActorId, Key, ListEncoding, ObjId, Op, OpId, OpIds, OpType, Prop}; -use crate::ObjType; -use fxhash::FxBuildHasher; -use std::borrow::Borrow; -use std::cmp::Ordering; -use std::collections::HashMap; -use std::ops::RangeBounds; - -mod load; -pub(crate) use load::OpSetBuilder; - -pub(crate) type OpSet = OpSetInternal; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct OpSetInternal { - /// The map of objects to their type and ops. - trees: HashMap, - /// The number of operations in the opset. - length: usize, - /// Metadata about the operations in this opset. - pub(crate) m: OpSetMetadata, -} - -impl OpSetInternal { - pub(crate) fn builder() -> OpSetBuilder { - OpSetBuilder::new() - } - - pub(crate) fn new() -> Self { - let mut trees: HashMap<_, _, _> = Default::default(); - trees.insert(ObjId::root(), OpTree::new()); - OpSetInternal { - trees, - length: 0, - m: OpSetMetadata { - actors: IndexedCache::new(), - props: IndexedCache::new(), - }, - } - } - - pub(crate) fn id_to_exid(&self, id: OpId) -> ExId { - if id == types::ROOT { - ExId::Root - } else { - ExId::Id( - id.counter(), - self.m.actors.cache[id.actor()].clone(), - id.actor(), - ) - } - } - - pub(crate) fn iter(&self) -> Iter<'_> { - let mut objs: Vec<_> = self.trees.iter().map(|t| (t.0, t.1.objtype, t.1)).collect(); - objs.sort_by(|a, b| self.m.lamport_cmp((a.0).0, (b.0).0)); - Iter { - opset: self, - trees: objs.into_iter(), - current: None, - } - } - - /// Iterate over objects in the opset in causal order - pub(crate) fn iter_objs( - &self, - ) -> impl Iterator)> + '_ { - let mut objs: Vec<_> = self.trees.iter().map(|t| (t.0, t.1.objtype, t.1)).collect(); - objs.sort_by(|a, b| self.m.lamport_cmp((a.0).0, (b.0).0)); - IterObjs { - trees: objs.into_iter(), - } - } - - pub(crate) fn parents(&self, obj: ObjId) -> Parents<'_> { - Parents { obj, ops: self } - } - - pub(crate) fn parent_object(&self, obj: &ObjId) -> Option { - let parent = self.trees.get(obj)?.parent?; - let query = self.search(&parent, OpIdVisSearch::new(obj.0)); - let key = query.key().unwrap(); - let visible = query.visible; - Some(Parent { - obj: parent, - key, - visible, - }) - } - - pub(crate) fn export_key(&self, obj: ObjId, key: Key, encoding: ListEncoding) -> Option { - match key { - Key::Map(m) => self.m.props.safe_get(m).map(|s| Prop::Map(s.to_string())), - Key::Seq(opid) => { - if opid.is_head() { - Some(Prop::Seq(0)) - } else { - self.search(&obj, query::ElemIdPos::new(opid, encoding)) - .index() - .map(Prop::Seq) - } - } - } - } - - pub(crate) fn keys(&self, obj: ObjId) -> Option> { - if let Some(tree) = self.trees.get(&obj) { - tree.internal.keys() - } else { - None - } - } - - pub(crate) fn keys_at(&self, obj: ObjId, clock: Clock) -> Option> { - if let Some(tree) = self.trees.get(&obj) { - tree.internal.keys_at(clock) - } else { - None - } - } - - pub(crate) fn map_range>( - &self, - obj: ObjId, - range: R, - ) -> Option> { - if let Some(tree) = self.trees.get(&obj) { - tree.internal.map_range(range, &self.m) - } else { - None - } - } - - pub(crate) fn map_range_at>( - &self, - obj: ObjId, - range: R, - clock: Clock, - ) -> Option> { - if let Some(tree) = self.trees.get(&obj) { - tree.internal.map_range_at(range, &self.m, clock) - } else { - None - } - } - - pub(crate) fn list_range>( - &self, - obj: ObjId, - range: R, - ) -> Option> { - if let Some(tree) = self.trees.get(&obj) { - tree.internal.list_range(range) - } else { - None - } - } - - pub(crate) fn list_range_at>( - &self, - obj: ObjId, - range: R, - clock: Clock, - ) -> Option> { - if let Some(tree) = self.trees.get(&obj) { - tree.internal.list_range_at(range, clock) - } else { - None - } - } - - pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, mut query: Q) -> Q - where - Q: TreeQuery<'a>, - { - if let Some(tree) = self.trees.get(obj) { - if query.can_shortcut_search(tree) { - query - } else { - tree.internal.search(query, &self.m) - } - } else { - query - } - } - - pub(crate) fn change_vis(&mut self, obj: &ObjId, index: usize, f: F) - where - F: Fn(&mut Op), - { - if let Some(tree) = self.trees.get_mut(obj) { - tree.last_insert = None; - tree.internal.update(index, f) - } - } - - /// Add `op` as a successor to each op at `op_indices` in `obj` - pub(crate) fn add_succ(&mut self, obj: &ObjId, op_indices: &[usize], op: &Op) { - if let Some(tree) = self.trees.get_mut(obj) { - tree.last_insert = None; - for i in op_indices { - tree.internal.update(*i, |old_op| { - old_op.add_succ(op, |left, right| self.m.lamport_cmp(*left, *right)) - }); - } - } - } - - pub(crate) fn remove(&mut self, obj: &ObjId, index: usize) -> Op { - // this happens on rollback - be sure to go back to the old state - let tree = self.trees.get_mut(obj).unwrap(); - self.length -= 1; - tree.last_insert = None; - let op = tree.internal.remove(index); - if let OpType::Make(_) = &op.action { - self.trees.remove(&op.id.into()); - } - op - } - - pub(crate) fn len(&self) -> usize { - self.length - } - - pub(crate) fn hint(&mut self, obj: &ObjId, index: usize, pos: usize) { - if let Some(tree) = self.trees.get_mut(obj) { - tree.last_insert = Some((index, pos)) - } - } - - #[tracing::instrument(skip(self, index))] - pub(crate) fn insert(&mut self, index: usize, obj: &ObjId, element: Op) { - if let OpType::Make(typ) = element.action { - self.trees.insert( - element.id.into(), - OpTree { - internal: Default::default(), - objtype: typ, - last_insert: None, - parent: Some(*obj), - }, - ); - } - - if let Some(tree) = self.trees.get_mut(obj) { - tree.last_insert = None; - tree.internal.insert(index, element); - self.length += 1; - } else { - tracing::warn!("attempting to insert op for unknown object"); - } - } - - pub(crate) fn object_type(&self, id: &ObjId) -> Option { - self.trees.get(id).map(|tree| tree.objtype) - } - - /// Return a graphviz representation of the opset. - /// - /// # Arguments - /// - /// * objects: An optional list of object IDs to display, if not specified all objects are - /// visualised - #[cfg(feature = "optree-visualisation")] - pub(crate) fn visualise(&self, objects: Option>) -> String { - use std::borrow::Cow; - let mut out = Vec::new(); - let trees = if let Some(objects) = objects { - let mut filtered = self.trees.clone(); - filtered.retain(|k, _| objects.contains(k)); - Cow::Owned(filtered) - } else { - Cow::Borrowed(&self.trees) - }; - let graph = super::visualisation::GraphVisualisation::construct(&trees, &self.m); - dot::render(&graph, &mut out).unwrap(); - String::from_utf8_lossy(&out[..]).to_string() - } -} - -impl Default for OpSetInternal { - fn default() -> Self { - Self::new() - } -} - -impl<'a> IntoIterator for &'a OpSetInternal { - type Item = (&'a ObjId, ObjType, &'a Op); - - type IntoIter = Iter<'a>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} - -pub(crate) struct IterObjs<'a> { - trees: std::vec::IntoIter<(&'a ObjId, ObjType, &'a op_tree::OpTree)>, -} - -impl<'a> Iterator for IterObjs<'a> { - type Item = (&'a ObjId, ObjType, op_tree::OpTreeIter<'a>); - - fn next(&mut self) -> Option { - self.trees - .next() - .map(|(id, typ, tree)| (id, typ, tree.iter())) - } -} - -#[derive(Clone)] -pub(crate) struct Iter<'a> { - opset: &'a OpSet, - trees: std::vec::IntoIter<(&'a ObjId, ObjType, &'a op_tree::OpTree)>, - current: Option<(&'a ObjId, ObjType, op_tree::OpTreeIter<'a>)>, -} -impl<'a> Iterator for Iter<'a> { - type Item = (&'a ObjId, ObjType, &'a Op); - - fn next(&mut self) -> Option { - if let Some((id, typ, tree)) = &mut self.current { - if let Some(next) = tree.next() { - return Some((id, *typ, next)); - } - } - - loop { - self.current = self.trees.next().map(|o| (o.0, o.1, o.2.iter())); - if let Some((obj, typ, tree)) = &mut self.current { - if let Some(next) = tree.next() { - return Some((obj, *typ, next)); - } - } else { - return None; - } - } - } -} - -impl<'a> ExactSizeIterator for Iter<'a> { - fn len(&self) -> usize { - self.opset.len() - } -} - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct OpSetMetadata { - pub(crate) actors: IndexedCache, - pub(crate) props: IndexedCache, -} - -impl Default for OpSetMetadata { - fn default() -> Self { - Self { - actors: IndexedCache::new(), - props: IndexedCache::new(), - } - } -} - -impl OpSetMetadata { - pub(crate) fn from_actors(actors: Vec) -> Self { - Self { - props: IndexedCache::new(), - actors: actors.into_iter().collect(), - } - } - - pub(crate) fn key_cmp(&self, left: &Key, right: &Key) -> Ordering { - match (left, right) { - (Key::Map(a), Key::Map(b)) => self.props[*a].cmp(&self.props[*b]), - _ => panic!("can only compare map keys"), - } - } - - pub(crate) fn lamport_cmp(&self, left: OpId, right: OpId) -> Ordering { - left.lamport_cmp(&right, &self.actors.cache) - } - - pub(crate) fn sorted_opids>(&self, opids: I) -> OpIds { - OpIds::new(opids, |left, right| self.lamport_cmp(*left, *right)) - } - - /// If `opids` are in ascending lamport timestamp order with respect to the actor IDs in - /// this `OpSetMetadata` then this returns `Some(OpIds)`, otherwise returns `None`. - pub(crate) fn try_sorted_opids(&self, opids: Vec) -> Option { - OpIds::new_if_sorted(opids, |a, b| self.lamport_cmp(*a, *b)) - } - - pub(crate) fn import_prop>(&mut self, key: S) -> usize { - self.props.cache(key.borrow().to_string()) - } -} - -pub(crate) struct Parent { - pub(crate) obj: ObjId, - pub(crate) key: Key, - pub(crate) visible: bool, -} diff --git a/rust/automerge/src/op_set/load.rs b/rust/automerge/src/op_set/load.rs deleted file mode 100644 index e14f46b7..00000000 --- a/rust/automerge/src/op_set/load.rs +++ /dev/null @@ -1,52 +0,0 @@ -use std::collections::HashMap; - -use fxhash::FxBuildHasher; - -use super::{OpSet, OpTree}; -use crate::{ - op_tree::OpTreeInternal, - storage::load::{DocObserver, LoadedObject}, - types::ObjId, -}; - -/// An opset builder which creates an optree for each object as it finishes loading, inserting the -/// ops using `OpTreeInternal::insert`. This should be faster than using `OpSet::insert_*` but only -/// works because the ops in the document format are in the same order as in the optrees. -pub(crate) struct OpSetBuilder { - completed_objects: HashMap, -} - -impl OpSetBuilder { - pub(crate) fn new() -> OpSetBuilder { - Self { - completed_objects: HashMap::default(), - } - } -} - -impl DocObserver for OpSetBuilder { - type Output = OpSet; - - fn object_loaded(&mut self, loaded: LoadedObject) { - let mut internal = OpTreeInternal::new(); - for (index, op) in loaded.ops.into_iter().enumerate() { - internal.insert(index, op); - } - let tree = OpTree { - internal, - objtype: loaded.obj_type, - parent: loaded.parent, - last_insert: None, - }; - self.completed_objects.insert(loaded.id, tree); - } - - fn finish(self, metadata: super::OpSetMetadata) -> Self::Output { - let len = self.completed_objects.values().map(|t| t.len()).sum(); - OpSet { - trees: self.completed_objects, - length: len, - m: metadata, - } - } -} diff --git a/rust/automerge/src/op_tree.rs b/rust/automerge/src/op_tree.rs deleted file mode 100644 index 7de00dc3..00000000 --- a/rust/automerge/src/op_tree.rs +++ /dev/null @@ -1,373 +0,0 @@ -use std::{fmt::Debug, mem, ops::RangeBounds}; - -pub(crate) use crate::op_set::OpSetMetadata; -use crate::{ - clock::Clock, - query::{self, ChangeVisibility, QueryResult, TreeQuery}, -}; -use crate::{ - types::{ObjId, Op, OpId}, - ObjType, -}; -use std::collections::HashSet; - -mod iter; -mod node; - -pub(crate) use iter::OpTreeIter; -#[allow(unused)] -pub(crate) use node::{OpTreeNode, B}; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct OpTree { - pub(crate) internal: OpTreeInternal, - pub(crate) objtype: ObjType, - /// The id of the parent object, root has no parent. - pub(crate) parent: Option, - /// record the last list index and tree position - /// inserted into the op_set - this allows us to - /// short circuit the query if the follow op is another - /// insert or delete at the same spot - pub(crate) last_insert: Option<(usize, usize)>, -} - -impl OpTree { - pub(crate) fn new() -> Self { - Self { - internal: Default::default(), - objtype: ObjType::Map, - parent: None, - last_insert: None, - } - } - - pub(crate) fn iter(&self) -> OpTreeIter<'_> { - self.internal.iter() - } - - pub(crate) fn len(&self) -> usize { - self.internal.len() - } -} - -#[derive(Clone, Debug)] -pub(crate) struct OpTreeInternal { - pub(crate) root_node: Option, - pub(crate) ops: Vec, -} - -impl OpTreeInternal { - /// Construct a new, empty, sequence. - pub(crate) fn new() -> Self { - Self { - root_node: None, - ops: vec![], - } - } - - /// Get the length of the sequence. - pub(crate) fn len(&self) -> usize { - self.root_node.as_ref().map_or(0, |n| n.len()) - } - - pub(crate) fn keys(&self) -> Option> { - if self.root_node.is_some() { - Some(query::Keys::new(self)) - } else { - None - } - } - - pub(crate) fn keys_at(&self, clock: Clock) -> Option> { - if self.root_node.is_some() { - Some(query::KeysAt::new(self, clock)) - } else { - None - } - } - - pub(crate) fn map_range<'a, R: RangeBounds>( - &'a self, - range: R, - meta: &'a OpSetMetadata, - ) -> Option> { - if self.root_node.is_some() { - Some(query::MapRange::new(range, self, meta)) - } else { - None - } - } - - pub(crate) fn map_range_at<'a, R: RangeBounds>( - &'a self, - range: R, - meta: &'a OpSetMetadata, - clock: Clock, - ) -> Option> { - if self.root_node.is_some() { - Some(query::MapRangeAt::new(range, self, meta, clock)) - } else { - None - } - } - - pub(crate) fn list_range>( - &self, - range: R, - ) -> Option> { - if self.root_node.is_some() { - Some(query::ListRange::new(range, self)) - } else { - None - } - } - - pub(crate) fn list_range_at>( - &self, - range: R, - clock: Clock, - ) -> Option> { - if self.root_node.is_some() { - Some(query::ListRangeAt::new(range, clock, self)) - } else { - None - } - } - - pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, mut query: Q, m: &OpSetMetadata) -> Q - where - Q: TreeQuery<'a>, - { - self.root_node.as_ref().map(|root| { - match query.query_node_with_metadata(root, m, &self.ops) { - QueryResult::Descend => root.search(&mut query, m, &self.ops, None), - QueryResult::Skip(skip) => root.search(&mut query, m, &self.ops, Some(skip)), - _ => true, - } - }); - query - } - - /// Create an iterator through the sequence. - pub(crate) fn iter(&self) -> OpTreeIter<'_> { - iter::OpTreeIter::new(self) - } - - /// Insert the `element` into the sequence at `index`. - /// - /// # Panics - /// - /// Panics if `index > len`. - pub(crate) fn insert(&mut self, index: usize, op: Op) { - assert!( - index <= self.len(), - "tried to insert at {} but len is {}", - index, - self.len() - ); - - let element = self.ops.len(); - self.ops.push(op); - - let old_len = self.len(); - if let Some(root) = self.root_node.as_mut() { - #[cfg(debug_assertions)] - root.check(); - - if root.is_full() { - let original_len = root.len(); - let new_root = OpTreeNode::new(); - - // move new_root to root position - let old_root = mem::replace(root, new_root); - - root.length += old_root.len(); - root.index = old_root.index.clone(); - root.children.push(old_root); - root.split_child(0, &self.ops); - - assert_eq!(original_len, root.len()); - - // after splitting the root has one element and two children, find which child the - // index is in - let first_child_len = root.children[0].len(); - let (child, insertion_index) = if first_child_len < index { - (&mut root.children[1], index - (first_child_len + 1)) - } else { - (&mut root.children[0], index) - }; - root.length += 1; - root.index.insert(&self.ops[element]); - child.insert_into_non_full_node(insertion_index, element, &self.ops) - } else { - root.insert_into_non_full_node(index, element, &self.ops) - } - } else { - let mut root = OpTreeNode::new(); - root.insert_into_non_full_node(index, element, &self.ops); - self.root_node = Some(root) - } - assert_eq!(self.len(), old_len + 1, "{:#?}", self); - } - - /// Get the `element` at `index` in the sequence. - pub(crate) fn get(&self, index: usize) -> Option<&Op> { - self.root_node - .as_ref() - .and_then(|n| n.get(index)) - .map(|n| &self.ops[n]) - } - - // this replaces get_mut() because it allows the indexes to update correctly - pub(crate) fn update(&mut self, index: usize, f: F) - where - F: FnOnce(&mut Op), - { - if self.len() > index { - let n = self.root_node.as_ref().unwrap().get(index).unwrap(); - let new_element = self.ops.get_mut(n).unwrap(); - let old_vis = new_element.visible(); - f(new_element); - let vis = ChangeVisibility { - old_vis, - new_vis: new_element.visible(), - op: new_element, - }; - self.root_node.as_mut().unwrap().update(index, vis); - } - } - - /// Removes the element at `index` from the sequence. - /// - /// # Panics - /// - /// Panics if `index` is out of bounds. - pub(crate) fn remove(&mut self, index: usize) -> Op { - if let Some(root) = self.root_node.as_mut() { - #[cfg(debug_assertions)] - let len = root.check(); - let old = root.remove(index, &self.ops); - - if root.elements.is_empty() { - if root.is_leaf() { - self.root_node = None; - } else { - self.root_node = Some(root.children.remove(0)); - } - } - - #[cfg(debug_assertions)] - debug_assert_eq!(len, self.root_node.as_ref().map_or(0, |r| r.check()) + 1); - self.ops[old].clone() - } else { - panic!("remove from empty tree") - } - } -} - -impl Default for OpTreeInternal { - fn default() -> Self { - Self::new() - } -} - -impl PartialEq for OpTreeInternal { - fn eq(&self, other: &Self) -> bool { - self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b) - } -} - -impl<'a> IntoIterator for &'a OpTreeInternal { - type Item = &'a Op; - - type IntoIter = Iter<'a>; - - fn into_iter(self) -> Self::IntoIter { - Iter { - inner: self, - index: 0, - } - } -} - -pub(crate) struct Iter<'a> { - inner: &'a OpTreeInternal, - index: usize, -} - -impl<'a> Iterator for Iter<'a> { - type Item = &'a Op; - - fn next(&mut self) -> Option { - self.index += 1; - self.inner.get(self.index - 1) - } - - fn nth(&mut self, n: usize) -> Option { - self.index += n + 1; - self.inner.get(self.index - 1) - } -} - -#[derive(Debug, Clone, PartialEq)] -struct CounterData { - pos: usize, - val: i64, - succ: HashSet, - op: Op, -} - -#[cfg(test)] -mod tests { - use crate::legacy as amp; - use crate::types::{Op, OpId}; - - use super::*; - - fn op() -> Op { - let zero = OpId::new(0, 0); - Op { - id: zero, - action: amp::OpType::Put(0.into()), - key: zero.into(), - succ: Default::default(), - pred: Default::default(), - insert: false, - } - } - - #[test] - fn insert() { - let mut t: OpTree = OpTree::new(); - - t.internal.insert(0, op()); - t.internal.insert(1, op()); - t.internal.insert(0, op()); - t.internal.insert(0, op()); - t.internal.insert(0, op()); - t.internal.insert(3, op()); - t.internal.insert(4, op()); - } - - #[test] - fn insert_book() { - let mut t: OpTree = OpTree::new(); - - for i in 0..100 { - t.internal.insert(i % 2, op()); - } - } - - #[test] - fn insert_book_vec() { - let mut t: OpTree = OpTree::new(); - let mut v = Vec::new(); - - for i in 0..100 { - t.internal.insert(i % 3, op()); - v.insert(i % 3, op()); - - assert_eq!(v, t.internal.iter().cloned().collect::>()) - } - } -} diff --git a/rust/automerge/src/op_tree/iter.rs b/rust/automerge/src/op_tree/iter.rs deleted file mode 100644 index 0b19f359..00000000 --- a/rust/automerge/src/op_tree/iter.rs +++ /dev/null @@ -1,442 +0,0 @@ -use std::cmp::Ordering; - -use crate::types::Op; - -use super::{OpTreeInternal, OpTreeNode}; - -#[derive(Clone)] -pub(crate) struct OpTreeIter<'a>(Inner<'a>); - -impl<'a> OpTreeIter<'a> { - pub(crate) fn new(tree: &'a OpTreeInternal) -> OpTreeIter<'a> { - Self( - tree.root_node - .as_ref() - .map(|root| Inner::NonEmpty { - // This is a guess at the average depth of an OpTree - ancestors: Vec::with_capacity(6), - current: NodeIter { - node: root, - index: 0, - }, - cumulative_index: 0, - root_node: root, - ops: &tree.ops, - }) - .unwrap_or(Inner::Empty), - ) - } -} - -impl<'a> Iterator for OpTreeIter<'a> { - type Item = &'a Op; - - fn next(&mut self) -> Option { - self.0.next() - } - - fn nth(&mut self, n: usize) -> Option { - self.0.nth(n) - } -} - -#[derive(Clone)] -enum Inner<'a> { - Empty, - NonEmpty { - // A stack of nodes in the optree which we have descended in to to get to the current - // element. - ancestors: Vec>, - current: NodeIter<'a>, - // How far through the whole optree we are - cumulative_index: usize, - root_node: &'a OpTreeNode, - ops: &'a [Op], - }, -} - -/// A node in the op tree which we are iterating over -#[derive(Clone)] -struct NodeIter<'a> { - /// The node itself - node: &'a OpTreeNode, - /// The index of the next element we will pull from the node. This means something different - /// depending on whether the node is a leaf node or not. If the node is a leaf node then this - /// index is the index in `node.elements` which will be returned on the next call to `next()`. - /// If the node is not an internal node then this index is the index of `children` which we are - /// currently iterating as well as being the index of the next element of `elements` which we - /// will return once we have finished iterating over the child node. - index: usize, -} - -impl<'a> Iterator for Inner<'a> { - type Item = &'a Op; - - fn next(&mut self) -> Option { - match self { - Inner::Empty => None, - Inner::NonEmpty { - ancestors, - ops, - current, - cumulative_index, - .. - } => { - if current.node.is_leaf() { - // If we're in a leaf node and we haven't exhausted it yet we just return the elements - // of the leaf node - if current.index < current.node.len() { - let result = current.node.elements[current.index]; - current.index += 1; - *cumulative_index += 1; - Some(&ops[result]) - } else { - // We've exhausted the leaf node, we must find the nearest non-exhausted parent (lol) - let node_iter = loop { - if let Some( - node_iter @ NodeIter { - node: parent, - index: parent_index, - }, - ) = ancestors.pop() - { - // We've exhausted this parent - if parent_index >= parent.elements.len() { - continue; - } else { - // This parent still has elements to process, let's use it! - break node_iter; - } - } else { - // No parents left, we're done - return None; - } - }; - // if we've finished the elements in a leaf node and there's a parent node then we - // return the element from the parent node which is one after the index at which we - // descended into the child - *current = node_iter; - let result = current.node.elements[current.index]; - current.index += 1; - *cumulative_index += 1; - Some(&ops[result]) - } - } else { - // If we're in a non-leaf node then the last iteration returned an element from the - // current nodes `elements`, so we must now descend into a leaf child - ancestors.push(current.clone()); - loop { - let child = ¤t.node.children[current.index]; - current.index = 0; - if !child.is_leaf() { - ancestors.push(NodeIter { - node: child, - index: 0, - }); - current.node = child - } else { - current.node = child; - break; - } - } - self.next() - } - } - } - } - - fn nth(&mut self, n: usize) -> Option { - match self { - Self::Empty => None, - Self::NonEmpty { - root_node, - ops, - cumulative_index, - current, - ancestors, - .. - } => { - // Make sure that we don't rewind when calling nth more than once - if n < *cumulative_index { - None - } else if n >= root_node.len() { - *cumulative_index = root_node.len() - 1; - None - } else { - // rather than trying to go back up through the ancestors to find the right - // node we just start at the root. - *current = NodeIter { - node: root_node, - index: n, - }; - *cumulative_index = 0; - ancestors.clear(); - while !current.node.is_leaf() { - for (child_index, child) in current.node.children.iter().enumerate() { - match (*cumulative_index + child.len()).cmp(&n) { - Ordering::Less => { - *cumulative_index += child.len() + 1; - current.index = child_index + 1; - } - Ordering::Equal => { - *cumulative_index += child.len() + 1; - current.index = child_index + 1; - return Some(&ops[current.node.elements[child_index]]); - } - Ordering::Greater => { - current.index = child_index; - let old = std::mem::replace( - current, - NodeIter { - node: child, - index: 0, - }, - ); - ancestors.push(old); - break; - } - } - } - } - // we're in a leaf node and we kept track of the cumulative index as we went, - let index_in_this_node = n.saturating_sub(*cumulative_index); - current.index = index_in_this_node + 1; - Some(&ops[current.node.elements[index_in_this_node]]) - } - } - } - } -} - -#[cfg(test)] -mod tests { - use super::super::OpTreeInternal; - use crate::types::{Key, Op, OpId, OpType, ScalarValue}; - use proptest::prelude::*; - - #[derive(Clone)] - enum Action { - Insert(usize, Op), - Delete(usize), - } - - impl std::fmt::Debug for Action { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Insert(index, ..) => write!(f, "Insert({})", index), - Self::Delete(index) => write!(f, "Delete({})", index), - } - } - } - - // A struct which impls Debug by only printing the counters of the IDs of the ops it wraps. - // This is useful because the only difference between the ops that we generate is the counter - // of their IDs. Wrapping a Vec in DebugOps will result in output from assert! etc. which - // only shows the counters. For example, the output of a failing assert_eq! like this - // - // assert_eq!(DebugOps(&ops1), DebugOps(&ops2)) - // - // Might look like this - // - // left: `[0,1,2,3] - // right: `[0,1,2,3,4] - // - // i.e. all the other details of the ops are elided - #[derive(PartialEq)] - struct DebugOps<'a>(&'a [Op]); - - impl<'a> std::fmt::Debug for DebugOps<'a> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "[")?; - for (index, op) in self.0.iter().enumerate() { - if index < self.0.len() - 1 { - write!(f, "{},", op.id.counter())?; - } else { - write!(f, "{}]", op.id.counter())? - } - } - Ok(()) - } - } - - fn op(counter: u64) -> Op { - Op { - action: OpType::Put(ScalarValue::Uint(counter)), - id: OpId::new(counter, 0), - key: Key::Map(0), - succ: Default::default(), - pred: Default::default(), - insert: false, - } - } - - /// A model for a property based test of the OpTreeIter. We generate a set of actions, each - /// action pertaining to a `model` - which is just a `Vec`. As we generate each action we - /// apply it to the model and record the action we took. In the property test we replay the - /// same actions against an `OpTree` and check that the iterator returns the same result as the - /// `model`. - #[derive(Clone)] - struct Model { - actions: Vec, - model: Vec, - } - - impl std::fmt::Debug for Model { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Model") - .field("actions", &self.actions) - .field("model", &DebugOps(&self.model)) - .finish() - } - } - - impl Model { - fn insert(&self, index: usize, next_op_counter: u64) -> Self { - let mut actions = self.actions.clone(); - let op = op(next_op_counter); - actions.push(Action::Insert(index, op.clone())); - let mut model = self.model.clone(); - model.insert(index, op); - Self { actions, model } - } - - fn delete(&self, index: usize) -> Self { - let mut actions = self.actions.clone(); - actions.push(Action::Delete(index)); - let mut model = self.model.clone(); - model.remove(index); - Self { actions, model } - } - - fn next(self, next_op_counter: u64) -> impl Strategy { - if self.model.is_empty() { - Just(self.insert(0, next_op_counter)).boxed() - } else { - // Note that we have to feed `self` through the `prop_flat_map` using `Just` to - // appease the borrow checker, this is annoying because it does obscure the meaning - // of the code heere which is basically "decide whether the next action should be - // insert, if it is insert choose an index between 0..model.len() + 1 and generate - // an op to insert, otherwise choose an index between 0..model.len() and generate a - // delete action". - // - // 95% chance of inserting to make sure we deal with large lists - (proptest::bool::weighted(0.95), Just(self)) - .prop_flat_map(move |(insert, model)| { - if insert { - (0..model.model.len() + 1, Just(model)) - .prop_map(move |(index, model)| { - model.insert(index, next_op_counter) - }) - .boxed() - } else { - ((0..model.model.len()), Just(model)) - .prop_map(move |(index, model)| model.delete(index)) - .boxed() - } - }) - .boxed() - } - } - } - - fn model() -> impl Strategy { - (0_u64..150).prop_flat_map(|num_steps| { - let mut strat = Just(( - 0, - Model { - actions: Vec::new(), - model: Vec::new(), - }, - )) - .boxed(); - for _ in 0..num_steps { - strat = strat - // Note the counter, which we feed through each `prop_flat_map`, incrementing - // it by one each time. This mean that the generated ops have ascending (but - // not necessarily consecutive because not every `Action` is an `Insert`) - // counters. This makes it easier to debug failures - if we just used a random - // counter it would be much harder to see where things are out of order. - .prop_flat_map(|(counter, model)| { - let next_counter = counter + 1; - model.next(counter).prop_map(move |m| (next_counter, m)) - }) - .boxed(); - } - strat.prop_map(|(_, model)| model) - }) - } - - fn make_optree(actions: &[Action]) -> super::OpTreeInternal { - let mut optree = OpTreeInternal::new(); - for action in actions { - match action { - Action::Insert(index, op) => optree.insert(*index, op.clone()), - Action::Delete(index) => { - optree.remove(*index); - } - } - } - optree - } - - /// A model for calls to `nth`. `NthModel::n` is guarnateed to be in `(0..model.len())` - #[derive(Clone)] - struct NthModel { - model: Vec, - actions: Vec, - n: usize, - } - - impl std::fmt::Debug for NthModel { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Model") - .field("actions", &self.actions) - .field("model", &DebugOps(&self.model)) - .field("n", &self.n) - .finish() - } - } - - fn nth_model() -> impl Strategy { - model().prop_flat_map(|model| { - if model.model.is_empty() { - Just(NthModel { - model: model.model, - actions: model.actions, - n: 0, - }) - .boxed() - } else { - (0..model.model.len(), Just(model)) - .prop_map(|(index, model)| NthModel { - model: model.model, - actions: model.actions, - n: index, - }) - .boxed() - } - }) - } - - proptest! { - #[test] - fn optree_iter_proptest(model in model()) { - let optree = make_optree(&model.actions); - let iter = super::OpTreeIter::new(&optree); - let iterated = iter.cloned().collect::>(); - assert_eq!(DebugOps(&model.model), DebugOps(&iterated)) - } - - #[test] - fn optree_iter_nth(model in nth_model()) { - let optree = make_optree(&model.actions); - let mut iter = super::OpTreeIter::new(&optree); - let mut model_iter = model.model.iter(); - assert_eq!(model_iter.nth(model.n), iter.nth(model.n)); - - let tail = iter.cloned().collect::>(); - let expected_tail = model_iter.cloned().collect::>(); - assert_eq!(DebugOps(tail.as_slice()), DebugOps(expected_tail.as_slice())); - } - } -} diff --git a/rust/automerge/src/parents.rs b/rust/automerge/src/parents.rs deleted file mode 100644 index e1c5cc66..00000000 --- a/rust/automerge/src/parents.rs +++ /dev/null @@ -1,121 +0,0 @@ -use crate::op_set; -use crate::op_set::OpSet; -use crate::types::{ListEncoding, ObjId}; -use crate::{exid::ExId, Prop}; - -/// An iterator over the "parents" of an object -/// -/// The "parent" of an object in this context is the ([`ExId`], [`Prop`]) pair which specifies the -/// location of this object in the composite object which contains it. Each element in the iterator -/// is a [`Parent`], yielded in reverse order. This means that once the iterator returns `None` you -/// have reached the root of the document. -/// -/// This is returned by [`crate::ReadDoc::parents`] -#[derive(Debug)] -pub struct Parents<'a> { - pub(crate) obj: ObjId, - pub(crate) ops: &'a OpSet, -} - -impl<'a> Parents<'a> { - /// Return the path this `Parents` represents - /// - /// This is _not_ in reverse order. - pub fn path(self) -> Vec<(ExId, Prop)> { - let mut path = self - .map(|Parent { obj, prop, .. }| (obj, prop)) - .collect::>(); - path.reverse(); - path - } - - /// Like `path` but returns `None` if the target is not visible - pub fn visible_path(self) -> Option> { - let mut path = Vec::new(); - for Parent { obj, prop, visible } in self { - if !visible { - return None; - } - path.push((obj, prop)) - } - path.reverse(); - Some(path) - } -} - -impl<'a> Iterator for Parents<'a> { - type Item = Parent; - - fn next(&mut self) -> Option { - if self.obj.is_root() { - None - } else if let Some(op_set::Parent { obj, key, visible }) = self.ops.parent_object(&self.obj) - { - self.obj = obj; - Some(Parent { - obj: self.ops.id_to_exid(self.obj.0), - prop: self - .ops - .export_key(self.obj, key, ListEncoding::List) - .unwrap(), - visible, - }) - } else { - None - } - } -} - -/// A component of a path to an object -#[derive(Debug, PartialEq, Eq)] -pub struct Parent { - /// The object ID this component refers to - pub obj: ExId, - /// The property within `obj` this component refers to - pub prop: Prop, - /// Whether this component is "visible" - /// - /// An "invisible" component is one where the property is hidden, either because it has been - /// deleted or because there is a conflict on this (object, property) pair and this value does - /// not win the conflict. - pub visible: bool, -} - -#[cfg(test)] -mod tests { - use super::Parent; - use crate::{transaction::Transactable, Prop, ReadDoc}; - - #[test] - fn test_invisible_parents() { - // Create a document with a list of objects, then delete one of the objects, then generate - // a path to the deleted object. - - let mut doc = crate::AutoCommit::new(); - let list = doc - .put_object(crate::ROOT, "list", crate::ObjType::List) - .unwrap(); - let obj1 = doc.insert_object(&list, 0, crate::ObjType::Map).unwrap(); - let _obj2 = doc.insert_object(&list, 1, crate::ObjType::Map).unwrap(); - doc.put(&obj1, "key", "value").unwrap(); - doc.delete(&list, 0).unwrap(); - - let mut parents = doc.parents(&obj1).unwrap().collect::>(); - parents.reverse(); - assert_eq!( - parents, - vec![ - Parent { - obj: crate::ROOT, - prop: Prop::Map("list".to_string()), - visible: true, - }, - Parent { - obj: list, - prop: Prop::Seq(0), - visible: false, - }, - ] - ); - } -} diff --git a/rust/automerge/src/query.rs b/rust/automerge/src/query.rs deleted file mode 100644 index 640ecf8d..00000000 --- a/rust/automerge/src/query.rs +++ /dev/null @@ -1,362 +0,0 @@ -use crate::op_tree::{OpSetMetadata, OpTree, OpTreeNode}; -use crate::types::{ - Clock, Counter, Key, ListEncoding, Op, OpId, OpType, ScalarValue, TextEncoding, -}; -use fxhash::FxBuildHasher; -use std::cmp::Ordering; -use std::collections::{HashMap, HashSet}; -use std::fmt::Debug; - -mod elem_id_pos; -mod insert; -mod keys; -mod keys_at; -mod len; -mod len_at; -mod list_range; -mod list_range_at; -mod list_vals; -mod list_vals_at; -mod map_range; -mod map_range_at; -mod nth; -mod nth_at; -mod opid; -mod opid_vis; -mod prop; -mod prop_at; -mod seek_op; -mod seek_op_with_patch; - -pub(crate) use elem_id_pos::ElemIdPos; -pub(crate) use insert::InsertNth; -pub(crate) use keys::Keys; -pub(crate) use keys_at::KeysAt; -pub(crate) use len::Len; -pub(crate) use len_at::LenAt; -pub(crate) use list_range::ListRange; -pub(crate) use list_range_at::ListRangeAt; -pub(crate) use list_vals::ListVals; -pub(crate) use list_vals_at::ListValsAt; -pub(crate) use map_range::MapRange; -pub(crate) use map_range_at::MapRangeAt; -pub(crate) use nth::Nth; -pub(crate) use nth_at::NthAt; -pub(crate) use opid::OpIdSearch; -pub(crate) use opid_vis::OpIdVisSearch; -pub(crate) use prop::Prop; -pub(crate) use prop_at::PropAt; -pub(crate) use seek_op::SeekOp; -pub(crate) use seek_op_with_patch::SeekOpWithPatch; - -// use a struct for the args for clarity as they are passed up the update chain in the optree -#[derive(Debug, Clone)] -pub(crate) struct ChangeVisibility<'a> { - pub(crate) old_vis: bool, - pub(crate) new_vis: bool, - pub(crate) op: &'a Op, -} - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct CounterData { - pos: usize, - val: i64, - succ: HashSet, - op: Op, -} - -pub(crate) trait TreeQuery<'a>: Clone + Debug { - fn equiv(&mut self, _other: &Self) -> bool { - false - } - - fn can_shortcut_search(&mut self, _tree: &'a OpTree) -> bool { - false - } - - #[inline(always)] - fn query_node_with_metadata( - &mut self, - child: &'a OpTreeNode, - _m: &OpSetMetadata, - ops: &[Op], - ) -> QueryResult { - self.query_node(child, ops) - } - - fn query_node(&mut self, _child: &'a OpTreeNode, _ops: &[Op]) -> QueryResult { - QueryResult::Descend - } - - #[inline(always)] - fn query_element_with_metadata(&mut self, element: &'a Op, _m: &OpSetMetadata) -> QueryResult { - self.query_element(element) - } - - fn query_element(&mut self, _element: &'a Op) -> QueryResult { - panic!("invalid element query") - } -} - -#[derive(Debug, Clone, PartialEq)] -pub(crate) enum QueryResult { - Next, - /// Skip this many elements, only allowed from the root node. - Skip(usize), - Descend, - Finish, -} - -#[derive(Clone, Debug, PartialEq)] -struct TextWidth { - utf8: usize, - utf16: usize, -} - -impl TextWidth { - fn add_op(&mut self, op: &Op) { - self.utf8 += op.width(ListEncoding::Text(TextEncoding::Utf8)); - self.utf16 += op.width(ListEncoding::Text(TextEncoding::Utf16)); - } - - fn remove_op(&mut self, op: &Op) { - // Why are we using saturating_sub here? Shouldn't this always be greater than 0? - // - // In the case of objects which are _not_ `Text` we may end up subtracting more than the - // current width. This can happen if the elements in a list are `ScalarValue::str` and - // there are conflicting elements for the same index in the list. Like so: - // - // ```notrust - // [ - // "element", - // ["conflict1", "conflict2_longer"], - // "element" - // ] - // ``` - // - // Where there are two conflicted elements at index 1 - // - // in `Index::insert` and `Index::change_visibility` we add the width of the inserted op in - // utf8 and utf16 to the current width, but only if there was not a previous element for - // that index. Imagine that we encounter the "conflict1" op first, then we will add the - // length of 'conflict1' to the text widths. When 'conflict2_longer' is added we don't do - // anything because we've already seen an op for this index. Imagine that later we remove - // the `conflict2_longer` op, then we will end up subtracting the length of - // 'conflict2_longer' from the text widths, hence, `saturating_sub`. This isn't a problem - // because for non text objects we don't need the text widths to be accurate anyway. - // - // Really this is a sign that we should be tracking the type of the Index (List or Text) at - // the type level, but for now we just look the other way. - self.utf8 = self - .utf8 - .saturating_sub(op.width(ListEncoding::Text(TextEncoding::Utf8))); - self.utf16 = self - .utf16 - .saturating_sub(op.width(ListEncoding::Text(TextEncoding::Utf16))); - } - - fn merge(&mut self, other: &TextWidth) { - self.utf8 += other.utf8; - self.utf16 += other.utf16; - } -} - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct Index { - /// The map of visible keys to the number of visible operations for that key. - visible: HashMap, - visible_text: TextWidth, - /// Set of opids found in this node and below. - ops: HashSet, -} - -impl Index { - pub(crate) fn new() -> Self { - Index { - visible: Default::default(), - visible_text: TextWidth { utf8: 0, utf16: 0 }, - ops: Default::default(), - } - } - - /// Get the number of visible elements in this index. - pub(crate) fn visible_len(&self, encoding: ListEncoding) -> usize { - match encoding { - ListEncoding::List => self.visible.len(), - ListEncoding::Text(TextEncoding::Utf8) => self.visible_text.utf8, - ListEncoding::Text(TextEncoding::Utf16) => self.visible_text.utf16, - } - } - - pub(crate) fn has_visible(&self, seen: &Key) -> bool { - self.visible.contains_key(seen) - } - - /// Whether `opid` is in this node or any below it - pub(crate) fn has_op(&self, opid: &OpId) -> bool { - self.ops.contains(opid) - } - - pub(crate) fn change_vis<'a>( - &mut self, - change_vis: ChangeVisibility<'a>, - ) -> ChangeVisibility<'a> { - let ChangeVisibility { - old_vis, - new_vis, - op, - } = &change_vis; - let key = op.elemid_or_key(); - match (old_vis, new_vis) { - (true, false) => match self.visible.get(&key).copied() { - Some(n) if n == 1 => { - self.visible.remove(&key); - self.visible_text.remove_op(op); - } - Some(n) => { - self.visible.insert(key, n - 1); - } - None => panic!("remove overun in index"), - }, - (false, true) => { - if let Some(n) = self.visible.get(&key) { - self.visible.insert(key, n + 1); - } else { - self.visible.insert(key, 1); - self.visible_text.add_op(op); - } - } - _ => {} - } - change_vis - } - - pub(crate) fn insert(&mut self, op: &Op) { - self.ops.insert(op.id); - if op.visible() { - let key = op.elemid_or_key(); - if let Some(n) = self.visible.get(&key) { - self.visible.insert(key, n + 1); - } else { - self.visible.insert(key, 1); - self.visible_text.add_op(op); - } - } - } - - pub(crate) fn remove(&mut self, op: &Op) { - self.ops.remove(&op.id); - if op.visible() { - let key = op.elemid_or_key(); - match self.visible.get(&key).copied() { - Some(n) if n == 1 => { - self.visible.remove(&key); - self.visible_text.remove_op(op); - } - Some(n) => { - self.visible.insert(key, n - 1); - } - None => panic!("remove overun in index"), - } - } - } - - pub(crate) fn merge(&mut self, other: &Index) { - for id in &other.ops { - self.ops.insert(*id); - } - for (elem, other_len) in other.visible.iter() { - self.visible - .entry(*elem) - .and_modify(|len| *len += *other_len) - .or_insert(*other_len); - } - self.visible_text.merge(&other.visible_text); - } -} - -impl Default for Index { - fn default() -> Self { - Self::new() - } -} - -#[derive(Debug, Clone, PartialEq, Default)] -pub(crate) struct VisWindow { - counters: HashMap, -} - -impl VisWindow { - fn visible_at(&mut self, op: &Op, pos: usize, clock: &Clock) -> bool { - if !clock.covers(&op.id) { - return false; - } - - let mut visible = false; - match op.action { - OpType::Put(ScalarValue::Counter(Counter { start, .. })) => { - self.counters.insert( - op.id, - CounterData { - pos, - val: start, - succ: op.succ.into_iter().cloned().collect(), - op: op.clone(), - }, - ); - if !op.succ.into_iter().any(|i| clock.covers(i)) { - visible = true; - } - } - OpType::Increment(inc_val) => { - for id in &op.pred { - // pred is always before op.id so we can see them - if let Some(mut entry) = self.counters.get_mut(id) { - entry.succ.remove(&op.id); - entry.val += inc_val; - entry.op.action = OpType::Put(ScalarValue::counter(entry.val)); - if !entry.succ.iter().any(|i| clock.covers(i)) { - visible = true; - } - } - } - } - _ => { - if !op.succ.into_iter().any(|i| clock.covers(i)) { - visible = true; - } - } - }; - visible - } - - pub(crate) fn seen_op(&self, op: &Op, pos: usize) -> Vec<(usize, Op)> { - let mut result = vec![]; - for pred in &op.pred { - if let Some(entry) = self.counters.get(pred) { - result.push((entry.pos, entry.op.clone())); - } - } - if result.is_empty() { - result.push((pos, op.clone())); - } - result - } -} - -pub(crate) fn binary_search_by(node: &OpTreeNode, ops: &[Op], f: F) -> usize -where - F: Fn(&Op) -> Ordering, -{ - let mut right = node.len(); - let mut left = 0; - while left < right { - let seq = (left + right) / 2; - if f(&ops[node.get(seq).unwrap()]) == Ordering::Less { - left = seq + 1; - } else { - right = seq; - } - } - left -} diff --git a/rust/automerge/src/query/elem_id_pos.rs b/rust/automerge/src/query/elem_id_pos.rs deleted file mode 100644 index cb559216..00000000 --- a/rust/automerge/src/query/elem_id_pos.rs +++ /dev/null @@ -1,74 +0,0 @@ -use crate::{ - op_tree::OpTreeNode, - types::{ElemId, ListEncoding, Op, OpId}, -}; - -use super::{QueryResult, TreeQuery}; - -/// Lookup the index in the list that this elemid occupies, includes hidden elements. -#[derive(Clone, Debug)] -pub(crate) struct ElemIdPos { - elem_opid: OpId, - pos: usize, - found: bool, - encoding: ListEncoding, -} - -impl ElemIdPos { - pub(crate) fn new(elemid: ElemId, encoding: ListEncoding) -> Self { - if elemid.is_head() { - Self { - elem_opid: elemid.0, - pos: 0, - found: true, - encoding, - } - } else { - Self { - elem_opid: elemid.0, - pos: 0, - found: false, - encoding, - } - } - } - - pub(crate) fn index(&self) -> Option { - if self.found { - Some(self.pos) - } else { - None - } - } -} - -impl<'a> TreeQuery<'a> for ElemIdPos { - fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { - if self.found { - return QueryResult::Finish; - } - // if index has our element then we can continue - if child.index.has_op(&self.elem_opid) { - // element is in this node somewhere - QueryResult::Descend - } else { - // not in this node, try the next one - self.pos += child.index.visible_len(self.encoding); - QueryResult::Next - } - } - - fn query_element(&mut self, element: &crate::types::Op) -> QueryResult { - if self.found { - return QueryResult::Finish; - } - if element.elemid() == Some(ElemId(self.elem_opid)) { - // this is it - self.found = true; - return QueryResult::Finish; - } else if element.visible() { - self.pos += element.width(self.encoding); - } - QueryResult::Next - } -} diff --git a/rust/automerge/src/query/keys.rs b/rust/automerge/src/query/keys.rs deleted file mode 100644 index edda4fe9..00000000 --- a/rust/automerge/src/query/keys.rs +++ /dev/null @@ -1,54 +0,0 @@ -use crate::op_tree::OpTreeInternal; -use crate::types::Key; -use std::fmt::Debug; - -#[derive(Debug)] -pub(crate) struct Keys<'a> { - index: usize, - last_key: Option, - index_back: usize, - last_key_back: Option, - op_tree: &'a OpTreeInternal, -} - -impl<'a> Keys<'a> { - pub(crate) fn new(op_tree: &'a OpTreeInternal) -> Self { - Self { - index: 0, - last_key: None, - index_back: op_tree.len(), - last_key_back: None, - op_tree, - } - } -} - -impl<'a> Iterator for Keys<'a> { - type Item = Key; - - fn next(&mut self) -> Option { - for i in self.index..self.index_back { - let op = self.op_tree.get(i)?; - self.index += 1; - if Some(op.elemid_or_key()) != self.last_key && op.visible() { - self.last_key = Some(op.elemid_or_key()); - return Some(op.elemid_or_key()); - } - } - None - } -} - -impl<'a> DoubleEndedIterator for Keys<'a> { - fn next_back(&mut self) -> Option { - for i in (self.index..self.index_back).rev() { - let op = self.op_tree.get(i)?; - self.index_back -= 1; - if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { - self.last_key_back = Some(op.elemid_or_key()); - return Some(op.elemid_or_key()); - } - } - None - } -} diff --git a/rust/automerge/src/query/len.rs b/rust/automerge/src/query/len.rs deleted file mode 100644 index 9134b11f..00000000 --- a/rust/automerge/src/query/len.rs +++ /dev/null @@ -1,23 +0,0 @@ -use crate::op_tree::OpTreeNode; -use crate::query::{QueryResult, TreeQuery}; -use crate::types::{ListEncoding, Op}; -use std::fmt::Debug; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct Len { - pub(crate) len: usize, - encoding: ListEncoding, -} - -impl Len { - pub(crate) fn new(encoding: ListEncoding) -> Self { - Len { len: 0, encoding } - } -} - -impl<'a> TreeQuery<'a> for Len { - fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { - self.len = child.index.visible_len(self.encoding); - QueryResult::Finish - } -} diff --git a/rust/automerge/src/query/list_range.rs b/rust/automerge/src/query/list_range.rs deleted file mode 100644 index d01082ab..00000000 --- a/rust/automerge/src/query/list_range.rs +++ /dev/null @@ -1,67 +0,0 @@ -use crate::exid::ExId; -use crate::op_tree::OpTreeInternal; -use crate::types::{ElemId, OpId}; -use crate::values::ValueIter; -use crate::{Automerge, Value}; -use std::fmt::Debug; -use std::ops::RangeBounds; - -#[derive(Debug)] -pub(crate) struct ListRange<'a, R: RangeBounds> { - range: R, - index: usize, - pos: usize, - last_elemid: Option, - next_result: Option<(usize, Value<'a>, OpId)>, - index_back: usize, - op_tree: &'a OpTreeInternal, -} - -impl<'a, R: RangeBounds> ListRange<'a, R> { - pub(crate) fn new(range: R, op_tree: &'a OpTreeInternal) -> Self { - Self { - range, - index: 0, // FIXME root_child.seek_to_pos(range.start) - pos: 0, // FIXME range.start - last_elemid: None, - next_result: None, - index_back: op_tree.len(), - op_tree, - } - } -} - -impl<'a, R: RangeBounds> ValueIter<'a> for ListRange<'a, R> { - fn next_value(&mut self, doc: &'a Automerge) -> Option<(Value<'a>, ExId)> { - self.next().map(|(_, val, id)| (val, doc.id_to_exid(id))) - } -} - -impl<'a, R: RangeBounds> Iterator for ListRange<'a, R> { - type Item = (usize, Value<'a>, OpId); - - // FIXME: this is fine if we're scanning everything (see values()) but could be much more efficient - // if we're scanning a narrow range on a large sequence ... we should be able to seek to the starting - // point and stop at the end point and not needless scan all the ops before and after the range - fn next(&mut self) -> Option { - for i in self.index..self.index_back { - let op = self.op_tree.get(i)?; - self.index += 1; - if op.visible() { - if op.elemid() != self.last_elemid { - self.last_elemid = op.elemid(); - self.pos += 1; - if self.range.contains(&(self.pos - 1)) { - let result = self.next_result.replace((self.pos - 1, op.value(), op.id)); - if result.is_some() { - return result; - } - } - } else if self.pos > 0 && self.range.contains(&(self.pos - 1)) { - self.next_result = Some((self.pos - 1, op.value(), op.id)); - } - } - } - self.next_result.take() - } -} diff --git a/rust/automerge/src/query/list_range_at.rs b/rust/automerge/src/query/list_range_at.rs deleted file mode 100644 index 33cdf548..00000000 --- a/rust/automerge/src/query/list_range_at.rs +++ /dev/null @@ -1,70 +0,0 @@ -use super::VisWindow; -use crate::exid::ExId; -use crate::op_tree::OpTreeInternal; -use crate::types::{Clock, ElemId, OpId}; -use crate::values::ValueIter; -use crate::{Automerge, Value}; -use std::fmt::Debug; -use std::ops::RangeBounds; - -#[derive(Debug)] -pub(crate) struct ListRangeAt<'a, R: RangeBounds> { - range: R, - index: usize, - pos: usize, - last_elemid: Option, - next_result: Option<(usize, Value<'a>, OpId)>, - index_back: usize, - op_tree: &'a OpTreeInternal, - clock: Clock, - window: VisWindow, -} - -impl<'a, R: RangeBounds> ValueIter<'a> for ListRangeAt<'a, R> { - fn next_value(&mut self, doc: &'a Automerge) -> Option<(Value<'a>, ExId)> { - self.next().map(|(_, val, id)| (val, doc.id_to_exid(id))) - } -} - -impl<'a, R: RangeBounds> ListRangeAt<'a, R> { - pub(crate) fn new(range: R, clock: Clock, op_tree: &'a OpTreeInternal) -> Self { - Self { - range, - index: 0, // FIXME root_child.seek_to_pos(range.start) - pos: 0, // FIXME range.start - last_elemid: None, - next_result: None, - index_back: op_tree.len(), - op_tree, - clock, - window: VisWindow::default(), - } - } -} - -impl<'a, R: RangeBounds> Iterator for ListRangeAt<'a, R> { - type Item = (usize, Value<'a>, OpId); - - fn next(&mut self) -> Option { - for i in self.index..self.index_back { - let op = self.op_tree.get(i)?; - let visible = self.window.visible_at(op, i, &self.clock); - self.index += 1; - if visible { - if op.elemid() != self.last_elemid { - self.last_elemid = op.elemid(); - self.pos += 1; - if self.range.contains(&(self.pos - 1)) { - let result = self.next_result.replace((self.pos - 1, op.value(), op.id)); - if result.is_some() { - return result; - } - } - } else if self.pos > 0 && self.range.contains(&(self.pos - 1)) { - self.next_result = Some((self.pos - 1, op.value(), op.id)); - } - } - } - self.next_result.take() - } -} diff --git a/rust/automerge/src/query/map_range.rs b/rust/automerge/src/query/map_range.rs deleted file mode 100644 index 909312db..00000000 --- a/rust/automerge/src/query/map_range.rs +++ /dev/null @@ -1,106 +0,0 @@ -use crate::exid::ExId; -use crate::op_tree::{OpSetMetadata, OpTreeInternal}; -use crate::types::{Key, OpId}; -use crate::values::ValueIter; -use crate::{Automerge, Value}; -use std::fmt::Debug; -use std::ops::RangeBounds; - -#[derive(Debug)] -pub(crate) struct MapRange<'a, R: RangeBounds> { - range: R, - index: usize, - last_key: Option, - next_result: Option<(&'a str, Value<'a>, OpId)>, - index_back: usize, - last_key_back: Option, - op_tree: &'a OpTreeInternal, - meta: &'a OpSetMetadata, -} - -impl<'a, R: RangeBounds> ValueIter<'a> for MapRange<'a, R> { - fn next_value(&mut self, doc: &'a Automerge) -> Option<(Value<'a>, ExId)> { - self.next().map(|(_, val, id)| (val, doc.id_to_exid(id))) - } -} - -impl<'a, R: RangeBounds> MapRange<'a, R> { - pub(crate) fn new(range: R, op_tree: &'a OpTreeInternal, meta: &'a OpSetMetadata) -> Self { - Self { - range, - index: 0, - last_key: None, - next_result: None, - index_back: op_tree.len(), - last_key_back: None, - op_tree, - meta, - } - } -} - -impl<'a, R: RangeBounds> Iterator for MapRange<'a, R> { - type Item = (&'a str, Value<'a>, OpId); - - // FIXME: this is fine if we're scanning everything (see values()) but could be much more efficient - // if we're scanning a narrow range on a map with many keys... we should be able to seek to the starting - // point and stop at the end point and not needless scan all the ops before and after the range - fn next(&mut self) -> Option { - for i in self.index..self.index_back { - let op = self.op_tree.get(i)?; - self.index += 1; - if op.visible() { - let prop = match op.key { - Key::Map(m) => self.meta.props.get(m), - Key::Seq(_) => return None, // this is a list - }; - if self.range.contains(prop) { - let result = self.next_result.replace((prop, op.value(), op.id)); - if Some(op.key) != self.last_key { - self.last_key = Some(op.key); - if result.is_some() { - return result; - } - } - } - } - } - self.next_result.take() - } -} - -impl<'a, R: RangeBounds> DoubleEndedIterator for MapRange<'a, R> { - fn next_back(&mut self) -> Option { - for i in (self.index..self.index_back).rev() { - let op = self.op_tree.get(i)?; - self.index_back -= 1; - - if Some(op.key) != self.last_key_back && op.visible() { - self.last_key_back = Some(op.key); - let prop = match op.key { - Key::Map(m) => self.meta.props.get(m), - Key::Seq(_) => return None, // this is a list - }; - if self.range.contains(prop) { - return Some((prop, op.value(), op.id)); - } - } - } - - // we're now overlapping the index and index_back so try and take the result from the next query - if let Some((prop, a, b)) = self.next_result.take() { - let last_prop = match self.last_key_back { - None => None, - Some(Key::Map(u)) => Some(self.meta.props.get(u).as_str()), - Some(Key::Seq(_)) => None, - }; - - // we can only use this result if we haven't ended in the prop's state (to account for - // conflicts). - if Some(prop) != last_prop { - return Some((prop, a, b)); - } - } - None - } -} diff --git a/rust/automerge/src/query/map_range_at.rs b/rust/automerge/src/query/map_range_at.rs deleted file mode 100644 index c5c5af06..00000000 --- a/rust/automerge/src/query/map_range_at.rs +++ /dev/null @@ -1,119 +0,0 @@ -use crate::clock::Clock; -use crate::exid::ExId; -use crate::op_tree::{OpSetMetadata, OpTreeInternal}; -use crate::types::{Key, OpId}; -use crate::values::ValueIter; -use crate::{Automerge, Value}; -use std::fmt::Debug; -use std::ops::RangeBounds; - -use super::VisWindow; - -#[derive(Debug)] -pub(crate) struct MapRangeAt<'a, R: RangeBounds> { - clock: Clock, - window: VisWindow, - - range: R, - index: usize, - last_key: Option, - next_result: Option<(&'a str, Value<'a>, OpId)>, - - index_back: usize, - last_key_back: Option, - - op_tree: &'a OpTreeInternal, - meta: &'a OpSetMetadata, -} - -impl<'a, R: RangeBounds> ValueIter<'a> for MapRangeAt<'a, R> { - fn next_value(&mut self, doc: &'a Automerge) -> Option<(Value<'a>, ExId)> { - self.next().map(|(_, val, id)| (val, doc.id_to_exid(id))) - } -} - -impl<'a, R: RangeBounds> MapRangeAt<'a, R> { - pub(crate) fn new( - range: R, - op_tree: &'a OpTreeInternal, - meta: &'a OpSetMetadata, - clock: Clock, - ) -> Self { - Self { - clock, - window: VisWindow::default(), - range, - index: 0, - last_key: None, - next_result: None, - index_back: op_tree.len(), - last_key_back: None, - op_tree, - meta, - } - } -} - -impl<'a, R: RangeBounds> Iterator for MapRangeAt<'a, R> { - type Item = (&'a str, Value<'a>, OpId); - - fn next(&mut self) -> Option { - for i in self.index..self.index_back { - let op = self.op_tree.get(i)?; - let visible = self.window.visible_at(op, i, &self.clock); - self.index += 1; - if visible { - let prop = match op.key { - Key::Map(m) => self.meta.props.get(m), - Key::Seq(_) => return None, // this is a list - }; - if self.range.contains(prop) { - let result = self.next_result.replace((prop, op.value(), op.id)); - if Some(op.key) != self.last_key { - self.last_key = Some(op.key); - if result.is_some() { - return result; - } - } - } - } - } - self.next_result.take() - } -} - -impl<'a, R: RangeBounds> DoubleEndedIterator for MapRangeAt<'a, R> { - fn next_back(&mut self) -> Option { - for i in (self.index..self.index_back).rev() { - let op = self.op_tree.get(i)?; - let visible = self.window.visible_at(op, i, &self.clock); - self.index_back -= 1; - if Some(op.key) != self.last_key_back && visible { - self.last_key_back = Some(op.key); - let prop = match op.key { - Key::Map(m) => self.meta.props.get(m), - Key::Seq(_) => return None, // this is a list - }; - if self.range.contains(prop) { - return Some((prop, op.value(), op.id)); - } - } - } - - // we're now overlapping the index and index_back so try and take the result from the next query - if let Some((prop, a, b)) = self.next_result.take() { - let last_prop = match self.last_key_back { - None => None, - Some(Key::Map(u)) => Some(self.meta.props.get(u).as_str()), - Some(Key::Seq(_)) => None, - }; - - // we can only use this result if we haven't ended in the prop's state (to account for - // conflicts). - if Some(prop) != last_prop { - return Some((prop, a, b)); - } - } - None - } -} diff --git a/rust/automerge/src/query/opid_vis.rs b/rust/automerge/src/query/opid_vis.rs deleted file mode 100644 index c0d2cc89..00000000 --- a/rust/automerge/src/query/opid_vis.rs +++ /dev/null @@ -1,62 +0,0 @@ -use crate::op_tree::OpTreeNode; -use crate::query::{QueryResult, TreeQuery}; -use crate::types::{Key, Op, OpId}; - -/// Search for an OpId in a tree. -/// Returns the index of the operation in the tree. -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct OpIdVisSearch { - target: OpId, - found: bool, - pub(crate) visible: bool, - key: Option, -} - -impl OpIdVisSearch { - pub(crate) fn new(target: OpId) -> Self { - OpIdVisSearch { - target, - found: false, - visible: true, - key: None, - } - } - - pub(crate) fn key(&self) -> &Option { - &self.key - } -} - -impl<'a> TreeQuery<'a> for OpIdVisSearch { - fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { - if child.index.ops.contains(&self.target) { - QueryResult::Descend - } else { - QueryResult::Next - } - } - - fn query_element(&mut self, element: &Op) -> QueryResult { - if element.id == self.target { - self.found = true; - self.key = Some(element.elemid_or_key()); - if element.visible() { - QueryResult::Next - } else { - self.visible = false; - QueryResult::Finish - } - } else if self.found { - if self.key != Some(element.elemid_or_key()) { - QueryResult::Finish - } else if element.visible() { - self.visible = false; - QueryResult::Finish - } else { - QueryResult::Next - } - } else { - QueryResult::Next - } - } -} diff --git a/rust/automerge/src/query/prop.rs b/rust/automerge/src/query/prop.rs deleted file mode 100644 index d2a11361..00000000 --- a/rust/automerge/src/query/prop.rs +++ /dev/null @@ -1,49 +0,0 @@ -use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, Op}; -use std::fmt::Debug; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct Prop<'a> { - key: Key, - pub(crate) ops: Vec<&'a Op>, - pub(crate) ops_pos: Vec, - pub(crate) pos: usize, -} - -impl<'a> Prop<'a> { - pub(crate) fn new(prop: usize) -> Self { - Prop { - key: Key::Map(prop), - ops: vec![], - ops_pos: vec![], - pos: 0, - } - } -} - -impl<'a> TreeQuery<'a> for Prop<'a> { - fn query_node_with_metadata( - &mut self, - child: &'a OpTreeNode, - m: &OpSetMetadata, - ops: &[Op], - ) -> QueryResult { - let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.key)); - self.pos = start; - QueryResult::Skip(start) - } - - fn query_element(&mut self, op: &'a Op) -> QueryResult { - // don't bother looking at things past our key - if op.key != self.key { - return QueryResult::Finish; - } - if op.visible() { - self.ops.push(op); - self.ops_pos.push(self.pos); - } - self.pos += 1; - QueryResult::Next - } -} diff --git a/rust/automerge/src/query/seek_op.rs b/rust/automerge/src/query/seek_op.rs deleted file mode 100644 index 2ed875d2..00000000 --- a/rust/automerge/src/query/seek_op.rs +++ /dev/null @@ -1,247 +0,0 @@ -use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, Op, HEAD}; -use std::cmp::Ordering; -use std::fmt::Debug; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct SeekOp<'a> { - /// the op we are looking for - op: &'a Op, - /// The position to insert at - pub(crate) pos: usize, - /// The indices of ops that this op overwrites - pub(crate) succ: Vec, - /// whether a position has been found - found: bool, -} - -impl<'a> SeekOp<'a> { - pub(crate) fn new(op: &'a Op) -> Self { - SeekOp { - op, - succ: vec![], - pos: 0, - found: false, - } - } - - fn lesser_insert(&self, op: &Op, m: &OpSetMetadata) -> bool { - op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less - } - - fn greater_opid(&self, op: &Op, m: &OpSetMetadata) -> bool { - m.lamport_cmp(op.id, self.op.id) == Ordering::Greater - } - - fn is_target_insert(&self, op: &Op) -> bool { - op.insert && op.elemid() == self.op.key.elemid() - } -} - -impl<'a> TreeQuery<'a> for SeekOp<'a> { - fn query_node_with_metadata( - &mut self, - child: &OpTreeNode, - m: &OpSetMetadata, - ops: &[Op], - ) -> QueryResult { - if self.found { - return QueryResult::Descend; - } - match self.op.key { - Key::Seq(HEAD) => { - while self.pos < child.len() { - let op = &ops[child.get(self.pos).unwrap()]; - if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { - break; - } - self.pos += 1; - } - QueryResult::Finish - } - Key::Seq(e) => { - if child.index.ops.contains(&e.0) { - QueryResult::Descend - } else { - self.pos += child.len(); - QueryResult::Next - } - } - Key::Map(_) => { - let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.op.key)); - self.pos = start; - QueryResult::Skip(start) - } - } - } - - fn query_element_with_metadata(&mut self, e: &Op, m: &OpSetMetadata) -> QueryResult { - match self.op.key { - Key::Map(_) => { - // don't bother looking at things past our key - if e.key != self.op.key { - return QueryResult::Finish; - } - - if self.op.overwrites(e) { - self.succ.push(self.pos); - } - - if m.lamport_cmp(e.id, self.op.id) == Ordering::Greater { - return QueryResult::Finish; - } - - self.pos += 1; - QueryResult::Next - } - Key::Seq(_) => { - if !self.found { - if self.is_target_insert(e) { - self.found = true; - if self.op.overwrites(e) { - self.succ.push(self.pos); - } - } - self.pos += 1; - QueryResult::Next - } else { - // we have already found the target - if self.op.overwrites(e) { - self.succ.push(self.pos); - } - if self.op.insert { - if self.lesser_insert(e, m) { - QueryResult::Finish - } else { - self.pos += 1; - QueryResult::Next - } - } else if e.insert || self.greater_opid(e, m) { - QueryResult::Finish - } else { - self.pos += 1; - QueryResult::Next - } - } - } - } - } -} - -#[cfg(test)] -pub(crate) mod tests { - use crate::{ - op_set::OpSet, - op_tree::B, - query::SeekOp, - types::{Key, ObjId, Op, OpId}, - ActorId, ScalarValue, - }; - - /// Create an optree in which the only visible ops are on the boundaries of the nodes, - /// i.e. the visible elements are in the internal nodes. Like so - /// - /// ```notrust - /// - /// .----------------------. - /// | id | key | succ | - /// | B | "a" | | - /// | 2B | "b" | | - /// '----------------------' - /// / | \ - /// ;------------------------. | `------------------------------------. - /// | id | op | succ | | | id | op | succ | - /// | 0 |set "a" | 1 | | | 2B + 1 |set "c" | 2B + 2 | - /// | 1 |set "a" | 2 | | | 2B + 2 |set "c" | 2B + 3 | - /// | 2 |set "a" | 3 | | ... - /// ... | | 3B |set "c" | | - /// | B - 1 |set "a" | B | | '------------------------------------' - /// '--------'--------'------' | - /// | - /// .-----------------------------. - /// | id | key | succ | - /// | B + 1 | "b" | B + 2 | - /// | B + 2 | "b" | B + 3 | - /// .... - /// | B + (B - 1 | "b" | 2B | - /// '-----------------------------' - /// ``` - /// - /// The important point here is that the leaf nodes contain no visible ops for keys "a" and - /// "b". - /// - /// # Returns - /// - /// The opset in question and an op which should be inserted at the next position after the - /// internally visible ops. - pub(crate) fn optree_with_only_internally_visible_ops() -> (OpSet, Op) { - let mut set = OpSet::new(); - let actor = set.m.actors.cache(ActorId::random()); - let a = set.m.props.cache("a".to_string()); - let b = set.m.props.cache("b".to_string()); - let c = set.m.props.cache("c".to_string()); - - let mut counter = 0; - // For each key insert `B` operations with the `pred` and `succ` setup such that the final - // operation for each key is the only visible op. - for key in [a, b, c] { - for iteration in 0..B { - // Generate a value to insert - let keystr = set.m.props.get(key); - let val = keystr.repeat(iteration + 1); - - // Only the last op is visible - let pred = if iteration == 0 { - Default::default() - } else { - set.m - .sorted_opids(vec![OpId::new(counter - 1, actor)].into_iter()) - }; - - // only the last op is visible - let succ = if iteration == B - 1 { - Default::default() - } else { - set.m - .sorted_opids(vec![OpId::new(counter, actor)].into_iter()) - }; - - let op = Op { - id: OpId::new(counter, actor), - action: crate::OpType::Put(ScalarValue::Str(val.into())), - key: Key::Map(key), - succ, - pred, - insert: false, - }; - set.insert(counter as usize, &ObjId::root(), op); - counter += 1; - } - } - - // Now try and create an op which inserts at the next index of 'a' - let new_op = Op { - id: OpId::new(counter, actor), - action: crate::OpType::Put(ScalarValue::Str("test".into())), - key: Key::Map(a), - succ: Default::default(), - pred: set - .m - .sorted_opids(std::iter::once(OpId::new(B as u64 - 1, actor))), - insert: false, - }; - (set, new_op) - } - - #[test] - fn seek_on_page_boundary() { - let (set, new_op) = optree_with_only_internally_visible_ops(); - - let q = SeekOp::new(&new_op); - let q = set.search(&ObjId::root(), q); - - // we've inserted `B - 1` elements for "a", so the index should be `B` - assert_eq!(q.pos, B); - } -} diff --git a/rust/automerge/src/query/seek_op_with_patch.rs b/rust/automerge/src/query/seek_op_with_patch.rs deleted file mode 100644 index cd30f5bb..00000000 --- a/rust/automerge/src/query/seek_op_with_patch.rs +++ /dev/null @@ -1,291 +0,0 @@ -use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, ListEncoding, Op, HEAD}; -use std::cmp::Ordering; -use std::fmt::Debug; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct SeekOpWithPatch<'a> { - op: Op, - pub(crate) pos: usize, - pub(crate) succ: Vec, - found: bool, - encoding: ListEncoding, - pub(crate) seen: usize, - pub(crate) last_width: usize, - last_seen: Option, - pub(crate) values: Vec<&'a Op>, - pub(crate) had_value_before: bool, -} - -impl<'a> SeekOpWithPatch<'a> { - pub(crate) fn new(op: &Op, encoding: ListEncoding) -> Self { - SeekOpWithPatch { - op: op.clone(), - succ: vec![], - pos: 0, - found: false, - encoding, - seen: 0, - last_width: 0, - last_seen: None, - values: vec![], - had_value_before: false, - } - } - - fn lesser_insert(&self, op: &Op, m: &OpSetMetadata) -> bool { - op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less - } - - fn greater_opid(&self, op: &Op, m: &OpSetMetadata) -> bool { - m.lamport_cmp(op.id, self.op.id) == Ordering::Greater - } - - fn is_target_insert(&self, op: &Op) -> bool { - op.insert && op.elemid() == self.op.key.elemid() - } - - /// Keeps track of the number of visible list elements we have seen. Increments `self.seen` if - /// operation `e` associates a visible value with a list element, and if we have not already - /// counted that list element (this ensures that if a list element has several values, i.e. - /// a conflict, then it is still only counted once). - fn count_visible(&mut self, e: &Op) { - if e.elemid() == self.op.elemid() { - return; - } - if e.insert { - self.last_seen = None - } - if e.visible() && self.last_seen.is_none() { - self.seen += e.width(self.encoding); - self.last_seen = Some(e.elemid_or_key()) - } - } -} - -impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { - fn query_node_with_metadata( - &mut self, - child: &'a OpTreeNode, - m: &OpSetMetadata, - ops: &[Op], - ) -> QueryResult { - if self.found { - return QueryResult::Descend; - } - match self.op.key { - // Special case for insertion at the head of the list (`e == HEAD` is only possible for - // an insertion operation). Skip over any list elements whose elemId is greater than - // the opId of the operation being inserted. - Key::Seq(e) if e == HEAD => { - while self.pos < child.len() { - let op = &ops[child.get(self.pos).unwrap()]; - if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { - break; - } - self.count_visible(op); - self.pos += 1; - } - QueryResult::Finish - } - - // Updating a list: search for the tree node that contains the new operation's - // reference element (i.e. the element we're updating or inserting after) - Key::Seq(e) => { - if self.found || child.index.ops.contains(&e.0) { - QueryResult::Descend - } else { - self.pos += child.len(); - - // When we skip over a subtree, we need to count the number of visible list - // elements we're skipping over. Each node stores the number of visible - // elements it contains. However, it could happen that a visible element is - // split across two tree nodes. To avoid double-counting in this situation, we - // subtract one if the last visible element also appears in this tree node. - let mut num_vis = child.index.visible_len(self.encoding); - if num_vis > 0 { - // FIXME: I think this is wrong: we should subtract one only if this - // subtree contains a *visible* (i.e. empty succs) operation for the list - // element with elemId `last_seen`; this will subtract one even if all - // values for this list element have been deleted in this subtree. - if let Some(last_seen) = self.last_seen { - if child.index.has_visible(&last_seen) { - num_vis -= 1; - } - } - self.seen += num_vis; - - // FIXME: this is also wrong: `last_seen` needs to be the elemId of the - // last *visible* list element in this subtree, but I think this returns - // the last operation's elemId regardless of whether it's visible or not. - // This will lead to incorrect counting if `last_seen` is not visible: it's - // not counted towards `num_vis`, so we shouldn't be subtracting 1. - self.last_seen = Some(ops[child.last()].elemid_or_key()); - } - QueryResult::Next - } - } - - // Updating a map: operations appear in sorted order by key - Key::Map(_) => { - let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.op.key)); - self.pos = start; - QueryResult::Skip(start) - } - } - } - - // Only called when operating on a sequence (list/text) object, since updates of a map are - // handled in `query_node_with_metadata`. - fn query_element_with_metadata(&mut self, e: &'a Op, m: &OpSetMetadata) -> QueryResult { - match self.op.key { - Key::Map(_) => { - if !self.found { - // Iterate over any existing operations for the same key; stop when we reach an - // operation with a different key - if e.key != self.op.key { - return QueryResult::Finish; - } - - // Keep track of any ops we're overwriting and any conflicts on this key - if self.op.overwrites(e) { - // when we encounter an increment op we also want to find the counter for - // it. - if self.op.is_inc() && e.is_counter() && e.visible() { - self.values.push(e); - } - self.succ.push(self.pos); - self.last_width = e.width(self.encoding); - - if e.visible() { - self.had_value_before = true; - } - } else if e.visible() { - self.values.push(e); - } - - // Ops for the same key should be in ascending order of opId, so we break when - // we reach an op with an opId greater than that of the new operation - if m.lamport_cmp(e.id, self.op.id) == Ordering::Greater { - self.found = true; - return QueryResult::Next; - } - - self.pos += 1; - } else { - // For the purpose of reporting conflicts, we also need to take into account any - // ops for the same key that appear after the new operation - - if e.key != self.op.key { - return QueryResult::Finish; - } - // No need to check if `self.op.overwrites(op)` because an operation's `preds` - // must always have lower Lamport timestamps than that op itself, and the ops - // here all have greater opIds than the new op - if e.visible() { - self.values.push(e); - } - } - QueryResult::Next - } - Key::Seq(_) => { - let result = if !self.found { - // First search for the referenced list element (i.e. the element we're updating, or - // after which we're inserting) - if self.is_target_insert(e) { - self.found = true; - if self.op.overwrites(e) { - // when we encounter an increment op we also want to find the counter for - // it. - if self.op.is_inc() && e.is_counter() && e.visible() { - self.values.push(e); - } - self.succ.push(self.pos); - self.last_width = e.width(self.encoding); - } - if e.visible() { - self.had_value_before = true; - } - } - self.pos += 1; - QueryResult::Next - } else { - // Once we've found the reference element, keep track of any ops that we're overwriting - let overwritten = self.op.overwrites(e); - if overwritten { - // when we encounter an increment op we also want to find the counter for - // it. - if self.op.is_inc() && e.is_counter() && e.visible() { - self.values.push(e); - } - self.succ.push(self.pos); - self.last_width = e.width(self.encoding); - } - - // If the new op is an insertion, skip over any existing list elements whose elemId is - // greater than the ID of the new insertion - if self.op.insert { - if self.lesser_insert(e, m) { - // Insert before the first existing list element whose elemId is less than that - // of the new insertion - QueryResult::Finish - } else { - self.pos += 1; - QueryResult::Next - } - } else if e.insert { - // If the new op is an update of an existing list element, the first insertion op - // we encounter after the reference element indicates the end of the reference elem - QueryResult::Finish - } else { - // When updating an existing list element, keep track of any conflicts on this list - // element. We also need to remember if the list element had any visible elements - // prior to applying the new operation: if not, the new operation is resurrecting - // a deleted list element, so it looks like an insertion in the patch. - if e.visible() { - self.had_value_before = true; - if !overwritten { - self.values.push(e); - } - } - - // We now need to put the ops for the same list element into ascending order, so we - // skip over any ops whose ID is less than that of the new operation. - if !self.greater_opid(e, m) { - self.pos += 1; - } - QueryResult::Next - } - }; - - // The patch needs to know the list index of each operation, so we count the number of - // visible list elements up to the insertion position of the new operation - if result == QueryResult::Next { - self.count_visible(e); - } - result - } - } - } -} - -#[cfg(test)] -mod tests { - use super::{super::seek_op::tests::optree_with_only_internally_visible_ops, SeekOpWithPatch}; - use crate::{ - op_tree::B, - types::{ListEncoding, ObjId}, - }; - - #[test] - fn test_insert_on_internal_only_nodes() { - let (set, new_op) = optree_with_only_internally_visible_ops(); - - let q = SeekOpWithPatch::new(&new_op, ListEncoding::List); - let q = set.search(&ObjId::root(), q); - - // we've inserted `B - 1` elements for "a", so the index should be `B` - assert_eq!(q.pos, B); - } -} diff --git a/rust/automerge/src/read.rs b/rust/automerge/src/read.rs deleted file mode 100644 index 6d479718..00000000 --- a/rust/automerge/src/read.rs +++ /dev/null @@ -1,199 +0,0 @@ -use crate::{ - error::AutomergeError, exid::ExId, keys::Keys, keys_at::KeysAt, list_range::ListRange, - list_range_at::ListRangeAt, map_range::MapRange, map_range_at::MapRangeAt, parents::Parents, - values::Values, Change, ChangeHash, ObjType, Prop, Value, -}; - -use std::ops::RangeBounds; - -/// Methods for reading values from an automerge document -/// -/// Many of the methods on this trait have an alternate `*_at` version which -/// takes an additional argument of `&[ChangeHash]`. This allows you to retrieve -/// the value at a particular point in the document history identified by the -/// given change hashes. -pub trait ReadDoc { - /// Get the parents of an object in the document tree. - /// - /// See the documentation for [`Parents`] for more details. - /// - /// ### Errors - /// - /// Returns an error when the id given is not the id of an object in this document. - /// This function does not get the parents of scalar values contained within objects. - /// - /// ### Experimental - /// - /// This function may in future be changed to allow getting the parents from the id of a scalar - /// value. - fn parents>(&self, obj: O) -> Result, AutomergeError>; - - /// Get the path to an object - /// - /// "path" here means the sequence of `(object Id, key)` pairs which leads - /// to the object in question. - /// - /// ### Errors - /// - /// * If the object ID `obj` is not in the document - fn path_to_object>(&self, obj: O) -> Result, AutomergeError>; - - /// Get the keys of the object `obj`. - /// - /// For a map this returns the keys of the map. - /// For a list this returns the element ids (opids) encoded as strings. - fn keys>(&self, obj: O) -> Keys<'_, '_>; - - /// Get the keys of the object `obj` as at `heads` - /// - /// See [`Self::keys`] - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_>; - - /// Iterate over the keys and values of the map `obj` in the given range. - /// - /// If the object correspoding to `obj` is a list then this will return an empty iterator - /// - /// The returned iterator yields `(key, value, exid)` tuples, where the - /// third element is the ID of the operation which created the value. - fn map_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> MapRange<'_, R>; - - /// Iterate over the keys and values of the map `obj` in the given range as - /// at `heads` - /// - /// If the object correspoding to `obj` is a list then this will return an empty iterator - /// - /// The returned iterator yields `(key, value, exid)` tuples, where the - /// third element is the ID of the operation which created the value. - /// - /// See [`Self::map_range`] - fn map_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> MapRangeAt<'_, R>; - - /// Iterate over the indexes and values of the list or text `obj` in the given range. - /// - /// The reuturned iterator yields `(index, value, exid)` tuples, where the third - /// element is the ID of the operation which created the value. - fn list_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> ListRange<'_, R>; - - /// Iterate over the indexes and values of the list or text `obj` in the given range as at `heads` - /// - /// The returned iterator yields `(index, value, exid)` tuples, where the third - /// element is the ID of the operation which created the value. - /// - /// See [`Self::list_range`] - fn list_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> ListRangeAt<'_, R>; - - /// Iterate over the values in a map, list, or text object - /// - /// The returned iterator yields `(value, exid)` tuples, where the second element - /// is the ID of the operation which created the value. - fn values>(&self, obj: O) -> Values<'_>; - - /// Iterate over the values in a map, list, or text object as at `heads` - /// - /// The returned iterator yields `(value, exid)` tuples, where the second element - /// is the ID of the operation which created the value. - /// - /// See [`Self::values`] - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_>; - - /// Get the length of the given object. - /// - /// If the given object is not in this document this method will return `0` - fn length>(&self, obj: O) -> usize; - - /// Get the length of the given object as at `heads` - /// - /// If the given object is not in this document this method will return `0` - /// - /// See [`Self::length`] - fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; - - /// Get the type of this object, if it is an object. - fn object_type>(&self, obj: O) -> Result; - - /// Get the string represented by the given text object. - fn text>(&self, obj: O) -> Result; - - /// Get the string represented by the given text object as at `heads`, see - /// [`Self::text`] - fn text_at>( - &self, - obj: O, - heads: &[ChangeHash], - ) -> Result; - - /// Get a value out of the document. - /// - /// This returns a tuple of `(value, object ID)`. This is for two reasons: - /// - /// 1. If `value` is an object (represented by `Value::Object`) then the ID - /// is the ID of that object. This can then be used to retrieve nested - /// values from the document. - /// 2. Even if `value` is a scalar, the ID represents the operation which - /// created the value. This is useful if there are conflicting values for - /// this key as each value is tagged with the ID. - /// - /// In the case of a key which has conflicting values, this method will - /// return a single arbitrarily chosen value. This value will be chosen - /// deterministically on all nodes. If you want to get all the values for a - /// key use [`Self::get_all`]. - fn get, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError>; - - /// Get the value of the given key as at `heads`, see `[Self::get]` - fn get_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError>; - - /// Get all conflicting values out of the document at this prop that conflict. - /// - /// If there are multiple conflicting values for a given key this method - /// will return all of them, with each value tagged by the ID of the - /// operation which created it. - fn get_all, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError>; - - /// Get all possibly conflicting values for a key as at `heads` - /// - /// See `[Self::get_all]` - fn get_all_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError>; - - /// Get the hashes of the changes in this document that aren't transitive dependencies of the - /// given `heads`. - fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec; - - /// Get a change by its hash. - fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change>; -} diff --git a/rust/automerge/src/storage.rs b/rust/automerge/src/storage.rs deleted file mode 100644 index 5b3d03a7..00000000 --- a/rust/automerge/src/storage.rs +++ /dev/null @@ -1,24 +0,0 @@ -use std::ops::Range; - -pub(crate) mod change; -mod chunk; -mod columns; -pub(crate) mod convert; -mod document; -pub(crate) mod load; -pub(crate) mod parse; -pub(crate) mod save; - -pub(crate) use { - change::{AsChangeOp, Change, ChangeOp, Compressed, ReadChangeOpError}, - chunk::{CheckSum, Chunk, ChunkType, Header}, - columns::{Columns, MismatchingColumn, RawColumn, RawColumns}, - document::{AsChangeMeta, AsDocOp, ChangeMetadata, CompressConfig, DocOp, Document}, - load::VerificationMode, -}; - -fn shift_range(range: Range, by: usize) -> Range { - range.start + by..range.end + by -} - -pub(crate) const MAGIC_BYTES: [u8; 4] = [0x85, 0x6f, 0x4a, 0x83]; diff --git a/rust/automerge/src/storage/change.rs b/rust/automerge/src/storage/change.rs deleted file mode 100644 index 61db0b00..00000000 --- a/rust/automerge/src/storage/change.rs +++ /dev/null @@ -1,511 +0,0 @@ -use std::{borrow::Cow, io::Write, marker::PhantomData, num::NonZeroU64, ops::Range}; - -use crate::{convert, ActorId, ChangeHash, ScalarValue}; - -use super::{parse, shift_range, CheckSum, ChunkType, Columns, Header, RawColumns}; - -mod change_op_columns; -use change_op_columns::ChangeOpsColumns; -pub(crate) use change_op_columns::{ChangeOp, ReadChangeOpError}; - -mod change_actors; -pub(crate) use change_actors::PredOutOfOrder; -mod compressed; -mod op_with_change_actors; -pub(crate) use compressed::Compressed; - -pub(crate) const DEFLATE_MIN_SIZE: usize = 256; - -/// Changes present an iterator over the operations encoded in them. Before we have read these -/// changes we don't know if they are valid, so we expose an iterator with items which are -/// `Result`s. However, frequently we know that the changes are valid, this trait is used as a -/// witness that we have verified the operations in a change so we can expose an iterator which -/// does not return `Results` -pub(crate) trait OpReadState {} -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct Verified; -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct Unverified; -impl OpReadState for Verified {} -impl OpReadState for Unverified {} - -/// A `Change` is the result of parsing a change chunk as specified in [1] -/// -/// The type parameter to this type represents whether or not operation have been "verified". -/// Operations in a change chunk are stored in a compressed column oriented storage format. In -/// general there is no guarantee that this storage is valid. Therefore we use the `OpReadState` -/// type parameter to distinguish between contexts where we know that the ops are valid and those -/// where we don't. The `Change::verify_ops` method can be used to obtain a verified `Change` which -/// can provide an iterator over `ChangeOp`s directly, rather than over `Result`. -/// -/// [1]: https://alexjg.github.io/automerge-storage-docs/#change-chunks -#[derive(Clone, Debug)] -pub(crate) struct Change<'a, O: OpReadState> { - /// The raw bytes of the entire chunk containing this change, including the header. - bytes: Cow<'a, [u8]>, - header: Header, - dependencies: Vec, - actor: ActorId, - other_actors: Vec, - seq: u64, - start_op: NonZeroU64, - timestamp: i64, - message: Option, - ops_meta: ChangeOpsColumns, - /// The range in `Self::bytes` where the ops column data is - ops_data: Range, - extra_bytes: Range, - _phantom: PhantomData, -} - -impl<'a, O: OpReadState> PartialEq for Change<'a, O> { - fn eq(&self, other: &Self) -> bool { - self.bytes == other.bytes - } -} - -#[derive(thiserror::Error, Debug)] -pub(crate) enum ParseError { - #[error(transparent)] - Leb128(#[from] parse::leb128::Error), - #[error(transparent)] - InvalidUtf8(#[from] parse::InvalidUtf8), - #[error("failed to parse change columns: {0}")] - RawColumns(#[from] crate::storage::columns::raw_column::ParseError), - #[error("failed to parse header: {0}")] - Header(#[from] super::chunk::error::Header), - #[error("change contained compressed columns")] - CompressedChangeCols, - #[error("invalid change cols: {0}")] - InvalidColumns(Box), -} - -impl<'a> Change<'a, Unverified> { - pub(crate) fn parse( - input: parse::Input<'a>, - ) -> parse::ParseResult<'a, Change<'a, Unverified>, ParseError> { - // TODO(alex): check chunk type - let (i, header) = Header::parse(input)?; - let parse::Split { - first: chunk_input, - remaining, - } = i.split(header.data_bytes().len()); - let (_, change) = Self::parse_following_header(chunk_input, header)?; - Ok((remaining, change)) - } - - /// Parse a change chunk. `input` should be the entire chunk, including the header bytes. - pub(crate) fn parse_following_header( - input: parse::Input<'a>, - header: Header, - ) -> parse::ParseResult<'_, Change<'a, Unverified>, ParseError> { - let (i, deps) = parse::length_prefixed(parse::change_hash)(input)?; - let (i, actor) = parse::actor_id(i)?; - let (i, seq) = parse::leb128_u64(i)?; - let (i, start_op) = parse::nonzero_leb128_u64(i)?; - let (i, timestamp) = parse::leb128_i64(i)?; - let (i, message_len) = parse::leb128_u64(i)?; - let (i, message) = parse::utf_8(message_len as usize, i)?; - let (i, other_actors) = parse::length_prefixed(parse::actor_id)(i)?; - let (i, ops_meta) = RawColumns::parse(i)?; - let ( - i, - parse::RangeOf { - range: ops_data, .. - }, - ) = parse::range_of(|i| parse::take_n(ops_meta.total_column_len(), i), i)?; - - let ( - _i, - parse::RangeOf { - range: extra_bytes, .. - }, - ) = parse::range_of(parse::take_rest, i)?; - - let ops_meta = ops_meta - .uncompressed() - .ok_or(parse::ParseError::Error(ParseError::CompressedChangeCols))?; - let col_layout = Columns::parse(ops_data.len(), ops_meta.iter()) - .map_err(|e| parse::ParseError::Error(ParseError::InvalidColumns(Box::new(e))))?; - let ops_meta = ChangeOpsColumns::try_from(col_layout) - .map_err(|e| parse::ParseError::Error(ParseError::InvalidColumns(Box::new(e))))?; - - Ok(( - parse::Input::empty(), - Change { - bytes: input.bytes().into(), - header, - dependencies: deps, - actor, - other_actors, - seq, - start_op, - timestamp, - message: if message.is_empty() { - None - } else { - Some(message) - }, - ops_meta, - ops_data, - extra_bytes, - _phantom: PhantomData, - }, - )) - } - - /// Iterate over the ops in this chunk. The iterator will return an error if any of the ops are - /// malformed. - pub(crate) fn iter_ops( - &'a self, - ) -> impl Iterator> + Clone + 'a { - self.ops_meta.iter(self.ops_data()) - } - - /// Verify all the ops in this change executing `f` for each one - /// - /// `f` will be called for each op in this change, allowing callers to collect additional - /// information about the ops (e.g. all the actor IDs in the change, or the number of ops) - /// - /// # Errors - /// * If there is an error reading an operation - pub(crate) fn verify_ops( - self, - mut f: F, - ) -> Result, ReadChangeOpError> { - for op in self.iter_ops() { - f(op?); - } - if u32::try_from(u64::from(self.start_op)).is_err() { - return Err(ReadChangeOpError::CounterTooLarge); - } - Ok(Change { - bytes: self.bytes, - header: self.header, - dependencies: self.dependencies, - actor: self.actor, - other_actors: self.other_actors, - seq: self.seq, - start_op: self.start_op, - timestamp: self.timestamp, - message: self.message, - ops_meta: self.ops_meta, - ops_data: self.ops_data, - extra_bytes: self.extra_bytes, - _phantom: PhantomData, - }) - } -} - -impl<'a> Change<'a, Verified> { - pub(crate) fn builder() -> ChangeBuilder { - ChangeBuilder::new() - } - - pub(crate) fn iter_ops(&'a self) -> impl Iterator + Clone + 'a { - // SAFETY: This unwrap is okay because a `Change<'_, Verified>` can only be constructed - // using either `verify_ops` or `Builder::build`, so we know the ops columns are valid. - self.ops_meta.iter(self.ops_data()).map(|o| o.unwrap()) - } -} - -impl<'a, O: OpReadState> Change<'a, O> { - pub(crate) fn checksum(&self) -> CheckSum { - self.header.checksum() - } - - pub(crate) fn actor(&self) -> &ActorId { - &self.actor - } - pub(crate) fn other_actors(&self) -> &[ActorId] { - &self.other_actors - } - - pub(crate) fn start_op(&self) -> NonZeroU64 { - self.start_op - } - - pub(crate) fn message(&self) -> &Option { - &self.message - } - - pub(crate) fn dependencies(&self) -> &[ChangeHash] { - &self.dependencies - } - - pub(crate) fn seq(&self) -> u64 { - self.seq - } - - pub(crate) fn timestamp(&self) -> i64 { - self.timestamp - } - - pub(crate) fn extra_bytes(&self) -> &[u8] { - &self.bytes[self.extra_bytes.clone()] - } - - pub(crate) fn checksum_valid(&self) -> bool { - self.header.checksum_valid() - } - - pub(crate) fn body_bytes(&self) -> &[u8] { - &self.bytes[self.header.len()..] - } - - pub(crate) fn bytes(&self) -> &[u8] { - &self.bytes - } - - pub(crate) fn hash(&self) -> ChangeHash { - self.header.hash() - } - - pub(crate) fn ops_data(&self) -> &[u8] { - &self.bytes[self.ops_data.clone()] - } - - pub(crate) fn into_owned(self) -> Change<'static, O> { - Change { - dependencies: self.dependencies, - bytes: Cow::Owned(self.bytes.into_owned()), - header: self.header, - actor: self.actor, - other_actors: self.other_actors, - seq: self.seq, - start_op: self.start_op, - timestamp: self.timestamp, - message: self.message, - ops_meta: self.ops_meta, - ops_data: self.ops_data, - extra_bytes: self.extra_bytes, - _phantom: PhantomData, - } - } - - pub(crate) fn compress(&self) -> Option> { - if self.bytes.len() > DEFLATE_MIN_SIZE { - Some(Compressed::compress(self)) - } else { - None - } - } -} - -fn length_prefixed_bytes>(b: B, out: &mut Vec) -> usize { - let prefix_len = leb128::write::unsigned(out, b.as_ref().len() as u64).unwrap(); - out.write_all(b.as_ref()).unwrap(); - prefix_len + b.as_ref().len() -} - -// Bunch of type safe builder boilerplate -pub(crate) struct Unset; -pub(crate) struct Set { - value: T, -} - -#[allow(non_camel_case_types)] -pub(crate) struct ChangeBuilder { - dependencies: Vec, - actor: ACTOR, - seq: SEQ, - start_op: START_OP, - timestamp: TIME, - message: Option, - extra_bytes: Option>, -} - -impl ChangeBuilder { - pub(crate) fn new() -> Self { - Self { - dependencies: vec![], - actor: Unset, - seq: Unset, - start_op: Unset, - timestamp: Unset, - message: None, - extra_bytes: None, - } - } -} - -#[allow(non_camel_case_types)] -impl ChangeBuilder { - pub(crate) fn with_dependencies(self, mut dependencies: Vec) -> Self { - dependencies.sort_unstable(); - Self { - dependencies, - ..self - } - } - - pub(crate) fn with_message(self, message: Option) -> Self { - Self { message, ..self } - } - - pub(crate) fn with_extra_bytes(self, extra_bytes: Vec) -> Self { - Self { - extra_bytes: Some(extra_bytes), - ..self - } - } -} - -#[allow(non_camel_case_types)] -impl ChangeBuilder { - pub(crate) fn with_seq(self, seq: u64) -> ChangeBuilder, TIME> { - ChangeBuilder { - dependencies: self.dependencies, - actor: self.actor, - seq: Set { value: seq }, - start_op: self.start_op, - timestamp: self.timestamp, - message: self.message, - extra_bytes: self.extra_bytes, - } - } -} - -#[allow(non_camel_case_types)] -impl ChangeBuilder { - pub(crate) fn with_actor( - self, - actor: ActorId, - ) -> ChangeBuilder, SEQ, TIME> { - ChangeBuilder { - dependencies: self.dependencies, - actor: Set { value: actor }, - seq: self.seq, - start_op: self.start_op, - timestamp: self.timestamp, - message: self.message, - extra_bytes: self.extra_bytes, - } - } -} - -impl ChangeBuilder { - pub(crate) fn with_start_op( - self, - start_op: NonZeroU64, - ) -> ChangeBuilder, ACTOR, SEQ, TIME> { - ChangeBuilder { - dependencies: self.dependencies, - actor: self.actor, - seq: self.seq, - start_op: Set { value: start_op }, - timestamp: self.timestamp, - message: self.message, - extra_bytes: self.extra_bytes, - } - } -} - -#[allow(non_camel_case_types)] -impl ChangeBuilder { - pub(crate) fn with_timestamp(self, time: i64) -> ChangeBuilder> { - ChangeBuilder { - dependencies: self.dependencies, - actor: self.actor, - seq: self.seq, - start_op: self.start_op, - timestamp: Set { value: time }, - message: self.message, - extra_bytes: self.extra_bytes, - } - } -} - -/// A row to be encoded as a change op -/// -/// The lifetime `'a` is the lifetime of the value and key data types. For types which cannot -/// provide a reference (e.g. because they are decoding from some columnar storage on each -/// iteration) this should be `'static`. -pub(crate) trait AsChangeOp<'a> { - /// The type of the Actor ID component of the op IDs for this impl. This is typically either - /// `&'a ActorID` or `usize` - type ActorId; - /// The type of the op IDs this impl produces. - type OpId: convert::OpId; - /// The type of the predecessor iterator returned by `Self::pred`. This can often be omitted - type PredIter: Iterator + ExactSizeIterator; - - fn obj(&self) -> convert::ObjId; - fn key(&self) -> convert::Key<'a, Self::OpId>; - fn insert(&self) -> bool; - fn action(&self) -> u64; - fn val(&self) -> Cow<'a, ScalarValue>; - fn pred(&self) -> Self::PredIter; -} - -impl ChangeBuilder, Set, Set, Set> { - pub(crate) fn build<'a, A, I, O>( - self, - ops: I, - ) -> Result, PredOutOfOrder> - where - A: AsChangeOp<'a, OpId = O> + 'a, - O: convert::OpId<&'a ActorId> + 'a, - I: Iterator + Clone + 'a, - { - let mut col_data = Vec::new(); - let actors = change_actors::ChangeActors::new(self.actor.value, ops)?; - let cols = ChangeOpsColumns::encode(actors.iter(), &mut col_data); - - let (actor, other_actors) = actors.done(); - - let mut data = Vec::with_capacity(col_data.len()); - leb128::write::unsigned(&mut data, self.dependencies.len() as u64).unwrap(); - for dep in &self.dependencies { - data.write_all(dep.as_bytes()).unwrap(); - } - length_prefixed_bytes(&actor, &mut data); - leb128::write::unsigned(&mut data, self.seq.value).unwrap(); - leb128::write::unsigned(&mut data, self.start_op.value.into()).unwrap(); - leb128::write::signed(&mut data, self.timestamp.value).unwrap(); - length_prefixed_bytes( - self.message.as_ref().map(|m| m.as_bytes()).unwrap_or(&[]), - &mut data, - ); - leb128::write::unsigned(&mut data, other_actors.len() as u64).unwrap(); - for actor in other_actors.iter() { - length_prefixed_bytes(actor, &mut data); - } - cols.raw_columns().write(&mut data); - let ops_data_start = data.len(); - let ops_data = ops_data_start..(ops_data_start + col_data.len()); - - data.extend(col_data); - let extra_bytes = - data.len()..(data.len() + self.extra_bytes.as_ref().map(|e| e.len()).unwrap_or(0)); - if let Some(extra) = self.extra_bytes { - data.extend(extra); - } - - let header = Header::new(ChunkType::Change, &data); - - let mut bytes = Vec::with_capacity(header.len() + data.len()); - header.write(&mut bytes); - bytes.extend(data); - - let ops_data = shift_range(ops_data, header.len()); - let extra_bytes = shift_range(extra_bytes, header.len()); - - Ok(Change { - bytes: Cow::Owned(bytes), - header, - dependencies: self.dependencies, - actor, - other_actors, - seq: self.seq.value, - start_op: self.start_op.value, - timestamp: self.timestamp.value, - message: self.message, - ops_meta: cols, - ops_data, - extra_bytes, - _phantom: PhantomData, - }) - } -} diff --git a/rust/automerge/src/storage/change/change_actors.rs b/rust/automerge/src/storage/change/change_actors.rs deleted file mode 100644 index 61f1221d..00000000 --- a/rust/automerge/src/storage/change/change_actors.rs +++ /dev/null @@ -1,304 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use crate::convert; - -use super::AsChangeOp; - -/// This struct represents the ordering of actor indices in a change chunk. Operations in a change -/// chunk are encoded with the actor ID represented as an offset into an array of actors which are -/// encoded at the start of the chunk. This array is in a specific order: the author of the change -/// is always the first actor, then all other actors referenced in a change are encoded in -/// lexicographic order. -/// -/// The intended usage is to construct a `ChangeActors` from an iterator over `AsChangeOp` where -/// the `ActorId` of the `AsChangeOp` implementation is the original actor ID. The resulting -/// `ChangeActors` implements `Iterator` where the `item` implements -/// `AsChangeOp>`, which can be passed to `ChangeOpColumns::encode`. -/// -/// Once encoding is complete you can use `ChangeActors::done` to retrieve the original actor and the -/// other actors in the change. -/// -/// # Note on type parameters -/// -/// The type paramters are annoying, they basically exist because we can't have generic associated -/// types, so we have to feed the concrete types of the associated types of the `AsChangeOp` -/// implementation through here. Here's what they all refer to: -/// -/// * A - The type of the actor ID used in the operation IDs of the incoming changes -/// * I - The type of the iterator over the `AsChangeOp` implementation of the incoming changes -/// * O - The concrete type of the operation ID which implementas `convert::OpId` -/// * C - The concrete type (which implements `AsChangeOp`) of the incoming changes -/// * 'a - The lifetime bound for the AsChangeOp trait and it's associated types -/// -/// Maybe when GATs land we can make this simpler. -pub(crate) struct ChangeActors<'a, ActorId, I, O, C> { - actor: ActorId, - other_actors: Vec, - index: BTreeMap, - wrapped: I, - num_ops: usize, - _phantom: std::marker::PhantomData<(&'a O, C)>, -} - -#[derive(thiserror::Error, Debug)] -#[error("actor index {0} referenced by an operation was not found in the changes")] -pub(crate) struct MissingActor(usize); - -#[derive(Debug, thiserror::Error)] -#[error("pred OpIds out of order")] -pub(crate) struct PredOutOfOrder; - -impl<'a, A, I, O, C> ChangeActors<'a, A, I, O, C> -where - A: PartialEq + Ord + Clone + std::hash::Hash + 'static, - O: convert::OpId<&'a A> + 'a, - C: AsChangeOp<'a, OpId = O> + 'a, - I: Iterator + Clone + 'a, -{ - /// Create a new change actor mapping - /// - /// # Arguments - /// * actor - the actor ID of the actor who authored this change - /// * ops - an iterator containing the operations which will be encoded into the change - /// - /// # Errors - /// * If one of the ops herein contains a `pred` with ops which are not in lamport timestamp - /// order - pub(crate) fn new(actor: A, ops: I) -> Result, PredOutOfOrder> { - // Change actors indices are encoded with the 0th element being the actor who authored the - // change and all other actors referenced in the chain following the author in - // lexicographic order. Here we collect all the actors referenced by operations in `ops` - let (num_ops, mut other_actors) = - ops.clone() - .try_fold((0, BTreeSet::new()), |(count, mut acc), op| { - if let convert::Key::Elem(convert::ElemId::Op(o)) = op.key() { - if o.actor() != &actor { - acc.insert(o.actor()); - } - } - - if !are_sorted(op.pred()) { - return Err(PredOutOfOrder); - } - for pred in op.pred() { - if pred.actor() != &actor { - acc.insert(pred.actor()); - } - } - if let convert::ObjId::Op(o) = op.obj() { - if o.actor() != &actor { - acc.insert(o.actor()); - } - } - Ok((count + 1, acc)) - })?; - // This shouldn't be necessary but just in case - other_actors.remove(&actor); - let mut other_actors = other_actors.into_iter().cloned().collect::>(); - other_actors.sort(); - let index = std::iter::once(actor.clone()) - .chain(other_actors.clone().into_iter()) - .enumerate() - .map(|(idx, actor)| (actor, idx)) - .collect(); - Ok(ChangeActors { - actor, - other_actors, - index, - wrapped: ops, - num_ops, - _phantom: std::marker::PhantomData, - }) - } - - /// Translate an OpID from the OpSet index to the change index - fn translate_opid(&self, opid: &O) -> ChangeOpId { - ChangeOpId { - actor: *self.index.get(opid.actor()).unwrap(), - counter: opid.counter(), - } - } - - /// Returns a clonable iterator over the converted operations. The item of the iterator is an - /// implementation of `AsChangeOp` which uses the index of the actor of each operation into the - /// actors as encoded in a change. This is suitable for passing to `ChangeOpColumns::encode` - pub(crate) fn iter<'b>(&'b self) -> WithChangeActorsOpIter<'b, 'a, A, I, O, C> { - WithChangeActorsOpIter { - change_actors: self, - inner: self.wrapped.clone(), - } - } - - pub(crate) fn done(self) -> (A, Vec) { - (self.actor, self.other_actors) - } -} - -/// The actual implementation of the converted iterator -pub(crate) struct WithChangeActorsOpIter<'actors, 'aschangeop, A, I, O, C> { - change_actors: &'actors ChangeActors<'aschangeop, A, I, O, C>, - inner: I, -} - -impl<'actors, 'aschangeop, A: 'aschangeop, I, O, C> Clone - for WithChangeActorsOpIter<'actors, 'aschangeop, A, I, O, C> -where - I: Clone, -{ - fn clone(&self) -> Self { - Self { - change_actors: self.change_actors, - inner: self.inner.clone(), - } - } -} - -impl<'actors, 'aschangeop, A: 'aschangeop, I, O, C> Iterator - for WithChangeActorsOpIter<'actors, 'aschangeop, A, I, O, C> -where - C: AsChangeOp<'aschangeop, OpId = O>, - O: convert::OpId<&'aschangeop A>, - I: Iterator + Clone, -{ - type Item = WithChangeActors<'actors, 'aschangeop, A, I, O, C>; - - fn next(&mut self) -> Option { - self.inner.next().map(|o| WithChangeActors { - op: o, - actors: self.change_actors, - }) - } -} - -impl<'actors, 'aschangeop, A: 'aschangeop, I, O, C> ExactSizeIterator - for WithChangeActorsOpIter<'actors, 'aschangeop, A, I, O, C> -where - C: AsChangeOp<'aschangeop, OpId = O>, - O: convert::OpId<&'aschangeop A>, - I: Iterator + Clone, -{ - fn len(&self) -> usize { - self.change_actors.num_ops - } -} - -pub(crate) struct ChangeOpId { - actor: usize, - counter: u64, -} - -impl convert::OpId for ChangeOpId { - fn actor(&self) -> usize { - self.actor - } - - fn counter(&self) -> u64 { - self.counter - } -} - -/// A struct which implements `AsChangeOp` by translating the actor IDs in the incoming operations -/// into the index into the actors in the `ChangeActors`. -pub(crate) struct WithChangeActors<'actors, 'aschangeop, A, I, O, C> { - op: C, - actors: &'actors ChangeActors<'aschangeop, A, I, O, C>, -} - -impl<'actors, 'aschangeop, A, I, O, P, C> AsChangeOp<'aschangeop> - for WithChangeActors<'actors, 'aschangeop, A, I, O, C> -where - A: PartialEq + Ord + Clone + std::hash::Hash + 'static, - O: convert::OpId<&'aschangeop A>, - P: Iterator + ExactSizeIterator + 'aschangeop, - C: AsChangeOp<'aschangeop, PredIter = P, OpId = O> + 'aschangeop, - I: Iterator + Clone + 'aschangeop, -{ - type ActorId = usize; - type OpId = ChangeOpId; - type PredIter = WithChangeActorsPredIter<'actors, 'aschangeop, A, I, O, C, P>; - - fn action(&self) -> u64 { - self.op.action() - } - - fn insert(&self) -> bool { - self.op.insert() - } - - fn pred(&self) -> Self::PredIter { - WithChangeActorsPredIter { - wrapped: self.op.pred(), - actors: self.actors, - _phantom: std::marker::PhantomData, - } - } - - fn key(&self) -> convert::Key<'aschangeop, Self::OpId> { - self.op.key().map(|o| self.actors.translate_opid(&o)) - } - - fn obj(&self) -> convert::ObjId { - self.op.obj().map(|o| self.actors.translate_opid(&o)) - } - - fn val(&self) -> std::borrow::Cow<'aschangeop, crate::ScalarValue> { - self.op.val() - } -} - -pub(crate) struct WithChangeActorsPredIter<'actors, 'aschangeop, A, I, O, C, P> { - wrapped: P, - actors: &'actors ChangeActors<'aschangeop, A, I, O, C>, - _phantom: std::marker::PhantomData, -} - -impl<'actors, 'aschangeop, A, I, O, C, P> ExactSizeIterator - for WithChangeActorsPredIter<'actors, 'aschangeop, A, I, O, C, P> -where - A: PartialEq + Ord + Clone + std::hash::Hash + 'static, - O: convert::OpId<&'aschangeop A>, - P: Iterator + ExactSizeIterator + 'aschangeop, - C: AsChangeOp<'aschangeop, OpId = O> + 'aschangeop, - I: Iterator + Clone + 'aschangeop, -{ - fn len(&self) -> usize { - self.wrapped.len() - } -} - -impl<'actors, 'aschangeop, A, I, O, C, P> Iterator - for WithChangeActorsPredIter<'actors, 'aschangeop, A, I, O, C, P> -where - A: PartialEq + Ord + Clone + std::hash::Hash + 'static, - O: convert::OpId<&'aschangeop A>, - P: Iterator + 'aschangeop, - C: AsChangeOp<'aschangeop, OpId = O> + 'aschangeop, - I: Iterator + Clone + 'aschangeop, -{ - type Item = ChangeOpId; - - fn next(&mut self) -> Option { - self.wrapped.next().map(|o| self.actors.translate_opid(&o)) - } -} - -fn are_sorted(mut opids: I) -> bool -where - A: PartialEq + Ord + Clone, - O: convert::OpId, - I: Iterator, -{ - if let Some(first) = opids.next() { - let mut prev = first; - for opid in opids { - if opid.counter() < prev.counter() { - return false; - } - if opid.counter() == prev.counter() && opid.actor() < prev.actor() { - return false; - } - prev = opid; - } - } - true -} diff --git a/rust/automerge/src/storage/change/change_op_columns.rs b/rust/automerge/src/storage/change/change_op_columns.rs deleted file mode 100644 index 86ec59c2..00000000 --- a/rust/automerge/src/storage/change/change_op_columns.rs +++ /dev/null @@ -1,499 +0,0 @@ -use std::{convert::TryFrom, ops::Range}; - -use crate::{ - columnar::{ - column_range::{ - generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, - BooleanRange, DeltaRange, Key, KeyEncoder, KeyIter, KeyRange, ObjIdEncoder, ObjIdIter, - ObjIdRange, OpIdListEncoder, OpIdListIter, OpIdListRange, RleRange, ValueEncoder, - ValueIter, ValueRange, - }, - encoding::{ - BooleanDecoder, BooleanEncoder, ColumnDecoder, DecodeColumnError, RleDecoder, - RleEncoder, - }, - }, - convert, - error::InvalidOpType, - storage::{ - change::AsChangeOp, - columns::{ - compression, ColumnId, ColumnSpec, ColumnType, Columns, MismatchingColumn, RawColumn, - }, - RawColumns, - }, - types::{ElemId, ObjId, OpId, ScalarValue}, - OpType, -}; - -const OBJ_COL_ID: ColumnId = ColumnId::new(0); -const KEY_COL_ID: ColumnId = ColumnId::new(1); -const INSERT_COL_ID: ColumnId = ColumnId::new(3); -const ACTION_COL_ID: ColumnId = ColumnId::new(4); -const VAL_COL_ID: ColumnId = ColumnId::new(5); -const PRED_COL_ID: ColumnId = ColumnId::new(7); - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct ChangeOp { - pub(crate) key: Key, - pub(crate) insert: bool, - pub(crate) val: ScalarValue, - pub(crate) pred: Vec, - pub(crate) action: u64, - pub(crate) obj: ObjId, -} - -impl<'a, A: AsChangeOp<'a, ActorId = usize, OpId = OpId>> From for ChangeOp { - fn from(a: A) -> Self { - ChangeOp { - key: match a.key() { - convert::Key::Prop(s) => Key::Prop(s.into_owned()), - convert::Key::Elem(convert::ElemId::Head) => Key::Elem(ElemId::head()), - convert::Key::Elem(convert::ElemId::Op(o)) => Key::Elem(ElemId(o)), - }, - obj: match a.obj() { - convert::ObjId::Root => ObjId::root(), - convert::ObjId::Op(o) => ObjId(o), - }, - val: a.val().into_owned(), - pred: a.pred().collect(), - insert: a.insert(), - action: a.action(), - } - } -} - -impl<'a> AsChangeOp<'a> for &'a ChangeOp { - type OpId = &'a crate::types::OpId; - type ActorId = usize; - type PredIter = std::slice::Iter<'a, crate::types::OpId>; - - fn obj(&self) -> convert::ObjId { - if self.obj.is_root() { - convert::ObjId::Root - } else { - convert::ObjId::Op(self.obj.opid()) - } - } - - fn key(&self) -> convert::Key<'a, Self::OpId> { - match &self.key { - Key::Prop(s) => convert::Key::Prop(std::borrow::Cow::Borrowed(s)), - Key::Elem(e) if e.is_head() => convert::Key::Elem(convert::ElemId::Head), - Key::Elem(e) => convert::Key::Elem(convert::ElemId::Op(&e.0)), - } - } - - fn val(&self) -> std::borrow::Cow<'a, ScalarValue> { - std::borrow::Cow::Borrowed(&self.val) - } - - fn pred(&self) -> Self::PredIter { - self.pred.iter() - } - - fn insert(&self) -> bool { - self.insert - } - - fn action(&self) -> u64 { - self.action - } -} - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct ChangeOpsColumns { - obj: Option, - key: KeyRange, - insert: BooleanRange, - action: RleRange, - val: ValueRange, - pred: OpIdListRange, -} - -impl ChangeOpsColumns { - pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> ChangeOpsIter<'a> { - ChangeOpsIter { - failed: false, - obj: self.obj.as_ref().map(|o| o.iter(data)), - key: self.key.iter(data), - insert: self.insert.decoder(data), - action: self.action.decoder(data), - val: self.val.iter(data), - pred: self.pred.iter(data), - } - } - - #[tracing::instrument(skip(ops, out))] - pub(crate) fn encode<'a, 'b, 'c, I, C, Op>(ops: I, out: &'b mut Vec) -> ChangeOpsColumns - where - I: Iterator + Clone + ExactSizeIterator + 'a, - Op: convert::OpId + 'a, - C: AsChangeOp<'c, OpId = Op> + 'a, - { - if ops.len() > 10000 { - Self::encode_rowwise(ops, out) - } else { - Self::encode_columnwise(ops, out) - } - } - - pub(crate) fn encode_columnwise<'a, 'b, 'c, I, C, Op>( - ops: I, - out: &'b mut Vec, - ) -> ChangeOpsColumns - where - I: Iterator + Clone + 'a, - Op: convert::OpId + 'a, - C: AsChangeOp<'c, OpId = Op> + 'a, - { - let obj = ObjIdRange::encode(ops.clone().map(|o| o.obj()), out); - let key = KeyRange::encode(ops.clone().map(|o| o.key()), out); - let insert = BooleanRange::encode(ops.clone().map(|o| o.insert()), out); - let action = RleRange::encode(ops.clone().map(|o| Some(o.action())), out); - let val = ValueRange::encode(ops.clone().map(|o| o.val()), out); - let pred = OpIdListRange::encode(ops.map(|o| o.pred()), out); - Self { - obj, - key, - insert, - action, - val, - pred, - } - } - - fn encode_rowwise<'a, 'b, 'c, I, C, Op>(ops: I, out: &'b mut Vec) -> ChangeOpsColumns - where - I: Iterator + Clone + 'a, - Op: convert::OpId + 'a, - C: AsChangeOp<'c, OpId = Op> + 'a, - { - let mut obj = ObjIdEncoder::new(); - let mut key = KeyEncoder::new(); - let mut insert = BooleanEncoder::new(); - let mut action = RleEncoder::<_, u64>::from(Vec::new()); - let mut val = ValueEncoder::new(); - let mut pred = OpIdListEncoder::new(); - for op in ops { - obj.append(op.obj()); - key.append(op.key()); - insert.append(op.insert()); - action.append_value(op.action()); - val.append(&op.val()); - pred.append(op.pred()); - } - let obj = obj.finish(out); - let key = key.finish(out); - - let insert_start = out.len(); - let (insert, _) = insert.finish(); - out.extend(insert); - let insert = BooleanRange::from(insert_start..out.len()); - - let action_start = out.len(); - let (action, _) = action.finish(); - out.extend(action); - let action = RleRange::from(action_start..out.len()); - - let val = val.finish(out); - let pred = pred.finish(out); - - Self { - obj, - key, - insert, - action, - val, - pred, - } - } - - pub(crate) fn raw_columns(&self) -> RawColumns { - let mut cols = vec![ - RawColumn::new( - ColumnSpec::new(OBJ_COL_ID, ColumnType::Actor, false), - self.obj - .as_ref() - .map(|o| o.actor_range().clone().into()) - .unwrap_or(0..0), - ), - RawColumn::new( - ColumnSpec::new(OBJ_COL_ID, ColumnType::Integer, false), - self.obj - .as_ref() - .map(|o| o.counter_range().clone().into()) - .unwrap_or(0..0), - ), - RawColumn::new( - ColumnSpec::new(KEY_COL_ID, ColumnType::Actor, false), - self.key.actor_range().clone().into(), - ), - RawColumn::new( - ColumnSpec::new(KEY_COL_ID, ColumnType::DeltaInteger, false), - self.key.counter_range().clone().into(), - ), - RawColumn::new( - ColumnSpec::new(KEY_COL_ID, ColumnType::String, false), - self.key.string_range().clone().into(), - ), - RawColumn::new( - ColumnSpec::new(INSERT_COL_ID, ColumnType::Boolean, false), - self.insert.clone().into(), - ), - RawColumn::new( - ColumnSpec::new(ACTION_COL_ID, ColumnType::Integer, false), - self.action.clone().into(), - ), - RawColumn::new( - ColumnSpec::new(VAL_COL_ID, ColumnType::ValueMetadata, false), - self.val.meta_range().clone().into(), - ), - ]; - if !self.val.raw_range().is_empty() { - cols.push(RawColumn::new( - ColumnSpec::new(VAL_COL_ID, ColumnType::Value, false), - self.val.raw_range().clone().into(), - )); - } - cols.push(RawColumn::new( - ColumnSpec::new(PRED_COL_ID, ColumnType::Group, false), - self.pred.group_range().clone().into(), - )); - if !self.pred.actor_range().is_empty() { - cols.extend([ - RawColumn::new( - ColumnSpec::new(PRED_COL_ID, ColumnType::Actor, false), - self.pred.actor_range().clone().into(), - ), - RawColumn::new( - ColumnSpec::new(PRED_COL_ID, ColumnType::DeltaInteger, false), - self.pred.counter_range().clone().into(), - ), - ]); - } - cols.into_iter().collect() - } -} - -#[derive(thiserror::Error, Debug)] -#[error(transparent)] -pub enum ReadChangeOpError { - #[error(transparent)] - DecodeError(#[from] DecodeColumnError), - #[error(transparent)] - InvalidOpType(#[from] InvalidOpType), - #[error("counter too large")] - CounterTooLarge, -} - -#[derive(Clone)] -pub(crate) struct ChangeOpsIter<'a> { - failed: bool, - obj: Option>, - key: KeyIter<'a>, - insert: BooleanDecoder<'a>, - action: RleDecoder<'a, u64>, - val: ValueIter<'a>, - pred: OpIdListIter<'a>, -} - -impl<'a> ChangeOpsIter<'a> { - fn done(&self) -> bool { - self.action.done() - } - - fn try_next(&mut self) -> Result, ReadChangeOpError> { - if self.failed || self.done() { - Ok(None) - } else { - let obj = if let Some(ref mut objs) = self.obj { - objs.next_in_col("obj")? - } else { - ObjId::root() - }; - let key = self.key.next_in_col("key")?; - let insert = self.insert.next_in_col("insert")?; - let action = self.action.next_in_col("action")?; - let val = self.val.next_in_col("value")?; - let pred = self.pred.next_in_col("pred")?; - - // This check is necessary to ensure that OpType::from_action_and_value - // cannot panic later in the process. - OpType::validate_action_and_value(action, &val)?; - - Ok(Some(ChangeOp { - obj, - key, - insert, - action, - val, - pred, - })) - } - } -} - -impl<'a> Iterator for ChangeOpsIter<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - match self.try_next() { - Ok(v) => v.map(Ok), - Err(e) => { - self.failed = true; - Some(Err(e)) - } - } - } -} - -#[derive(thiserror::Error, Debug)] -pub(crate) enum ParseChangeColumnsError { - #[error("mismatching column at {index}.")] - MismatchingColumn { index: usize }, -} - -impl From for ParseChangeColumnsError { - fn from(m: MismatchingColumn) -> Self { - Self::MismatchingColumn { index: m.index } - } -} - -impl TryFrom for ChangeOpsColumns { - type Error = ParseChangeColumnsError; - - fn try_from(columns: Columns) -> Result { - let mut obj_actor: Option> = None; - let mut obj_ctr: Option> = None; - let mut key_actor: Option> = None; - let mut key_ctr: Option = None; - let mut key_str: Option> = None; - let mut insert: Option> = None; - let mut action: Option> = None; - let mut val: Option = None; - let mut pred_group: Option> = None; - let mut pred_actor: Option> = None; - let mut pred_ctr: Option = None; - let mut other = Columns::empty(); - - for (index, col) in columns.into_iter().enumerate() { - match (col.id(), col.col_type()) { - (OBJ_COL_ID, ColumnType::Actor) => obj_actor = Some(col.range().into()), - (OBJ_COL_ID, ColumnType::Integer) => obj_ctr = Some(col.range().into()), - (KEY_COL_ID, ColumnType::Actor) => key_actor = Some(col.range().into()), - (KEY_COL_ID, ColumnType::DeltaInteger) => key_ctr = Some(col.range().into()), - (KEY_COL_ID, ColumnType::String) => key_str = Some(col.range().into()), - (INSERT_COL_ID, ColumnType::Boolean) => insert = Some(col.range()), - (ACTION_COL_ID, ColumnType::Integer) => action = Some(col.range()), - (VAL_COL_ID, ColumnType::ValueMetadata) => match col.into_ranges() { - GenericColumnRange::Value(v) => { - val = Some(v); - } - _ => return Err(ParseChangeColumnsError::MismatchingColumn { index }), - }, - (PRED_COL_ID, ColumnType::Group) => match col.into_ranges() { - GenericColumnRange::Group(GroupRange { num, values }) => { - let mut cols = values.into_iter(); - pred_group = Some(num); - // If there was no data in the group at all then the columns won't be - // present - if cols.len() == 0 { - pred_actor = Some((0..0).into()); - pred_ctr = Some((0..0).into()); - } else { - let first = cols.next(); - let second = cols.next(); - match (first, second) { - ( - Some(GroupedColumnRange::Simple(SimpleColRange::RleInt( - actor_range, - ))), - Some(GroupedColumnRange::Simple(SimpleColRange::Delta( - ctr_range, - ))), - ) => { - pred_actor = Some(actor_range); - pred_ctr = Some(ctr_range); - } - _ => { - return Err(ParseChangeColumnsError::MismatchingColumn { - index, - }) - } - } - } - if cols.next().is_some() { - return Err(ParseChangeColumnsError::MismatchingColumn { index }); - } - } - _ => return Err(ParseChangeColumnsError::MismatchingColumn { index }), - }, - (other_type, other_col) => { - tracing::warn!(typ=?other_type, id=?other_col, "unknown column"); - other.append(col); - } - } - } - let pred = OpIdListRange::new( - pred_group.unwrap_or_else(|| (0..0).into()), - pred_actor.unwrap_or_else(|| (0..0).into()), - pred_ctr.unwrap_or_else(|| (0..0).into()), - ); - Ok(ChangeOpsColumns { - obj: ObjIdRange::new( - obj_actor.unwrap_or_else(|| (0..0).into()), - obj_ctr.unwrap_or_else(|| (0..0).into()), - ), - key: KeyRange::new( - key_actor.unwrap_or_else(|| (0..0).into()), - key_ctr.unwrap_or_else(|| (0..0).into()), - key_str.unwrap_or_else(|| (0..0).into()), - ), - insert: insert.unwrap_or(0..0).into(), - action: action.unwrap_or(0..0).into(), - val: val.unwrap_or_else(|| ValueRange::new((0..0).into(), (0..0).into())), - pred, - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::columnar::encoding::properties::{key, opid, scalar_value}; - use proptest::prelude::*; - - prop_compose! { - fn change_op() - (key in key(), - value in scalar_value(), - pred in proptest::collection::vec(opid(), 0..20), - action in 0_u64..6, - obj in opid(), - insert in any::()) -> ChangeOp { - - let val = if action == 5 && !(value.is_int() || value.is_uint()) { - ScalarValue::Uint(0) - } else { value }; - ChangeOp { - obj: obj.into(), - key, - val, - pred, - action, - insert, - } - } - } - - proptest! { - #[test] - fn test_encode_decode_change_ops(ops in proptest::collection::vec(change_op(), 0..100)) { - let mut out = Vec::new(); - let cols2 = ChangeOpsColumns::encode(ops.iter(), &mut out); - let decoded = cols2.iter(&out[..]).collect::, _>>().unwrap(); - assert_eq!(ops, decoded); - } - } -} diff --git a/rust/automerge/src/storage/change/compressed.rs b/rust/automerge/src/storage/change/compressed.rs deleted file mode 100644 index 55d56ffb..00000000 --- a/rust/automerge/src/storage/change/compressed.rs +++ /dev/null @@ -1,51 +0,0 @@ -use std::{borrow::Cow, io::Read}; - -use crate::storage::{Change, CheckSum, ChunkType, MAGIC_BYTES}; - -use super::OpReadState; - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct Compressed<'a> { - checksum: CheckSum, - bytes: Cow<'a, [u8]>, -} - -impl<'a> Compressed<'a> { - pub(crate) fn new(checksum: CheckSum, bytes: Cow<'a, [u8]>) -> Self { - Self { checksum, bytes } - } - - pub(crate) fn compress<'b, O: OpReadState>(change: &'b Change<'b, O>) -> Compressed<'static> { - let mut result = Vec::with_capacity(change.bytes().len()); - result.extend(MAGIC_BYTES); - result.extend(change.checksum().bytes()); - result.push(u8::from(ChunkType::Compressed)); - let mut deflater = flate2::bufread::DeflateEncoder::new( - change.body_bytes(), - flate2::Compression::default(), - ); - let mut deflated = Vec::new(); - let deflated_len = deflater.read_to_end(&mut deflated).unwrap(); - leb128::write::unsigned(&mut result, deflated_len as u64).unwrap(); - result.extend(&deflated[..]); - Compressed { - checksum: change.checksum(), - bytes: Cow::Owned(result), - } - } - - pub(crate) fn bytes(&self) -> Cow<'a, [u8]> { - self.bytes.clone() - } - - pub(crate) fn checksum(&self) -> CheckSum { - self.checksum - } - - pub(crate) fn into_owned(self) -> Compressed<'static> { - Compressed { - checksum: self.checksum, - bytes: Cow::Owned(self.bytes.into_owned()), - } - } -} diff --git a/rust/automerge/src/storage/change/op_with_change_actors.rs b/rust/automerge/src/storage/change/op_with_change_actors.rs deleted file mode 100644 index 8b137891..00000000 --- a/rust/automerge/src/storage/change/op_with_change_actors.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/rust/automerge/src/storage/chunk.rs b/rust/automerge/src/storage/chunk.rs deleted file mode 100644 index d0048528..00000000 --- a/rust/automerge/src/storage/chunk.rs +++ /dev/null @@ -1,293 +0,0 @@ -use std::{ - borrow::Cow, - convert::{TryFrom, TryInto}, - io::Read, - ops::Range, -}; - -use sha2::{Digest, Sha256}; - -use super::{change::Unverified, parse, Change, Compressed, Document, MAGIC_BYTES}; -use crate::{columnar::encoding::leb128::ulebsize, ChangeHash}; - -pub(crate) enum Chunk<'a> { - Document(Document<'a>), - Change(Change<'a, Unverified>), - CompressedChange(Change<'static, Unverified>, Compressed<'a>), -} - -pub(crate) mod error { - use super::parse; - use crate::storage::{change, document}; - - #[derive(thiserror::Error, Debug)] - pub(crate) enum Chunk { - #[error("there was data in a chunk leftover after parsing")] - LeftoverData, - #[error(transparent)] - Leb128(#[from] parse::leb128::Error), - #[error("failed to parse header: {0}")] - Header(#[from] Header), - #[error("bad change chunk: {0}")] - Change(#[from] change::ParseError), - #[error("bad document chunk: {0}")] - Document(#[from] document::ParseError), - #[error("unable to decompresse compressed chunk")] - Deflate, - } - - #[derive(thiserror::Error, Debug)] - pub(crate) enum Header { - #[error(transparent)] - Leb128(#[from] parse::leb128::Error), - #[error("unknown chunk type: {0}")] - UnknownChunkType(u8), - #[error("Invalid magic bytes")] - InvalidMagicBytes, - } -} - -impl<'a> Chunk<'a> { - pub(crate) fn parse( - input: parse::Input<'a>, - ) -> parse::ParseResult<'a, Chunk<'a>, error::Chunk> { - let (i, header) = Header::parse::(input)?; - let parse::Split { - first: chunk_input, - remaining, - } = i.split(header.data_bytes().len()); - tracing::trace!(?header, "parsed chunk header"); - let chunk = match header.chunk_type { - ChunkType::Change => { - let (remaining, change) = - Change::parse_following_header(chunk_input, header).map_err(|e| e.lift())?; - if !remaining.is_empty() { - return Err(parse::ParseError::Error(error::Chunk::LeftoverData)); - } - Chunk::Change(change) - } - ChunkType::Document => { - let (remaining, doc) = - Document::parse(chunk_input, header).map_err(|e| e.lift())?; - if !remaining.is_empty() { - return Err(parse::ParseError::Error(error::Chunk::LeftoverData)); - } - Chunk::Document(doc) - } - ChunkType::Compressed => { - let compressed = &input.unconsumed_bytes()[header.data_bytes()]; - let mut decoder = flate2::bufread::DeflateDecoder::new(compressed); - let mut decompressed = Vec::new(); - decoder - .read_to_end(&mut decompressed) - .map_err(|_| parse::ParseError::Error(error::Chunk::Deflate))?; - let inner_header = header.with_data(ChunkType::Change, &decompressed); - let mut inner_chunk = Vec::with_capacity(inner_header.len() + decompressed.len()); - inner_header.write(&mut inner_chunk); - inner_chunk.extend(&decompressed); - let (remaining, change) = - Change::parse(parse::Input::new(&inner_chunk)).map_err(|e| e.lift())?; - if !remaining.is_empty() { - return Err(parse::ParseError::Error(error::Chunk::LeftoverData)); - } - Chunk::CompressedChange( - change.into_owned(), - Compressed::new(header.checksum, Cow::Borrowed(chunk_input.bytes())), - ) - } - }; - Ok((remaining, chunk)) - } - - pub(crate) fn checksum_valid(&self) -> bool { - match self { - Self::Document(d) => d.checksum_valid(), - Self::Change(c) => c.checksum_valid(), - Self::CompressedChange(change, compressed) => { - compressed.checksum() == change.checksum() && change.checksum_valid() - } - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq)] -pub(crate) enum ChunkType { - Document, - Change, - Compressed, -} - -impl TryFrom for ChunkType { - type Error = u8; - - fn try_from(value: u8) -> Result { - match value { - 0 => Ok(Self::Document), - 1 => Ok(Self::Change), - 2 => Ok(Self::Compressed), - other => Err(other), - } - } -} - -impl From for u8 { - fn from(ct: ChunkType) -> Self { - match ct { - ChunkType::Document => 0, - ChunkType::Change => 1, - ChunkType::Compressed => 2, - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq)] -pub(crate) struct CheckSum([u8; 4]); - -impl CheckSum { - pub(crate) fn bytes(&self) -> [u8; 4] { - self.0 - } -} - -impl From<[u8; 4]> for CheckSum { - fn from(raw: [u8; 4]) -> Self { - CheckSum(raw) - } -} - -impl AsRef<[u8]> for CheckSum { - fn as_ref(&self) -> &[u8] { - &self.0 - } -} - -impl From for CheckSum { - fn from(h: ChangeHash) -> Self { - let bytes = h.as_bytes(); - [bytes[0], bytes[1], bytes[2], bytes[3]].into() - } -} - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct Header { - checksum: CheckSum, - chunk_type: ChunkType, - data_len: usize, - header_size: usize, - hash: ChangeHash, -} - -impl Header { - pub(crate) fn new(chunk_type: ChunkType, data: &[u8]) -> Self { - let hash = hash(chunk_type, data); - Self { - hash, - checksum: hash.checksum().into(), - data_len: data.len(), - header_size: MAGIC_BYTES.len() - + 4 // checksum - + 1 // chunk type - + (ulebsize(data.len() as u64) as usize), - chunk_type, - } - } - - /// Returns a header with the same checksum but with a different chunk type and data length. - /// This is primarily useful when processing compressed chunks, where the checksum is actually - /// derived from the uncompressed data. - pub(crate) fn with_data(&self, chunk_type: ChunkType, data: &[u8]) -> Header { - let hash = hash(chunk_type, data); - Self { - hash, - checksum: self.checksum, - data_len: data.len(), - header_size: MAGIC_BYTES.len() - + 4 // checksum - + 1 // chunk type - + (ulebsize(data.len() as u64) as usize), - chunk_type, - } - } - - pub(crate) fn len(&self) -> usize { - self.header_size - } - - pub(crate) fn write(&self, out: &mut Vec) { - out.extend(MAGIC_BYTES); - out.extend(self.checksum.bytes()); - out.push(u8::from(self.chunk_type)); - leb128::write::unsigned(out, self.data_len as u64).unwrap(); - } - - pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Header, E> - where - E: From, - { - let ( - i, - parse::RangeOf { - range: header, - value: (checksum_bytes, chunk_type, chunk_len), - }, - ) = parse::range_of( - |i| { - let (i, magic) = parse::take4(i)?; - if magic != MAGIC_BYTES { - return Err(parse::ParseError::Error(E::from( - error::Header::InvalidMagicBytes, - ))); - } - let (i, checksum_bytes) = parse::take4(i)?; - let (i, raw_chunk_type) = parse::take1(i)?; - let chunk_type: ChunkType = raw_chunk_type.try_into().map_err(|_| { - parse::ParseError::Error(E::from(error::Header::UnknownChunkType( - raw_chunk_type, - ))) - })?; - let (i, chunk_len) = parse::leb128_u64(i).map_err(|e| e.lift())?; - Ok((i, (checksum_bytes, chunk_type, chunk_len))) - }, - input, - )?; - - let (_, data) = parse::take_n(chunk_len as usize, i)?; - let hash = hash(chunk_type, data); - Ok(( - i, - Header { - checksum: checksum_bytes.into(), - chunk_type, - data_len: data.len(), - header_size: header.len(), - hash, - }, - )) - } - - /// The range of the input which corresponds to the data specified by this header - pub(crate) fn data_bytes(&self) -> Range { - self.header_size..(self.header_size + self.data_len) - } - - pub(crate) fn hash(&self) -> ChangeHash { - self.hash - } - - pub(crate) fn checksum_valid(&self) -> bool { - CheckSum(self.hash.checksum()) == self.checksum - } - - pub(crate) fn checksum(&self) -> CheckSum { - self.checksum - } -} - -fn hash(typ: ChunkType, data: &[u8]) -> ChangeHash { - let mut out = vec![u8::from(typ)]; - leb128::write::unsigned(&mut out, data.len() as u64).unwrap(); - out.extend(data); - let hash_result = Sha256::digest(out); - let array: [u8; 32] = hash_result.into(); - ChangeHash(array) -} diff --git a/rust/automerge/src/storage/columns.rs b/rust/automerge/src/storage/columns.rs deleted file mode 100644 index 2ff6fa1f..00000000 --- a/rust/automerge/src/storage/columns.rs +++ /dev/null @@ -1,355 +0,0 @@ -/// This module contains types which represent the column metadata which is encoded in the columnar -/// storage format specified in [1]. In this format metadata about each column is packed into a 32 -/// bit integer, which is represented by the types in `column_specification`. The column data in -/// the format is a sequence of (`ColumnSpecification`, `usize`) pairs where each pair represents -/// the type of the column and the length of the column in the data which follows, these pairs are -/// represented by `RawColumn` and `RawColumns`. Some columns are actually composites of several -/// underlying columns and so not every `RawColumns` is valid. The types in `column` and -/// `column_builder` take a `RawColumns` and produce a `Columns` - which is a valid set of possibly -/// composite column metadata. -/// -/// There are two typical workflows: -/// -/// ## Reading -/// * First parse a `RawColumns` from the underlying data using `RawColumns::parse` -/// * Ensure that the columns are decompressed using `RawColumns::decompress` (checking first if -/// you can avoid this using `RawColumns::uncompressed`) -/// * Parse the `RawColumns` into a `Columns` using `Columns::parse` -/// -/// ## Writing -/// * Construct a `RawColumns` -/// * Compress using `RawColumns::compress` -/// * Write to output using `RawColumns::write` -/// -/// [1]: https://alexjg.github.io/automerge-storage-docs/#_columnar_storage_format -use std::ops::Range; - -mod column_specification; -pub(crate) use column_specification::{ColumnId, ColumnSpec, ColumnType}; -mod column; -pub(crate) use column::Column; -mod column_builder; -pub(crate) use column_builder::{ - AwaitingRawColumnValueBuilder, ColumnBuilder, GroupAwaitingValue, GroupBuilder, -}; - -pub(crate) mod raw_column; -pub(crate) use raw_column::{RawColumn, RawColumns}; - -#[derive(Debug, thiserror::Error)] -#[error("mismatching column at {index}.")] -pub(crate) struct MismatchingColumn { - pub(crate) index: usize, -} - -pub(crate) mod compression { - #[derive(Clone, Debug)] - pub(crate) struct Unknown; - #[derive(Clone, Debug)] - pub(crate) struct Uncompressed; - - /// A witness for what we know about whether or not a column is compressed - pub(crate) trait ColumnCompression {} - impl ColumnCompression for Unknown {} - impl ColumnCompression for Uncompressed {} -} - -/// `Columns` represents a sequence of "logical" columns. "Logical" in this sense means that -/// each column produces one value, but may be composed of multiple [`RawColumn`]s. For example, in a -/// logical column containing values there are two `RawColumn`s, one for the metadata about the -/// values, and one for the values themselves. -#[derive(Clone, Debug)] -pub(crate) struct Columns { - columns: Vec, -} - -impl Columns { - pub(crate) fn empty() -> Self { - Self { - columns: Vec::new(), - } - } - - pub(crate) fn append(&mut self, col: Column) { - self.columns.push(col) - } - - pub(crate) fn parse<'a, I: Iterator>>( - data_size: usize, - cols: I, - ) -> Result { - let mut parser = ColumnLayoutParser::new(data_size, None); - for raw_col in cols { - parser.add_column(raw_col.spec(), raw_col.data())?; - } - parser.build() - } -} - -impl FromIterator for Result { - fn from_iter>(iter: T) -> Self { - let iter = iter.into_iter(); - let mut result = Vec::with_capacity(iter.size_hint().1.unwrap_or(0)); - let mut last_column: Option = None; - for col in iter { - if let Some(last_col) = last_column { - if col.spec().normalize() < last_col.normalize() { - return Err(BadColumnLayout::OutOfOrder); - } - } - last_column = Some(col.spec()); - result.push(col); - } - Ok(Columns { columns: result }) - } -} - -impl IntoIterator for Columns { - type Item = Column; - type IntoIter = std::vec::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.columns.into_iter() - } -} - -#[derive(Debug, thiserror::Error)] -pub(crate) enum BadColumnLayout { - #[error("duplicate column specifications: {0}")] - DuplicateColumnSpecs(u32), - #[error("out of order columns")] - OutOfOrder, - #[error("nested group")] - NestedGroup, - #[error("raw value column without metadata column")] - LoneRawValueColumn, - #[error("value metadata followed by value column with different column ID")] - MismatchingValueMetadataId, - #[error("non contiguous columns")] - NonContiguousColumns, - #[error("data out of range")] - DataOutOfRange, -} - -struct ColumnLayoutParser { - columns: Vec, - last_spec: Option, - state: LayoutParserState, - total_data_size: usize, -} - -enum LayoutParserState { - Ready, - InValue(AwaitingRawColumnValueBuilder), - InGroup(ColumnId, GroupParseState), -} - -#[derive(Debug)] -enum GroupParseState { - Ready(GroupBuilder), - InValue(GroupAwaitingValue), -} - -impl ColumnLayoutParser { - fn new(data_size: usize, size_hint: Option) -> Self { - ColumnLayoutParser { - columns: Vec::with_capacity(size_hint.unwrap_or(0)), - last_spec: None, - state: LayoutParserState::Ready, - total_data_size: data_size, - } - } - - fn build(mut self) -> Result { - let columns = match self.state { - LayoutParserState::Ready => self.columns, - LayoutParserState::InValue(mut builder) => { - self.columns.push(builder.build((0..0).into())); - self.columns - } - LayoutParserState::InGroup(_, groupstate) => { - match groupstate { - GroupParseState::InValue(mut builder) => { - self.columns.push(builder.finish_empty().finish()); - } - GroupParseState::Ready(mut builder) => { - self.columns.push(builder.finish()); - } - }; - self.columns - } - }; - Ok(Columns { columns }) - } - - #[tracing::instrument(skip(self), err)] - fn add_column( - &mut self, - column: ColumnSpec, - range: Range, - ) -> Result<(), BadColumnLayout> { - self.check_contiguous(&range)?; - self.check_bounds(&range)?; - if let Some(last_spec) = self.last_spec { - if last_spec.normalize() > column.normalize() { - return Err(BadColumnLayout::OutOfOrder); - } else if last_spec == column { - return Err(BadColumnLayout::DuplicateColumnSpecs(column.into())); - } - } - match &mut self.state { - LayoutParserState::Ready => match column.col_type() { - ColumnType::Group => { - self.state = LayoutParserState::InGroup( - column.id(), - GroupParseState::Ready(ColumnBuilder::start_group(column, range.into())), - ); - Ok(()) - } - ColumnType::ValueMetadata => { - self.state = LayoutParserState::InValue(ColumnBuilder::start_value( - column, - range.into(), - )); - Ok(()) - } - ColumnType::Value => Err(BadColumnLayout::LoneRawValueColumn), - ColumnType::Actor => { - self.columns - .push(ColumnBuilder::build_actor(column, range.into())); - Ok(()) - } - ColumnType::String => { - self.columns - .push(ColumnBuilder::build_string(column, range.into())); - Ok(()) - } - ColumnType::Integer => { - self.columns - .push(ColumnBuilder::build_integer(column, range.into())); - Ok(()) - } - ColumnType::DeltaInteger => { - self.columns - .push(ColumnBuilder::build_delta_integer(column, range.into())); - Ok(()) - } - ColumnType::Boolean => { - self.columns - .push(ColumnBuilder::build_boolean(column, range.into())); - Ok(()) - } - }, - LayoutParserState::InValue(builder) => match column.col_type() { - ColumnType::Value => { - if builder.id() != column.id() { - return Err(BadColumnLayout::MismatchingValueMetadataId); - } - self.columns.push(builder.build(range.into())); - self.state = LayoutParserState::Ready; - Ok(()) - } - _ => { - self.columns.push(builder.build((0..0).into())); - self.state = LayoutParserState::Ready; - self.add_column(column, range) - } - }, - LayoutParserState::InGroup(id, group_state) => { - if *id != column.id() { - match group_state { - GroupParseState::Ready(b) => self.columns.push(b.finish()), - GroupParseState::InValue(b) => self.columns.push(b.finish_empty().finish()), - }; - std::mem::swap(&mut self.state, &mut LayoutParserState::Ready); - self.add_column(column, range) - } else { - match group_state { - GroupParseState::Ready(builder) => match column.col_type() { - ColumnType::Group => Err(BadColumnLayout::NestedGroup), - ColumnType::Value => Err(BadColumnLayout::LoneRawValueColumn), - ColumnType::ValueMetadata => { - *group_state = - GroupParseState::InValue(builder.start_value(column, range)); - Ok(()) - } - ColumnType::Actor => { - builder.add_actor(column, range); - Ok(()) - } - ColumnType::Boolean => { - builder.add_boolean(column, range); - Ok(()) - } - ColumnType::DeltaInteger => { - builder.add_delta_integer(column, range); - Ok(()) - } - ColumnType::Integer => { - builder.add_integer(column, range); - Ok(()) - } - ColumnType::String => { - builder.add_string(column, range); - Ok(()) - } - }, - GroupParseState::InValue(builder) => match column.col_type() { - ColumnType::Value => { - *group_state = GroupParseState::Ready(builder.finish_value(range)); - Ok(()) - } - _ => { - *group_state = GroupParseState::Ready(builder.finish_empty()); - self.add_column(column, range) - } - }, - } - } - } - } - } - - fn check_contiguous(&self, next_range: &Range) -> Result<(), BadColumnLayout> { - match &self.state { - LayoutParserState::Ready => { - if let Some(prev) = self.columns.last() { - if prev.range().end != next_range.start { - tracing::error!(prev=?prev.range(), next=?next_range, "it's here"); - Err(BadColumnLayout::NonContiguousColumns) - } else { - Ok(()) - } - } else { - Ok(()) - } - } - LayoutParserState::InValue(builder) => { - if builder.meta_range().end() != next_range.start { - Err(BadColumnLayout::NonContiguousColumns) - } else { - Ok(()) - } - } - LayoutParserState::InGroup(_, group_state) => { - let end = match group_state { - GroupParseState::InValue(b) => b.range().end, - GroupParseState::Ready(b) => b.range().end, - }; - if end != next_range.start { - Err(BadColumnLayout::NonContiguousColumns) - } else { - Ok(()) - } - } - } - } - - fn check_bounds(&self, next_range: &Range) -> Result<(), BadColumnLayout> { - if next_range.end > self.total_data_size { - Err(BadColumnLayout::DataOutOfRange) - } else { - Ok(()) - } - } -} diff --git a/rust/automerge/src/storage/columns/column.rs b/rust/automerge/src/storage/columns/column.rs deleted file mode 100644 index 6f834439..00000000 --- a/rust/automerge/src/storage/columns/column.rs +++ /dev/null @@ -1,42 +0,0 @@ -use std::ops::Range; - -use crate::columnar::column_range::generic::GenericColumnRange; - -use super::{ColumnId, ColumnSpec, ColumnType}; - -/// A combination of a column specification and the range of data associated with it. Note that -/// multiple (adjacent) ranges can be associated with one column as some columns are composite. -/// This is encapsulated in the `GenericColumnRange` type. -#[derive(Clone, Debug)] -pub(crate) struct Column { - spec: ColumnSpec, - range: GenericColumnRange, -} - -impl Column { - pub(crate) fn new(spec: ColumnSpec, range: GenericColumnRange) -> Column { - Self { spec, range } - } -} - -impl Column { - pub(crate) fn range(&self) -> Range { - self.range.range() - } - - pub(crate) fn into_ranges(self) -> GenericColumnRange { - self.range - } - - pub(crate) fn col_type(&self) -> ColumnType { - self.spec.col_type() - } - - pub(crate) fn id(&self) -> ColumnId { - self.spec.id() - } - - pub(crate) fn spec(&self) -> ColumnSpec { - self.spec - } -} diff --git a/rust/automerge/src/storage/columns/column_builder.rs b/rust/automerge/src/storage/columns/column_builder.rs deleted file mode 100644 index 5cc41a21..00000000 --- a/rust/automerge/src/storage/columns/column_builder.rs +++ /dev/null @@ -1,199 +0,0 @@ -use std::ops::Range; - -use crate::columnar::column_range::{ - generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, - BooleanRange, DeltaRange, RawRange, RleRange, ValueRange, -}; - -use super::{Column, ColumnId, ColumnSpec}; - -pub(crate) struct ColumnBuilder; - -impl ColumnBuilder { - pub(crate) fn build_actor(spec: ColumnSpec, range: RleRange) -> Column { - Column::new( - spec, - GenericColumnRange::Simple(SimpleColRange::RleInt(range)), - ) - } - - pub(crate) fn build_string(spec: ColumnSpec, range: RleRange) -> Column { - Column::new( - spec, - GenericColumnRange::Simple(SimpleColRange::RleString(range)), - ) - } - - pub(crate) fn build_integer(spec: ColumnSpec, range: RleRange) -> Column { - Column::new( - spec, - GenericColumnRange::Simple(SimpleColRange::RleInt(range)), - ) - } - - pub(crate) fn build_delta_integer(spec: ColumnSpec, range: DeltaRange) -> Column { - Column::new( - spec, - GenericColumnRange::Simple(SimpleColRange::Delta(range)), - ) - } - - pub(crate) fn build_boolean(spec: ColumnSpec, range: BooleanRange) -> Column { - Column::new( - spec, - GenericColumnRange::Simple(SimpleColRange::Boolean(range)), - ) - } - - pub(crate) fn start_value( - spec: ColumnSpec, - meta: RleRange, - ) -> AwaitingRawColumnValueBuilder { - AwaitingRawColumnValueBuilder { spec, meta } - } - - pub(crate) fn start_group(spec: ColumnSpec, num: RleRange) -> GroupBuilder { - GroupBuilder { - spec, - num_range: num, - columns: Vec::new(), - } - } -} - -pub(crate) struct AwaitingRawColumnValueBuilder { - spec: ColumnSpec, - meta: RleRange, -} - -impl AwaitingRawColumnValueBuilder { - pub(crate) fn id(&self) -> ColumnId { - self.spec.id() - } - - pub(crate) fn meta_range(&self) -> &RleRange { - &self.meta - } - - pub(crate) fn build(&mut self, raw: RawRange) -> Column { - Column::new( - self.spec, - GenericColumnRange::Value(ValueRange::new(self.meta.clone(), raw)), - ) - } -} - -#[derive(Debug)] -pub(crate) struct GroupBuilder { - spec: ColumnSpec, - num_range: RleRange, - columns: Vec, -} - -impl GroupBuilder { - pub(crate) fn range(&self) -> Range { - let start = self.num_range.start(); - let end = self - .columns - .last() - .map(|c| c.range().end) - .unwrap_or_else(|| self.num_range.end()); - start..end - } - - pub(crate) fn add_actor(&mut self, _spec: ColumnSpec, range: Range) { - self.columns - .push(GroupedColumnRange::Simple(SimpleColRange::RleInt( - range.into(), - ))); - } - - pub(crate) fn add_string(&mut self, _spec: ColumnSpec, range: Range) { - self.columns - .push(GroupedColumnRange::Simple(SimpleColRange::RleString( - range.into(), - ))); - } - - pub(crate) fn add_integer(&mut self, _spec: ColumnSpec, range: Range) { - self.columns - .push(GroupedColumnRange::Simple(SimpleColRange::RleInt( - range.into(), - ))); - } - - pub(crate) fn add_delta_integer(&mut self, _spec: ColumnSpec, range: Range) { - self.columns - .push(GroupedColumnRange::Simple(SimpleColRange::Delta( - range.into(), - ))); - } - - pub(crate) fn add_boolean(&mut self, _spec: ColumnSpec, range: Range) { - self.columns - .push(GroupedColumnRange::Simple(SimpleColRange::Boolean( - range.into(), - ))); - } - - pub(crate) fn start_value( - &mut self, - _spec: ColumnSpec, - meta: Range, - ) -> GroupAwaitingValue { - GroupAwaitingValue { - spec: self.spec, - num_range: self.num_range.clone(), - columns: std::mem::take(&mut self.columns), - val_meta: meta.into(), - } - } - - pub(crate) fn finish(&mut self) -> Column { - Column::new( - self.spec, - GenericColumnRange::Group(GroupRange::new( - self.num_range.clone(), - std::mem::take(&mut self.columns), - )), - ) - } -} - -#[derive(Debug)] -pub(crate) struct GroupAwaitingValue { - spec: ColumnSpec, - num_range: RleRange, - columns: Vec, - val_meta: RleRange, -} - -impl GroupAwaitingValue { - pub(crate) fn finish_empty(&mut self) -> GroupBuilder { - self.columns.push(GroupedColumnRange::Value(ValueRange::new( - self.val_meta.clone(), - (0..0).into(), - ))); - GroupBuilder { - spec: self.spec, - num_range: self.num_range.clone(), - columns: std::mem::take(&mut self.columns), - } - } - - pub(crate) fn finish_value(&mut self, raw: Range) -> GroupBuilder { - self.columns.push(GroupedColumnRange::Value(ValueRange::new( - self.val_meta.clone(), - raw.into(), - ))); - GroupBuilder { - spec: self.spec, - num_range: self.num_range.clone(), - columns: std::mem::take(&mut self.columns), - } - } - - pub(crate) fn range(&self) -> Range { - self.num_range.start()..self.val_meta.end() - } -} diff --git a/rust/automerge/src/storage/columns/column_specification.rs b/rust/automerge/src/storage/columns/column_specification.rs deleted file mode 100644 index 5bde0e7a..00000000 --- a/rust/automerge/src/storage/columns/column_specification.rs +++ /dev/null @@ -1,285 +0,0 @@ -/// An implementation of column specifications as specified in [1] -/// -/// [1]: https://alexjg.github.io/automerge-storage-docs/#column-specifications -#[derive(Eq, PartialEq, Clone, Copy)] -pub(crate) struct ColumnSpec(u32); - -impl ColumnSpec { - pub(crate) fn new(id: ColumnId, col_type: ColumnType, deflate: bool) -> Self { - let mut raw = id.0 << 4; - raw |= u8::from(col_type) as u32; - if deflate { - raw |= 0b00001000; - } else { - raw &= 0b11110111; - } - ColumnSpec(raw) - } - - pub(crate) fn col_type(&self) -> ColumnType { - self.0.to_be_bytes()[3].into() - } - - pub(crate) fn id(&self) -> ColumnId { - ColumnId(self.0 >> 4) - } - - pub(crate) fn deflate(&self) -> bool { - self.0 & 0b00001000 > 0 - } - - pub(crate) fn deflated(&self) -> Self { - Self::new(self.id(), self.col_type(), true) - } - - pub(crate) fn inflated(&self) -> Self { - Self::new(self.id(), self.col_type(), false) - } - - pub(crate) fn normalize(&self) -> Normalized { - Normalized(self.0 & 0b11110111) - } -} - -#[derive(PartialEq, PartialOrd)] -pub(crate) struct Normalized(u32); - -impl std::fmt::Debug for ColumnSpec { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "ColumnSpec(id: {:?}, type: {}, deflate: {})", - self.id(), - self.col_type(), - self.deflate() - ) - } -} - -#[derive(Eq, PartialEq, Clone, Copy)] -pub(crate) struct ColumnId(u32); - -impl ColumnId { - pub(crate) const fn new(raw: u32) -> Self { - ColumnId(raw) - } -} - -impl From for ColumnId { - fn from(raw: u32) -> Self { - Self(raw) - } -} - -impl std::fmt::Debug for ColumnId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } -} - -/// The differente possible column types, as specified in [1] -/// -/// [1]: https://alexjg.github.io/automerge-storage-docs/#column-specifications -#[derive(Eq, PartialEq, Clone, Copy, Debug)] -pub(crate) enum ColumnType { - Group, - Actor, - Integer, - DeltaInteger, - Boolean, - String, - ValueMetadata, - Value, -} - -impl std::fmt::Display for ColumnType { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Group => write!(f, "Group"), - Self::Actor => write!(f, "Actor"), - Self::Integer => write!(f, "Integer"), - Self::DeltaInteger => write!(f, "DeltaInteger"), - Self::Boolean => write!(f, "Boolean"), - Self::String => write!(f, "String"), - Self::ValueMetadata => write!(f, "ValueMetadata"), - Self::Value => write!(f, "Value"), - } - } -} - -impl From for ColumnType { - fn from(v: u8) -> Self { - let type_bits = v & 0b00000111; - match type_bits { - 0 => Self::Group, - 1 => Self::Actor, - 2 => Self::Integer, - 3 => Self::DeltaInteger, - 4 => Self::Boolean, - 5 => Self::String, - 6 => Self::ValueMetadata, - 7 => Self::Value, - _ => unreachable!(), - } - } -} - -impl From for u8 { - fn from(ct: ColumnType) -> Self { - match ct { - ColumnType::Group => 0, - ColumnType::Actor => 1, - ColumnType::Integer => 2, - ColumnType::DeltaInteger => 3, - ColumnType::Boolean => 4, - ColumnType::String => 5, - ColumnType::ValueMetadata => 6, - ColumnType::Value => 7, - } - } -} - -impl From for ColumnSpec { - fn from(raw: u32) -> Self { - ColumnSpec(raw) - } -} - -impl From for u32 { - fn from(spec: ColumnSpec) -> Self { - spec.0 - } -} - -impl From<[u8; 4]> for ColumnSpec { - fn from(raw: [u8; 4]) -> Self { - u32::from_be_bytes(raw).into() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn column_spec_encoding() { - struct Scenario { - id: ColumnId, - col_type: ColumnType, - int_val: u32, - } - - let scenarios = vec![ - Scenario { - id: ColumnId(7), - col_type: ColumnType::Group, - int_val: 112, - }, - Scenario { - id: ColumnId(0), - col_type: ColumnType::Actor, - int_val: 1, - }, - Scenario { - id: ColumnId(0), - col_type: ColumnType::Integer, - int_val: 2, - }, - Scenario { - id: ColumnId(1), - col_type: ColumnType::DeltaInteger, - int_val: 19, - }, - Scenario { - id: ColumnId(3), - col_type: ColumnType::Boolean, - int_val: 52, - }, - Scenario { - id: ColumnId(1), - col_type: ColumnType::String, - int_val: 21, - }, - Scenario { - id: ColumnId(5), - col_type: ColumnType::ValueMetadata, - int_val: 86, - }, - Scenario { - id: ColumnId(5), - col_type: ColumnType::Value, - int_val: 87, - }, - ]; - - for (index, scenario) in scenarios.into_iter().enumerate() { - let spec = ColumnSpec::new(scenario.id, scenario.col_type, false); - - let encoded_val = u32::from(spec); - if encoded_val != scenario.int_val { - panic!( - "Scenario {} failed encoding: expected {} but got {}", - index + 1, - scenario.int_val, - encoded_val - ); - } - - if spec.col_type() != scenario.col_type { - panic!( - "Scenario {} failed col type: expected {:?} but got {:?}", - index + 1, - scenario.col_type, - spec.col_type() - ); - } - - if spec.deflate() { - panic!( - "Scenario {} failed: spec returned true for deflate, should have been false", - index + 1 - ); - } - - if spec.id() != scenario.id { - panic!( - "Scenario {} failed id: expected {:?} but got {:?}", - index + 1, - scenario.id, - spec.id() - ); - } - - let deflated = ColumnSpec::new(scenario.id, scenario.col_type, true); - - if deflated.id() != spec.id() { - panic!("Scenario {} failed deflate id test", index + 1); - } - - if deflated.col_type() != spec.col_type() { - panic!("Scenario {} failed col type test", index + 1); - } - - if !deflated.deflate() { - panic!( - "Scenario {} failed: when deflate bit set deflate returned false", - index + 1 - ); - } - - let expected = scenario.int_val | 0b00001000; - if expected != u32::from(deflated) { - panic!( - "Scenario {} failed deflate bit test, expected {} got {}", - index + 1, - expected, - u32::from(deflated) - ); - } - - if deflated.normalize() != spec.normalize() { - panic!("Scenario {} failed normalize test", index + 1); - } - } - } -} diff --git a/rust/automerge/src/storage/columns/raw_column.rs b/rust/automerge/src/storage/columns/raw_column.rs deleted file mode 100644 index ac9a5759..00000000 --- a/rust/automerge/src/storage/columns/raw_column.rs +++ /dev/null @@ -1,272 +0,0 @@ -use std::{io::Read, marker::PhantomData, ops::Range}; - -use crate::storage::parse; - -use super::{compression, ColumnSpec}; - -/// This is a "raw" column in the sense that it is just the column specification[1] and range. This -/// is in contrast to [`super::Column`] which is aware of composite columns such as value columns[2] and -/// group columns[3]. -/// -/// `RawColumn` is generally an intermediary object which is parsed into a [`super::Column`]. -/// -/// The type parameter `T` is a witness to whether this column is compressed. If `T: -/// compression::Uncompressed` then we have proved that this column is not compressed, otherwise it -/// may be compressed. -/// -/// [1]: https://alexjg.github.io/automerge-storage-docs/#column-specifications -/// [2]: https://alexjg.github.io/automerge-storage-docs/#raw-value-columns -/// [3]: https://alexjg.github.io/automerge-storage-docs/#group-columns -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct RawColumn { - spec: ColumnSpec, - /// The location of the data in the column data block. Note that this range starts at the - /// beginning of the column data block - i.e. the `data` attribute of the first column in the - /// column data block will be 0 - not at the start of the chunk. - data: Range, - _phantom: PhantomData, -} - -impl RawColumn { - pub(crate) fn new(spec: ColumnSpec, data: Range) -> Self { - Self { - spec: ColumnSpec::new(spec.id(), spec.col_type(), false), - data, - _phantom: PhantomData, - } - } -} - -impl RawColumn { - pub(crate) fn spec(&self) -> ColumnSpec { - self.spec - } - - pub(crate) fn data(&self) -> Range { - self.data.clone() - } - - fn compress(&self, input: &[u8], out: &mut Vec, threshold: usize) -> (ColumnSpec, usize) { - let (spec, len) = if self.data.len() < threshold || self.spec.deflate() { - out.extend(&input[self.data.clone()]); - (self.spec, self.data.len()) - } else { - let mut deflater = flate2::bufread::DeflateEncoder::new( - &input[self.data.clone()], - flate2::Compression::default(), - ); - //This unwrap should be okay as we're reading and writing to in memory buffers - (self.spec.deflated(), deflater.read_to_end(out).unwrap()) - }; - (spec, len) - } - - pub(crate) fn uncompressed(&self) -> Option> { - if self.spec.deflate() { - None - } else { - Some(RawColumn { - spec: self.spec, - data: self.data.clone(), - _phantom: PhantomData, - }) - } - } - - fn decompress( - &self, - input: &[u8], - out: &mut Vec, - ) -> Result<(ColumnSpec, usize), ParseError> { - let len = if self.spec.deflate() { - let mut inflater = flate2::bufread::DeflateDecoder::new(&input[self.data.clone()]); - inflater.read_to_end(out).map_err(ParseError::Deflate)? - } else { - out.extend(&input[self.data.clone()]); - self.data.len() - }; - Ok((self.spec.inflated(), len)) - } -} - -#[derive(Clone, Debug, PartialEq)] -pub(crate) struct RawColumns(Vec>); - -impl RawColumns { - /// Returns `Some` if no column in this set of columns is marked as compressed - pub(crate) fn uncompressed(&self) -> Option> { - let mut result = Vec::with_capacity(self.0.len()); - for col in &self.0 { - if let Some(uncomp) = col.uncompressed() { - result.push(uncomp); - } else { - return None; - } - } - Some(RawColumns(result)) - } - - /// Write each column in `input` represented by `self` into `out`, possibly compressing. - /// - /// # Returns - /// The `RawColumns` corresponding to the data written to `out` - /// - /// # Panics - /// * If any of the ranges in `self` is outside the bounds of `input` - pub(crate) fn compress( - &self, - input: &[u8], - out: &mut Vec, - threshold: usize, - ) -> RawColumns { - let mut result = Vec::with_capacity(self.0.len()); - let mut start = 0; - for col in &self.0 { - let (spec, len) = col.compress(input, out, threshold); - result.push(RawColumn { - spec, - data: start..(start + len), - _phantom: PhantomData::, - }); - start += len; - } - RawColumns(result) - } - - /// Read each column from `input` and write to `out`, decompressing any compressed columns - /// - /// # Returns - /// The `RawColumns` corresponding to the data written to `out` - /// - /// # Panics - /// * If any of the ranges in `self` is outside the bounds of `input` - pub(crate) fn uncompress( - &self, - input: &[u8], - out: &mut Vec, - ) -> Result, ParseError> { - let mut result = Vec::with_capacity(self.0.len()); - let mut start = 0; - for col in &self.0 { - let (spec, len) = if let Some(decomp) = col.uncompressed() { - out.extend(&input[decomp.data.clone()]); - (decomp.spec, decomp.data.len()) - } else { - col.decompress(input, out)? - }; - result.push(RawColumn { - spec, - data: start..(start + len), - _phantom: PhantomData::, - }); - start += len; - } - Ok(RawColumns(result)) - } -} - -impl FromIterator> for RawColumns { - fn from_iter>>(iter: U) -> Self { - Self(iter.into_iter().filter(|c| !c.data.is_empty()).collect()) - } -} - -impl FromIterator<(ColumnSpec, Range)> for RawColumns { - fn from_iter)>>(iter: T) -> Self { - Self( - iter.into_iter() - .filter_map(|(spec, data)| { - if data.is_empty() { - None - } else { - Some(RawColumn { - spec, - data, - _phantom: PhantomData, - }) - } - }) - .collect(), - ) - } -} - -#[derive(Debug, thiserror::Error)] -pub(crate) enum ParseError { - #[error("columns were not in normalized order")] - NotInNormalOrder, - #[error(transparent)] - Leb128(#[from] parse::leb128::Error), - #[error(transparent)] - Deflate(#[from] std::io::Error), -} - -impl RawColumns { - pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, E> - where - E: From, - { - let i = input; - let (i, num_columns) = parse::leb128_u64(i).map_err(|e| e.lift())?; - let (i, specs_and_lens) = parse::apply_n( - num_columns as usize, - parse::tuple2( - parse::map(parse::leb128_u32, ColumnSpec::from), - parse::leb128_u64, - ), - )(i) - .map_err(|e| e.lift())?; - let columns: Vec> = specs_and_lens - .into_iter() - .scan(0_usize, |offset, (spec, len)| { - // Note: we use a saturating add here as len was passed over the network - // and so could be anything. If the addition does every saturate we would - // expect parsing to fail later (but at least it won't panic!). - let end = offset.saturating_add(len as usize); - let data = *offset..end; - *offset = end; - Some(RawColumn { - spec, - data, - _phantom: PhantomData, - }) - }) - .collect::>(); - if !are_normal_sorted(&columns) { - return Err(parse::ParseError::Error( - ParseError::NotInNormalOrder.into(), - )); - } - Ok((i, RawColumns(columns))) - } -} - -impl RawColumns { - pub(crate) fn write(&self, out: &mut Vec) -> usize { - let mut written = leb128::write::unsigned(out, self.0.len() as u64).unwrap(); - for col in &self.0 { - written += leb128::write::unsigned(out, u32::from(col.spec) as u64).unwrap(); - written += leb128::write::unsigned(out, col.data.len() as u64).unwrap(); - } - written - } - - pub(crate) fn total_column_len(&self) -> usize { - self.0.iter().map(|c| c.data.len()).sum() - } - - pub(crate) fn iter(&self) -> impl Iterator> + '_ { - self.0.iter() - } -} - -fn are_normal_sorted(cols: &[RawColumn]) -> bool { - if cols.len() > 1 { - for (i, col) in cols[1..].iter().enumerate() { - if col.spec.normalize() < cols[i].spec.normalize() { - return false; - } - } - } - true -} diff --git a/rust/automerge/src/storage/convert.rs b/rust/automerge/src/storage/convert.rs deleted file mode 100644 index 48f83d03..00000000 --- a/rust/automerge/src/storage/convert.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod op_as_changeop; -pub(crate) use op_as_changeop::op_as_actor_id; - -mod op_as_docop; -pub(crate) use op_as_docop::op_as_docop; diff --git a/rust/automerge/src/storage/convert/op_as_changeop.rs b/rust/automerge/src/storage/convert/op_as_changeop.rs deleted file mode 100644 index 00b5e940..00000000 --- a/rust/automerge/src/storage/convert/op_as_changeop.rs +++ /dev/null @@ -1,128 +0,0 @@ -/// Types for converting an OpTree op into a `ChangeOp` or a `DocOp` -use std::borrow::Cow; - -use crate::{ - convert, - op_set::OpSetMetadata, - storage::AsChangeOp, - types::{ActorId, Key, ObjId, Op, OpId, OpType, ScalarValue}, -}; - -/// Wrap an op in an implementation of `AsChangeOp` which represents actor IDs using a reference to -/// the actor ID stored in the metadata. -/// -/// Note that the methods of `AsChangeOp` will panic if the actor is missing from the metadata -pub(crate) fn op_as_actor_id<'a>( - obj: &'a ObjId, - op: &'a Op, - metadata: &'a OpSetMetadata, -) -> OpWithMetadata<'a> { - OpWithMetadata { obj, op, metadata } -} - -pub(crate) struct OpWithMetadata<'a> { - obj: &'a ObjId, - op: &'a Op, - metadata: &'a OpSetMetadata, -} - -impl<'a> OpWithMetadata<'a> { - fn wrap(&self, opid: &'a OpId) -> OpIdWithMetadata<'a> { - OpIdWithMetadata { - opid, - metadata: self.metadata, - } - } -} - -pub(crate) struct OpIdWithMetadata<'a> { - opid: &'a OpId, - metadata: &'a OpSetMetadata, -} - -impl<'a> convert::OpId<&'a ActorId> for OpIdWithMetadata<'a> { - fn counter(&self) -> u64 { - self.opid.counter() - } - - fn actor(&self) -> &'a ActorId { - self.metadata.actors.get(self.opid.actor()) - } -} - -pub(crate) struct PredWithMetadata<'a> { - op: &'a Op, - offset: usize, - metadata: &'a OpSetMetadata, -} - -impl<'a> ExactSizeIterator for PredWithMetadata<'a> { - fn len(&self) -> usize { - self.op.pred.len() - } -} - -impl<'a> Iterator for PredWithMetadata<'a> { - type Item = OpIdWithMetadata<'a>; - - fn next(&mut self) -> Option { - if let Some(op) = self.op.pred.get(self.offset) { - self.offset += 1; - Some(OpIdWithMetadata { - opid: op, - metadata: self.metadata, - }) - } else { - None - } - } -} - -impl<'a> AsChangeOp<'a> for OpWithMetadata<'a> { - type ActorId = &'a ActorId; - type OpId = OpIdWithMetadata<'a>; - type PredIter = PredWithMetadata<'a>; - - fn action(&self) -> u64 { - self.op.action.action_index() - } - - fn insert(&self) -> bool { - self.op.insert - } - - fn val(&self) -> Cow<'a, ScalarValue> { - match &self.op.action { - OpType::Make(..) | OpType::Delete => Cow::Owned(ScalarValue::Null), - OpType::Increment(i) => Cow::Owned(ScalarValue::Int(*i)), - OpType::Put(s) => Cow::Borrowed(s), - } - } - - fn obj(&self) -> convert::ObjId { - if self.obj.is_root() { - convert::ObjId::Root - } else { - convert::ObjId::Op(OpIdWithMetadata { - opid: self.obj.opid(), - metadata: self.metadata, - }) - } - } - - fn pred(&self) -> Self::PredIter { - PredWithMetadata { - op: self.op, - offset: 0, - metadata: self.metadata, - } - } - - fn key(&self) -> convert::Key<'a, Self::OpId> { - match &self.op.key { - Key::Map(idx) => convert::Key::Prop(Cow::Owned(self.metadata.props.get(*idx).into())), - Key::Seq(e) if e.is_head() => convert::Key::Elem(convert::ElemId::Head), - Key::Seq(e) => convert::Key::Elem(convert::ElemId::Op(self.wrap(&e.0))), - } - } -} diff --git a/rust/automerge/src/storage/convert/op_as_docop.rs b/rust/automerge/src/storage/convert/op_as_docop.rs deleted file mode 100644 index 8d237354..00000000 --- a/rust/automerge/src/storage/convert/op_as_docop.rs +++ /dev/null @@ -1,145 +0,0 @@ -use std::borrow::Cow; - -use crate::{ - convert, - indexed_cache::IndexedCache, - storage::AsDocOp, - types::{ElemId, Key, ObjId, Op, OpId, OpType, ScalarValue}, -}; - -/// Create an [`AsDocOp`] implementation for a [`crate::types::Op`] -/// -/// # Arguments -/// * actors - A vector where the i'th element is the actor index of the document encoding of actor -/// i, as returned by [`OpSetMetadata.actors.encode_index`] -/// * props - An indexed cache containing the properties in this op_as_docop -/// * obj - The object ID this op refers too -/// * op - The op itself -/// -/// # Panics -/// -/// The methods of the resulting `AsDocOp` implementation will panic if any actor ID in the op -/// references an index not in `actors` or a property not in `props` -pub(crate) fn op_as_docop<'a>( - actors: &'a [usize], - props: &'a IndexedCache, - obj: &'a ObjId, - op: &'a Op, -) -> OpAsDocOp<'a> { - OpAsDocOp { - op, - obj, - actor_lookup: actors, - props, - } -} - -pub(crate) struct OpAsDocOp<'a> { - op: &'a Op, - obj: &'a ObjId, - actor_lookup: &'a [usize], - props: &'a IndexedCache, -} - -#[derive(Debug)] -pub(crate) struct DocOpId { - actor: usize, - counter: u64, -} - -impl convert::OpId for DocOpId { - fn actor(&self) -> usize { - self.actor - } - - fn counter(&self) -> u64 { - self.counter - } -} - -impl<'a> OpAsDocOp<'a> {} - -impl<'a> AsDocOp<'a> for OpAsDocOp<'a> { - type ActorId = usize; - type OpId = DocOpId; - type SuccIter = OpAsDocOpSuccIter<'a>; - - fn id(&self) -> Self::OpId { - translate(self.actor_lookup, &self.op.id) - } - - fn obj(&self) -> convert::ObjId { - if self.obj.is_root() { - convert::ObjId::Root - } else { - convert::ObjId::Op(translate(self.actor_lookup, self.obj.opid())) - } - } - - fn key(&self) -> convert::Key<'a, Self::OpId> { - match self.op.key { - Key::Map(idx) => convert::Key::Prop(Cow::Owned(self.props.get(idx).into())), - Key::Seq(e) if e.is_head() => convert::Key::Elem(convert::ElemId::Head), - Key::Seq(ElemId(o)) => { - convert::Key::Elem(convert::ElemId::Op(translate(self.actor_lookup, &o))) - } - } - } - - fn val(&self) -> Cow<'a, crate::ScalarValue> { - match &self.op.action { - OpType::Put(v) => Cow::Borrowed(v), - OpType::Increment(i) => Cow::Owned(ScalarValue::Int(*i)), - _ => Cow::Owned(ScalarValue::Null), - } - } - - fn succ(&self) -> Self::SuccIter { - OpAsDocOpSuccIter { - op: self.op, - offset: 0, - actor_index: self.actor_lookup, - } - } - - fn insert(&self) -> bool { - self.op.insert - } - - fn action(&self) -> u64 { - self.op.action.action_index() - } -} - -pub(crate) struct OpAsDocOpSuccIter<'a> { - op: &'a Op, - offset: usize, - actor_index: &'a [usize], -} - -impl<'a> Iterator for OpAsDocOpSuccIter<'a> { - type Item = DocOpId; - - fn next(&mut self) -> Option { - if let Some(s) = self.op.succ.get(self.offset) { - self.offset += 1; - Some(translate(self.actor_index, s)) - } else { - None - } - } -} - -impl<'a> ExactSizeIterator for OpAsDocOpSuccIter<'a> { - fn len(&self) -> usize { - self.op.succ.len() - } -} - -fn translate<'a>(actor_lookup: &'a [usize], op: &'a OpId) -> DocOpId { - let index = actor_lookup[op.actor()]; - DocOpId { - actor: index, - counter: op.counter(), - } -} diff --git a/rust/automerge/src/storage/document.rs b/rust/automerge/src/storage/document.rs deleted file mode 100644 index ecef0bfd..00000000 --- a/rust/automerge/src/storage/document.rs +++ /dev/null @@ -1,343 +0,0 @@ -use std::{borrow::Cow, ops::Range}; - -use super::{parse, shift_range, ChunkType, Columns, Header, RawColumns}; - -use crate::{convert, ActorId, ChangeHash}; - -mod doc_op_columns; -use doc_op_columns::DocOpColumns; -pub(crate) use doc_op_columns::{AsDocOp, DocOp, ReadDocOpError}; -mod doc_change_columns; -use doc_change_columns::DocChangeColumns; -pub(crate) use doc_change_columns::{AsChangeMeta, ChangeMetadata, ReadChangeError}; -mod compression; - -#[allow(dead_code)] -pub(crate) enum CompressConfig { - None, - Threshold(usize), -} - -#[derive(Debug)] -pub(crate) struct Document<'a> { - bytes: Cow<'a, [u8]>, - #[allow(dead_code)] - compressed_bytes: Option>, - header: Header, - actors: Vec, - heads: Vec, - op_metadata: DocOpColumns, - op_bytes: Range, - change_metadata: DocChangeColumns, - change_bytes: Range, - #[allow(dead_code)] - head_indices: Vec, -} - -#[derive(thiserror::Error, Debug)] -pub(crate) enum ParseError { - #[error(transparent)] - Leb128(#[from] parse::leb128::Error), - #[error(transparent)] - RawColumns(#[from] crate::storage::columns::raw_column::ParseError), - #[error("bad column layout for {column_type}s: {error}")] - BadColumnLayout { - column_type: &'static str, - error: super::columns::BadColumnLayout, - }, - #[error(transparent)] - BadDocOps(#[from] doc_op_columns::Error), - #[error(transparent)] - BadDocChanges(#[from] doc_change_columns::ReadChangeError), -} - -impl<'a> Document<'a> { - /// Parse a document chunk. Input must be the entire chunk including the header and magic - /// bytes but the header must already have been parsed. That is to say, this is expected to be - /// used like so: - /// - /// ```rust,ignore - /// # use automerge::storage::{parse::{ParseResult, Input}, Document, Header}; - /// # fn main() -> ParseResult<(), ()> { - /// let chunkbytes: &[u8] = todo!(); - /// let input = Input::new(chunkbytes); - /// let (i, header) = Header::parse(input)?; - /// let (i, doc) = Document::parse(i, header)?; - /// # } - /// ``` - pub(crate) fn parse( - input: parse::Input<'a>, - header: Header, - ) -> parse::ParseResult<'a, Document<'a>, ParseError> { - let i = input; - - // Because some columns in a document may be compressed we do some funky stuff when - // parsing. As we're parsing the chunk we split the data into four parts: - // - // .----------------. - // | Prefix | - // |.--------------.| - // || Actors || - // || Heads || - // || Change Meta || - // || Ops Meta || - // |'--------------'| - // +----------------+ - // | Change data | - // +----------------+ - // | Ops data | - // +----------------+ - // | Suffix | - // |.--------------.| - // || Head indices || - // |'--------------'| - // '----------------' - // - // We record the range of each of these sections using `parse::range_of`. Later, we check - // if any of the column definitions in change meta or ops meta specify that their columns - // are compressed. If there are compressed columns then we copy the uncompressed parts of the - // input data to a new output vec, then decompress the compressed parts. Specifically we do - // the following: - // - // * Copy everything in prefix to the output buffer - // * If any of change columns are compressed, copy all of change data to the output buffer - // decompressing each compressed column - // * Likewise if any of ops columns are compressed copy the data decompressing as required - // * Finally copy the suffix - // - // The reason for all this work is that we end up keeping all of the data behind the - // document chunk in a single Vec, which plays nicely with the cache and makes dumping the - // document to disk or network straightforward. - - // parse everything in the prefix - let ( - i, - parse::RangeOf { - range: prefix, - value: (actors, heads, change_meta, ops_meta), - }, - ) = parse::range_of( - |i| -> parse::ParseResult<'_, _, ParseError> { - let (i, actors) = parse::length_prefixed(parse::actor_id)(i)?; - let (i, heads) = parse::length_prefixed(parse::change_hash)(i)?; - let (i, change_meta) = RawColumns::parse::(i)?; - let (i, ops_meta) = RawColumns::parse::(i)?; - Ok((i, (actors, heads, change_meta, ops_meta))) - }, - i, - )?; - - // parse the change data - let (i, parse::RangeOf { range: changes, .. }) = - parse::range_of(|i| parse::take_n(change_meta.total_column_len(), i), i)?; - - // parse the ops data - let (i, parse::RangeOf { range: ops, .. }) = - parse::range_of(|i| parse::take_n(ops_meta.total_column_len(), i), i)?; - - // parse the suffix, which may be empty if this document was produced by an older version - // of the JS automerge implementation - let (i, suffix, head_indices) = if i.is_empty() { - (i, 0..0, Vec::new()) - } else { - let ( - i, - parse::RangeOf { - range: suffix, - value: head_indices, - }, - ) = parse::range_of( - |i| parse::apply_n(heads.len(), parse::leb128_u64::)(i), - i, - )?; - (i, suffix, head_indices) - }; - - let compression::Decompressed { - change_bytes, - op_bytes, - uncompressed, - compressed, - changes, - ops, - } = compression::decompress(compression::Args { - prefix: prefix.start, - suffix: suffix.start, - original: Cow::Borrowed(input.bytes()), - changes: compression::Cols { - data: changes, - raw_columns: change_meta, - }, - ops: compression::Cols { - data: ops, - raw_columns: ops_meta, - }, - extra_args: (), - }) - .map_err(|e| parse::ParseError::Error(ParseError::RawColumns(e)))?; - - let ops_layout = Columns::parse(op_bytes.len(), ops.iter()).map_err(|e| { - parse::ParseError::Error(ParseError::BadColumnLayout { - column_type: "ops", - error: e, - }) - })?; - let ops_cols = - DocOpColumns::try_from(ops_layout).map_err(|e| parse::ParseError::Error(e.into()))?; - - let change_layout = Columns::parse(change_bytes.len(), changes.iter()).map_err(|e| { - parse::ParseError::Error(ParseError::BadColumnLayout { - column_type: "changes", - error: e, - }) - })?; - let change_cols = DocChangeColumns::try_from(change_layout) - .map_err(|e| parse::ParseError::Error(e.into()))?; - - Ok(( - i, - Document { - bytes: uncompressed, - compressed_bytes: compressed, - header, - actors, - heads, - op_metadata: ops_cols, - op_bytes, - change_metadata: change_cols, - change_bytes, - head_indices, - }, - )) - } - - pub(crate) fn new<'b, I, C, IC, D, O>( - mut actors: Vec, - heads_with_indices: Vec<(ChangeHash, usize)>, - ops: I, - changes: IC, - compress: CompressConfig, - ) -> Document<'static> - where - I: Iterator + Clone + ExactSizeIterator, - O: convert::OpId, - D: AsDocOp<'b, OpId = O>, - C: AsChangeMeta<'b>, - IC: Iterator + Clone, - { - let mut ops_out = Vec::new(); - let ops_meta = DocOpColumns::encode(ops, &mut ops_out); - - let mut change_out = Vec::new(); - let change_meta = DocChangeColumns::encode(changes, &mut change_out); - actors.sort_unstable(); - - let mut data = Vec::with_capacity(ops_out.len() + change_out.len()); - leb128::write::unsigned(&mut data, actors.len() as u64).unwrap(); - for actor in &actors { - leb128::write::unsigned(&mut data, actor.to_bytes().len() as u64).unwrap(); - data.extend(actor.to_bytes()); - } - leb128::write::unsigned(&mut data, heads_with_indices.len() as u64).unwrap(); - for (head, _) in &heads_with_indices { - data.extend(head.as_bytes()); - } - let prefix_len = data.len(); - - change_meta.raw_columns().write(&mut data); - ops_meta.raw_columns().write(&mut data); - let change_start = data.len(); - let change_end = change_start + change_out.len(); - data.extend(change_out); - let ops_start = data.len(); - let ops_end = ops_start + ops_out.len(); - data.extend(ops_out); - let suffix_start = data.len(); - - let head_indices = heads_with_indices - .iter() - .map(|(_, i)| *i as u64) - .collect::>(); - for index in &head_indices { - leb128::write::unsigned(&mut data, *index).unwrap(); - } - - let header = Header::new(ChunkType::Document, &data); - let mut bytes = Vec::with_capacity(data.len() + header.len()); - header.write(&mut bytes); - let header_len = bytes.len(); - bytes.extend(&data); - - let op_bytes = shift_range(ops_start..ops_end, header.len()); - let change_bytes = shift_range(change_start..change_end, header.len()); - - let compressed_bytes = if let CompressConfig::Threshold(threshold) = compress { - let compressed = Cow::Owned(compression::compress(compression::Args { - prefix: prefix_len + header.len(), - suffix: suffix_start + header.len(), - ops: compression::Cols { - raw_columns: ops_meta.raw_columns(), - data: op_bytes.clone(), - }, - changes: compression::Cols { - raw_columns: change_meta.raw_columns(), - data: change_bytes.clone(), - }, - original: Cow::Borrowed(&bytes), - extra_args: compression::CompressArgs { - threshold, - original_header_len: header_len, - }, - })); - Some(compressed) - } else { - None - }; - - Document { - actors, - bytes: Cow::Owned(bytes), - compressed_bytes, - header, - heads: heads_with_indices.into_iter().map(|(h, _)| h).collect(), - op_metadata: ops_meta, - op_bytes, - change_metadata: change_meta, - change_bytes, - head_indices, - } - } - - pub(crate) fn iter_ops( - &'a self, - ) -> impl Iterator> + Clone + 'a { - self.op_metadata.iter(&self.bytes[self.op_bytes.clone()]) - } - - pub(crate) fn iter_changes( - &'a self, - ) -> impl Iterator, ReadChangeError>> + Clone + 'a { - self.change_metadata - .iter(&self.bytes[self.change_bytes.clone()]) - } - - pub(crate) fn into_bytes(self) -> Vec { - if let Some(compressed) = self.compressed_bytes { - compressed.into_owned() - } else { - self.bytes.into_owned() - } - } - - pub(crate) fn checksum_valid(&self) -> bool { - self.header.checksum_valid() - } - - pub(crate) fn actors(&self) -> &[ActorId] { - &self.actors - } - - pub(crate) fn heads(&self) -> &[ChangeHash] { - &self.heads - } -} diff --git a/rust/automerge/src/storage/document/compression.rs b/rust/automerge/src/storage/document/compression.rs deleted file mode 100644 index 2f0e96ce..00000000 --- a/rust/automerge/src/storage/document/compression.rs +++ /dev/null @@ -1,356 +0,0 @@ -use std::{borrow::Cow, convert::Infallible, ops::Range}; - -use crate::storage::{ - columns::{compression, raw_column}, - shift_range, ChunkType, Header, RawColumns, -}; - -pub(super) struct Args<'a, T: compression::ColumnCompression, DirArgs> { - /// The original data of the entire document chunk (compressed or uncompressed) - pub(super) original: Cow<'a, [u8]>, - /// The number of bytes in the original before the beginning of the change column metadata - pub(super) prefix: usize, - /// The offset in the original after the end of the ops column data - pub(super) suffix: usize, - /// The column data for the changes - pub(super) changes: Cols, - /// The column data for the ops - pub(super) ops: Cols, - /// Additional arguments specific to the direction (compression or uncompression) - pub(super) extra_args: DirArgs, -} - -pub(super) struct CompressArgs { - pub(super) threshold: usize, - pub(super) original_header_len: usize, -} - -/// Compress a document chunk returning the compressed bytes -pub(super) fn compress(args: Args<'_, compression::Uncompressed, CompressArgs>) -> Vec { - let header_len = args.extra_args.original_header_len; - let threshold = args.extra_args.threshold; - // Wrap in a closure so we can use `?` in the construction but still force the compiler - // to check that the error type is `Infallible` - let result: Result<_, Infallible> = (|| { - Ok(Compression::::new( - args, - Compressing { - threshold, - header_len, - }, - ) - .changes()? - .ops()? - .write_data() - .finish()) - })(); - // We just checked the error is `Infallible` so unwrap is fine - result.unwrap() -} - -pub(super) fn decompress<'a>( - args: Args<'a, compression::Unknown, ()>, -) -> Result, raw_column::ParseError> { - match ( - args.changes.raw_columns.uncompressed(), - args.ops.raw_columns.uncompressed(), - ) { - (Some(changes), Some(ops)) => Ok(Decompressed { - changes, - ops, - compressed: None, - uncompressed: args.original, - change_bytes: args.changes.data, - op_bytes: args.ops.data, - }), - _ => Ok( - Compression::<'a, Decompressing, _>::new(args, Decompressing) - .changes()? - .ops()? - .write_data() - .finish(), - ), - } -} - -pub(super) struct Decompressed<'a> { - /// The original compressed data, if there was any - pub(super) compressed: Option>, - /// The final uncompressed data - pub(super) uncompressed: Cow<'a, [u8]>, - /// The ops column metadata - pub(super) ops: RawColumns, - /// The change column metadata - pub(super) changes: RawColumns, - /// The location of the change column data in the uncompressed data - pub(super) change_bytes: Range, - /// The location of the op column data in the uncompressed data - pub(super) op_bytes: Range, -} - -struct Compression<'a, D: Direction, S: CompressionState> { - args: Args<'a, D::In, D::Args>, - state: S, - direction: D, -} - -/// Some columns in the original data -pub(super) struct Cols { - /// The metadata for these columns - pub(super) raw_columns: RawColumns, - /// The location in the original chunk of the data for these columns - pub(super) data: Range, -} - -// Compression and decompression involve almost the same steps in either direction. This trait -// encapsulates that. -trait Direction: std::fmt::Debug { - type Out: compression::ColumnCompression; - type In: compression::ColumnCompression; - type Error; - type Args; - - /// This method represents the (de)compression process for a direction. The arguments are: - /// - /// * cols - The columns we are processing - /// * input - the entire document chunk - /// * out - the vector to place the processed columns in - /// * meta_out - the vector to place processed column metadata in - fn process( - &self, - cols: &Cols, - input: &[u8], - out: &mut Vec, - meta_out: &mut Vec, - ) -> Result, Self::Error>; -} -#[derive(Debug)] -struct Compressing { - threshold: usize, - header_len: usize, -} - -impl Direction for Compressing { - type Error = Infallible; - type Out = compression::Unknown; - type In = compression::Uncompressed; - type Args = CompressArgs; - - fn process( - &self, - cols: &Cols, - input: &[u8], - out: &mut Vec, - meta_out: &mut Vec, - ) -> Result, Self::Error> { - let start = out.len(); - let raw_columns = cols - .raw_columns - .compress(&input[cols.data.clone()], out, self.threshold); - raw_columns.write(meta_out); - Ok(Cols { - data: start..out.len(), - raw_columns, - }) - } -} - -#[derive(Debug)] -struct Decompressing; - -impl Direction for Decompressing { - type Error = raw_column::ParseError; - type Out = compression::Uncompressed; - type In = compression::Unknown; - type Args = (); - - fn process( - &self, - cols: &Cols, - input: &[u8], - out: &mut Vec, - meta_out: &mut Vec, - ) -> Result, raw_column::ParseError> { - let start = out.len(); - let raw_columns = cols - .raw_columns - .uncompress(&input[cols.data.clone()], out)?; - raw_columns.write(meta_out); - Ok(Cols { - data: start..out.len(), - raw_columns, - }) - } -} - -// Somewhat absurdly I (alex) kept getting the order of writing ops and changes wrong as well as -// the order that column metadata vs data should be written in. This is a type state to get the -// compiler to enforce that things are done in the right order. -trait CompressionState {} -impl CompressionState for Starting {} -impl CompressionState for Changes {} -impl CompressionState for ChangesAndOps {} -impl CompressionState for Finished {} - -/// We haven't done any processing yet -struct Starting { - /// The vector to write column data to - data_out: Vec, - /// The vector to write column metadata to - meta_out: Vec, -} - -/// We've processed the changes columns -struct Changes { - /// The `Cols` for the processed change columns - change_cols: Cols, - /// The vector to write column metadata to - meta_out: Vec, - /// The vector to write column data to - data_out: Vec, -} - -/// We've processed the ops columns -struct ChangesAndOps { - /// The `Cols` for the processed change columns - change_cols: Cols, - /// The `Cols` for the processed op columns - ops_cols: Cols, - /// The vector to write column metadata to - meta_out: Vec, - /// The vector to write column data to - data_out: Vec, -} - -/// We've written the column metadata and the op metadata for changes and ops to the output buffer -/// and added the prefix and suffix from the args. -struct Finished { - /// The `Cols` for the processed change columns - change_cols: Cols, - /// The `Cols` for the processed op columns - ops_cols: Cols, - /// The start of the change column metadata in the processed chunk - data_start: usize, - /// The processed chunk - out: Vec, -} - -impl<'a, D: Direction> Compression<'a, D, Starting> { - fn new(args: Args<'a, D::In, D::Args>, direction: D) -> Compression<'a, D, Starting> { - let mut meta_out = Vec::with_capacity(args.original.len() * 2); - meta_out.extend(&args.original[..args.prefix]); - Compression { - args, - direction, - state: Starting { - meta_out, - data_out: Vec::new(), - }, - } - } -} - -impl<'a, D: Direction> Compression<'a, D, Starting> { - fn changes(self) -> Result>, D::Error> { - let Starting { - mut data_out, - mut meta_out, - } = self.state; - let change_cols = self.direction.process( - &self.args.changes, - &self.args.original, - &mut data_out, - &mut meta_out, - )?; - Ok(Compression { - args: self.args, - direction: self.direction, - state: Changes { - change_cols, - meta_out, - data_out, - }, - }) - } -} - -impl<'a, D: Direction> Compression<'a, D, Changes> { - fn ops(self) -> Result>, D::Error> { - let Changes { - change_cols, - mut meta_out, - mut data_out, - } = self.state; - let ops_cols = self.direction.process( - &self.args.ops, - &self.args.original, - &mut data_out, - &mut meta_out, - )?; - Ok(Compression { - args: self.args, - direction: self.direction, - state: ChangesAndOps { - change_cols, - ops_cols, - meta_out, - data_out, - }, - }) - } -} - -impl<'a, D: Direction> Compression<'a, D, ChangesAndOps> { - fn write_data(self) -> Compression<'a, D, Finished> { - let ChangesAndOps { - data_out, - mut meta_out, - change_cols, - ops_cols, - } = self.state; - let data_start = meta_out.len(); - meta_out.extend(&data_out); - meta_out.extend(&self.args.original[self.args.suffix..]); - Compression { - args: self.args, - direction: self.direction, - state: Finished { - ops_cols, - change_cols, - out: meta_out, - data_start, - }, - } - } -} - -impl<'a> Compression<'a, Decompressing, Finished> { - fn finish(self) -> Decompressed<'a> { - let Finished { - change_cols, - ops_cols, - data_start, - out, - } = self.state; - Decompressed { - ops: ops_cols.raw_columns, - changes: change_cols.raw_columns, - uncompressed: Cow::Owned(out), - compressed: Some(self.args.original), - change_bytes: shift_range(change_cols.data, data_start), - op_bytes: shift_range(ops_cols.data, data_start), - } - } -} - -impl<'a> Compression<'a, Compressing, Finished> { - fn finish(self) -> Vec { - let Finished { out, .. } = self.state; - let headerless = &out[self.direction.header_len..]; - let header = Header::new(ChunkType::Document, headerless); - let mut result = Vec::with_capacity(header.len() + out.len()); - header.write(&mut result); - result.extend(headerless); - result - } -} diff --git a/rust/automerge/src/storage/document/doc_change_columns.rs b/rust/automerge/src/storage/document/doc_change_columns.rs deleted file mode 100644 index 93fa28e3..00000000 --- a/rust/automerge/src/storage/document/doc_change_columns.rs +++ /dev/null @@ -1,339 +0,0 @@ -use std::{borrow::Cow, convert::TryFrom}; - -use crate::{ - columnar::{ - column_range::{ - generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, - DeltaRange, DepsIter, DepsRange, RleRange, ValueIter, ValueRange, - }, - encoding::{ColumnDecoder, DecodeColumnError, DeltaDecoder, RleDecoder}, - }, - storage::{ - columns::{compression, ColumnId, ColumnSpec, ColumnType}, - Columns, MismatchingColumn, RawColumn, RawColumns, - }, - types::ScalarValue, -}; - -const ACTOR_COL_ID: ColumnId = ColumnId::new(0); -const SEQ_COL_ID: ColumnId = ColumnId::new(0); -const MAX_OP_COL_ID: ColumnId = ColumnId::new(1); -const TIME_COL_ID: ColumnId = ColumnId::new(2); -const MESSAGE_COL_ID: ColumnId = ColumnId::new(3); -const DEPS_COL_ID: ColumnId = ColumnId::new(4); -const EXTRA_COL_ID: ColumnId = ColumnId::new(5); - -#[derive(Debug)] -pub(crate) struct ChangeMetadata<'a> { - pub(crate) actor: usize, - pub(crate) seq: u64, - pub(crate) max_op: u64, - pub(crate) timestamp: i64, - pub(crate) message: Option, - pub(crate) deps: Vec, - pub(crate) extra: Cow<'a, [u8]>, -} - -/// A row to be encoded as change metadata in the document format -/// -/// The lifetime `'a` is the lifetime of the extra bytes Cow. For types which cannot -/// provide a reference (e.g. because they are decoding from some columnar storage on each -/// iteration) this should be `'static`. -pub(crate) trait AsChangeMeta<'a> { - /// The type of the iterator over dependency indices - type DepsIter: Iterator + ExactSizeIterator; - - fn actor(&self) -> u64; - fn seq(&self) -> u64; - fn max_op(&self) -> u64; - fn timestamp(&self) -> i64; - fn message(&self) -> Option>; - fn deps(&self) -> Self::DepsIter; - fn extra(&self) -> Cow<'a, [u8]>; -} - -#[derive(Debug, Clone)] -pub(crate) struct DocChangeColumns { - actor: RleRange, - seq: DeltaRange, - max_op: DeltaRange, - time: DeltaRange, - message: RleRange, - deps: DepsRange, - extra: ValueRange, - #[allow(dead_code)] - other: Columns, -} - -impl DocChangeColumns { - pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> DocChangeColumnIter<'a> { - DocChangeColumnIter { - actors: self.actor.decoder(data), - seq: self.seq.decoder(data), - max_op: self.max_op.decoder(data), - time: self.time.decoder(data), - message: if self.message.is_empty() { - None - } else { - Some(self.message.decoder(data)) - }, - deps: self.deps.iter(data), - extra: ExtraDecoder { - val: self.extra.iter(data), - }, - } - } - - pub(crate) fn encode<'a, I, C>(changes: I, out: &mut Vec) -> DocChangeColumns - where - C: AsChangeMeta<'a>, - I: Iterator + Clone, - { - let actor = RleRange::::encode( - // TODO: make this fallible once iterators have a try_splice - changes.clone().map(|c| Some(c.actor())), - out, - ); - let seq = DeltaRange::encode(changes.clone().map(|c| Some(c.seq() as i64)), out); - let max_op = DeltaRange::encode(changes.clone().map(|c| Some(c.max_op() as i64)), out); - let time = DeltaRange::encode(changes.clone().map(|c| Some(c.timestamp())), out); - let message = RleRange::encode(changes.clone().map(|c| c.message()), out); - let deps = DepsRange::encode(changes.clone().map(|c| c.deps()), out); - let extra = ValueRange::encode( - changes.map(|c| Cow::Owned(ScalarValue::Bytes(c.extra().to_vec()))), - out, - ); - DocChangeColumns { - actor, - seq, - max_op, - time, - message, - deps, - extra, - other: Columns::empty(), - } - } - - pub(crate) fn raw_columns(&self) -> RawColumns { - let mut cols = vec![ - RawColumn::new( - ColumnSpec::new(ACTOR_COL_ID, ColumnType::Actor, false), - self.actor.clone().into(), - ), - RawColumn::new( - ColumnSpec::new(SEQ_COL_ID, ColumnType::DeltaInteger, false), - self.seq.clone().into(), - ), - RawColumn::new( - ColumnSpec::new(MAX_OP_COL_ID, ColumnType::DeltaInteger, false), - self.max_op.clone().into(), - ), - RawColumn::new( - ColumnSpec::new(TIME_COL_ID, ColumnType::DeltaInteger, false), - self.time.clone().into(), - ), - RawColumn::new( - ColumnSpec::new(MESSAGE_COL_ID, ColumnType::String, false), - self.message.clone().into(), - ), - RawColumn::new( - ColumnSpec::new(DEPS_COL_ID, ColumnType::Group, false), - self.deps.num_range().clone().into(), - ), - ]; - if self.deps.deps_range().len() > 0 { - cols.push(RawColumn::new( - ColumnSpec::new(DEPS_COL_ID, ColumnType::DeltaInteger, false), - self.deps.deps_range().clone().into(), - )) - } - cols.push(RawColumn::new( - ColumnSpec::new(EXTRA_COL_ID, ColumnType::ValueMetadata, false), - self.extra.meta_range().clone().into(), - )); - if !self.extra.raw_range().is_empty() { - cols.push(RawColumn::new( - ColumnSpec::new(EXTRA_COL_ID, ColumnType::Value, false), - self.extra.raw_range().clone().into(), - )) - } - cols.into_iter().collect() - } -} - -#[derive(Debug, thiserror::Error)] -pub(crate) enum ReadChangeError { - #[error("unexpected null value for {0}")] - UnexpectedNull(String), - #[error("mismatching column types for column {index}")] - MismatchingColumn { index: usize }, - #[error("incorrect value in extra bytes column")] - InvalidExtraBytes, - #[error(transparent)] - ReadColumn(#[from] DecodeColumnError), -} - -impl From for ReadChangeError { - fn from(m: MismatchingColumn) -> Self { - Self::MismatchingColumn { index: m.index } - } -} - -#[derive(Clone)] -pub(crate) struct DocChangeColumnIter<'a> { - actors: RleDecoder<'a, u64>, - seq: DeltaDecoder<'a>, - max_op: DeltaDecoder<'a>, - time: DeltaDecoder<'a>, - message: Option>, - deps: DepsIter<'a>, - extra: ExtraDecoder<'a>, -} - -impl<'a> DocChangeColumnIter<'a> { - fn try_next(&mut self) -> Result>, ReadChangeError> { - let actor = match self.actors.maybe_next_in_col("actor")? { - Some(actor) => actor as usize, - None => { - // The actor column should always have a value so if the actor iterator returns None that - // means we should be done, we check by asserting that all the other iterators - // return none (which is what Self::check_done does). - if self.check_done() { - return Ok(None); - } else { - return Err(ReadChangeError::UnexpectedNull("actor".to_string())); - } - } - }; - let seq = self.seq.next_in_col("seq").and_then(|seq| { - u64::try_from(seq).map_err(|e| DecodeColumnError::invalid_value("seq", e.to_string())) - })?; - let max_op = self.max_op.next_in_col("max_op").and_then(|seq| { - u64::try_from(seq).map_err(|e| DecodeColumnError::invalid_value("seq", e.to_string())) - })?; - let time = self.time.next_in_col("time")?; - let message = if let Some(ref mut message) = self.message { - message.maybe_next_in_col("message")? - } else { - None - }; - let deps = self.deps.next_in_col("deps")?; - let extra = self.extra.next().transpose()?.unwrap_or(Cow::Borrowed(&[])); - Ok(Some(ChangeMetadata { - actor, - seq, - max_op, - timestamp: time, - message, - deps, - extra, - })) - } -} - -impl<'a> Iterator for DocChangeColumnIter<'a> { - type Item = Result, ReadChangeError>; - - fn next(&mut self) -> Option { - self.try_next().transpose() - } -} - -impl<'a> DocChangeColumnIter<'a> { - fn check_done(&mut self) -> bool { - let other_cols = [ - self.seq.next().is_none(), - self.max_op.next().is_none(), - self.time.next().is_none(), - self.deps.next().is_none(), - ]; - other_cols.iter().any(|f| *f) - } -} - -#[derive(Clone)] -struct ExtraDecoder<'a> { - val: ValueIter<'a>, -} - -impl<'a> Iterator for ExtraDecoder<'a> { - type Item = Result, ReadChangeError>; - fn next(&mut self) -> Option { - match self.val.next() { - Some(Ok(ScalarValue::Bytes(b))) => Some(Ok(Cow::Owned(b))), - Some(Ok(_)) => Some(Err(ReadChangeError::InvalidExtraBytes)), - Some(Err(e)) => Some(Err(e.into())), - None => None, - } - } -} - -impl TryFrom for DocChangeColumns { - type Error = ReadChangeError; - - fn try_from(columns: Columns) -> Result { - let mut actor: Option> = None; - let mut seq: Option = None; - let mut max_op: Option = None; - let mut time: Option = None; - let mut message: Option> = None; - let mut deps: Option = None; - let mut extra: Option = None; - let mut other = Columns::empty(); - - for (index, col) in columns.into_iter().enumerate() { - match (col.id(), col.col_type()) { - (ACTOR_COL_ID, ColumnType::Actor) => actor = Some(col.range().into()), - (SEQ_COL_ID, ColumnType::DeltaInteger) => seq = Some(col.range().into()), - (MAX_OP_COL_ID, ColumnType::DeltaInteger) => max_op = Some(col.range().into()), - (TIME_COL_ID, ColumnType::DeltaInteger) => time = Some(col.range().into()), - (MESSAGE_COL_ID, ColumnType::String) => message = Some(col.range().into()), - (DEPS_COL_ID, ColumnType::Group) => match col.into_ranges() { - GenericColumnRange::Group(GroupRange { num, values }) => { - let mut cols = values.into_iter(); - let deps_group = num; - let first = cols.next(); - let deps_index = match first { - Some(GroupedColumnRange::Simple(SimpleColRange::Delta( - index_range, - ))) => index_range, - Some(_) => { - tracing::error!( - "deps column contained more than one grouped column" - ); - return Err(ReadChangeError::MismatchingColumn { index: 5 }); - } - None => (0..0).into(), - }; - if cols.next().is_some() { - return Err(ReadChangeError::MismatchingColumn { index }); - } - deps = Some(DepsRange::new(deps_group, deps_index)); - } - _ => return Err(ReadChangeError::MismatchingColumn { index }), - }, - (EXTRA_COL_ID, ColumnType::ValueMetadata) => match col.into_ranges() { - GenericColumnRange::Value(val) => { - extra = Some(val); - } - _ => return Err(ReadChangeError::MismatchingColumn { index }), - }, - (other_id, other_type) => { - tracing::warn!(id=?other_id, typ=?other_type, "unknown column"); - other.append(col); - } - } - } - Ok(DocChangeColumns { - actor: actor.unwrap_or_else(|| (0..0).into()), - seq: seq.unwrap_or_else(|| (0..0).into()), - max_op: max_op.unwrap_or_else(|| (0..0).into()), - time: time.unwrap_or_else(|| (0..0).into()), - message: message.unwrap_or_else(|| (0..0).into()), - deps: deps.unwrap_or_else(|| DepsRange::new((0..0).into(), (0..0).into())), - extra: extra.unwrap_or_else(|| ValueRange::new((0..0).into(), (0..0).into())), - other, - }) - } -} diff --git a/rust/automerge/src/storage/document/doc_op_columns.rs b/rust/automerge/src/storage/document/doc_op_columns.rs deleted file mode 100644 index 82de17eb..00000000 --- a/rust/automerge/src/storage/document/doc_op_columns.rs +++ /dev/null @@ -1,450 +0,0 @@ -use std::{borrow::Cow, convert::TryFrom}; - -use crate::{ - columnar::{ - column_range::{ - generic::{GenericColumnRange, GroupRange, GroupedColumnRange, SimpleColRange}, - BooleanRange, DeltaRange, Key, KeyEncoder, KeyIter, KeyRange, ObjIdEncoder, ObjIdIter, - ObjIdRange, OpIdEncoder, OpIdIter, OpIdListEncoder, OpIdListIter, OpIdListRange, - OpIdRange, RleRange, ValueEncoder, ValueIter, ValueRange, - }, - encoding::{ - BooleanDecoder, BooleanEncoder, ColumnDecoder, DecodeColumnError, RleDecoder, - RleEncoder, - }, - }, - convert, - storage::{ - columns::{compression, ColumnId, ColumnSpec, ColumnType}, - Columns, MismatchingColumn, RawColumn, RawColumns, - }, - types::{ObjId, OpId, ScalarValue}, -}; - -const OBJ_COL_ID: ColumnId = ColumnId::new(0); -const KEY_COL_ID: ColumnId = ColumnId::new(1); -const ID_COL_ID: ColumnId = ColumnId::new(2); -const INSERT_COL_ID: ColumnId = ColumnId::new(3); -const ACTION_COL_ID: ColumnId = ColumnId::new(4); -const VAL_COL_ID: ColumnId = ColumnId::new(5); -const SUCC_COL_ID: ColumnId = ColumnId::new(8); - -/// The form operations take in the compressed document format. -#[derive(Debug)] -pub(crate) struct DocOp { - pub(crate) id: OpId, - pub(crate) object: ObjId, - pub(crate) key: Key, - pub(crate) insert: bool, - pub(crate) action: usize, - pub(crate) value: ScalarValue, - pub(crate) succ: Vec, -} - -#[derive(Debug, Clone)] -pub(crate) struct DocOpColumns { - obj: Option, - key: KeyRange, - id: OpIdRange, - insert: BooleanRange, - action: RleRange, - val: ValueRange, - succ: OpIdListRange, - #[allow(dead_code)] - other: Columns, -} - -struct DocId { - actor: usize, - counter: u64, -} - -impl convert::OpId for DocId { - fn actor(&self) -> usize { - self.actor - } - - fn counter(&self) -> u64 { - self.counter - } -} - -/// A row to be encoded as an op in the document format -/// -/// The lifetime `'a` is the lifetime of the value and key data types. For types which cannot -/// provide a reference (e.g. because they are decoding from some columnar storage on each -/// iteration) this should be `'static`. -pub(crate) trait AsDocOp<'a> { - /// The type of the Actor ID component of the op IDs for this impl. This is typically either - /// `&'a ActorID` or `usize` - type ActorId; - /// The type of the op IDs this impl produces. - type OpId: convert::OpId; - /// The type of the successor iterator returned by `Self::pred`. This can often be omitted - type SuccIter: Iterator + ExactSizeIterator; - - fn obj(&self) -> convert::ObjId; - fn id(&self) -> Self::OpId; - fn key(&self) -> convert::Key<'a, Self::OpId>; - fn insert(&self) -> bool; - fn action(&self) -> u64; - fn val(&self) -> Cow<'a, ScalarValue>; - fn succ(&self) -> Self::SuccIter; -} - -impl DocOpColumns { - pub(crate) fn encode<'a, I, C, O>(ops: I, out: &mut Vec) -> DocOpColumns - where - I: Iterator + Clone + ExactSizeIterator, - O: convert::OpId, - C: AsDocOp<'a, OpId = O>, - { - if ops.len() > 30000 { - Self::encode_rowwise(ops, out) - } else { - Self::encode_columnwise(ops, out) - } - } - - fn encode_columnwise<'a, I, O, C>(ops: I, out: &mut Vec) -> DocOpColumns - where - I: Iterator + Clone, - O: convert::OpId, - C: AsDocOp<'a, OpId = O>, - { - let obj = ObjIdRange::encode(ops.clone().map(|o| o.obj()), out); - let key = KeyRange::encode(ops.clone().map(|o| o.key()), out); - let id = OpIdRange::encode(ops.clone().map(|o| o.id()), out); - let insert = BooleanRange::encode(ops.clone().map(|o| o.insert()), out); - let action = RleRange::encode(ops.clone().map(|o| Some(o.action())), out); - let val = ValueRange::encode(ops.clone().map(|o| o.val()), out); - let succ = OpIdListRange::encode(ops.map(|o| o.succ()), out); - Self { - obj, - key, - id, - insert, - action, - val, - succ, - other: Columns::empty(), - } - } - - fn encode_rowwise<'a, I, O, C>(ops: I, out: &mut Vec) -> DocOpColumns - where - I: Iterator, - O: convert::OpId, - C: AsDocOp<'a, OpId = O>, - { - let mut obj = ObjIdEncoder::new(); - let mut key = KeyEncoder::new(); - let mut id = OpIdEncoder::new(); - let mut insert = BooleanEncoder::new(); - let mut action = RleEncoder::<_, u64>::from(Vec::new()); - let mut val = ValueEncoder::new(); - let mut succ = OpIdListEncoder::new(); - for op in ops { - obj.append(op.obj()); - key.append(op.key()); - id.append(op.id()); - insert.append(op.insert()); - action.append(Some(op.action())); - val.append(&op.val()); - succ.append(op.succ()); - } - let obj = obj.finish(out); - let key = key.finish(out); - let id = id.finish(out); - - let insert_start = out.len(); - let (insert_out, _) = insert.finish(); - out.extend(insert_out); - let insert = BooleanRange::from(insert_start..out.len()); - - let action_start = out.len(); - let (action_out, _) = action.finish(); - out.extend(action_out); - let action = RleRange::from(action_start..out.len()); - - let val = val.finish(out); - let succ = succ.finish(out); - DocOpColumns { - obj, - key, - id, - insert, - action, - val, - succ, - other: Columns::empty(), - } - } - - pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> DocOpColumnIter<'a> { - DocOpColumnIter { - id: self.id.iter(data), - action: self.action.decoder(data), - objs: self.obj.as_ref().map(|o| o.iter(data)), - keys: self.key.iter(data), - insert: self.insert.decoder(data), - value: self.val.iter(data), - succ: self.succ.iter(data), - } - } - - pub(crate) fn raw_columns(&self) -> RawColumns { - let mut cols = vec![ - RawColumn::new( - ColumnSpec::new(OBJ_COL_ID, ColumnType::Actor, false), - self.obj - .as_ref() - .map(|o| o.actor_range().clone().into()) - .unwrap_or(0..0), - ), - RawColumn::new( - ColumnSpec::new(OBJ_COL_ID, ColumnType::Integer, false), - self.obj - .as_ref() - .map(|o| o.counter_range().clone().into()) - .unwrap_or(0..0), - ), - RawColumn::new( - ColumnSpec::new(KEY_COL_ID, ColumnType::Actor, false), - self.key.actor_range().clone().into(), - ), - RawColumn::new( - ColumnSpec::new(KEY_COL_ID, ColumnType::DeltaInteger, false), - self.key.counter_range().clone().into(), - ), - RawColumn::new( - ColumnSpec::new(KEY_COL_ID, ColumnType::String, false), - self.key.string_range().clone().into(), - ), - RawColumn::new( - ColumnSpec::new(ID_COL_ID, ColumnType::Actor, false), - self.id.actor_range().clone().into(), - ), - RawColumn::new( - ColumnSpec::new(ID_COL_ID, ColumnType::DeltaInteger, false), - self.id.counter_range().clone().into(), - ), - RawColumn::new( - ColumnSpec::new(INSERT_COL_ID, ColumnType::Boolean, false), - self.insert.clone().into(), - ), - RawColumn::new( - ColumnSpec::new(ACTION_COL_ID, ColumnType::Integer, false), - self.action.clone().into(), - ), - RawColumn::new( - ColumnSpec::new(VAL_COL_ID, ColumnType::ValueMetadata, false), - self.val.meta_range().clone().into(), - ), - ]; - if !self.val.raw_range().is_empty() { - cols.push(RawColumn::new( - ColumnSpec::new(VAL_COL_ID, ColumnType::Value, false), - self.val.raw_range().clone().into(), - )); - } - cols.push(RawColumn::new( - ColumnSpec::new(SUCC_COL_ID, ColumnType::Group, false), - self.succ.group_range().clone().into(), - )); - if !self.succ.actor_range().is_empty() { - cols.extend([ - RawColumn::new( - ColumnSpec::new(SUCC_COL_ID, ColumnType::Actor, false), - self.succ.actor_range().clone().into(), - ), - RawColumn::new( - ColumnSpec::new(SUCC_COL_ID, ColumnType::DeltaInteger, false), - self.succ.counter_range().clone().into(), - ), - ]); - } - cols.into_iter().collect() - } -} - -#[derive(Clone)] -pub(crate) struct DocOpColumnIter<'a> { - id: OpIdIter<'a>, - action: RleDecoder<'a, u64>, - objs: Option>, - keys: KeyIter<'a>, - insert: BooleanDecoder<'a>, - value: ValueIter<'a>, - succ: OpIdListIter<'a>, -} - -impl<'a> DocOpColumnIter<'a> { - fn done(&self) -> bool { - self.id.done() - } -} - -#[derive(Debug, thiserror::Error)] -#[error(transparent)] -pub(crate) struct ReadDocOpError(#[from] DecodeColumnError); - -impl<'a> Iterator for DocOpColumnIter<'a> { - type Item = Result; - - fn next(&mut self) -> Option { - if self.done() { - None - } else { - match self.try_next() { - Ok(Some(op)) => Some(Ok(op)), - Ok(None) => None, - Err(e) => Some(Err(e.into())), - } - } - } -} - -impl<'a> DocOpColumnIter<'a> { - fn try_next(&mut self) -> Result, DecodeColumnError> { - if self.done() { - Ok(None) - } else { - let id = self.id.next_in_col("id")?; - let action = self.action.next_in_col("action")?; - let obj = if let Some(ref mut objs) = self.objs { - objs.next_in_col("obj")? - } else { - ObjId::root() - }; - let key = self.keys.next_in_col("key")?; - let value = self.value.next_in_col("value")?; - let succ = self.succ.next_in_col("succ")?; - let insert = self.insert.next_in_col("insert")?; - Ok(Some(DocOp { - id, - value, - action: action as usize, - object: obj, - key, - succ, - insert, - })) - } - } -} - -#[derive(Debug, thiserror::Error)] -pub(crate) enum Error { - #[error("mismatching column at {index}.")] - MismatchingColumn { index: usize }, -} - -impl From for Error { - fn from(m: MismatchingColumn) -> Self { - Error::MismatchingColumn { index: m.index } - } -} - -impl TryFrom for DocOpColumns { - type Error = Error; - - fn try_from(columns: Columns) -> Result { - let mut obj_actor: Option> = None; - let mut obj_ctr: Option> = None; - let mut key_actor: Option> = None; - let mut key_ctr: Option = None; - let mut key_str: Option> = None; - let mut id_actor: Option> = None; - let mut id_ctr: Option = None; - let mut insert: Option = None; - let mut action: Option> = None; - let mut val: Option = None; - let mut succ_group: Option> = None; - let mut succ_actor: Option> = None; - let mut succ_ctr: Option = None; - let mut other = Columns::empty(); - - for (index, col) in columns.into_iter().enumerate() { - match (col.id(), col.col_type()) { - (ID_COL_ID, ColumnType::Actor) => id_actor = Some(col.range().into()), - (ID_COL_ID, ColumnType::DeltaInteger) => id_ctr = Some(col.range().into()), - (OBJ_COL_ID, ColumnType::Actor) => obj_actor = Some(col.range().into()), - (OBJ_COL_ID, ColumnType::Integer) => obj_ctr = Some(col.range().into()), - (KEY_COL_ID, ColumnType::Actor) => key_actor = Some(col.range().into()), - (KEY_COL_ID, ColumnType::DeltaInteger) => key_ctr = Some(col.range().into()), - (KEY_COL_ID, ColumnType::String) => key_str = Some(col.range().into()), - (INSERT_COL_ID, ColumnType::Boolean) => insert = Some(col.range().into()), - (ACTION_COL_ID, ColumnType::Integer) => action = Some(col.range().into()), - (VAL_COL_ID, ColumnType::ValueMetadata) => match col.into_ranges() { - GenericColumnRange::Value(v) => val = Some(v), - _ => { - tracing::error!("col 9 should be a value column"); - return Err(Error::MismatchingColumn { index }); - } - }, - (SUCC_COL_ID, ColumnType::Group) => match col.into_ranges() { - GenericColumnRange::Group(GroupRange { num, values }) => { - let mut cols = values.into_iter(); - let first = cols.next(); - let second = cols.next(); - succ_group = Some(num); - match (first, second) { - ( - Some(GroupedColumnRange::Simple(SimpleColRange::RleInt( - actor_range, - ))), - Some(GroupedColumnRange::Simple(SimpleColRange::Delta(ctr_range))), - ) => { - succ_actor = Some(actor_range); - succ_ctr = Some(ctr_range); - } - (None, None) => { - succ_actor = Some((0..0).into()); - succ_ctr = Some((0..0).into()); - } - _ => { - tracing::error!( - "expected a two column group of (actor, rle int) for index 10" - ); - return Err(Error::MismatchingColumn { index }); - } - }; - if cols.next().is_some() { - return Err(Error::MismatchingColumn { index }); - } - } - _ => return Err(Error::MismatchingColumn { index }), - }, - (other_col, other_type) => { - tracing::warn!(id=?other_col, typ=?other_type, "unknown column type"); - other.append(col) - } - } - } - Ok(DocOpColumns { - obj: ObjIdRange::new( - obj_actor.unwrap_or_else(|| (0..0).into()), - obj_ctr.unwrap_or_else(|| (0..0).into()), - ), - key: KeyRange::new( - key_actor.unwrap_or_else(|| (0..0).into()), - key_ctr.unwrap_or_else(|| (0..0).into()), - key_str.unwrap_or_else(|| (0..0).into()), - ), - id: OpIdRange::new( - id_actor.unwrap_or_else(|| (0..0).into()), - id_ctr.unwrap_or_else(|| (0..0).into()), - ), - insert: insert.unwrap_or_else(|| (0..0).into()), - action: action.unwrap_or_else(|| (0..0).into()), - val: val.unwrap_or_else(|| ValueRange::new((0..0).into(), (0..0).into())), - succ: OpIdListRange::new( - succ_group.unwrap_or_else(|| (0..0).into()), - succ_actor.unwrap_or_else(|| (0..0).into()), - succ_ctr.unwrap_or_else(|| (0..0).into()), - ), - other, - }) - } -} diff --git a/rust/automerge/src/storage/load.rs b/rust/automerge/src/storage/load.rs deleted file mode 100644 index 80ab3d82..00000000 --- a/rust/automerge/src/storage/load.rs +++ /dev/null @@ -1,120 +0,0 @@ -use tracing::instrument; - -use crate::{ - change::Change, - storage::{self, parse}, -}; - -mod change_collector; -mod reconstruct_document; -pub(crate) use reconstruct_document::{ - reconstruct_document, DocObserver, LoadedObject, Reconstructed, VerificationMode, -}; - -#[derive(Debug, thiserror::Error)] -#[allow(unreachable_pub)] -pub enum Error { - #[error("unable to parse chunk: {0}")] - Parse(Box), - #[error("invalid change columns: {0}")] - InvalidChangeColumns(Box), - #[error("invalid ops columns: {0}")] - InvalidOpsColumns(Box), - #[error("a chunk contained leftover data")] - LeftoverData, - #[error("error inflating document chunk ops: {0}")] - InflateDocument(Box), - #[error("bad checksum")] - BadChecksum, -} - -pub(crate) enum LoadedChanges<'a> { - /// All the data was succesfully loaded into a list of changes - Complete(Vec), - /// We only managed to load _some_ changes. - Partial { - /// The succesfully loaded changes - loaded: Vec, - /// The data which we were unable to parse - #[allow(dead_code)] - remaining: parse::Input<'a>, - /// The error encountered whilst trying to parse `remaining` - error: Error, - }, -} - -/// Attempt to Load all the chunks in `data`. -/// -/// # Partial Loads -/// -/// Automerge documents are encoded as one or more concatenated chunks. Each chunk containing one -/// or more changes. This means it is possible to partially load corrupted data if the first `n` -/// chunks are valid. This function returns a `LoadedChanges` which you can examine to determine if -/// this is the case. -#[instrument(skip(data))] -pub(crate) fn load_changes<'a>(mut data: parse::Input<'a>) -> LoadedChanges<'a> { - let mut changes = Vec::new(); - while !data.is_empty() { - let remaining = match load_next_change(data, &mut changes) { - Ok(d) => d, - Err(e) => { - return LoadedChanges::Partial { - loaded: changes, - remaining: data, - error: e, - }; - } - }; - data = remaining.reset(); - } - LoadedChanges::Complete(changes) -} - -fn load_next_change<'a>( - data: parse::Input<'a>, - changes: &mut Vec, -) -> Result, Error> { - let (remaining, chunk) = storage::Chunk::parse(data).map_err(|e| Error::Parse(Box::new(e)))?; - if !chunk.checksum_valid() { - return Err(Error::BadChecksum); - } - match chunk { - storage::Chunk::Document(d) => { - tracing::trace!("loading document chunk"); - let Reconstructed { - changes: new_changes, - .. - } = reconstruct_document(&d, VerificationMode::DontCheck, NullObserver) - .map_err(|e| Error::InflateDocument(Box::new(e)))?; - changes.extend(new_changes); - } - storage::Chunk::Change(change) => { - tracing::trace!("loading change chunk"); - let change = Change::new_from_unverified(change.into_owned(), None) - .map_err(|e| Error::InvalidChangeColumns(Box::new(e)))?; - #[cfg(debug_assertions)] - { - let loaded_ops = change.iter_ops().collect::>(); - tracing::trace!(actor=?change.actor_id(), num_ops=change.len(), ops=?loaded_ops, "loaded change"); - } - #[cfg(not(debug_assertions))] - tracing::trace!(actor=?change.actor_id(), num_ops=change.len(), "loaded change"); - changes.push(change); - } - storage::Chunk::CompressedChange(change, compressed) => { - tracing::trace!("loading compressed change chunk"); - let change = - Change::new_from_unverified(change.into_owned(), Some(compressed.into_owned())) - .map_err(|e| Error::InvalidChangeColumns(Box::new(e)))?; - changes.push(change); - } - }; - Ok(remaining) -} - -struct NullObserver; -impl DocObserver for NullObserver { - type Output = (); - fn finish(self, _metadata: crate::op_tree::OpSetMetadata) -> Self::Output {} - fn object_loaded(&mut self, _object: LoadedObject) {} -} diff --git a/rust/automerge/src/storage/load/change_collector.rs b/rust/automerge/src/storage/load/change_collector.rs deleted file mode 100644 index d05367a9..00000000 --- a/rust/automerge/src/storage/load/change_collector.rs +++ /dev/null @@ -1,222 +0,0 @@ -use std::{ - borrow::Cow, - collections::{BTreeSet, HashMap}, - num::NonZeroU64, -}; - -use tracing::instrument; - -use crate::{ - op_tree::OpSetMetadata, - storage::{ - change::{PredOutOfOrder, Verified}, - convert::op_as_actor_id, - Change as StoredChange, ChangeMetadata, - }, - types::{ChangeHash, ObjId, Op}, -}; - -#[derive(Debug, thiserror::Error)] -pub(crate) enum Error { - #[error("a change referenced an actor index we couldn't find")] - MissingActor, - #[error("changes out of order")] - ChangesOutOfOrder, - #[error("missing change")] - MissingChange, - #[error("unable to read change metadata: {0}")] - ReadChange(Box), - #[error("incorrect max op")] - IncorrectMaxOp, - #[error("missing ops")] - MissingOps, -} - -pub(crate) struct ChangeCollector<'a> { - changes_by_actor: HashMap>>, -} - -pub(crate) struct CollectedChanges<'a> { - pub(crate) history: Vec>, - pub(crate) heads: BTreeSet, -} - -impl<'a> ChangeCollector<'a> { - pub(crate) fn new( - changes: I, - ) -> Result, Error> - where - I: IntoIterator, E>>, - { - let mut changes_by_actor: HashMap>> = HashMap::new(); - for (index, change) in changes.into_iter().enumerate() { - tracing::trace!(?change, "importing change metadata"); - let change = change.map_err(|e| Error::ReadChange(Box::new(e)))?; - let actor_changes = changes_by_actor.entry(change.actor).or_default(); - if let Some(prev) = actor_changes.last() { - // Note that we allow max_op to be equal to the previous max_op in case the - // previous change had no ops (which is permitted) - if prev.max_op > change.max_op { - return Err(Error::ChangesOutOfOrder); - } - } - actor_changes.push(PartialChange { - index, - deps: change.deps, - actor: change.actor, - seq: change.seq, - timestamp: change.timestamp, - max_op: change.max_op, - message: change.message, - extra_bytes: change.extra, - ops: Vec::new(), - }) - } - let num_changes: usize = changes_by_actor.values().map(|v| v.len()).sum(); - tracing::trace!(num_changes, "change collection context created"); - Ok(ChangeCollector { changes_by_actor }) - } - - #[instrument(skip(self))] - pub(crate) fn collect(&mut self, obj: ObjId, op: Op) -> Result<(), Error> { - let actor_changes = self - .changes_by_actor - .get_mut(&op.id.actor()) - .ok_or_else(|| { - tracing::error!(missing_actor = op.id.actor(), "missing actor for op"); - Error::MissingActor - })?; - let change_index = actor_changes.partition_point(|c| c.max_op < op.id.counter()); - let change = actor_changes.get_mut(change_index).ok_or_else(|| { - tracing::error!(missing_change_index = change_index, "missing change for op"); - Error::MissingChange - })?; - change.ops.push((obj, op)); - Ok(()) - } - - #[instrument(skip(self, metadata))] - pub(crate) fn finish( - self, - metadata: &OpSetMetadata, - ) -> Result, Error> { - let mut changes_in_order = - Vec::with_capacity(self.changes_by_actor.values().map(|c| c.len()).sum()); - for (_, changes) in self.changes_by_actor { - let mut seq = None; - for change in changes { - if let Some(seq) = seq { - if seq != change.seq - 1 { - return Err(Error::ChangesOutOfOrder); - } - } else if change.seq != 1 { - return Err(Error::ChangesOutOfOrder); - } - seq = Some(change.seq); - changes_in_order.push(change); - } - } - changes_in_order.sort_by_key(|c| c.index); - - let mut hashes_by_index = HashMap::new(); - let mut history = Vec::new(); - let mut heads = BTreeSet::new(); - for (index, change) in changes_in_order.into_iter().enumerate() { - let finished = change.finish(&hashes_by_index, metadata)?; - let hash = finished.hash(); - hashes_by_index.insert(index, hash); - for dep in finished.dependencies() { - heads.remove(dep); - } - heads.insert(hash); - history.push(finished.into_owned()); - } - - Ok(CollectedChanges { history, heads }) - } -} - -#[derive(Debug)] -struct PartialChange<'a> { - index: usize, - deps: Vec, - actor: usize, - seq: u64, - max_op: u64, - timestamp: i64, - message: Option, - extra_bytes: Cow<'a, [u8]>, - ops: Vec<(ObjId, Op)>, -} - -impl<'a> PartialChange<'a> { - /// # Panics - /// - /// * If any op references a property index which is not in `props` - /// * If any op references an actor index which is not in `actors` - #[instrument(skip(self, known_changes, metadata))] - fn finish( - mut self, - known_changes: &HashMap, - metadata: &OpSetMetadata, - ) -> Result, Error> { - let deps_len = self.deps.len(); - let mut deps = self.deps.into_iter().try_fold::<_, _, Result<_, Error>>( - Vec::with_capacity(deps_len), - |mut acc, dep| { - acc.push(known_changes.get(&(dep as usize)).cloned().ok_or_else(|| { - tracing::error!( - dependent_index = self.index, - dep_index = dep, - "could not find dependency" - ); - Error::MissingChange - })?); - Ok(acc) - }, - )?; - deps.sort(); - let num_ops = self.ops.len() as u64; - self.ops.sort_by_key(|o| o.1.id); - let converted_ops = self - .ops - .iter() - .map(|(obj, op)| op_as_actor_id(obj, op, metadata)); - let actor = metadata - .actors - .safe_get(self.actor) - .ok_or_else(|| { - tracing::error!(actor_index = self.actor, "actor out of bounds"); - Error::MissingActor - })? - .clone(); - - if num_ops > self.max_op { - return Err(Error::IncorrectMaxOp); - } - - let change = match StoredChange::builder() - .with_dependencies(deps) - .with_actor(actor) - .with_seq(self.seq) - .with_start_op(NonZeroU64::new(self.max_op - num_ops + 1).ok_or(Error::MissingOps)?) - .with_timestamp(self.timestamp) - .with_message(self.message.map(|s| s.to_string())) - .with_extra_bytes(self.extra_bytes.into_owned()) - .build(converted_ops) - { - Ok(s) => s, - Err(PredOutOfOrder) => { - // SAFETY: types::Op::preds is `types::OpIds` which ensures ops are always sorted - panic!("preds out of order"); - } - }; - #[cfg(not(debug_assertions))] - tracing::trace!(?change, hash=?change.hash(), "collected change"); - #[cfg(debug_assertions)] - { - tracing::trace!(?change, ops=?self.ops, hash=?change.hash(), "collected change"); - } - Ok(change) - } -} diff --git a/rust/automerge/src/storage/load/reconstruct_document.rs b/rust/automerge/src/storage/load/reconstruct_document.rs deleted file mode 100644 index 44ace72a..00000000 --- a/rust/automerge/src/storage/load/reconstruct_document.rs +++ /dev/null @@ -1,391 +0,0 @@ -use super::change_collector::ChangeCollector; -use std::collections::{BTreeSet, HashMap}; -use tracing::instrument; - -use crate::{ - change::Change, - columnar::Key as DocOpKey, - op_tree::OpSetMetadata, - storage::{change::Verified, Change as StoredChange, DocOp, Document}, - types::{ChangeHash, ElemId, Key, ObjId, ObjType, Op, OpId, OpIds, OpType}, - ScalarValue, -}; - -#[derive(Debug, thiserror::Error)] -pub(crate) enum Error { - #[error("the document contained ops which were out of order")] - OpsOutOfOrder, - #[error("error reading operation: {0:?}")] - ReadOp(Box), - #[error("an operation contained an invalid action")] - InvalidAction, - #[error("an operation referenced a missing actor id")] - MissingActor, - #[error("invalid changes: {0}")] - InvalidChanges(#[from] super::change_collector::Error), - #[error("mismatching heads")] - MismatchingHeads(MismatchedHeads), - #[error("missing operations")] - MissingOps, - #[error("succ out of order")] - SuccOutOfOrder, -} - -pub(crate) struct MismatchedHeads { - changes: Vec>, - expected_heads: BTreeSet, - derived_heads: BTreeSet, -} - -impl std::fmt::Debug for MismatchedHeads { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("MismatchedHeads") - .field("changes", &self.changes.len()) - .field("expected_heads", &self.expected_heads) - .field("derived_heads", &self.derived_heads) - .finish() - } -} - -/// All the operations loaded from an object in the document format -pub(crate) struct LoadedObject { - /// The id of the object - pub(crate) id: ObjId, - /// The id of the parent object, if any - pub(crate) parent: Option, - /// The operations for this object - pub(crate) ops: Vec, - /// The type of the object - pub(crate) obj_type: ObjType, -} - -/// An observer which will be notified of each object as it completes and which can produce a -/// result once all the operations are loaded and the change graph is verified. -pub(crate) trait DocObserver { - type Output; - - /// The operations for an object have been loaded - fn object_loaded(&mut self, object: LoadedObject); - /// The document has finished loading. The `metadata` is the `OpSetMetadata` which was used to - /// create the indices in the operations which were passed to `object_loaded` - fn finish(self, metadata: OpSetMetadata) -> Self::Output; -} - -/// The result of reconstructing the change history from a document -pub(crate) struct Reconstructed { - /// The maximum op counter that was found in the document - pub(crate) max_op: u64, - /// The changes in the document, in the order they were encoded in the document - pub(crate) changes: Vec, - /// The result produced by the `DocObserver` which was watching the reconstruction - pub(crate) result: Output, - /// The heads of the document - pub(crate) heads: BTreeSet, -} - -#[derive(Debug)] -pub enum VerificationMode { - Check, - DontCheck, -} - -#[instrument(skip(doc, observer))] -pub(crate) fn reconstruct_document<'a, O: DocObserver>( - doc: &'a Document<'a>, - mode: VerificationMode, - mut observer: O, -) -> Result, Error> { - // The document format does not contain the bytes of the changes which are encoded in it - // directly. Instead the metadata about the changes (the actor, the start op, etc.) are all - // encoded separately to all the ops in the document. We need to reconstruct the changes in - // order to verify the heads of the document. To do this we iterate over the document - // operations adding each operation to a `ChangeCollector`. Once we've collected all the - // changes, the `ChangeCollector` knows how to group all the operations together to produce the - // change graph. - // - // Some of the work involved in reconstructing the changes could in principle be quite costly. - // For example, delete operations dont appear in the document at all, instead the delete - // operations are recorded as `succ` operations on the operations which they delete. This means - // that to reconstruct delete operations we have to first collect all the operations, then look - // for succ operations which we have not seen a concrete operation for. Happily we can take - // advantage of the fact that operations are encoded in the order of the object they apply to. - // This is the purpose of `LoadingObject`. - // - // Finally, when constructing an OpSet from this data we want to process the operations in the - // order they appear in the document, this allows us to create the OpSet more efficiently than - // if we were directly applying the reconstructed change graph. This is the purpose of the - // `DocObserver`, which we pass operations to as we complete the processing of each object. - - // The metadata which we create from the doc and which we will pass to the observer - let mut metadata = OpSetMetadata::from_actors(doc.actors().to_vec()); - // The object we are currently loading, starts with the root - let mut current_object = LoadingObject::root(); - // The changes we are collecting to later construct the change graph from - let mut collector = ChangeCollector::new(doc.iter_changes())?; - // A map where we record the create operations so that when the object ID the incoming - // operations refer to switches we can lookup the object type for the new object. We also - // need it so we can pass the parent object ID to the observer - let mut create_ops = HashMap::new(); - // The max op we've seen - let mut max_op = 0; - // The objects we have finished loaded - let mut objs_loaded = BTreeSet::new(); - - for op_res in doc.iter_ops() { - let doc_op = op_res.map_err(|e| Error::ReadOp(Box::new(e)))?; - max_op = std::cmp::max(max_op, doc_op.id.counter()); - - // Delete ops only appear as succ values in the document operations, so if a delete - // operation is the max op we will only see it here. Therefore we step through the document - // operations succs checking for max op - for succ in &doc_op.succ { - max_op = std::cmp::max(max_op, succ.counter()); - } - - let obj = doc_op.object; - check_opid(&metadata, *obj.opid())?; - let op = import_op(&mut metadata, doc_op)?; - tracing::trace!(?op, ?obj, "loading document op"); - - if let OpType::Make(obj_type) = op.action { - create_ops.insert( - ObjId::from(op.id), - CreateOp { - obj_type, - parent_id: obj, - }, - ); - }; - if obj == current_object.id { - current_object.append_op(op.clone())?; - } else { - let create_op = match create_ops.get(&obj) { - Some(t) => Ok(t), - None => { - tracing::error!( - ?op, - "operation referenced an object which we haven't seen a create op for yet" - ); - Err(Error::OpsOutOfOrder) - } - }?; - if obj < current_object.id { - tracing::error!(?op, previous_obj=?current_object.id, "op referenced an object ID which was smaller than the previous object ID"); - return Err(Error::OpsOutOfOrder); - } else { - let loaded = current_object.finish(&mut collector, &metadata)?; - objs_loaded.insert(loaded.id); - observer.object_loaded(loaded); - current_object = - LoadingObject::new(obj, Some(create_op.parent_id), create_op.obj_type); - current_object.append_op(op.clone())?; - } - } - } - let loaded = current_object.finish(&mut collector, &metadata)?; - objs_loaded.insert(loaded.id); - observer.object_loaded(loaded); - - // If an op created an object but no operation targeting that object was ever made then the - // object will only exist in the create_ops map. We collect all such objects here. - for ( - obj_id, - CreateOp { - parent_id, - obj_type, - }, - ) in create_ops.into_iter() - { - if !objs_loaded.contains(&obj_id) { - observer.object_loaded(LoadedObject { - parent: Some(parent_id), - id: obj_id, - ops: Vec::new(), - obj_type, - }) - } - } - - let super::change_collector::CollectedChanges { history, heads } = - collector.finish(&metadata)?; - if matches!(mode, VerificationMode::Check) { - let expected_heads: BTreeSet<_> = doc.heads().iter().cloned().collect(); - if expected_heads != heads { - tracing::error!(?expected_heads, ?heads, "mismatching heads"); - return Err(Error::MismatchingHeads(MismatchedHeads { - changes: history, - expected_heads, - derived_heads: heads, - })); - } - } - let result = observer.finish(metadata); - - Ok(Reconstructed { - result, - changes: history.into_iter().map(Change::new).collect(), - heads, - max_op, - }) -} - -struct CreateOp { - parent_id: ObjId, - obj_type: ObjType, -} -struct LoadingObject { - id: ObjId, - parent_id: Option, - ops: Vec, - obj_type: ObjType, - preds: HashMap>, - /// Operations which set a value, stored to later lookup keys when reconstructing delete events - set_ops: HashMap, - /// To correctly load the values of the `Counter` struct in the value of op IDs we need to - /// lookup the various increment operations which have been applied by the succesors of the - /// initial operation which creates the counter. - inc_ops: HashMap, -} - -impl LoadingObject { - fn root() -> Self { - Self::new(ObjId::root(), None, ObjType::Map) - } - - fn new(id: ObjId, parent_id: Option, obj_type: ObjType) -> Self { - LoadingObject { - id, - parent_id, - ops: Vec::new(), - obj_type, - preds: HashMap::new(), - set_ops: HashMap::new(), - inc_ops: HashMap::new(), - } - } - - fn append_op(&mut self, op: Op) -> Result<(), Error> { - // Collect set and make operations so we can find the keys which delete operations refer to - // in `finish` - if matches!(op.action, OpType::Put(_) | OpType::Make(_)) { - match op.key { - Key::Map(_) => { - self.set_ops.insert(op.id, op.key); - } - Key::Seq(ElemId(o)) => { - let elem_opid = if op.insert { op.id } else { o }; - self.set_ops.insert(op.id, Key::Seq(ElemId(elem_opid))); - } - }; - } - // Collect increment operations so we can reconstruct counters properly in `finish` - if let OpType::Increment(inc) = op.action { - self.inc_ops.insert(op.id, inc); - } - for succ in &op.succ { - self.preds.entry(*succ).or_default().push(op.id); - } - self.ops.push(op); - Ok(()) - } - - fn finish( - mut self, - collector: &mut ChangeCollector<'_>, - meta: &OpSetMetadata, - ) -> Result { - let mut ops = Vec::new(); - for mut op in self.ops.into_iter() { - if let Some(preds) = self.preds.remove(&op.id) { - op.pred = meta.sorted_opids(preds.into_iter()); - } - if let OpType::Put(ScalarValue::Counter(c)) = &mut op.action { - let inc_ops = op.succ.iter().filter_map(|s| self.inc_ops.get(s).copied()); - c.increment(inc_ops); - } - collector.collect(self.id, op.clone())?; - ops.push(op) - } - // Any remaining pred ops must be delete operations - // TODO (alex): Figure out what index these should be inserted at. Does it even matter? - for (opid, preds) in self.preds.into_iter() { - let key = self.set_ops.get(&preds[0]).ok_or_else(|| { - tracing::error!(?opid, ?preds, "no delete operation found"); - Error::MissingOps - })?; - collector.collect( - self.id, - Op { - id: opid, - pred: meta.sorted_opids(preds.into_iter()), - insert: false, - succ: OpIds::empty(), - key: *key, - action: OpType::Delete, - }, - )?; - } - Ok(LoadedObject { - id: self.id, - parent: self.parent_id, - ops, - obj_type: self.obj_type, - }) - } -} - -fn import_op(m: &mut OpSetMetadata, op: DocOp) -> Result { - let key = match op.key { - DocOpKey::Prop(s) => Key::Map(m.import_prop(s)), - DocOpKey::Elem(ElemId(op)) => Key::Seq(ElemId(check_opid(m, op)?)), - }; - for opid in &op.succ { - if m.actors.safe_get(opid.actor()).is_none() { - tracing::error!(?opid, "missing actor"); - return Err(Error::MissingActor); - } - } - Ok(Op { - id: check_opid(m, op.id)?, - action: parse_optype(op.action, op.value)?, - key, - succ: m.try_sorted_opids(op.succ).ok_or(Error::SuccOutOfOrder)?, - pred: OpIds::empty(), - insert: op.insert, - }) -} - -/// We construct the OpSetMetadata directly from the vector of actors which are encoded in the -/// start of the document. Therefore we need to check for each opid in the docuemnt that the actor -/// ID which it references actually exists in the metadata. -fn check_opid(m: &OpSetMetadata, opid: OpId) -> Result { - match m.actors.safe_get(opid.actor()) { - Some(_) => Ok(opid), - None => { - tracing::error!("missing actor"); - Err(Error::MissingActor) - } - } -} - -fn parse_optype(action_index: usize, value: ScalarValue) -> Result { - match action_index { - 0 => Ok(OpType::Make(ObjType::Map)), - 1 => Ok(OpType::Put(value)), - 2 => Ok(OpType::Make(ObjType::List)), - 3 => Ok(OpType::Delete), - 4 => Ok(OpType::Make(ObjType::Text)), - 5 => match value { - ScalarValue::Int(i) => Ok(OpType::Increment(i)), - _ => { - tracing::error!(?value, "invalid value for counter op"); - Err(Error::InvalidAction) - } - }, - 6 => Ok(OpType::Make(ObjType::Table)), - other => { - tracing::error!(action = other, "unknown action type"); - Err(Error::InvalidAction) - } - } -} diff --git a/rust/automerge/src/storage/parse.rs b/rust/automerge/src/storage/parse.rs deleted file mode 100644 index 6751afb4..00000000 --- a/rust/automerge/src/storage/parse.rs +++ /dev/null @@ -1,595 +0,0 @@ -//! A small parser combinator library inspired by [`nom`](https://docs.rs/crate/nom/5.0.0). -//! -//! The primary reason for using this rather than `nom` is that this is only a few hundred lines of -//! code because we don't need a fully fledged combinator library - automerge is a low level -//! library so it's good to avoid dependencies where we can. -//! -//! # Basic Usage -//! -//! The basic components of this library are [`Parser`]s, which parse [`Input`]s and produce -//! [`ParseResult`]s. `Input` is a combination of an `&[u8]` which is the incoming data along with -//! the position it has read up to in the data. `Parser` is a trait but has a blanket `impl` for -//! `FnMut(Input<'a>) -> ParseResult<'a, O, E>` so in practice you can think of parsers as a -//! function which takes some input and returns a result plus any remaining input. This final part -//! is encapsulated by the `ParseResult` which is a type alias for a `Result`. This means that -//! typical usage will look something like this: -//! -//! ```rust,ignore -//! use automerge::storage::parse::{ParseResult, take_1}; -//! fn do_something<'a>(input: Input<'a>) -> ParseResult<'a, [u8; 3], ()> { -//! let (i, a) = take_1::<()>(input)?; -//! let (i, b) = take_1::<()>(i)?; -//! let (i, c) = take_1::<()>(i)?; -//! let result = [a, b, c]; -//! Ok((i, result)) -//! } -//! -//! let input = Input::new(&[b"12345"]); -//! let result = do_something(input); -//! if let Ok((_, result)) = result { -//! assert_eq!(&result, &['1', '2', '3']); -//! } else { -//! panic!(); -//! } -//! ``` -//! -//! Three things to note here: -//! -//! 1. The rebinding of the input (in `i`) after each call to `take_1`, this is how parser state is passed from -//! one call to the next -//! 2. We return a tuple containing the remaining input plus the result -//! 3. `take_1` has a type parameter we must pass to it representing the error type. Generally you -//! don't need to do that as type inference is often good enough. -//! -//! # Errors -//! -//! The error branch of `ParseError` is an enum containing either `ParseError::Incomplete` -//! indicating that with more input we might be able to succeed, or a `ParseError::Error`. The -//! latter branch is where parser specific errors (e.g. "this u8 is not a valid chunk type") are -//! passed. This has implications for returning and handling errors. -//! -//! ## Returning Errors -//! -//! If you want to return an error from a parser you will need to wrap the error in -//! `ParseError::Error`. -//! -//! ```rust,ignore -//! struct MyError; -//! fn my_bad_parser() -> ParseResult<(), MyError> { -//! Err(ParseError::Error(MyError)) -//! } -//! ``` -//! -//! ## Handling Errors -//! -//! Handling errors is generally important when you want to compose parsers with different error -//! types. In this case you will often have an error type you want to map each of the underlying -//! errors into. For this purpose you can use `ParseError::lift` -//! -//! ```rust,ignore -//! # use automerge::parse::{ParseResult, Input}; -//! #[derive(thiserror::Error, Debug)] -//! #[error("this is a bad string")] -//! struct BadString; -//! -//! #[derive(thiserror::Error, Debug)] -//! #[error("this is a bad number")] -//! struct BadNumber; -//! -//! fn parse_string<'a>(input: Input<'a>) -> ParseResult<'a, String, BadString> { -//! Err(ParseError::Error(BadString)) -//! } -//! -//! fn parse_number<'a>(input: Input<'a>) -> ParseResult<'a, u32, BadNumber> { -//! Err(ParseError::Error(BadNumber)) -//! } -//! -//! #[derive(thiserror::Error, Debug)] -//! struct CombinedError{ -//! #[error(transparent)] -//! String(#[from] BadString), -//! #[error(transparent)] -//! Number(#[from] BadNumber), -//! } -//! -//! fn parse_string_then_number<'a>(input: Input<'a>) -> ParseResult<'a, (String, u32), CombinedError> { -//! // Note the `e.lift()` here, this works because of the `From` impl generated by -//! // `thiserror::Error` -//! let (i, thestring) = parse_string(input).map_err(|e| e.lift())?; -//! let (i, thenumber) = parse_number(i).map_err(|e| e.lift())?; -//! Ok((i, (thestring, thenumber))) -//! } -//! ``` - -use core::num::NonZeroUsize; -use std::convert::TryInto; - -pub(crate) mod leb128; -use crate::{ActorId, ChangeHash}; - -const HASH_SIZE: usize = 32; // 256 bits = 32 bytes - -#[allow(unused_imports)] -pub(crate) use self::leb128::{leb128_i64, leb128_u32, leb128_u64, nonzero_leb128_u64}; - -pub(crate) type ParseResult<'a, O, E> = Result<(Input<'a>, O), ParseError>; - -/// The input to be parsed. This is a combination of an underlying slice, plus an offset into that -/// slice. Consequently it is very cheap to copy. -#[derive(PartialEq, Clone, Copy)] -pub(crate) struct Input<'a> { - bytes: &'a [u8], - position: usize, - original: &'a [u8], -} - -impl<'a> std::fmt::Debug for Input<'a> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "Input(len: {}, position: {}, original_len: {})", - self.bytes.len(), - self.position, - self.original.len() - ) - } -} - -impl<'a> Input<'a> { - pub(crate) fn new(bytes: &'a [u8]) -> Self { - Self { - bytes, - position: 0, - original: bytes, - } - } - - #[cfg(test)] - pub(in crate::storage::parse) fn with_position(bytes: &'a [u8], position: usize) -> Input<'a> { - let remaining = &bytes[position..]; - Self { - bytes: remaining, - position, - original: bytes, - } - } - - pub(crate) fn empty() -> Self { - Self { - bytes: &[], - position: 0, - original: &[], - } - } - - fn take_1(&self) -> ParseResult<'a, u8, E> { - if let Some(need) = NonZeroUsize::new(1_usize.saturating_sub(self.bytes.len())) { - Err(ParseError::Incomplete(Needed::Size(need))) - } else { - let (result, remaining) = self.bytes.split_at(1); - let new_input = Input { - bytes: remaining, - original: self.original, - position: self.position + 1, - }; - Ok((new_input, result[0])) - } - } - - fn take_n(&self, n: usize) -> ParseResult<'a, &'a [u8], E> { - if let Some(need) = NonZeroUsize::new(n.saturating_sub(self.bytes.len())) { - Err(ParseError::Incomplete(Needed::Size(need))) - } else { - let (result, remaining) = self.bytes.split_at(n); - let new_input = Input { - bytes: remaining, - original: self.original, - position: self.position + n, - }; - Ok((new_input, result)) - } - } - - fn take_4(&self) -> ParseResult<'a, [u8; 4], E> { - if let Some(need) = NonZeroUsize::new(4_usize.saturating_sub(self.bytes.len())) { - Err(ParseError::Incomplete(Needed::Size(need))) - } else { - let (result, remaining) = self.bytes.split_at(4); - let new_input = Input { - bytes: remaining, - original: self.original, - position: self.position + 4, - }; - Ok((new_input, result.try_into().expect("we checked the length"))) - } - } - - fn range_of(&self, mut parser: P) -> ParseResult<'a, RangeOf, E> - where - P: Parser<'a, R, E>, - { - let (new_input, value) = parser.parse(*self)?; - let range = self.position..new_input.position; - Ok((new_input, RangeOf { range, value })) - } - - fn rest(&self) -> ParseResult<'a, &'a [u8], E> { - let position = self.position + self.bytes.len(); - let new_input = Self { - position, - original: self.original, - bytes: &[], - }; - Ok((new_input, self.bytes)) - } - - fn truncate(&self, length: usize) -> Input<'a> { - let length = if length > self.bytes.len() { - self.bytes.len() - } else { - length - }; - Input { - bytes: &self.bytes[..length], - position: self.position, - original: &self.original[..(self.position + length)], - } - } - - fn skip(&self, length: usize) -> Input<'a> { - if length > self.bytes.len() { - Input { - bytes: &[], - position: self.bytes.len(), - original: self.original, - } - } else { - Input { - bytes: &self.bytes[length..], - position: self.position + length, - original: &self.original[(self.position + length)..], - } - } - } - - /// Split this input into two separate inputs, the first is the same as the current input but - /// with the remaining unconsumed_bytes set to at most length. The remaining `Input` is the bytes - /// after `length`. - /// - /// This is useful if you are parsing input which contains length delimited chunks. In this - /// case you may have a single input where you parse a header, then you want to parse the - /// current input up until the length and then parse the next chunk from the remainign input. - /// For example: - /// - /// ```rust,ignore - /// # use automerge::storage::parse::{Input, ParseResult}; - /// - /// fn parse_chunk(input: Input<'_>) -> ParseResult<(), ()> { - /// Ok(()) - /// } - /// - /// # fn main() -> ParseResult<(), ()> { - /// let incoming_bytes: &[u8] = todo!(); - /// let mut input = Input::new(incoming_bytes); - /// let mut chunks = Vec::new(); - /// while !input.is_empty() { - /// let (i, chunk_len) = leb128_u64(input)?; - /// let Split{first: i, remaining} = i.split(chunk_len); - /// // Note that here, the `i` we pass into `parse_chunk` has already parsed the header, - /// // so the logic of the `parse_chunk` function doesn't need to reimplement the header - /// // parsing - /// let (i, chunk) = parse_chunk(i)?; - /// let input = remaining; - /// } - /// parse_chunk(i); - /// # } - /// ``` - pub(crate) fn split(&self, length: usize) -> Split<'a> { - Split { - first: self.truncate(length), - remaining: self.skip(length), - } - } - - /// Return a new `Input` which forgets about the consumed input. The new `Input` will have it's - /// position set to 0. This is equivalent to `Input::new(self.bytes())` - pub(crate) fn reset(&self) -> Input<'a> { - Input::new(self.bytes) - } - - /// Check if there are any more bytes left to consume - pub(crate) fn is_empty(&self) -> bool { - self.bytes.is_empty() - } - - /// The bytes which have not yet been consumed - pub(crate) fn unconsumed_bytes(&self) -> &'a [u8] { - self.bytes - } - - /// The bytes behind this input - including bytes which have been consumed - #[allow(clippy::misnamed_getters)] - pub(crate) fn bytes(&self) -> &'a [u8] { - self.original - } -} - -/// Returned by [`Input::split`] -pub(crate) struct Split<'a> { - /// The input up to the length passed to `split`. This is identical to the original input - /// except that [`Input::bytes`] and [`Input::unconsumed_bytes`] will only return the original - /// input up to `length` bytes from the point at which `split` was called. - pub(crate) first: Input<'a>, - /// The remaining input after the length passed to `split`. This is equivalent to - /// - /// ```rust,ignore - /// # use automerge::storage::parse::Input; - /// # let split_length = 1; - /// let original_input = todo!(); - /// Input::new(original_input.bytes()[split_length..]) - /// ``` - pub(crate) remaining: Input<'a>, -} - -pub(crate) trait Parser<'a, O, E> { - fn parse(&mut self, input: Input<'a>) -> ParseResult<'a, O, E>; -} - -impl<'a, O, F, E> Parser<'a, O, E> for F -where - F: FnMut(Input<'a>) -> ParseResult<'a, O, E>, -{ - fn parse(&mut self, input: Input<'a>) -> ParseResult<'a, O, E> { - (self)(input) - } -} - -#[derive(Clone, Debug, PartialEq)] -pub(crate) enum ParseError { - /// Some application specific error occurred - Error(E), - /// A combinator requested more data than we have available - Incomplete(Needed), -} - -impl ParseError { - /// Convert any underlying `E` into `F`. This is useful when you are composing parsers - pub(crate) fn lift(self) -> ParseError - where - F: From, - { - match self { - Self::Error(e) => ParseError::Error(F::from(e)), - Self::Incomplete(n) => ParseError::Incomplete(n), - } - } -} - -impl std::fmt::Display for ParseError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Error(e) => write!(f, "{}", e), - Self::Incomplete(_) => write!(f, "not enough data"), - } - } -} - -impl std::error::Error for ParseError {} - -/// How much more input we need -#[derive(Clone, Debug, PartialEq)] -pub(crate) enum Needed { - /// We don't know how much more - #[allow(dead_code)] - Unknown, - /// We need _at least_ this much more - Size(NonZeroUsize), -} - -/// Map the function `f` over the result of `parser` returning a new parser -pub(crate) fn map<'a, O1, O2, F, G, Er>( - mut parser: F, - mut f: G, -) -> impl FnMut(Input<'a>) -> ParseResult<'a, O2, Er> -where - F: Parser<'a, O1, Er>, - G: FnMut(O1) -> O2, -{ - move |input: Input<'a>| { - let (input, o1) = parser.parse(input)?; - Ok((input, f(o1))) - } -} - -/// Pull one byte from the input -pub(crate) fn take1(input: Input<'_>) -> ParseResult<'_, u8, E> { - input.take_1() -} - -/// Parse an array of four bytes from the input -pub(crate) fn take4(input: Input<'_>) -> ParseResult<'_, [u8; 4], E> { - input.take_4() -} - -/// Parse a slice of length `n` from `input` -pub(crate) fn take_n(n: usize, input: Input<'_>) -> ParseResult<'_, &[u8], E> { - input.take_n(n) -} - -/// Parse a length prefixed collection of `g` -/// -/// This first parses a LEB128 encoded `u64` from the input, then applies the parser `g` this many -/// times, returning the result in a `Vec`. -pub(crate) fn length_prefixed<'a, G, O, Er>( - mut g: G, -) -> impl FnMut(Input<'a>) -> ParseResult<'a, Vec, Er> -where - G: Parser<'a, O, Er>, - Er: From, -{ - move |input: Input<'a>| { - let (i, count) = leb128_u64(input).map_err(|e| e.lift())?; - let mut res = Vec::new(); - let mut input = i; - for _ in 0..count { - match g.parse(input) { - Ok((i, e)) => { - input = i; - res.push(e); - } - Err(e) => { - return Err(e); - } - } - } - Ok((input, res)) - } -} - -/// Parse a length prefixed array of bytes from the input -/// -/// This first parses a LEB128 encoded `u64` from the input, then parses this many bytes from the -/// underlying input. -pub(crate) fn length_prefixed_bytes(input: Input<'_>) -> ParseResult<'_, &[u8], E> -where - E: From, -{ - let (i, len) = leb128_u64(input).map_err(|e| e.lift())?; - take_n(len as usize, i) -} - -/// Apply two parsers, returning the result in a 2 tuple -/// -/// This first applies `f`, then `g` and returns the result as `(f, g)`. -pub(super) fn tuple2<'a, F, E, G, H, Er>( - mut f: F, - mut g: G, -) -> impl FnMut(Input<'a>) -> ParseResult<'_, (E, H), Er> -where - F: Parser<'a, E, Er>, - G: Parser<'a, H, Er>, -{ - move |input: Input<'a>| { - let (i, one) = f.parse(input)?; - let (i, two) = g.parse(i)?; - Ok((i, (one, two))) - } -} - -/// Apply the parser `f` `n` times and reutrn the result in a `Vec` -pub(super) fn apply_n<'a, F, E, Er>( - n: usize, - mut f: F, -) -> impl FnMut(Input<'a>) -> ParseResult<'_, Vec, Er> -where - F: Parser<'a, E, Er>, -{ - move |input: Input<'a>| { - let mut i = input; - let mut result = Vec::new(); - for _ in 0..n { - let (new_i, e) = f.parse(i)?; - result.push(e); - i = new_i; - } - Ok((i, result)) - } -} - -/// Parse a length prefixed actor ID -/// -/// This first parses a LEB128 encoded u64 from the input, then the corresponding number of bytes -/// which are returned wrapped in an `ActorId` -pub(crate) fn actor_id(input: Input<'_>) -> ParseResult<'_, ActorId, E> -where - E: From, -{ - let (i, length) = leb128_u64(input).map_err(|e| e.lift())?; - let (i, bytes) = take_n(length as usize, i)?; - Ok((i, bytes.into())) -} - -/// Parse a change hash. -/// -/// This is just a nice wrapper around `take_4` -pub(crate) fn change_hash(input: Input<'_>) -> ParseResult<'_, ChangeHash, E> { - let (i, bytes) = take_n(HASH_SIZE, input)?; - let byte_arr: ChangeHash = bytes.try_into().expect("we checked the length above"); - Ok((i, byte_arr)) -} - -#[derive(thiserror::Error, Debug)] -#[error("invalid UTF-8")] -pub(crate) struct InvalidUtf8; - -/// Parse a length prefixed UTF-8 string -/// -/// This first parses a LEB128 encode `u64` from the input, then parses this many bytes from the -/// input before attempting to convert these bytes into a `String`, returning -/// `ParseError::Error(InvalidUtf8)` if that fails. -pub(crate) fn utf_8(len: usize, input: Input<'_>) -> ParseResult<'_, String, E> -where - E: From, -{ - let (i, bytes) = take_n(len, input)?; - let result = String::from_utf8(bytes.to_vec()) - .map_err(|_| ParseError::Error(InvalidUtf8)) - .map_err(|e| e.lift())?; - Ok((i, result)) -} - -/// Returned from `range_of` -pub(crate) struct RangeOf { - /// The range in the input where we parsed from - pub(crate) range: std::ops::Range, - /// The value we parsed - pub(crate) value: T, -} - -/// Evaluate `parser` and then return the value parsed, as well as the range in the input which we -/// just parsed. -/// -/// This is useful when you want to parse some data from an input in order to check that is valid, -/// but you will also be holding on to the input data and want to know where in the input data the -/// valid data was parsed from. -/// -/// # Example -/// -/// Imagine that we are parsing records of some kind from a file, as well as parsing the record we -/// want to record the offset in the file where the record is so we can update it in place. -/// -/// ```rust,ignore -/// # use automerge::storage::parse::{ParseResult, Input}; -/// struct Message; -/// struct Record { -/// message: Message, -/// location: std::ops::Range -/// } -/// -/// fn parse_message<'a>(input: Input<'a>) -> ParseResult<'a, Message, ()> { -/// unimplemented!() -/// } -/// -/// fn parse_record<'a>(input: Input<'a>) -> ParseResult<'a, Record, ()> { -/// let (i, RangeOf{range: location, value: message}) = range_of(|i| parse_message(i), i)?; -/// Ok((i, Record { -/// location, // <- this is the location in the input where the message was parsed from -/// message, -/// })) -/// } -/// -/// let file_contents: Vec = unimplemented!(); -/// let input = Input::new(&file_contents); -/// let record = parse_record(input).unwrap().1; -/// ``` -pub(crate) fn range_of<'a, P, R, E>(parser: P, input: Input<'a>) -> ParseResult<'a, RangeOf, E> -where - P: Parser<'a, R, E>, -{ - input.range_of(parser) -} - -/// Parse all the remaining input from the parser. This can never fail -pub(crate) fn take_rest(input: Input<'_>) -> ParseResult<'_, &'_ [u8], E> { - input.rest() -} diff --git a/rust/automerge/src/storage/parse/leb128.rs b/rust/automerge/src/storage/parse/leb128.rs deleted file mode 100644 index 9f5e72a2..00000000 --- a/rust/automerge/src/storage/parse/leb128.rs +++ /dev/null @@ -1,302 +0,0 @@ -use std::num::NonZeroU64; - -use super::{take1, Input, ParseError, ParseResult}; - -#[derive(PartialEq, thiserror::Error, Debug, Clone)] -pub(crate) enum Error { - #[error("leb128 was too large for the destination type")] - Leb128TooLarge, - #[error("leb128 was improperly encoded")] - Leb128Overlong, - #[error("leb128 was zero when it was expected to be nonzero")] - UnexpectedZero, -} - -pub(crate) fn leb128_u64(input: Input<'_>) -> ParseResult<'_, u64, E> -where - E: From, -{ - let mut res = 0; - let mut shift = 0; - let mut input = input; - - loop { - let (i, byte) = take1(input)?; - input = i; - res |= ((byte & 0x7F) as u64) << shift; - shift += 7; - - if (byte & 0x80) == 0 { - if shift > 64 && byte > 1 { - return Err(ParseError::Error(Error::Leb128TooLarge.into())); - } else if shift > 7 && byte == 0 { - return Err(ParseError::Error(Error::Leb128Overlong.into())); - } - return Ok((input, res)); - } else if shift > 64 { - return Err(ParseError::Error(Error::Leb128TooLarge.into())); - } - } -} - -pub(crate) fn leb128_i64(input: Input<'_>) -> ParseResult<'_, i64, E> -where - E: From, -{ - let mut res = 0; - let mut shift = 0; - - let mut input = input; - let mut prev = 0; - loop { - let (i, byte) = take1(input)?; - input = i; - res |= ((byte & 0x7F) as i64) << shift; - shift += 7; - - if (byte & 0x80) == 0 { - if shift > 64 && byte != 0 && byte != 0x7f { - // the 10th byte (if present) must contain only the sign-extended sign bit - return Err(ParseError::Error(Error::Leb128TooLarge.into())); - } else if shift > 7 - && ((byte == 0 && prev & 0x40 == 0) || (byte == 0x7f && prev & 0x40 > 0)) - { - // overlong if the sign bit of penultimate byte has been extended - return Err(ParseError::Error(Error::Leb128Overlong.into())); - } else if shift < 64 && byte & 0x40 > 0 { - // sign extend negative numbers - res |= -1 << shift; - } - return Ok((input, res)); - } else if shift > 64 { - return Err(ParseError::Error(Error::Leb128TooLarge.into())); - } - prev = byte; - } -} - -pub(crate) fn leb128_u32(input: Input<'_>) -> ParseResult<'_, u32, E> -where - E: From, -{ - let (i, num) = leb128_u64(input)?; - let result = u32::try_from(num).map_err(|_| ParseError::Error(Error::Leb128TooLarge.into()))?; - Ok((i, result)) -} - -/// Parse a LEB128 encoded u64 from the input, throwing an error if it is `0` -pub(crate) fn nonzero_leb128_u64(input: Input<'_>) -> ParseResult<'_, NonZeroU64, E> -where - E: From, -{ - let (input, num) = leb128_u64(input)?; - let result = - NonZeroU64::new(num).ok_or_else(|| ParseError::Error(Error::UnexpectedZero.into()))?; - Ok((input, result)) -} - -#[cfg(test)] -mod tests { - use super::super::Needed; - use super::*; - use std::num::NonZeroUsize; - - const NEED_ONE: Needed = Needed::Size(unsafe { NonZeroUsize::new_unchecked(1) }); - - #[test] - fn leb_128_u64() { - let one = &[0b00000001_u8]; - let one_two_nine = &[0b10000001, 0b00000001]; - let one_and_more = &[0b00000001, 0b00000011]; - - let scenarios: Vec<(&'static [u8], ParseResult<'_, u64, Error>)> = vec![ - (one, Ok((Input::with_position(one, 1), 1))), - ( - one_two_nine, - Ok((Input::with_position(one_two_nine, 2), 129)), - ), - (one_and_more, Ok((Input::with_position(one_and_more, 1), 1))), - ]; - for (index, (input, expected)) in scenarios.clone().into_iter().enumerate() { - let result = leb128_u64(Input::new(input)); - if result != expected { - panic!( - "Scenario {} failed for u64: expected {:?} got {:?}", - index + 1, - expected, - result - ); - } - } - - let error_cases: Vec<(&'static str, &'static [u8], ParseError<_>)> = vec![ - ( - "too many bytes", - &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], - ParseError::Error(Error::Leb128TooLarge), - ), - ( - "too many bits", - &[129, 129, 129, 129, 129, 129, 129, 129, 129, 2], - ParseError::Error(Error::Leb128TooLarge), - ), - ( - "overlong encoding", - &[129, 0], - ParseError::Error(Error::Leb128Overlong), - ), - ("missing data", &[255], ParseError::Incomplete(NEED_ONE)), - ]; - error_cases.into_iter().for_each(|(desc, input, expected)| { - match leb128_u64::(Input::new(input)) { - Ok((_, x)) => panic!("leb128_u64 should fail with {}, got {}", desc, x), - Err(error) => { - if error != expected { - panic!("leb128_u64 should fail with {}, got {}", expected, error) - } - } - } - }); - - let success_cases: Vec<(&'static [u8], u64)> = vec![ - (&[0], 0), - (&[0x7f], 127), - (&[0x80, 0x01], 128), - (&[0xff, 0x7f], 16383), - ( - &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1], - u64::MAX, - ), - ]; - success_cases.into_iter().for_each(|(input, expected)| { - match leb128_u64::(Input::new(input)) { - Ok((_, x)) => { - if x != expected { - panic!("leb128_u64 should succeed with {}, got {}", expected, x) - } - } - Err(error) => panic!("leb128_u64 should succeed with {}, got {}", expected, error), - } - }); - } - - #[test] - fn leb_128_u32() { - let error_cases: Vec<(&'static str, &'static [u8], ParseError<_>)> = vec![ - ( - "too many bytes", - &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], - ParseError::Error(Error::Leb128TooLarge), - ), - ( - "too many bits", - &[0xff, 0xff, 0xff, 0xff, 0x1f], - ParseError::Error(Error::Leb128TooLarge), - ), - ( - "overlong encoding", - &[129, 0], - ParseError::Error(Error::Leb128Overlong), - ), - ("missing data", &[0xaa], ParseError::Incomplete(NEED_ONE)), - ]; - error_cases.into_iter().for_each(|(desc, input, expected)| { - match leb128_u32::(Input::new(input)) { - Ok((_, x)) => panic!("leb128_u32 should fail with {}, got {}", desc, x), - Err(error) => { - if error != expected { - panic!("leb128_u32 should fail with {}, got {}", expected, error) - } - } - } - }); - - let success_cases: Vec<(&'static [u8], u32)> = vec![ - (&[0], 0), - (&[0x7f], 127), - (&[0x80, 0x01], 128), - (&[0xff, 0x7f], 16383), - (&[0xff, 0xff, 0xff, 0xff, 0x0f], u32::MAX), - ]; - success_cases.into_iter().for_each(|(input, expected)| { - match leb128_u32::(Input::new(input)) { - Ok((_, x)) => { - if x != expected { - panic!("leb128_u32 should succeed with {}, got {}", expected, x) - } - } - Err(error) => panic!("leb128_u64 should succeed with {}, got {}", expected, error), - } - }); - } - - #[test] - fn leb_128_i64() { - let error_cases: Vec<(&'static str, &'static [u8], ParseError<_>)> = vec![ - ( - "too many bytes", - &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], - ParseError::Error(Error::Leb128TooLarge), - ), - ( - "too many positive bits", - &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01], - ParseError::Error(Error::Leb128TooLarge), - ), - ( - "too many negative bits", - &[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7e], - ParseError::Error(Error::Leb128TooLarge), - ), - ( - "overlong positive encoding", - &[0xbf, 0], - ParseError::Error(Error::Leb128Overlong), - ), - ( - "overlong negative encoding", - &[0x81, 0xff, 0x7f], - ParseError::Error(Error::Leb128Overlong), - ), - ("missing data", &[0x90], ParseError::Incomplete(NEED_ONE)), - ]; - error_cases.into_iter().for_each(|(desc, input, expected)| { - match leb128_i64::(Input::new(input)) { - Ok((_, x)) => panic!("leb128_i64 should fail with {}, got {}", desc, x), - Err(error) => { - if error != expected { - panic!("leb128_i64 should fail with {}, got {}", expected, error) - } - } - } - }); - - let success_cases: Vec<(&'static [u8], i64)> = vec![ - (&[0], 0), - (&[0x7f], -1), - (&[0x3f], 63), - (&[0x40], -64), - (&[0x80, 0x01], 128), - (&[0xff, 0x3f], 8191), - (&[0x80, 0x40], -8192), - ( - &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0], - i64::MAX, - ), - ( - &[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f], - i64::MIN, - ), - ]; - success_cases.into_iter().for_each(|(input, expected)| { - match leb128_i64::(Input::new(input)) { - Ok((_, x)) => { - if x != expected { - panic!("leb128_i64 should succeed with {}, got {}", expected, x) - } - } - Err(error) => panic!("leb128_u64 should succeed with {}, got {}", expected, error), - } - }); - } -} diff --git a/rust/automerge/src/storage/save.rs b/rust/automerge/src/storage/save.rs deleted file mode 100644 index 4921bd35..00000000 --- a/rust/automerge/src/storage/save.rs +++ /dev/null @@ -1,2 +0,0 @@ -mod document; -pub(crate) use document::save_document; diff --git a/rust/automerge/src/storage/save/document.rs b/rust/automerge/src/storage/save/document.rs deleted file mode 100644 index f27d920d..00000000 --- a/rust/automerge/src/storage/save/document.rs +++ /dev/null @@ -1,146 +0,0 @@ -use std::{borrow::Cow, collections::BTreeMap, iter::Iterator}; - -use crate::{ - indexed_cache::IndexedCache, - storage::{ - change::DEFLATE_MIN_SIZE, convert::op_as_docop, AsChangeMeta, CompressConfig, Document, - }, - types::{ActorId, ObjId, Op}, - Change, ChangeHash, -}; - -/// # Panics -/// -/// * If any of the `heads` are not in `changes` -/// * If any of ops in `ops` reference an actor which is not in `actors` -/// * If any of ops in `ops` reference a property which is not in `props` -/// * If any of the changes reference a dependency index which is not in `changes` -#[tracing::instrument(skip(changes, ops, actors, props, config))] -pub(crate) fn save_document<'a, I, O>( - changes: I, - ops: O, - actors: &'a IndexedCache, - props: &IndexedCache, - heads: &[ChangeHash], - config: Option, -) -> Vec -where - I: Iterator + Clone + 'a, - O: Iterator + Clone + ExactSizeIterator, -{ - let actor_lookup = actors.encode_index(); - let doc_ops = ops.map(|(obj, op)| op_as_docop(&actor_lookup, props, obj, op)); - - let hash_graph = HashGraph::new(changes.clone()); - let changes = changes.map(|c| ChangeWithGraph { - actors, - actor_lookup: &actor_lookup, - change: c, - graph: &hash_graph, - }); - - let doc = Document::new( - actors.sorted().cache, - hash_graph.heads_with_indices(heads.to_vec()), - doc_ops, - changes, - config.unwrap_or(CompressConfig::Threshold(DEFLATE_MIN_SIZE)), - ); - doc.into_bytes() -} - -struct HashGraph { - index_by_hash: BTreeMap, -} - -impl HashGraph { - fn new<'a, I>(changes: I) -> Self - where - I: Iterator, - { - let mut index_by_hash = BTreeMap::new(); - for (index, change) in changes.enumerate() { - index_by_hash.insert(change.hash(), index); - } - Self { index_by_hash } - } - - fn change_index(&self, hash: &ChangeHash) -> usize { - self.index_by_hash[hash] - } - - fn heads_with_indices(&self, heads: Vec) -> Vec<(ChangeHash, usize)> { - heads - .into_iter() - .map(|h| (h, self.index_by_hash[&h])) - .collect() - } -} - -struct ChangeWithGraph<'a> { - change: &'a Change, - graph: &'a HashGraph, - actor_lookup: &'a [usize], - actors: &'a IndexedCache, -} - -impl<'a> AsChangeMeta<'a> for ChangeWithGraph<'a> { - type DepsIter = ChangeDepsIter<'a>; - - fn actor(&self) -> u64 { - self.actor_lookup[self.actors.lookup(self.change.actor_id()).unwrap()] as u64 - } - - fn seq(&self) -> u64 { - self.change.seq() - } - - fn deps(&self) -> Self::DepsIter { - ChangeDepsIter { - change: self.change, - graph: self.graph, - offset: 0, - } - } - - fn extra(&self) -> Cow<'a, [u8]> { - self.change.extra_bytes().into() - } - - fn max_op(&self) -> u64 { - self.change.max_op() - } - - fn message(&self) -> Option> { - self.change.message().map(|m| Cow::Owned(m.into())) - } - - fn timestamp(&self) -> i64 { - self.change.timestamp() - } -} - -struct ChangeDepsIter<'a> { - change: &'a Change, - graph: &'a HashGraph, - offset: usize, -} - -impl<'a> ExactSizeIterator for ChangeDepsIter<'a> { - fn len(&self) -> usize { - self.change.deps().len() - } -} - -impl<'a> Iterator for ChangeDepsIter<'a> { - type Item = u64; - - fn next(&mut self) -> Option { - if let Some(dep) = self.change.deps().get(self.offset) { - self.offset += 1; - Some(self.graph.change_index(dep) as u64) - } else { - None - } - } -} diff --git a/rust/automerge/src/sync.rs b/rust/automerge/src/sync.rs deleted file mode 100644 index d6dc2580..00000000 --- a/rust/automerge/src/sync.rs +++ /dev/null @@ -1,963 +0,0 @@ -//! # Sync Protocol -//! -//! The sync protocol is based on this paper: -//! , it assumes a reliable in-order stream -//! between two peers who are synchronizing a document. -//! -//! Each peer maintains a [`State`] for each peer they are synchronizing with. -//! This state tracks things like what the heads of the other peer are and -//! whether there are in-flight messages. Anything which implements [`SyncDoc`] -//! can take part in the sync protocol. The flow goes something like this: -//! -//! * The initiating peer creates an empty [`State`] and then calls -//! [`SyncDoc::generate_sync_message`] to generate new sync message and sends -//! it to the receiving peer. -//! * The receiving peer receives a message from the initiator, creates a new -//! [`State`], and calls [`SyncDoc::receive_sync_message`] on it's view of the -//! document -//! * The receiving peer then calls [`SyncDoc::generate_sync_message`] to generate -//! a new sync message and send it back to the initiator -//! * From this point on each peer operates in a loop, receiving a sync message -//! from the other peer and then generating a new message to send back. -//! -//! ## Example -//! -//! ``` -//! use automerge::{transaction::Transactable, sync::{self, SyncDoc}, ReadDoc}; -//! # fn main() -> Result<(), automerge::AutomergeError> { -//! // Create a document on peer1 -//! let mut peer1 = automerge::AutoCommit::new(); -//! peer1.put(automerge::ROOT, "key", "value")?; -//! -//! // Create a state to track our sync with peer2 -//! let mut peer1_state = sync::State::new(); -//! // Generate the initial message to send to peer2, unwrap for brevity -//! let message1to2 = peer1.sync().generate_sync_message(&mut peer1_state).unwrap(); -//! -//! // We receive the message on peer2. We don't have a document at all yet -//! // so we create one -//! let mut peer2 = automerge::AutoCommit::new(); -//! // We don't have a state for peer1 (it's a new connection), so we create one -//! let mut peer2_state = sync::State::new(); -//! // Now receive the message from peer 1 -//! peer2.sync().receive_sync_message(&mut peer2_state, message1to2)?; -//! -//! // Now we loop, sending messages from one to two and two to one until -//! // neither has anything new to send -//! -//! loop { -//! let two_to_one = peer2.sync().generate_sync_message(&mut peer2_state); -//! if let Some(message) = two_to_one.as_ref() { -//! println!("two to one"); -//! peer1.sync().receive_sync_message(&mut peer1_state, message.clone())?; -//! } -//! let one_to_two = peer1.sync().generate_sync_message(&mut peer1_state); -//! if let Some(message) = one_to_two.as_ref() { -//! println!("one to two"); -//! peer2.sync().receive_sync_message(&mut peer2_state, message.clone())?; -//! } -//! if two_to_one.is_none() && one_to_two.is_none() { -//! break; -//! } -//! } -//! -//! assert_eq!(peer2.get(automerge::ROOT, "key")?.unwrap().0.to_str(), Some("value")); -//! -//! # Ok(()) -//! # } -//! ``` - -use itertools::Itertools; -use serde::ser::SerializeMap; -use std::collections::{HashMap, HashSet}; - -use crate::{ - storage::{parse, Change as StoredChange, ReadChangeOpError}, - Automerge, AutomergeError, Change, ChangeHash, OpObserver, ReadDoc, -}; - -mod bloom; -mod state; - -pub use bloom::{BloomFilter, DecodeError as DecodeBloomError}; -pub use state::DecodeError as DecodeStateError; -pub use state::{Have, State}; - -/// A document which can take part in the sync protocol -/// -/// See the [module level documentation](crate::sync) for more details. -pub trait SyncDoc { - /// Generate a sync message for the remote peer represented by `sync_state` - /// - /// If this returns `None` then there are no new messages to send, either because we are - /// waiting for an acknolwedgement of an in-flight message, or because the remote is up to - /// date. - fn generate_sync_message(&self, sync_state: &mut State) -> Option; - - /// Apply a received sync message to this document and `sync_state` - fn receive_sync_message( - &mut self, - sync_state: &mut State, - message: Message, - ) -> Result<(), AutomergeError>; - - /// Apply a received sync message to this document and `sync_state`, observing any changes with - /// `op_observer` - fn receive_sync_message_with( - &mut self, - sync_state: &mut State, - message: Message, - op_observer: &mut Obs, - ) -> Result<(), AutomergeError>; -} - -const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification - -impl SyncDoc for Automerge { - fn generate_sync_message(&self, sync_state: &mut State) -> Option { - let our_heads = self.get_heads(); - - let our_need = self.get_missing_deps(sync_state.their_heads.as_ref().unwrap_or(&vec![])); - - let their_heads_set = if let Some(ref heads) = sync_state.their_heads { - heads.iter().collect::>() - } else { - HashSet::new() - }; - let our_have = if our_need.iter().all(|hash| their_heads_set.contains(hash)) { - vec![self.make_bloom_filter(sync_state.shared_heads.clone())] - } else { - Vec::new() - }; - - if let Some(ref their_have) = sync_state.their_have { - if let Some(first_have) = their_have.first().as_ref() { - if !first_have - .last_sync - .iter() - .all(|hash| self.get_change_by_hash(hash).is_some()) - { - let reset_msg = Message { - heads: our_heads, - need: Vec::new(), - have: vec![Have::default()], - changes: Vec::new(), - }; - return Some(reset_msg); - } - } - } - - let changes_to_send = if let (Some(their_have), Some(their_need)) = ( - sync_state.their_have.as_ref(), - sync_state.their_need.as_ref(), - ) { - self.get_changes_to_send(their_have, their_need) - .expect("Should have only used hashes that are in the document") - } else { - Vec::new() - }; - - let heads_unchanged = sync_state.last_sent_heads == our_heads; - - let heads_equal = if let Some(their_heads) = sync_state.their_heads.as_ref() { - their_heads == &our_heads - } else { - false - }; - - // deduplicate the changes to send with those we have already sent and clone it now - let changes_to_send = changes_to_send - .into_iter() - .filter_map(|change| { - if !sync_state.sent_hashes.contains(&change.hash()) { - Some(change.clone()) - } else { - None - } - }) - .collect::>(); - - if heads_unchanged { - if heads_equal && changes_to_send.is_empty() { - return None; - } - if sync_state.in_flight { - return None; - } - } - - sync_state.last_sent_heads = our_heads.clone(); - sync_state - .sent_hashes - .extend(changes_to_send.iter().map(|c| c.hash())); - - let sync_message = Message { - heads: our_heads, - have: our_have, - need: our_need, - changes: changes_to_send, - }; - - sync_state.in_flight = true; - Some(sync_message) - } - - fn receive_sync_message( - &mut self, - sync_state: &mut State, - message: Message, - ) -> Result<(), AutomergeError> { - self.do_receive_sync_message::<()>(sync_state, message, None) - } - - fn receive_sync_message_with( - &mut self, - sync_state: &mut State, - message: Message, - op_observer: &mut Obs, - ) -> Result<(), AutomergeError> { - self.do_receive_sync_message(sync_state, message, Some(op_observer)) - } -} - -impl Automerge { - fn make_bloom_filter(&self, last_sync: Vec) -> Have { - let new_changes = self - .get_changes(&last_sync) - .expect("Should have only used hashes that are in the document"); - let hashes = new_changes.iter().map(|change| change.hash()); - Have { - last_sync, - bloom: BloomFilter::from_hashes(hashes), - } - } - - fn get_changes_to_send( - &self, - have: &[Have], - need: &[ChangeHash], - ) -> Result, AutomergeError> { - if have.is_empty() { - Ok(need - .iter() - .filter_map(|hash| self.get_change_by_hash(hash)) - .collect()) - } else { - let mut last_sync_hashes = HashSet::new(); - let mut bloom_filters = Vec::with_capacity(have.len()); - - for h in have { - let Have { last_sync, bloom } = h; - last_sync_hashes.extend(last_sync); - bloom_filters.push(bloom); - } - let last_sync_hashes = last_sync_hashes.into_iter().copied().collect::>(); - - let changes = self.get_changes(&last_sync_hashes)?; - - let mut change_hashes = HashSet::with_capacity(changes.len()); - let mut dependents: HashMap> = HashMap::new(); - let mut hashes_to_send = HashSet::new(); - - for change in &changes { - change_hashes.insert(change.hash()); - - for dep in change.deps() { - dependents.entry(*dep).or_default().push(change.hash()); - } - - if bloom_filters - .iter() - .all(|bloom| !bloom.contains_hash(&change.hash())) - { - hashes_to_send.insert(change.hash()); - } - } - - let mut stack = hashes_to_send.iter().copied().collect::>(); - while let Some(hash) = stack.pop() { - if let Some(deps) = dependents.get(&hash) { - for dep in deps { - if hashes_to_send.insert(*dep) { - stack.push(*dep); - } - } - } - } - - let mut changes_to_send = Vec::new(); - for hash in need { - if !hashes_to_send.contains(hash) { - if let Some(change) = self.get_change_by_hash(hash) { - changes_to_send.push(change); - } - } - } - - for change in changes { - if hashes_to_send.contains(&change.hash()) { - changes_to_send.push(change); - } - } - Ok(changes_to_send) - } - } - - fn do_receive_sync_message( - &mut self, - sync_state: &mut State, - message: Message, - op_observer: Option<&mut Obs>, - ) -> Result<(), AutomergeError> { - let before_heads = self.get_heads(); - - let Message { - heads: message_heads, - changes: message_changes, - need: message_need, - have: message_have, - } = message; - - let changes_is_empty = message_changes.is_empty(); - if !changes_is_empty { - self.apply_changes_with(message_changes, op_observer)?; - sync_state.shared_heads = advance_heads( - &before_heads.iter().collect(), - &self.get_heads().into_iter().collect(), - &sync_state.shared_heads, - ); - } - - // trim down the sent hashes to those that we know they haven't seen - self.filter_changes(&message_heads, &mut sync_state.sent_hashes)?; - - if changes_is_empty && message_heads == before_heads { - sync_state.last_sent_heads = message_heads.clone(); - } - - if sync_state.sent_hashes.is_empty() { - sync_state.in_flight = false; - } - - let known_heads = message_heads - .iter() - .filter(|head| self.get_change_by_hash(head).is_some()) - .collect::>(); - if known_heads.len() == message_heads.len() { - sync_state.shared_heads = message_heads.clone(); - sync_state.in_flight = false; - // If the remote peer has lost all its data, reset our state to perform a full resync - if message_heads.is_empty() { - sync_state.last_sent_heads = Default::default(); - sync_state.sent_hashes = Default::default(); - } - } else { - sync_state.shared_heads = sync_state - .shared_heads - .iter() - .chain(known_heads) - .copied() - .unique() - .sorted() - .collect::>(); - } - - sync_state.their_have = Some(message_have); - sync_state.their_heads = Some(message_heads); - sync_state.their_need = Some(message_need); - - Ok(()) - } -} - -#[derive(Debug, thiserror::Error)] -pub enum ReadMessageError { - #[error("expected {expected_one_of:?} but found {found}")] - WrongType { expected_one_of: Vec, found: u8 }, - #[error("{0}")] - Parse(String), - #[error(transparent)] - ReadChangeOps(#[from] ReadChangeOpError), - #[error("not enough input")] - NotEnoughInput, -} - -impl From for ReadMessageError { - fn from(e: parse::leb128::Error) -> Self { - ReadMessageError::Parse(e.to_string()) - } -} - -impl From for ReadMessageError { - fn from(e: bloom::ParseError) -> Self { - ReadMessageError::Parse(e.to_string()) - } -} - -impl From for ReadMessageError { - fn from(e: crate::storage::change::ParseError) -> Self { - ReadMessageError::Parse(format!("error parsing changes: {}", e)) - } -} - -impl From for parse::ParseError { - fn from(e: ReadMessageError) -> Self { - parse::ParseError::Error(e) - } -} - -impl From> for ReadMessageError { - fn from(p: parse::ParseError) -> Self { - match p { - parse::ParseError::Error(e) => e, - parse::ParseError::Incomplete(..) => Self::NotEnoughInput, - } - } -} - -/// The sync message to be sent. -#[derive(Clone, Debug, PartialEq)] -pub struct Message { - /// The heads of the sender. - pub heads: Vec, - /// The hashes of any changes that are being explicitly requested from the recipient. - pub need: Vec, - /// A summary of the changes that the sender already has. - pub have: Vec, - /// The changes for the recipient to apply. - pub changes: Vec, -} - -impl serde::Serialize for Message { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - let mut map = serializer.serialize_map(Some(4))?; - map.serialize_entry("heads", &self.heads)?; - map.serialize_entry("need", &self.need)?; - map.serialize_entry("have", &self.have)?; - map.serialize_entry( - "changes", - &self - .changes - .iter() - .map(crate::ExpandedChange::from) - .collect::>(), - )?; - map.end() - } -} - -fn parse_have(input: parse::Input<'_>) -> parse::ParseResult<'_, Have, ReadMessageError> { - let (i, last_sync) = parse::length_prefixed(parse::change_hash)(input)?; - let (i, bloom_bytes) = parse::length_prefixed_bytes(i)?; - let (_, bloom) = BloomFilter::parse(parse::Input::new(bloom_bytes)).map_err(|e| e.lift())?; - Ok((i, Have { last_sync, bloom })) -} - -impl Message { - pub fn decode(input: &[u8]) -> Result { - let input = parse::Input::new(input); - match Self::parse(input) { - Ok((_, msg)) => Ok(msg), - Err(parse::ParseError::Error(e)) => Err(e), - Err(parse::ParseError::Incomplete(_)) => Err(ReadMessageError::NotEnoughInput), - } - } - - pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, ReadMessageError> { - let (i, message_type) = parse::take1(input)?; - if message_type != MESSAGE_TYPE_SYNC { - return Err(parse::ParseError::Error(ReadMessageError::WrongType { - expected_one_of: vec![MESSAGE_TYPE_SYNC], - found: message_type, - })); - } - - let (i, heads) = parse::length_prefixed(parse::change_hash)(i)?; - let (i, need) = parse::length_prefixed(parse::change_hash)(i)?; - let (i, have) = parse::length_prefixed(parse_have)(i)?; - - let change_parser = |i| { - let (i, bytes) = parse::length_prefixed_bytes(i)?; - let (_, change) = - StoredChange::parse(parse::Input::new(bytes)).map_err(|e| e.lift())?; - Ok((i, change)) - }; - let (i, stored_changes) = parse::length_prefixed(change_parser)(i)?; - let changes_len = stored_changes.len(); - let changes: Vec = stored_changes - .into_iter() - .try_fold::<_, _, Result<_, ReadMessageError>>( - Vec::with_capacity(changes_len), - |mut acc, stored| { - let change = Change::new_from_unverified(stored.into_owned(), None) - .map_err(ReadMessageError::ReadChangeOps)?; - acc.push(change); - Ok(acc) - }, - )?; - - Ok(( - i, - Message { - heads, - need, - have, - changes, - }, - )) - } - - pub fn encode(mut self) -> Vec { - let mut buf = vec![MESSAGE_TYPE_SYNC]; - - encode_hashes(&mut buf, &self.heads); - encode_hashes(&mut buf, &self.need); - encode_many(&mut buf, self.have.iter(), |buf, h| { - encode_hashes(buf, &h.last_sync); - leb128::write::unsigned(buf, h.bloom.to_bytes().len() as u64).unwrap(); - buf.extend(h.bloom.to_bytes()); - }); - - encode_many(&mut buf, self.changes.iter_mut(), |buf, change| { - leb128::write::unsigned(buf, change.raw_bytes().len() as u64).unwrap(); - buf.extend::<&[u8]>(change.raw_bytes().as_ref()) - }); - - buf - } -} - -fn encode_many<'a, I, It, F>(out: &mut Vec, data: I, f: F) -where - I: Iterator + ExactSizeIterator + 'a, - F: Fn(&mut Vec, It), -{ - leb128::write::unsigned(out, data.len() as u64).unwrap(); - for datum in data { - f(out, datum) - } -} - -fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { - debug_assert!( - hashes.windows(2).all(|h| h[0] <= h[1]), - "hashes were not sorted" - ); - encode_many(buf, hashes.iter(), |buf, hash| buf.extend(hash.as_bytes())) -} - -fn advance_heads( - my_old_heads: &HashSet<&ChangeHash>, - my_new_heads: &HashSet, - our_old_shared_heads: &[ChangeHash], -) -> Vec { - let new_heads = my_new_heads - .iter() - .filter(|head| !my_old_heads.contains(head)) - .copied() - .collect::>(); - - let common_heads = our_old_shared_heads - .iter() - .filter(|head| my_new_heads.contains(head)) - .copied() - .collect::>(); - - let mut advanced_heads = HashSet::with_capacity(new_heads.len() + common_heads.len()); - for head in new_heads.into_iter().chain(common_heads) { - advanced_heads.insert(head); - } - let mut advanced_heads = advanced_heads.into_iter().collect::>(); - advanced_heads.sort(); - advanced_heads -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::change::gen::gen_change; - use crate::storage::parse::Input; - use crate::transaction::Transactable; - use crate::types::gen::gen_hash; - use crate::ActorId; - use proptest::prelude::*; - - prop_compose! { - fn gen_bloom()(hashes in gen_sorted_hashes(0..10)) -> BloomFilter { - BloomFilter::from_hashes(hashes.into_iter()) - } - } - - prop_compose! { - fn gen_have()(bloom in gen_bloom(), last_sync in gen_sorted_hashes(0..10)) -> Have { - Have { - bloom, - last_sync, - } - } - } - - fn gen_sorted_hashes(size: std::ops::Range) -> impl Strategy> { - proptest::collection::vec(gen_hash(), size).prop_map(|mut h| { - h.sort(); - h - }) - } - - prop_compose! { - fn gen_sync_message()( - heads in gen_sorted_hashes(0..10), - need in gen_sorted_hashes(0..10), - have in proptest::collection::vec(gen_have(), 0..10), - changes in proptest::collection::vec(gen_change(), 0..10), - ) -> Message { - Message { - heads, - need, - have, - changes, - } - } - - } - - #[test] - fn encode_decode_empty_message() { - let msg = Message { - heads: vec![], - need: vec![], - have: vec![], - changes: vec![], - }; - let encoded = msg.encode(); - Message::parse(Input::new(&encoded)).unwrap(); - } - - proptest! { - #[test] - fn encode_decode_message(msg in gen_sync_message()) { - let encoded = msg.clone().encode(); - let (i, decoded) = Message::parse(Input::new(&encoded)).unwrap(); - assert!(i.is_empty()); - assert_eq!(msg, decoded); - } - } - - #[test] - fn generate_sync_message_twice_does_nothing() { - let mut doc = crate::AutoCommit::new(); - doc.put(crate::ROOT, "key", "value").unwrap(); - let mut sync_state = State::new(); - - assert!(doc.sync().generate_sync_message(&mut sync_state).is_some()); - assert!(doc.sync().generate_sync_message(&mut sync_state).is_none()); - } - - #[test] - fn should_not_reply_if_we_have_no_data() { - let mut doc1 = crate::AutoCommit::new(); - let mut doc2 = crate::AutoCommit::new(); - let mut s1 = State::new(); - let mut s2 = State::new(); - let m1 = doc1 - .sync() - .generate_sync_message(&mut s1) - .expect("message was none"); - - doc2.sync().receive_sync_message(&mut s2, m1).unwrap(); - let m2 = doc2.sync().generate_sync_message(&mut s2); - assert!(m2.is_none()); - } - - #[test] - fn should_allow_simultaneous_messages_during_synchronisation() { - // create & synchronize two nodes - let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("abc123").unwrap()); - let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("def456").unwrap()); - let mut s1 = State::new(); - let mut s2 = State::new(); - - for i in 0..5 { - doc1.put(&crate::ROOT, "x", i).unwrap(); - doc1.commit(); - doc2.put(&crate::ROOT, "y", i).unwrap(); - doc2.commit(); - } - - let head1 = doc1.get_heads()[0]; - let head2 = doc2.get_heads()[0]; - - //// both sides report what they have but have no shared peer state - let msg1to2 = doc1 - .sync() - .generate_sync_message(&mut s1) - .expect("initial sync from 1 to 2 was None"); - let msg2to1 = doc2 - .sync() - .generate_sync_message(&mut s2) - .expect("initial sync message from 2 to 1 was None"); - assert_eq!(msg1to2.changes.len(), 0); - assert_eq!(msg1to2.have[0].last_sync.len(), 0); - assert_eq!(msg2to1.changes.len(), 0); - assert_eq!(msg2to1.have[0].last_sync.len(), 0); - - //// doc1 and doc2 receive that message and update sync state - doc1.sync().receive_sync_message(&mut s1, msg2to1).unwrap(); - doc2.sync().receive_sync_message(&mut s2, msg1to2).unwrap(); - - //// now both reply with their local changes the other lacks - //// (standard warning that 1% of the time this will result in a "need" message) - let msg1to2 = doc1 - .sync() - .generate_sync_message(&mut s1) - .expect("first reply from 1 to 2 was None"); - assert_eq!(msg1to2.changes.len(), 5); - - let msg2to1 = doc2 - .sync() - .generate_sync_message(&mut s2) - .expect("first reply from 2 to 1 was None"); - assert_eq!(msg2to1.changes.len(), 5); - - //// both should now apply the changes - doc1.sync().receive_sync_message(&mut s1, msg2to1).unwrap(); - assert_eq!(doc1.get_missing_deps(&[]), Vec::new()); - - doc2.sync().receive_sync_message(&mut s2, msg1to2).unwrap(); - assert_eq!(doc2.get_missing_deps(&[]), Vec::new()); - - //// The response acknowledges the changes received and sends no further changes - let msg1to2 = doc1 - .sync() - .generate_sync_message(&mut s1) - .expect("second reply from 1 to 2 was None"); - assert_eq!(msg1to2.changes.len(), 0); - let msg2to1 = doc2 - .sync() - .generate_sync_message(&mut s2) - .expect("second reply from 2 to 1 was None"); - assert_eq!(msg2to1.changes.len(), 0); - - //// After receiving acknowledgements, their shared heads should be equal - doc1.sync().receive_sync_message(&mut s1, msg2to1).unwrap(); - doc2.sync().receive_sync_message(&mut s2, msg1to2).unwrap(); - - assert_eq!(s1.shared_heads, s2.shared_heads); - - //// We're in sync, no more messages required - assert!(doc1.sync().generate_sync_message(&mut s1).is_none()); - assert!(doc2.sync().generate_sync_message(&mut s2).is_none()); - - //// If we make one more change and start another sync then its lastSync should be updated - doc1.put(crate::ROOT, "x", 5).unwrap(); - doc1.commit(); - let msg1to2 = doc1 - .sync() - .generate_sync_message(&mut s1) - .expect("third reply from 1 to 2 was None"); - let mut expected_heads = vec![head1, head2]; - expected_heads.sort(); - let mut actual_heads = msg1to2.have[0].last_sync.clone(); - actual_heads.sort(); - assert_eq!(actual_heads, expected_heads); - } - - #[test] - fn should_handle_false_positive_head() { - // Scenario: ,-- n1 - // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - // `-- n2 - // where n2 is a false positive in the Bloom filter containing {n1}. - // lastSync is c9. - - let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("abc123").unwrap()); - let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("def456").unwrap()); - let mut s1 = State::new(); - let mut s2 = State::new(); - - for i in 0..10 { - doc1.put(crate::ROOT, "x", i).unwrap(); - doc1.commit(); - } - - sync(&mut doc1, &mut doc2, &mut s1, &mut s2); - - // search for false positive; see comment above - let mut i = 0; - let (mut doc1, mut doc2) = loop { - let mut doc1copy = doc1 - .clone() - .with_actor(ActorId::try_from("01234567").unwrap()); - let val1 = format!("{} @ n1", i); - doc1copy.put(crate::ROOT, "x", val1).unwrap(); - doc1copy.commit(); - - let mut doc2copy = doc1 - .clone() - .with_actor(ActorId::try_from("89abcdef").unwrap()); - let val2 = format!("{} @ n2", i); - doc2copy.put(crate::ROOT, "x", val2).unwrap(); - doc2copy.commit(); - - let n1_bloom = BloomFilter::from_hashes(doc1copy.get_heads().into_iter()); - if n1_bloom.contains_hash(&doc2copy.get_heads()[0]) { - break (doc1copy, doc2copy); - } - i += 1; - }; - - let mut all_heads = doc1.get_heads(); - all_heads.extend(doc2.get_heads()); - all_heads.sort(); - - // reset sync states - let (_, mut s1) = State::parse(Input::new(s1.encode().as_slice())).unwrap(); - let (_, mut s2) = State::parse(Input::new(s2.encode().as_slice())).unwrap(); - sync(&mut doc1, &mut doc2, &mut s1, &mut s2); - assert_eq!(doc1.get_heads(), all_heads); - assert_eq!(doc2.get_heads(), all_heads); - } - - #[test] - fn should_handle_chains_of_false_positives() { - //// Scenario: ,-- c5 - //// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ - //// `-- n2c1 <-- n2c2 <-- n2c3 - //// where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. - //// lastSync is c4. - let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("abc123").unwrap()); - let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("def456").unwrap()); - let mut s1 = State::new(); - let mut s2 = State::new(); - - for i in 0..10 { - doc1.put(crate::ROOT, "x", i).unwrap(); - doc1.commit(); - } - - sync(&mut doc1, &mut doc2, &mut s1, &mut s2); - - doc1.put(crate::ROOT, "x", 5).unwrap(); - doc1.commit(); - let bloom = BloomFilter::from_hashes(doc1.get_heads().into_iter()); - - // search for false positive; see comment above - let mut i = 0; - let mut doc2 = loop { - let mut doc = doc2 - .fork() - .with_actor(ActorId::try_from("89abcdef").unwrap()); - doc.put(crate::ROOT, "x", format!("{} at 89abdef", i)) - .unwrap(); - doc.commit(); - if bloom.contains_hash(&doc.get_heads()[0]) { - break doc; - } - i += 1; - }; - - // find another false positive building on the first - i = 0; - let mut doc2 = loop { - let mut doc = doc2 - .fork() - .with_actor(ActorId::try_from("89abcdef").unwrap()); - doc.put(crate::ROOT, "x", format!("{} again", i)).unwrap(); - doc.commit(); - if bloom.contains_hash(&doc.get_heads()[0]) { - break doc; - } - i += 1; - }; - - doc2.put(crate::ROOT, "x", "final @ 89abcdef").unwrap(); - - let mut all_heads = doc1.get_heads(); - all_heads.extend(doc2.get_heads()); - all_heads.sort(); - - let (_, mut s1) = State::parse(Input::new(s1.encode().as_slice())).unwrap(); - let (_, mut s2) = State::parse(Input::new(s2.encode().as_slice())).unwrap(); - sync(&mut doc1, &mut doc2, &mut s1, &mut s2); - assert_eq!(doc1.get_heads(), all_heads); - assert_eq!(doc2.get_heads(), all_heads); - } - - #[test] - fn should_handle_lots_of_branching_and_merging() { - let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("01234567").unwrap()); - let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("89abcdef").unwrap()); - let mut doc3 = crate::AutoCommit::new().with_actor(ActorId::try_from("fedcba98").unwrap()); - let mut s1 = State::new(); - let mut s2 = State::new(); - - doc1.put(crate::ROOT, "x", 0).unwrap(); - let change1 = doc1.get_last_local_change().unwrap().clone(); - - doc2.apply_changes([change1.clone()]).unwrap(); - doc3.apply_changes([change1]).unwrap(); - - doc3.put(crate::ROOT, "x", 1).unwrap(); - - //// - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 - //// / \/ \/ \/ - //// / /\ /\ /\ - //// c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 - //// \ / - //// ---------------------------------------------- n3c1 <----- - for i in 1..20 { - doc1.put(crate::ROOT, "n1", i).unwrap(); - doc2.put(crate::ROOT, "n2", i).unwrap(); - let change1 = doc1.get_last_local_change().unwrap().clone(); - let change2 = doc2.get_last_local_change().unwrap().clone(); - doc1.apply_changes([change2.clone()]).unwrap(); - doc2.apply_changes([change1]).unwrap(); - } - - sync(&mut doc1, &mut doc2, &mut s1, &mut s2); - - //// Having n3's last change concurrent to the last sync heads forces us into the slower code path - let change3 = doc3.get_last_local_change().unwrap().clone(); - doc2.apply_changes([change3]).unwrap(); - - doc1.put(crate::ROOT, "n1", "final").unwrap(); - doc2.put(crate::ROOT, "n1", "final").unwrap(); - - sync(&mut doc1, &mut doc2, &mut s1, &mut s2); - - assert_eq!(doc1.get_heads(), doc2.get_heads()); - } - - fn sync( - a: &mut crate::AutoCommit, - b: &mut crate::AutoCommit, - a_sync_state: &mut State, - b_sync_state: &mut State, - ) { - //function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { - const MAX_ITER: usize = 10; - let mut iterations = 0; - - loop { - let a_to_b = a.sync().generate_sync_message(a_sync_state); - let b_to_a = b.sync().generate_sync_message(b_sync_state); - if a_to_b.is_none() && b_to_a.is_none() { - break; - } - if iterations > MAX_ITER { - panic!("failed to sync in {} iterations", MAX_ITER); - } - if let Some(msg) = a_to_b { - b.sync().receive_sync_message(b_sync_state, msg).unwrap() - } - if let Some(msg) = b_to_a { - a.sync().receive_sync_message(a_sync_state, msg).unwrap() - } - iterations += 1; - } - } -} diff --git a/rust/automerge/src/sync/state.rs b/rust/automerge/src/sync/state.rs deleted file mode 100644 index 354c605f..00000000 --- a/rust/automerge/src/sync/state.rs +++ /dev/null @@ -1,110 +0,0 @@ -use std::collections::BTreeSet; - -use super::{encode_hashes, BloomFilter}; -use crate::storage::parse; -use crate::ChangeHash; - -const SYNC_STATE_TYPE: u8 = 0x43; // first byte of an encoded sync state, for identification - -#[derive(Debug, thiserror::Error)] -pub enum DecodeError { - #[error("{0:?}")] - Parse(String), - #[error("wrong type: expected one of {expected_one_of:?} but found {found}")] - WrongType { expected_one_of: Vec, found: u8 }, - #[error("not enough input")] - NotEnoughInput, -} - -impl From for DecodeError { - fn from(_: parse::leb128::Error) -> Self { - Self::Parse("bad leb128 encoding".to_string()) - } -} - -/// The state of synchronisation with a peer. -/// -/// This should be persisted using [`Self::encode`] when you know you will be interacting with the -/// same peer in multiple sessions. [`Self::encode`] only encodes state which should be reused -/// across connections. -#[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] -pub struct State { - /// The hashes which we know both peers have - pub shared_heads: Vec, - /// The heads we last sent - pub last_sent_heads: Vec, - /// The heads we last received from them - pub their_heads: Option>, - /// Any specific changes they last said they needed - pub their_need: Option>, - /// The bloom filters summarising what they said they have - pub their_have: Option>, - /// The hashes we have sent in this session - pub sent_hashes: BTreeSet, - - /// `generate_sync_message` should return `None` if there are no new changes to send. In - /// particular, if there are changes in flight which the other end has not yet acknowledged we - /// do not wish to generate duplicate sync messages. This field tracks whether the changes we - /// expect to send to the peer based on this sync state have been sent or not. If - /// `in_flight` is `false` then `generate_sync_message` will return a new message (provided - /// there are in fact changes to send). If it is `true` then we don't. This flag is cleared - /// in `receive_sync_message`. - pub in_flight: bool, -} - -/// A summary of the changes that the sender of the message already has. -/// This is implicitly a request to the recipient to send all changes that the -/// sender does not already have. -#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, serde::Serialize)] -pub struct Have { - /// The heads at the time of the last successful sync with this recipient. - pub last_sync: Vec, - /// A bloom filter summarising all of the changes that the sender of the message has added - /// since the last sync. - pub bloom: BloomFilter, -} - -impl State { - pub fn new() -> Self { - Default::default() - } - - pub fn encode(&self) -> Vec { - let mut buf = vec![SYNC_STATE_TYPE]; - encode_hashes(&mut buf, &self.shared_heads); - buf - } - - pub fn decode(input: &[u8]) -> Result { - let input = parse::Input::new(input); - match Self::parse(input) { - Ok((_, state)) => Ok(state), - Err(parse::ParseError::Incomplete(_)) => Err(DecodeError::NotEnoughInput), - Err(parse::ParseError::Error(e)) => Err(e), - } - } - - pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, DecodeError> { - let (i, record_type) = parse::take1(input)?; - if record_type != SYNC_STATE_TYPE { - return Err(parse::ParseError::Error(DecodeError::WrongType { - expected_one_of: vec![SYNC_STATE_TYPE], - found: record_type, - })); - } - - let (i, shared_heads) = parse::length_prefixed(parse::change_hash)(i)?; - Ok(( - i, - Self { - shared_heads, - last_sent_heads: Vec::new(), - their_heads: None, - their_need: None, - their_have: Some(Vec::new()), - sent_hashes: BTreeSet::new(), - in_flight: false, - }, - )) - } -} diff --git a/rust/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs deleted file mode 100644 index 0fe735d5..00000000 --- a/rust/automerge/src/transaction/inner.rs +++ /dev/null @@ -1,731 +0,0 @@ -use std::num::NonZeroU64; - -use crate::exid::ExId; -use crate::query::{self, OpIdSearch}; -use crate::storage::Change as StoredChange; -use crate::types::{Key, ListEncoding, ObjId, OpId, OpIds, TextEncoding}; -use crate::{op_tree::OpSetMetadata, types::Op, Automerge, Change, ChangeHash, OpObserver, Prop}; -use crate::{AutomergeError, ObjType, OpType, ScalarValue}; - -#[derive(Debug, Clone)] -pub(crate) struct TransactionInner { - actor: usize, - seq: u64, - start_op: NonZeroU64, - time: i64, - message: Option, - deps: Vec, - operations: Vec<(ObjId, Op)>, -} - -/// Arguments required to create a new transaction -pub(crate) struct TransactionArgs { - /// The index of the actor ID this transaction will create ops for in the - /// [`OpSetMetadata::actors`] - pub(crate) actor_index: usize, - /// The sequence number of the change this transaction will create - pub(crate) seq: u64, - /// The start op of the change this transaction will create - pub(crate) start_op: NonZeroU64, - /// The dependencies of the change this transaction will create - pub(crate) deps: Vec, -} - -impl TransactionInner { - pub(crate) fn new( - TransactionArgs { - actor_index: actor, - seq, - start_op, - deps, - }: TransactionArgs, - ) -> Self { - TransactionInner { - actor, - seq, - start_op, - time: 0, - message: None, - operations: vec![], - deps, - } - } - - /// Create an empty change - pub(crate) fn empty( - doc: &mut Automerge, - args: TransactionArgs, - message: Option, - time: Option, - ) -> ChangeHash { - Self::new(args).commit_impl(doc, message, time) - } - - pub(crate) fn pending_ops(&self) -> usize { - self.operations.len() - } - - /// Commit the operations performed in this transaction, returning the hashes corresponding to - /// the new heads. - /// - /// Returns `None` if there were no operations to commit - #[tracing::instrument(skip(self, doc))] - pub(crate) fn commit( - self, - doc: &mut Automerge, - message: Option, - time: Option, - ) -> Option { - if self.pending_ops() == 0 { - return None; - } - Some(self.commit_impl(doc, message, time)) - } - - pub(crate) fn commit_impl( - mut self, - doc: &mut Automerge, - message: Option, - time: Option, - ) -> ChangeHash { - if message.is_some() { - self.message = message; - } - - if let Some(t) = time { - self.time = t; - } - - let num_ops = self.pending_ops(); - let change = self.export(&doc.ops().m); - let hash = change.hash(); - #[cfg(not(debug_assertions))] - tracing::trace!(commit=?hash, deps=?change.deps(), "committing transaction"); - #[cfg(debug_assertions)] - { - let ops = change.iter_ops().collect::>(); - tracing::trace!(commit=?hash, ?ops, deps=?change.deps(), "committing transaction"); - } - doc.update_history(change, num_ops); - debug_assert_eq!(doc.get_heads(), vec![hash]); - hash - } - - #[tracing::instrument(skip(self, metadata))] - pub(crate) fn export(self, metadata: &OpSetMetadata) -> Change { - use crate::storage::{change::PredOutOfOrder, convert::op_as_actor_id}; - - let actor = metadata.actors.get(self.actor).clone(); - let deps = self.deps.clone(); - let stored = match StoredChange::builder() - .with_actor(actor) - .with_seq(self.seq) - .with_start_op(self.start_op) - .with_message(self.message.clone()) - .with_dependencies(deps) - .with_timestamp(self.time) - .build( - self.operations - .iter() - .map(|(obj, op)| op_as_actor_id(obj, op, metadata)), - ) { - Ok(s) => s, - Err(PredOutOfOrder) => { - // SAFETY: types::Op::preds is `types::OpIds` which ensures ops are always sorted - panic!("preds out of order"); - } - }; - #[cfg(debug_assertions)] - { - let realized_ops = self.operations.iter().collect::>(); - tracing::trace!(?stored, ops=?realized_ops, "committing change"); - } - #[cfg(not(debug_assertions))] - tracing::trace!(?stored, "committing change"); - Change::new(stored) - } - - /// Undo the operations added in this transaction, returning the number of cancelled - /// operations. - pub(crate) fn rollback(self, doc: &mut Automerge) -> usize { - let num = self.pending_ops(); - // remove in reverse order so sets are removed before makes etc... - for (obj, op) in self.operations.into_iter().rev() { - for pred_id in &op.pred { - if let Some(p) = doc.ops().search(&obj, OpIdSearch::new(*pred_id)).index() { - doc.ops_mut().change_vis(&obj, p, |o| o.remove_succ(&op)); - } - } - if let Some(pos) = doc.ops().search(&obj, OpIdSearch::new(op.id)).index() { - doc.ops_mut().remove(&obj, pos); - } - } - - doc.rollback_last_actor(); - - num - } - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - pub(crate) fn put, V: Into, Obs: OpObserver>( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - ex_obj: &ExId, - prop: P, - value: V, - ) -> Result<(), AutomergeError> { - let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; - let value = value.into(); - let prop = prop.into(); - match (&prop, obj_type) { - (Prop::Map(_), ObjType::Map) => Ok(()), - (Prop::Seq(_), ObjType::List) => Ok(()), - (Prop::Seq(_), ObjType::Text) => Ok(()), - _ => Err(AutomergeError::InvalidOp(obj_type)), - }?; - self.local_op(doc, op_observer, obj, prop, value.into())?; - Ok(()) - } - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - pub(crate) fn put_object, Obs: OpObserver>( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - ex_obj: &ExId, - prop: P, - value: ObjType, - ) -> Result { - let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; - let prop = prop.into(); - match (&prop, obj_type) { - (Prop::Map(_), ObjType::Map) => Ok(()), - (Prop::Seq(_), ObjType::List) => Ok(()), - _ => Err(AutomergeError::InvalidOp(obj_type)), - }?; - let id = self - .local_op(doc, op_observer, obj, prop, value.into())? - .unwrap(); - let id = doc.id_to_exid(id); - Ok(id) - } - - fn next_id(&mut self) -> OpId { - OpId::new(self.start_op.get() + self.pending_ops() as u64, self.actor) - } - - fn next_insert(&mut self, key: Key, value: ScalarValue) -> Op { - Op { - id: self.next_id(), - action: OpType::Put(value), - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - } - } - - fn next_delete(&mut self, key: Key, pred: OpIds) -> Op { - Op { - id: self.next_id(), - action: OpType::Delete, - key, - succ: Default::default(), - pred, - insert: false, - } - } - - #[allow(clippy::too_many_arguments)] - fn insert_local_op( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - prop: Prop, - op: Op, - pos: usize, - obj: ObjId, - succ_pos: &[usize], - ) { - doc.ops_mut().add_succ(&obj, succ_pos, &op); - - if !op.is_delete() { - doc.ops_mut().insert(pos, &obj, op.clone()); - } - - self.finalize_op(doc, op_observer, obj, prop, op); - } - - pub(crate) fn insert, Obs: OpObserver>( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - ex_obj: &ExId, - index: usize, - value: V, - ) -> Result<(), AutomergeError> { - let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; - if !matches!(obj_type, ObjType::List | ObjType::Text) { - return Err(AutomergeError::InvalidOp(obj_type)); - } - let value = value.into(); - tracing::trace!(obj=?obj, value=?value, "inserting value"); - self.do_insert(doc, op_observer, obj, index, value.into())?; - Ok(()) - } - - pub(crate) fn insert_object( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - ex_obj: &ExId, - index: usize, - value: ObjType, - ) -> Result { - let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; - if !matches!(obj_type, ObjType::List | ObjType::Text) { - return Err(AutomergeError::InvalidOp(obj_type)); - } - let id = self.do_insert(doc, op_observer, obj, index, value.into())?; - let id = doc.id_to_exid(id); - Ok(id) - } - - fn do_insert( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - obj: ObjId, - index: usize, - action: OpType, - ) -> Result { - let id = self.next_id(); - - let query = doc - .ops() - .search(&obj, query::InsertNth::new(index, ListEncoding::List)); - - let key = query.key()?; - - let op = Op { - id, - action, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - - doc.ops_mut().insert(query.pos(), &obj, op.clone()); - - self.finalize_op(doc, op_observer, obj, Prop::Seq(index), op); - - Ok(id) - } - - pub(crate) fn local_op( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - obj: ObjId, - prop: Prop, - action: OpType, - ) -> Result, AutomergeError> { - match prop { - Prop::Map(s) => self.local_map_op(doc, op_observer, obj, s, action), - Prop::Seq(n) => self.local_list_op(doc, op_observer, obj, n, action), - } - } - - fn local_map_op( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - obj: ObjId, - prop: String, - action: OpType, - ) -> Result, AutomergeError> { - if prop.is_empty() { - return Err(AutomergeError::EmptyStringKey); - } - - let id = self.next_id(); - let prop_index = doc.ops_mut().m.props.cache(prop.clone()); - let query = doc.ops().search(&obj, query::Prop::new(prop_index)); - - // no key present to delete - if query.ops.is_empty() && action == OpType::Delete { - return Ok(None); - } - - if query.ops.len() == 1 && query.ops[0].is_noop(&action) { - return Ok(None); - } - - // increment operations are only valid against counter values. - // if there are multiple values (from conflicts) then we just need one of them to be a counter. - if matches!(action, OpType::Increment(_)) && query.ops.iter().all(|op| !op.is_counter()) { - return Err(AutomergeError::MissingCounter); - } - - let pred = doc.ops().m.sorted_opids(query.ops.iter().map(|o| o.id)); - - let op = Op { - id, - action, - key: Key::Map(prop_index), - succ: Default::default(), - pred, - insert: false, - }; - - let pos = query.pos; - let ops_pos = query.ops_pos; - self.insert_local_op(doc, op_observer, Prop::Map(prop), op, pos, obj, &ops_pos); - - Ok(Some(id)) - } - - fn local_list_op( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - obj: ObjId, - index: usize, - action: OpType, - ) -> Result, AutomergeError> { - let query = doc - .ops() - .search(&obj, query::Nth::new(index, ListEncoding::List)); - - let id = self.next_id(); - let pred = doc.ops().m.sorted_opids(query.ops.iter().map(|o| o.id)); - let key = query.key()?; - - if query.ops.len() == 1 && query.ops[0].is_noop(&action) { - return Ok(None); - } - - // increment operations are only valid against counter values. - // if there are multiple values (from conflicts) then we just need one of them to be a counter. - if matches!(action, OpType::Increment(_)) && query.ops.iter().all(|op| !op.is_counter()) { - return Err(AutomergeError::MissingCounter); - } - - let op = Op { - id, - action, - key, - succ: Default::default(), - pred, - insert: false, - }; - - let pos = query.pos; - let ops_pos = query.ops_pos; - self.insert_local_op(doc, op_observer, Prop::Seq(index), op, pos, obj, &ops_pos); - - Ok(Some(id)) - } - - pub(crate) fn increment, Obs: OpObserver>( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - obj: &ExId, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(obj)?.0; - self.local_op(doc, op_observer, obj, prop.into(), OpType::Increment(value))?; - Ok(()) - } - - pub(crate) fn delete, Obs: OpObserver>( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - ex_obj: &ExId, - prop: P, - ) -> Result<(), AutomergeError> { - let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; - let prop = prop.into(); - if obj_type == ObjType::Text { - let index = prop.to_index().ok_or(AutomergeError::InvalidOp(obj_type))?; - self.inner_splice( - doc, - op_observer, - SpliceArgs { - obj, - index, - del: 1, - values: vec![], - splice_type: SpliceType::Text("", doc.text_encoding()), - }, - )?; - } else { - self.local_op(doc, op_observer, obj, prop, OpType::Delete)?; - } - Ok(()) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - pub(crate) fn splice( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - ex_obj: &ExId, - index: usize, - del: usize, - vals: impl IntoIterator, - ) -> Result<(), AutomergeError> { - let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; - if !matches!(obj_type, ObjType::List | ObjType::Text) { - return Err(AutomergeError::InvalidOp(obj_type)); - } - let values = vals.into_iter().collect(); - self.inner_splice( - doc, - op_observer, - SpliceArgs { - obj, - index, - del, - values, - splice_type: SpliceType::List, - }, - ) - } - - /// Splice string into a text object - pub(crate) fn splice_text( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - ex_obj: &ExId, - index: usize, - del: usize, - text: &str, - ) -> Result<(), AutomergeError> { - let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; - if obj_type != ObjType::Text { - return Err(AutomergeError::InvalidOp(obj_type)); - } - let values = text.chars().map(ScalarValue::from).collect(); - self.inner_splice( - doc, - op_observer, - SpliceArgs { - obj, - index, - del, - values, - splice_type: SpliceType::Text(text, doc.text_encoding()), - }, - ) - } - - fn inner_splice( - &mut self, - doc: &mut Automerge, - mut op_observer: Option<&mut Obs>, - SpliceArgs { - obj, - mut index, - mut del, - values, - splice_type, - }: SpliceArgs<'_>, - ) -> Result<(), AutomergeError> { - let ex_obj = doc.ops().id_to_exid(obj.0); - let encoding = splice_type.encoding(); - // delete `del` items - performing the query for each one - let mut deleted = 0; - while deleted < del { - // TODO: could do this with a single custom query - let query = doc.ops().search(&obj, query::Nth::new(index, encoding)); - - // if we delete in the middle of a multi-character - // move cursor back to the beginning and expand the del width - let adjusted_index = query.index(); - if adjusted_index < index { - del += index - adjusted_index; - index = adjusted_index; - } - - let step = if let Some(op) = query.ops.last() { - op.width(encoding) - } else { - break; - }; - - let op = self.next_delete(query.key()?, query.pred(doc.ops())); - - let ops_pos = query.ops_pos; - doc.ops_mut().add_succ(&obj, &ops_pos, &op); - - self.operations.push((obj, op)); - - deleted += step; - } - - if deleted > 0 { - if let Some(obs) = op_observer.as_mut() { - obs.delete_seq(doc, ex_obj.clone(), index, deleted); - } - } - - // do the insert query for the first item and then - // insert the remaining ops one after the other - if !values.is_empty() { - let query = doc - .ops() - .search(&obj, query::InsertNth::new(index, encoding)); - let mut pos = query.pos(); - let mut key = query.key()?; - let mut cursor = index; - let mut width = 0; - - for v in &values { - let op = self.next_insert(key, v.clone()); - - doc.ops_mut().insert(pos, &obj, op.clone()); - - width = op.width(encoding); - cursor += width; - pos += 1; - key = op.id.into(); - - self.operations.push((obj, op)); - } - - doc.ops_mut().hint(&obj, cursor - width, pos - 1); - - // handle the observer - if let Some(obs) = op_observer.as_mut() { - match splice_type { - SpliceType::Text(text, _) if !obs.text_as_seq() => { - obs.splice_text(doc, ex_obj, index, text) - } - SpliceType::List | SpliceType::Text(..) => { - let start = self.operations.len() - values.len(); - for (offset, v) in values.iter().enumerate() { - let op = &self.operations[start + offset].1; - let value = (v.clone().into(), doc.ops().id_to_exid(op.id)); - obs.insert(doc, ex_obj.clone(), index + offset, value) - } - } - } - } - } - - Ok(()) - } - - fn finalize_op( - &mut self, - doc: &mut Automerge, - op_observer: Option<&mut Obs>, - obj: ObjId, - prop: Prop, - op: Op, - ) { - // TODO - id_to_exid should be a noop if not used - change type to Into? - if let Some(op_observer) = op_observer { - let ex_obj = doc.ops().id_to_exid(obj.0); - if op.insert { - let obj_type = doc.ops().object_type(&obj); - assert!(obj_type.unwrap().is_sequence()); - match (obj_type, prop) { - (Some(ObjType::List), Prop::Seq(index)) => { - let value = (op.value(), doc.ops().id_to_exid(op.id)); - op_observer.insert(doc, ex_obj, index, value) - } - (Some(ObjType::Text), Prop::Seq(index)) => { - // FIXME - if op_observer.text_as_seq() { - let value = (op.value(), doc.ops().id_to_exid(op.id)); - op_observer.insert(doc, ex_obj, index, value) - } else { - op_observer.splice_text(doc, ex_obj, index, op.to_str()) - } - } - _ => {} - } - } else if op.is_delete() { - op_observer.delete(doc, ex_obj, prop); - } else if let Some(value) = op.get_increment_value() { - op_observer.increment(doc, ex_obj, prop, (value, doc.ops().id_to_exid(op.id))); - } else { - let value = (op.value(), doc.ops().id_to_exid(op.id)); - op_observer.put(doc, ex_obj, prop, value, false); - } - } - self.operations.push((obj, op)); - } -} - -enum SpliceType<'a> { - List, - Text(&'a str, TextEncoding), -} - -impl<'a> SpliceType<'a> { - fn encoding(&self) -> ListEncoding { - match self { - SpliceType::List => ListEncoding::List, - SpliceType::Text(_, encoding) => ListEncoding::Text(*encoding), - } - } -} - -struct SpliceArgs<'a> { - obj: ObjId, - index: usize, - del: usize, - values: Vec, - splice_type: SpliceType<'a>, -} - -#[cfg(test)] -mod tests { - use crate::{transaction::Transactable, ReadDoc, ROOT}; - - use super::*; - - #[test] - fn map_rollback_doesnt_panic() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - - let a = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); - tx.put(&a, "b", 1).unwrap(); - assert!(tx.get(&a, "b").unwrap().is_some()); - } -} diff --git a/rust/automerge/src/transaction/manual_transaction.rs b/rust/automerge/src/transaction/manual_transaction.rs deleted file mode 100644 index fa5f6340..00000000 --- a/rust/automerge/src/transaction/manual_transaction.rs +++ /dev/null @@ -1,348 +0,0 @@ -use std::ops::RangeBounds; - -use crate::exid::ExId; -use crate::op_observer::BranchableObserver; -use crate::{ - Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ReadDoc, ScalarValue, Value, Values, -}; -use crate::{AutomergeError, Keys}; -use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt}; - -use super::{observation, CommitOptions, Transactable, TransactionArgs, TransactionInner}; - -/// A transaction on a document. -/// Transactions group operations into a single change so that no other operations can happen -/// in-between. -/// -/// Created from [`Automerge::transaction`]. -/// -/// ## Drop -/// -/// This transaction should be manually committed or rolled back. If not done manually then it will -/// be rolled back when it is dropped. This is to prevent the document being in an unsafe -/// intermediate state. -/// This is consistent with `?` error handling. -#[derive(Debug)] -pub struct Transaction<'a, Obs: observation::Observation> { - // this is an option so that we can take it during commit and rollback to prevent it being - // rolled back during drop. - inner: Option, - // As with `inner` this is an `Option` so we can `take` it during `commit` - observation: Option, - doc: &'a mut Automerge, -} - -impl<'a, Obs: observation::Observation> Transaction<'a, Obs> { - pub(crate) fn new(doc: &'a mut Automerge, args: TransactionArgs, obs: Obs) -> Self { - Self { - inner: Some(TransactionInner::new(args)), - doc, - observation: Some(obs), - } - } -} - -impl<'a> Transaction<'a, observation::UnObserved> { - pub(crate) fn empty( - doc: &'a mut Automerge, - args: TransactionArgs, - opts: CommitOptions, - ) -> ChangeHash { - TransactionInner::empty(doc, args, opts.message, opts.time) - } -} - -impl<'a, Obs: OpObserver + BranchableObserver> Transaction<'a, observation::Observed> { - pub fn observer(&mut self) -> &mut Obs { - self.observation.as_mut().unwrap().observer() - } -} - -impl<'a, Obs: observation::Observation> Transaction<'a, Obs> { - /// Get the heads of the document before this transaction was started. - pub fn get_heads(&self) -> Vec { - self.doc.get_heads() - } - - /// Commit the operations performed in this transaction, returning the hashes corresponding to - /// the new heads. - pub fn commit(mut self) -> Obs::CommitResult { - let tx = self.inner.take().unwrap(); - let hash = tx.commit(self.doc, None, None); - let obs = self.observation.take().unwrap(); - obs.make_result(hash) - } - - /// Commit the operations in this transaction with some options. - /// - /// ``` - /// # use automerge::transaction::CommitOptions; - /// # use automerge::transaction::Transactable; - /// # use automerge::ROOT; - /// # use automerge::Automerge; - /// # use automerge::ObjType; - /// # use std::time::SystemTime; - /// let mut doc = Automerge::new(); - /// let mut tx = doc.transaction(); - /// tx.put_object(ROOT, "todos", ObjType::List).unwrap(); - /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as - /// i64; - /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); - /// ``` - pub fn commit_with(mut self, options: CommitOptions) -> Obs::CommitResult { - let tx = self.inner.take().unwrap(); - let hash = tx.commit(self.doc, options.message, options.time); - let obs = self.observation.take().unwrap(); - obs.make_result(hash) - } - - /// Undo the operations added in this transaction, returning the number of cancelled - /// operations. - pub fn rollback(mut self) -> usize { - self.inner.take().unwrap().rollback(self.doc) - } - - fn do_tx(&mut self, f: F) -> O - where - F: FnOnce(&mut TransactionInner, &mut Automerge, Option<&mut Obs::Obs>) -> O, - { - let tx = self.inner.as_mut().unwrap(); - if let Some(obs) = self.observation.as_mut() { - f(tx, self.doc, obs.observer()) - } else { - f(tx, self.doc, None) - } - } -} - -impl<'a, Obs: observation::Observation> ReadDoc for Transaction<'a, Obs> { - fn keys>(&self, obj: O) -> Keys<'_, '_> { - self.doc.keys(obj) - } - - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { - self.doc.keys_at(obj, heads) - } - - fn map_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> MapRange<'_, R> { - self.doc.map_range(obj, range) - } - - fn map_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> MapRangeAt<'_, R> { - self.doc.map_range_at(obj, range, heads) - } - - fn list_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> ListRange<'_, R> { - self.doc.list_range(obj, range) - } - - fn list_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> ListRangeAt<'_, R> { - self.doc.list_range_at(obj, range, heads) - } - - fn values>(&self, obj: O) -> Values<'_> { - self.doc.values(obj) - } - - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { - self.doc.values_at(obj, heads) - } - - fn length>(&self, obj: O) -> usize { - self.doc.length(obj) - } - - fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { - self.doc.length_at(obj, heads) - } - - fn object_type>(&self, obj: O) -> Result { - self.doc.object_type(obj) - } - - fn text>(&self, obj: O) -> Result { - self.doc.text(obj) - } - - fn text_at>( - &self, - obj: O, - heads: &[ChangeHash], - ) -> Result { - self.doc.text_at(obj, heads) - } - - fn get, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - self.doc.get(obj, prop) - } - - fn get_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - self.doc.get_at(obj, prop, heads) - } - - fn get_all, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - self.doc.get_all(obj, prop) - } - - fn get_all_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - self.doc.get_all_at(obj, prop, heads) - } - - fn parents>(&self, obj: O) -> Result, AutomergeError> { - self.doc.parents(obj) - } - - fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { - self.doc.path_to_object(obj) - } - - fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { - self.doc.get_missing_deps(heads) - } - - fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&crate::Change> { - self.doc.get_change_by_hash(hash) - } -} - -impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> { - /// Get the number of pending operations in this transaction. - fn pending_ops(&self) -> usize { - self.inner.as_ref().unwrap().pending_ops() - } - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - fn put, P: Into, V: Into>( - &mut self, - obj: O, - prop: P, - value: V, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.put(doc, obs, obj.as_ref(), prop, value)) - } - - fn put_object, P: Into>( - &mut self, - obj: O, - prop: P, - value: ObjType, - ) -> Result { - self.do_tx(|tx, doc, obs| tx.put_object(doc, obs, obj.as_ref(), prop, value)) - } - - fn insert, V: Into>( - &mut self, - obj: O, - index: usize, - value: V, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.insert(doc, obs, obj.as_ref(), index, value)) - } - - fn insert_object>( - &mut self, - obj: O, - index: usize, - value: ObjType, - ) -> Result { - self.do_tx(|tx, doc, obs| tx.insert_object(doc, obs, obj.as_ref(), index, value)) - } - - fn increment, P: Into>( - &mut self, - obj: O, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.increment(doc, obs, obj.as_ref(), prop, value)) - } - - fn delete, P: Into>( - &mut self, - obj: O, - prop: P, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.delete(doc, obs, obj.as_ref(), prop)) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - fn splice, V: IntoIterator>( - &mut self, - obj: O, - pos: usize, - del: usize, - vals: V, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.splice(doc, obs, obj.as_ref(), pos, del, vals)) - } - - fn splice_text>( - &mut self, - obj: O, - pos: usize, - del: usize, - text: &str, - ) -> Result<(), AutomergeError> { - self.do_tx(|tx, doc, obs| tx.splice_text(doc, obs, obj.as_ref(), pos, del, text)) - } - - fn base_heads(&self) -> Vec { - self.doc.get_heads() - } -} - -// If a transaction is not commited or rolled back manually then it can leave the document in an -// intermediate state. -// This defaults to rolling back the transaction to be compatible with `?` error returning before -// reaching a call to `commit`. -impl<'a, Obs: observation::Observation> Drop for Transaction<'a, Obs> { - fn drop(&mut self) { - if let Some(txn) = self.inner.take() { - txn.rollback(self.doc); - } - } -} diff --git a/rust/automerge/src/transaction/observation.rs b/rust/automerge/src/transaction/observation.rs deleted file mode 100644 index 53723711..00000000 --- a/rust/automerge/src/transaction/observation.rs +++ /dev/null @@ -1,80 +0,0 @@ -//! This module is essentially a type level Option. It is used in sitations where we know at -//! compile time whether an `OpObserver` is available to track changes in a transaction. -use crate::{op_observer::BranchableObserver, ChangeHash, OpObserver}; - -mod private { - use crate::op_observer::BranchableObserver; - - pub trait Sealed {} - impl Sealed for super::Observed {} - impl Sealed for super::UnObserved {} -} - -pub trait Observation: private::Sealed { - type Obs: OpObserver + BranchableObserver; - type CommitResult; - - fn observer(&mut self) -> Option<&mut Self::Obs>; - fn make_result(self, hash: Option) -> Self::CommitResult; - fn branch(&self) -> Self; - fn merge(&mut self, other: &Self); -} - -#[derive(Clone, Debug)] -pub struct Observed(Obs); - -impl Observed { - pub(crate) fn new(o: O) -> Self { - Self(o) - } - - pub(crate) fn observer(&mut self) -> &mut O { - &mut self.0 - } -} - -impl Observation for Observed { - type Obs = Obs; - type CommitResult = (Obs, Option); - fn observer(&mut self) -> Option<&mut Self::Obs> { - Some(&mut self.0) - } - - fn make_result(self, hash: Option) -> Self::CommitResult { - (self.0, hash) - } - - fn branch(&self) -> Self { - Self(self.0.branch()) - } - - fn merge(&mut self, other: &Self) { - self.0.merge(&other.0) - } -} - -#[derive(Clone, Default, Debug)] -pub struct UnObserved; -impl UnObserved { - pub fn new() -> Self { - Self - } -} - -impl Observation for UnObserved { - type Obs = (); - type CommitResult = Option; - fn observer(&mut self) -> Option<&mut Self::Obs> { - None - } - - fn make_result(self, hash: Option) -> Self::CommitResult { - hash - } - - fn branch(&self) -> Self { - Self - } - - fn merge(&mut self, _other: &Self) {} -} diff --git a/rust/automerge/src/transaction/transactable.rs b/rust/automerge/src/transaction/transactable.rs deleted file mode 100644 index 05c48c79..00000000 --- a/rust/automerge/src/transaction/transactable.rs +++ /dev/null @@ -1,93 +0,0 @@ -use crate::exid::ExId; -use crate::{AutomergeError, ChangeHash, ObjType, Prop, ReadDoc, ScalarValue}; - -/// A way of mutating a document within a single change. -pub trait Transactable: ReadDoc { - /// Get the number of pending operations in this transaction. - fn pending_ops(&self) -> usize; - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - fn put, P: Into, V: Into>( - &mut self, - obj: O, - prop: P, - value: V, - ) -> Result<(), AutomergeError>; - - /// Set the value of property `P` to the new object `V` in object `obj`. - /// - /// # Returns - /// - /// The id of the object which was created. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - fn put_object, P: Into>( - &mut self, - obj: O, - prop: P, - object: ObjType, - ) -> Result; - - /// Insert a value into a list at the given index. - fn insert, V: Into>( - &mut self, - obj: O, - index: usize, - value: V, - ) -> Result<(), AutomergeError>; - - /// Insert an object into a list at the given index. - fn insert_object>( - &mut self, - obj: O, - index: usize, - object: ObjType, - ) -> Result; - - /// Increment the counter at the prop in the object by `value`. - fn increment, P: Into>( - &mut self, - obj: O, - prop: P, - value: i64, - ) -> Result<(), AutomergeError>; - - /// Delete the value at prop in the object. - fn delete, P: Into>( - &mut self, - obj: O, - prop: P, - ) -> Result<(), AutomergeError>; - - fn splice, V: IntoIterator>( - &mut self, - obj: O, - pos: usize, - del: usize, - vals: V, - ) -> Result<(), AutomergeError>; - - /// Like [`Self::splice`] but for text. - fn splice_text>( - &mut self, - obj: O, - pos: usize, - del: usize, - text: &str, - ) -> Result<(), AutomergeError>; - - /// The heads this transaction will be based on - fn base_heads(&self) -> Vec; -} diff --git a/rust/automerge/src/types.rs b/rust/automerge/src/types.rs deleted file mode 100644 index 468986ec..00000000 --- a/rust/automerge/src/types.rs +++ /dev/null @@ -1,844 +0,0 @@ -use crate::error; -use crate::legacy as amp; -use serde::{Deserialize, Serialize}; -use std::borrow::Cow; -use std::cmp::Eq; -use std::cmp::Ordering; -use std::fmt; -use std::fmt::Display; -use std::str::FromStr; -use tinyvec::{ArrayVec, TinyVec}; -//use crate::indexed_cache::IndexedCache; - -mod opids; -pub(crate) use opids::OpIds; - -pub(crate) use crate::clock::Clock; -pub(crate) use crate::value::{Counter, ScalarValue, Value}; - -pub(crate) const HEAD: ElemId = ElemId(OpId(0, 0)); -pub(crate) const ROOT: OpId = OpId(0, 0); - -const ROOT_STR: &str = "_root"; -const HEAD_STR: &str = "_head"; - -/// An actor id is a sequence of bytes. By default we use a uuid which can be nicely stack -/// allocated. -/// -/// In the event that users want to use their own type of identifier that is longer than a uuid -/// then they will likely end up pushing it onto the heap which is still fine. -/// -// Note that change encoding relies on the Ord implementation for the ActorId being implemented in -// terms of the lexicographic ordering of the underlying bytes. Be aware of this if you are -// changing the ActorId implementation in ways which might affect the Ord implementation -#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord)] -#[cfg_attr(feature = "derive-arbitrary", derive(arbitrary::Arbitrary))] -pub struct ActorId(TinyVec<[u8; 16]>); - -impl fmt::Debug for ActorId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("ActorID") - .field(&hex::encode(&self.0)) - .finish() - } -} - -impl ActorId { - pub fn random() -> ActorId { - ActorId(TinyVec::from(*uuid::Uuid::new_v4().as_bytes())) - } - - pub fn to_bytes(&self) -> &[u8] { - &self.0 - } - - pub fn to_hex_string(&self) -> String { - hex::encode(&self.0) - } -} - -impl TryFrom<&str> for ActorId { - type Error = error::InvalidActorId; - - fn try_from(s: &str) -> Result { - hex::decode(s) - .map(ActorId::from) - .map_err(|_| error::InvalidActorId(s.into())) - } -} - -impl TryFrom for ActorId { - type Error = error::InvalidActorId; - - fn try_from(s: String) -> Result { - hex::decode(&s) - .map(ActorId::from) - .map_err(|_| error::InvalidActorId(s)) - } -} - -impl AsRef<[u8]> for ActorId { - fn as_ref(&self) -> &[u8] { - &self.0 - } -} - -impl From for ActorId { - fn from(u: uuid::Uuid) -> Self { - ActorId(TinyVec::from(*u.as_bytes())) - } -} - -impl From<&[u8]> for ActorId { - fn from(b: &[u8]) -> Self { - ActorId(TinyVec::from(b)) - } -} - -impl From<&Vec> for ActorId { - fn from(b: &Vec) -> Self { - ActorId::from(b.as_slice()) - } -} - -impl From> for ActorId { - fn from(b: Vec) -> Self { - let inner = if let Ok(arr) = ArrayVec::try_from(b.as_slice()) { - TinyVec::Inline(arr) - } else { - TinyVec::Heap(b) - }; - ActorId(inner) - } -} - -impl From<[u8; N]> for ActorId { - fn from(array: [u8; N]) -> Self { - ActorId::from(&array) - } -} - -impl From<&[u8; N]> for ActorId { - fn from(slice: &[u8; N]) -> Self { - let inner = if let Ok(arr) = ArrayVec::try_from(slice.as_slice()) { - TinyVec::Inline(arr) - } else { - TinyVec::Heap(slice.to_vec()) - }; - ActorId(inner) - } -} - -impl FromStr for ActorId { - type Err = error::InvalidActorId; - - fn from_str(s: &str) -> Result { - ActorId::try_from(s) - } -} - -impl fmt::Display for ActorId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.to_hex_string()) - } -} - -/// The type of an object -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Copy, Hash)] -#[serde(rename_all = "camelCase", untagged)] -pub enum ObjType { - /// A map - Map, - /// Retained for backwards compatibility, tables are identical to maps - Table, - /// A sequence of arbitrary values - List, - /// A sequence of characters - Text, -} - -impl ObjType { - pub fn is_sequence(&self) -> bool { - matches!(self, Self::List | Self::Text) - } -} - -impl From for ObjType { - fn from(other: amp::MapType) -> Self { - match other { - amp::MapType::Map => Self::Map, - amp::MapType::Table => Self::Table, - } - } -} - -impl From for ObjType { - fn from(other: amp::SequenceType) -> Self { - match other { - amp::SequenceType::List => Self::List, - amp::SequenceType::Text => Self::Text, - } - } -} - -impl fmt::Display for ObjType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - ObjType::Map => write!(f, "map"), - ObjType::Table => write!(f, "table"), - ObjType::List => write!(f, "list"), - ObjType::Text => write!(f, "text"), - } - } -} - -#[derive(PartialEq, Debug, Clone)] -pub enum OpType { - Make(ObjType), - Delete, - Increment(i64), - Put(ScalarValue), -} - -impl OpType { - /// The index into the action array as specified in [1] - /// - /// [1]: https://alexjg.github.io/automerge-storage-docs/#action-array - pub(crate) fn action_index(&self) -> u64 { - match self { - Self::Make(ObjType::Map) => 0, - Self::Put(_) => 1, - Self::Make(ObjType::List) => 2, - Self::Delete => 3, - Self::Make(ObjType::Text) => 4, - Self::Increment(_) => 5, - Self::Make(ObjType::Table) => 6, - } - } - - pub(crate) fn validate_action_and_value( - action: u64, - value: &ScalarValue, - ) -> Result<(), error::InvalidOpType> { - match action { - 0..=4 => Ok(()), - 5 => match value { - ScalarValue::Int(_) | ScalarValue::Uint(_) => Ok(()), - _ => Err(error::InvalidOpType::NonNumericInc), - }, - 6 => Ok(()), - _ => Err(error::InvalidOpType::UnknownAction(action)), - } - } - - pub(crate) fn from_action_and_value(action: u64, value: ScalarValue) -> OpType { - match action { - 0 => Self::Make(ObjType::Map), - 1 => Self::Put(value), - 2 => Self::Make(ObjType::List), - 3 => Self::Delete, - 4 => Self::Make(ObjType::Text), - 5 => match value { - ScalarValue::Int(i) => Self::Increment(i), - ScalarValue::Uint(i) => Self::Increment(i as i64), - _ => unreachable!("validate_action_and_value returned NonNumericInc"), - }, - 6 => Self::Make(ObjType::Table), - _ => unreachable!("validate_action_and_value returned UnknownAction"), - } - } -} - -impl From for OpType { - fn from(v: ObjType) -> Self { - OpType::Make(v) - } -} - -impl From for OpType { - fn from(v: ScalarValue) -> Self { - OpType::Put(v) - } -} - -#[derive(Debug)] -pub(crate) enum Export { - Id(OpId), - Special(String), - Prop(usize), -} - -pub(crate) trait Exportable { - fn export(&self) -> Export; -} - -impl Exportable for ObjId { - fn export(&self) -> Export { - if self.0 == ROOT { - Export::Special(ROOT_STR.to_owned()) - } else { - Export::Id(self.0) - } - } -} - -impl Exportable for &ObjId { - fn export(&self) -> Export { - if self.0 == ROOT { - Export::Special(ROOT_STR.to_owned()) - } else { - Export::Id(self.0) - } - } -} - -impl Exportable for ElemId { - fn export(&self) -> Export { - if self == &HEAD { - Export::Special(HEAD_STR.to_owned()) - } else { - Export::Id(self.0) - } - } -} - -impl Exportable for OpId { - fn export(&self) -> Export { - Export::Id(*self) - } -} - -impl Exportable for Key { - fn export(&self) -> Export { - match self { - Key::Map(p) => Export::Prop(*p), - Key::Seq(e) => e.export(), - } - } -} - -impl From for OpId { - fn from(o: ObjId) -> Self { - o.0 - } -} - -impl From for ObjId { - fn from(o: OpId) -> Self { - ObjId(o) - } -} - -impl From for ElemId { - fn from(o: OpId) -> Self { - ElemId(o) - } -} - -impl From for Prop { - fn from(p: String) -> Self { - Prop::Map(p) - } -} - -impl From<&String> for Prop { - fn from(p: &String) -> Self { - Prop::Map(p.clone()) - } -} - -impl From<&str> for Prop { - fn from(p: &str) -> Self { - Prop::Map(p.to_owned()) - } -} - -impl From for Prop { - fn from(index: usize) -> Self { - Prop::Seq(index) - } -} - -impl From for Prop { - fn from(index: f64) -> Self { - Prop::Seq(index as usize) - } -} - -impl From for Key { - fn from(id: OpId) -> Self { - Key::Seq(ElemId(id)) - } -} - -impl From for Key { - fn from(e: ElemId) -> Self { - Key::Seq(e) - } -} - -impl From> for ElemId { - fn from(e: Option) -> Self { - e.unwrap_or(HEAD) - } -} - -impl From> for Key { - fn from(e: Option) -> Self { - Key::Seq(e.into()) - } -} - -#[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone, Copy, Hash)] -pub(crate) enum Key { - Map(usize), - Seq(ElemId), -} - -/// A property of an object -/// -/// This is either a string representing a property in a map, or an integer -/// which is the index into a sequence -#[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone)] -pub enum Prop { - /// A property in a map - Map(String), - /// An index into a sequence - Seq(usize), -} - -impl Prop { - pub(crate) fn to_index(&self) -> Option { - match self { - Prop::Map(_) => None, - Prop::Seq(n) => Some(*n), - } - } -} - -impl Display for Prop { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Prop::Map(s) => write!(f, "{}", s), - Prop::Seq(i) => write!(f, "{}", i), - } - } -} - -impl Key { - pub(crate) fn elemid(&self) -> Option { - match self { - Key::Map(_) => None, - Key::Seq(id) => Some(*id), - } - } -} - -#[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq, Copy, Hash, Default)] -pub(crate) struct OpId(u32, u32); - -impl OpId { - pub(crate) fn new(counter: u64, actor: usize) -> Self { - Self(counter.try_into().unwrap(), actor.try_into().unwrap()) - } - - #[inline] - pub(crate) fn counter(&self) -> u64 { - self.0.into() - } - - #[inline] - pub(crate) fn actor(&self) -> usize { - self.1.try_into().unwrap() - } - - #[inline] - pub(crate) fn lamport_cmp(&self, other: &OpId, actors: &[ActorId]) -> Ordering { - self.0 - .cmp(&other.0) - .then_with(|| actors[self.1 as usize].cmp(&actors[other.1 as usize])) - } -} - -#[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] -pub(crate) struct ObjId(pub(crate) OpId); - -impl ObjId { - pub(crate) const fn root() -> Self { - ObjId(OpId(0, 0)) - } - - pub(crate) fn is_root(&self) -> bool { - self.0.counter() == 0 - } - - pub(crate) fn opid(&self) -> &OpId { - &self.0 - } -} - -/// How indexes into text sequeces are calculated -/// -/// Automerge text objects are internally sequences of utf8 characters. This -/// means that in environments (such as javascript) which use a different -/// encoding the indexes into the text sequence will be different. This enum -/// represents the different ways indexes can be calculated. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum TextEncoding { - /// The indexes are calculated using the utf8 encoding - Utf8, - /// The indexes are calculated using the utf16 encoding - Utf16, -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub(crate) enum ListEncoding { - List, - Text(TextEncoding), -} - -impl Default for ListEncoding { - fn default() -> Self { - ListEncoding::List - } -} - -impl Default for TextEncoding { - fn default() -> Self { - TextEncoding::Utf8 - } -} - -impl ListEncoding { - pub(crate) fn new(obj: ObjType, text_encoding: TextEncoding) -> Self { - if obj == ObjType::Text { - ListEncoding::Text(text_encoding) - } else { - ListEncoding::List - } - } -} - -#[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] -pub(crate) struct ElemId(pub(crate) OpId); - -impl ElemId { - pub(crate) fn is_head(&self) -> bool { - *self == HEAD - } - - pub(crate) fn head() -> Self { - Self(OpId(0, 0)) - } -} - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct Op { - pub(crate) id: OpId, - pub(crate) action: OpType, - pub(crate) key: Key, - pub(crate) succ: OpIds, - pub(crate) pred: OpIds, - pub(crate) insert: bool, -} - -impl Op { - pub(crate) fn add_succ std::cmp::Ordering>(&mut self, op: &Op, cmp: F) { - self.succ.add(op.id, cmp); - if let OpType::Put(ScalarValue::Counter(Counter { - current, - increments, - .. - })) = &mut self.action - { - if let OpType::Increment(n) = &op.action { - *current += *n; - *increments += 1; - } - } - } - - pub(crate) fn remove_succ(&mut self, op: &Op) { - self.succ.retain(|id| id != &op.id); - if let OpType::Put(ScalarValue::Counter(Counter { - current, - increments, - .. - })) = &mut self.action - { - if let OpType::Increment(n) = &op.action { - *current -= *n; - *increments -= 1; - } - } - } - - pub(crate) fn width(&self, encoding: ListEncoding) -> usize { - match encoding { - ListEncoding::List => 1, - ListEncoding::Text(TextEncoding::Utf8) => self.to_str().chars().count(), - ListEncoding::Text(TextEncoding::Utf16) => self.to_str().encode_utf16().count(), - } - } - - pub(crate) fn to_str(&self) -> &str { - if let OpType::Put(ScalarValue::Str(s)) = &self.action { - s - } else { - "\u{fffc}" - } - } - - pub(crate) fn visible(&self) -> bool { - if self.is_inc() { - false - } else if self.is_counter() { - self.succ.len() <= self.incs() - } else { - self.succ.is_empty() - } - } - - pub(crate) fn incs(&self) -> usize { - if let OpType::Put(ScalarValue::Counter(Counter { increments, .. })) = &self.action { - *increments - } else { - 0 - } - } - - pub(crate) fn is_delete(&self) -> bool { - matches!(&self.action, OpType::Delete) - } - - pub(crate) fn is_inc(&self) -> bool { - matches!(&self.action, OpType::Increment(_)) - } - - pub(crate) fn is_counter(&self) -> bool { - matches!(&self.action, OpType::Put(ScalarValue::Counter(_))) - } - - pub(crate) fn is_noop(&self, action: &OpType) -> bool { - matches!((&self.action, action), (OpType::Put(n), OpType::Put(m)) if n == m) - } - - pub(crate) fn is_list_op(&self) -> bool { - matches!(&self.key, Key::Seq(_)) - } - - pub(crate) fn overwrites(&self, other: &Op) -> bool { - self.pred.iter().any(|i| i == &other.id) - } - - pub(crate) fn elemid(&self) -> Option { - self.elemid_or_key().elemid() - } - - pub(crate) fn elemid_or_key(&self) -> Key { - if self.insert { - Key::Seq(ElemId(self.id)) - } else { - self.key - } - } - - pub(crate) fn get_increment_value(&self) -> Option { - if let OpType::Increment(i) = self.action { - Some(i) - } else { - None - } - } - - pub(crate) fn value(&self) -> Value<'_> { - match &self.action { - OpType::Make(obj_type) => Value::Object(*obj_type), - OpType::Put(scalar) => Value::Scalar(Cow::Borrowed(scalar)), - _ => panic!("cant convert op into a value - {:?}", self), - } - } - - pub(crate) fn clone_value(&self) -> Value<'static> { - match &self.action { - OpType::Make(obj_type) => Value::Object(*obj_type), - OpType::Put(scalar) => Value::Scalar(Cow::Owned(scalar.clone())), - _ => panic!("cant convert op into a value - {:?}", self), - } - } - - #[allow(dead_code)] - pub(crate) fn dump(&self) -> String { - match &self.action { - OpType::Put(value) if self.insert => format!("i:{}", value), - OpType::Put(value) => format!("s:{}", value), - OpType::Make(obj) => format!("make{}", obj), - OpType::Increment(val) => format!("inc:{}", val), - OpType::Delete => "del".to_string(), - } - } -} - -#[derive(Debug, Clone)] -pub(crate) struct Peer {} - -/// The number of bytes in a change hash. -pub(crate) const HASH_SIZE: usize = 32; // 256 bits = 32 bytes - -/// The sha256 hash of a change. -#[derive(Eq, PartialEq, Hash, Clone, PartialOrd, Ord, Copy)] -pub struct ChangeHash(pub [u8; HASH_SIZE]); - -impl ChangeHash { - pub(crate) fn as_bytes(&self) -> &[u8] { - &self.0 - } - - pub(crate) fn checksum(&self) -> [u8; 4] { - [self.0[0], self.0[1], self.0[2], self.0[3]] - } -} - -impl AsRef<[u8]> for ChangeHash { - fn as_ref(&self) -> &[u8] { - &self.0 - } -} - -impl fmt::Debug for ChangeHash { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("ChangeHash") - .field(&hex::encode(self.0)) - .finish() - } -} - -impl fmt::Display for ChangeHash { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", hex::encode(self.0)) - } -} - -#[derive(thiserror::Error, Debug)] -pub enum ParseChangeHashError { - #[error(transparent)] - HexDecode(#[from] hex::FromHexError), - #[error( - "incorrect length, change hash should be {} bytes, got {actual}", - HASH_SIZE - )] - IncorrectLength { actual: usize }, -} - -impl FromStr for ChangeHash { - type Err = ParseChangeHashError; - - fn from_str(s: &str) -> Result { - let bytes = hex::decode(s)?; - if bytes.len() == HASH_SIZE { - Ok(ChangeHash(bytes.try_into().unwrap())) - } else { - Err(ParseChangeHashError::IncorrectLength { - actual: bytes.len(), - }) - } - } -} - -impl TryFrom<&[u8]> for ChangeHash { - type Error = error::InvalidChangeHashSlice; - - fn try_from(bytes: &[u8]) -> Result { - if bytes.len() != HASH_SIZE { - Err(error::InvalidChangeHashSlice(Vec::from(bytes))) - } else { - let mut array = [0; HASH_SIZE]; - array.copy_from_slice(bytes); - Ok(ChangeHash(array)) - } - } -} - -#[cfg(feature = "wasm")] -impl From for wasm_bindgen::JsValue { - fn from(prop: Prop) -> Self { - match prop { - Prop::Map(key) => key.into(), - Prop::Seq(index) => (index as f64).into(), - } - } -} - -#[cfg(test)] -pub(crate) mod gen { - use super::{ - ChangeHash, Counter, ElemId, Key, ObjType, Op, OpId, OpIds, OpType, ScalarValue, HASH_SIZE, - }; - use proptest::prelude::*; - - pub(crate) fn gen_hash() -> impl Strategy { - proptest::collection::vec(proptest::bits::u8::ANY, HASH_SIZE) - .prop_map(|b| ChangeHash::try_from(&b[..]).unwrap()) - } - - pub(crate) fn gen_scalar_value() -> impl Strategy { - prop_oneof![ - proptest::collection::vec(proptest::bits::u8::ANY, 0..200).prop_map(ScalarValue::Bytes), - "[a-z]{10,500}".prop_map(|s| ScalarValue::Str(s.into())), - any::().prop_map(ScalarValue::Int), - any::().prop_map(ScalarValue::Uint), - any::().prop_map(ScalarValue::F64), - any::().prop_map(|c| ScalarValue::Counter(Counter::from(c))), - any::().prop_map(ScalarValue::Timestamp), - any::().prop_map(ScalarValue::Boolean), - Just(ScalarValue::Null), - ] - } - - pub(crate) fn gen_objtype() -> impl Strategy { - prop_oneof![ - Just(ObjType::Map), - Just(ObjType::Table), - Just(ObjType::List), - Just(ObjType::Text), - ] - } - - pub(crate) fn gen_action() -> impl Strategy { - prop_oneof![ - Just(OpType::Delete), - any::().prop_map(OpType::Increment), - gen_scalar_value().prop_map(OpType::Put), - gen_objtype().prop_map(OpType::Make) - ] - } - - pub(crate) fn gen_key(key_indices: Vec) -> impl Strategy { - prop_oneof![ - proptest::sample::select(key_indices).prop_map(Key::Map), - Just(Key::Seq(ElemId(OpId::new(0, 0)))), - ] - } - - /// Generate an arbitrary op - /// - /// The generated op will have no preds or succs - /// - /// # Arguments - /// - /// * `id` - the OpId this op will be given - /// * `key_prop_indices` - The indices of props which will be used to generate keys of type - /// `Key::Map`. I.e. this is what would typically be in `OpSetMetadata::props - pub(crate) fn gen_op(id: OpId, key_prop_indices: Vec) -> impl Strategy { - (gen_key(key_prop_indices), any::(), gen_action()).prop_map( - move |(key, insert, action)| Op { - id, - key, - insert, - action, - succ: OpIds::empty(), - pred: OpIds::empty(), - }, - ) - } -} diff --git a/rust/automerge/src/types/opids.rs b/rust/automerge/src/types/opids.rs deleted file mode 100644 index a81ccb36..00000000 --- a/rust/automerge/src/types/opids.rs +++ /dev/null @@ -1,198 +0,0 @@ -use itertools::Itertools; - -use super::OpId; - -/// A wrapper around `Vec` which preserves the invariant that the ops are -/// in ascending order with respect to their counters and actor IDs. In order to -/// maintain this invariant you must provide a comparator function when adding -/// ops as the actor indices in an OpId are not sufficient to order the OpIds -#[derive(Debug, Clone, PartialEq, Default)] -pub(crate) struct OpIds(Vec); - -impl<'a> IntoIterator for &'a OpIds { - type Item = &'a OpId; - type IntoIter = std::slice::Iter<'a, OpId>; - - fn into_iter(self) -> Self::IntoIter { - self.0.iter() - } -} - -impl OpIds { - pub(crate) fn empty() -> Self { - Self(Vec::new()) - } - - pub(crate) fn new, F: Fn(&OpId, &OpId) -> std::cmp::Ordering>( - opids: I, - cmp: F, - ) -> Self { - let mut inner = opids.collect::>(); - inner.sort_by(cmp); - Self(inner) - } - - /// Create a new OpIds if `opids` are sorted with respect to `cmp` and contain no duplicates. - /// - /// Returns `Some(OpIds)` if `opids` is sorted and has no duplicates, otherwise returns `None` - pub(crate) fn new_if_sorted std::cmp::Ordering>( - opids: Vec, - cmp: F, - ) -> Option { - if are_sorted_and_unique(opids.iter(), cmp) { - Some(Self(opids)) - } else { - None - } - } - - /// Add an op to this set of OpIds. The `comparator` must provide a - /// consistent ordering between successive calls to `add`. - pub(crate) fn add std::cmp::Ordering>( - &mut self, - opid: OpId, - comparator: F, - ) { - use std::cmp::Ordering::*; - if self.is_empty() { - self.0.push(opid); - return; - } - let idx_and_elem = self - .0 - .iter() - .find_position(|an_opid| matches!(comparator(an_opid, &opid), Greater | Equal)); - if let Some((idx, an_opid)) = idx_and_elem { - if comparator(an_opid, &opid) == Equal { - // nothing to do - } else { - self.0.insert(idx, opid); - } - } else { - self.0.push(opid); - } - } - - pub(crate) fn retain bool>(&mut self, f: F) { - self.0.retain(f) - } - - pub(crate) fn is_empty(&self) -> bool { - self.0.is_empty() - } - - pub(crate) fn len(&self) -> usize { - self.0.len() - } - - pub(crate) fn iter(&self) -> std::slice::Iter<'_, OpId> { - self.0.iter() - } - - pub(crate) fn contains(&self, op: &OpId) -> bool { - self.0.contains(op) - } - - pub(crate) fn get(&self, idx: usize) -> Option<&OpId> { - self.0.get(idx) - } -} - -fn are_sorted_and_unique< - 'a, - I: Iterator, - F: FnMut(&OpId, &OpId) -> std::cmp::Ordering, ->( - mut opids: I, - mut f: F, -) -> bool { - use std::cmp::Ordering; - let mut last = match opids.next() { - Some(e) => e, - None => return true, - }; - - for next in opids { - if matches!(f(last, next), Ordering::Greater | Ordering::Equal) { - return false; - } - last = next; - } - true -} - -#[cfg(test)] -mod tests { - use super::{OpId, OpIds}; - use crate::ActorId; - use proptest::prelude::*; - - fn gen_opid(actors: Vec) -> impl Strategy { - (0..actors.len()).prop_flat_map(|actor_idx| { - (Just(actor_idx), 0..(u32::MAX as u64)) - .prop_map(|(actor_idx, counter)| OpId::new(counter, actor_idx)) - }) - } - - fn scenario(size: std::ops::Range) -> impl Strategy, Vec)> { - let actors = vec![ - "aaaa".try_into().unwrap(), - "cccc".try_into().unwrap(), - "bbbb".try_into().unwrap(), - ]; - proptest::collection::vec(gen_opid(actors.clone()), size) - .prop_map(move |opids| (actors.clone(), opids)) - } - - fn duplicate_unsorted_scenario() -> impl Strategy, Vec)> { - scenario(1..100).prop_map(|(actors, mut opids)| { - let mut sorted_opids = opids.clone(); - sorted_opids.sort_by(|left, right| cmp(&actors, left, right)); - sorted_opids.dedup(); - // Unwrap is okay due to the size we pass to `scenario()` - let last = *sorted_opids.last().unwrap(); - if sorted_opids == opids { - // Opids are sorted and deduplicated, just copy the last opid and insert it at the - // front - opids.insert(0, last); - } - (actors, opids) - }) - } - - proptest! { - #[test] - fn test_sorted_opids((actors, opids) in scenario(0..100)) { - let mut sorted_opids = OpIds::default(); - for opid in &opids { - sorted_opids.add(*opid, |left, right| cmp(&actors, left, right)); - } - let result = sorted_opids.into_iter().cloned().collect::>(); - let mut expected = opids; - expected.sort_by(|left, right| cmp(&actors, left, right)); - expected.dedup(); - assert_eq!(result, expected); - } - - #[test] - fn test_new_if_sorted((actors, opids) in duplicate_unsorted_scenario()) { - let mut expected = opids.clone(); - assert_eq!(OpIds::new_if_sorted(opids, |left, right| cmp(&actors, left, right)), None); - expected.sort_by(|left, right| cmp(&actors, left, right)); - expected.dedup(); - let result = OpIds::new_if_sorted(expected.clone(), |left, right| cmp(&actors, left, right)).unwrap().into_iter().cloned().collect::>(); - assert_eq!(result, expected) - } - } - - fn cmp(actors: &[ActorId], left: &OpId, right: &OpId) -> std::cmp::Ordering { - use std::cmp::Ordering; - match (left, right) { - (OpId(0, _), OpId(0, _)) => Ordering::Equal, - (OpId(0, _), OpId(_, _)) => Ordering::Less, - (OpId(_, _), OpId(0, _)) => Ordering::Greater, - (OpId(a, x), OpId(b, y)) if a == b => actors[*x as usize].cmp(&actors[*y as usize]), - (OpId(a, _), OpId(b, _)) => a.cmp(b), - } - } -} diff --git a/rust/automerge/src/values.rs b/rust/automerge/src/values.rs deleted file mode 100644 index 15ccb4cb..00000000 --- a/rust/automerge/src/values.rs +++ /dev/null @@ -1,57 +0,0 @@ -use crate::exid::ExId; -use crate::{Automerge, Value}; -use std::fmt; - -/// An iterator over the values in an object -/// -/// This is returned by the [`crate::ReadDoc::values`] and [`crate::ReadDoc::values_at`] methods -pub struct Values<'a> { - range: Box>, - doc: &'a Automerge, -} - -impl<'a> fmt::Debug for Values<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Values").finish() - } -} - -pub(crate) trait ValueIter<'a> { - fn next_value(&mut self, doc: &'a Automerge) -> Option<(Value<'a>, ExId)>; -} - -pub(crate) struct NoValues {} - -impl<'a> ValueIter<'a> for NoValues { - fn next_value(&mut self, _doc: &'a Automerge) -> Option<(Value<'a>, ExId)> { - None - } -} - -impl<'a> Values<'a> { - pub(crate) fn new>(doc: &'a Automerge, range: Option) -> Self { - if let Some(range) = range { - Self { - range: Box::new(range), - doc, - } - } else { - Self::empty(doc) - } - } - - pub(crate) fn empty(doc: &'a Automerge) -> Self { - Self { - range: Box::new(NoValues {}), - doc, - } - } -} - -impl<'a> Iterator for Values<'a> { - type Item = (Value<'a>, ExId); - - fn next(&mut self) -> Option { - self.range.next_value(self.doc) - } -} diff --git a/rust/automerge/tests/fixtures/64bit_obj_id_change.automerge b/rust/automerge/tests/fixtures/64bit_obj_id_change.automerge deleted file mode 100644 index 700342a2..00000000 Binary files a/rust/automerge/tests/fixtures/64bit_obj_id_change.automerge and /dev/null differ diff --git a/rust/automerge/tests/fixtures/64bit_obj_id_doc.automerge b/rust/automerge/tests/fixtures/64bit_obj_id_doc.automerge deleted file mode 100644 index 6beb57fe..00000000 Binary files a/rust/automerge/tests/fixtures/64bit_obj_id_doc.automerge and /dev/null differ diff --git a/rust/automerge/tests/fixtures/counter_value_has_incorrect_meta.automerge b/rust/automerge/tests/fixtures/counter_value_has_incorrect_meta.automerge deleted file mode 100644 index 2290b446..00000000 Binary files a/rust/automerge/tests/fixtures/counter_value_has_incorrect_meta.automerge and /dev/null differ diff --git a/rust/automerge/tests/fixtures/counter_value_is_ok.automerge b/rust/automerge/tests/fixtures/counter_value_is_ok.automerge deleted file mode 100644 index fdc59896..00000000 Binary files a/rust/automerge/tests/fixtures/counter_value_is_ok.automerge and /dev/null differ diff --git a/rust/automerge/tests/fixtures/counter_value_is_overlong.automerge b/rust/automerge/tests/fixtures/counter_value_is_overlong.automerge deleted file mode 100644 index 831346f7..00000000 Binary files a/rust/automerge/tests/fixtures/counter_value_is_overlong.automerge and /dev/null differ diff --git a/rust/automerge/tests/fixtures/two_change_chunks.automerge b/rust/automerge/tests/fixtures/two_change_chunks.automerge deleted file mode 100644 index 1a84b363..00000000 Binary files a/rust/automerge/tests/fixtures/two_change_chunks.automerge and /dev/null differ diff --git a/rust/automerge/tests/fixtures/two_change_chunks_compressed.automerge b/rust/automerge/tests/fixtures/two_change_chunks_compressed.automerge deleted file mode 100644 index 9e3f305f..00000000 Binary files a/rust/automerge/tests/fixtures/two_change_chunks_compressed.automerge and /dev/null differ diff --git a/rust/automerge/tests/fixtures/two_change_chunks_out_of_order.automerge b/rust/automerge/tests/fixtures/two_change_chunks_out_of_order.automerge deleted file mode 100644 index 9ba0355f..00000000 Binary files a/rust/automerge/tests/fixtures/two_change_chunks_out_of_order.automerge and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/action-is-48.automerge b/rust/automerge/tests/fuzz-crashers/action-is-48.automerge deleted file mode 100644 index 16e6f719..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/action-is-48.automerge and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 b/rust/automerge/tests/fuzz-crashers/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 deleted file mode 100644 index bcb12cdd..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/incorrect_max_op.automerge b/rust/automerge/tests/fuzz-crashers/incorrect_max_op.automerge deleted file mode 100644 index 05cc2c82..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/incorrect_max_op.automerge and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/invalid_deflate_stream.automerge b/rust/automerge/tests/fuzz-crashers/invalid_deflate_stream.automerge deleted file mode 100644 index 21e869eb..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/invalid_deflate_stream.automerge and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/missing_actor.automerge b/rust/automerge/tests/fuzz-crashers/missing_actor.automerge deleted file mode 100644 index cc8c61b1..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/missing_actor.automerge and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/missing_deps.automerge b/rust/automerge/tests/fuzz-crashers/missing_deps.automerge deleted file mode 100644 index 8a57a0f4..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/missing_deps.automerge and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/missing_deps_compressed.automerge b/rust/automerge/tests/fuzz-crashers/missing_deps_compressed.automerge deleted file mode 100644 index 2c7b123b..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/missing_deps_compressed.automerge and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/missing_deps_subsequent.automerge b/rust/automerge/tests/fuzz-crashers/missing_deps_subsequent.automerge deleted file mode 100644 index 2fe439af..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/missing_deps_subsequent.automerge and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/overflow_in_length.automerge b/rust/automerge/tests/fuzz-crashers/overflow_in_length.automerge deleted file mode 100644 index 45771f34..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/overflow_in_length.automerge and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/too_many_deps.automerge b/rust/automerge/tests/fuzz-crashers/too_many_deps.automerge deleted file mode 100644 index 657ce993..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/too_many_deps.automerge and /dev/null differ diff --git a/rust/automerge/tests/fuzz-crashers/too_many_ops.automerge b/rust/automerge/tests/fuzz-crashers/too_many_ops.automerge deleted file mode 100644 index 661258b0..00000000 Binary files a/rust/automerge/tests/fuzz-crashers/too_many_ops.automerge and /dev/null differ diff --git a/rust/automerge/tests/test.rs b/rust/automerge/tests/test.rs deleted file mode 100644 index 3be6725e..00000000 --- a/rust/automerge/tests/test.rs +++ /dev/null @@ -1,1499 +0,0 @@ -use automerge::transaction::Transactable; -use automerge::{ - ActorId, AutoCommit, Automerge, AutomergeError, Change, ExpandedChange, ObjType, ReadDoc, - ScalarValue, VecOpObserver, ROOT, -}; -use std::fs; - -// set up logging for all the tests -//use test_log::test; - -#[allow(unused_imports)] -use automerge_test::{ - assert_doc, assert_obj, list, map, mk_counter, new_doc, new_doc_with_actor, pretty_print, - realize, realize_obj, sorted_actors, RealizedObject, -}; -use pretty_assertions::assert_eq; - -#[test] -fn no_conflict_on_repeated_assignment() { - let mut doc = AutoCommit::new(); - doc.put(&automerge::ROOT, "foo", 1).unwrap(); - doc.put(&automerge::ROOT, "foo", 2).unwrap(); - assert_doc!( - &doc, - map! { - "foo" => { 2 }, - } - ); -} - -#[test] -fn repeated_map_assignment_which_resolves_conflict_not_ignored() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - doc1.put(&automerge::ROOT, "field", 123).unwrap(); - doc2.merge(&mut doc1).unwrap(); - doc2.put(&automerge::ROOT, "field", 456).unwrap(); - doc1.put(&automerge::ROOT, "field", 789).unwrap(); - doc1.merge(&mut doc2).unwrap(); - assert_eq!(doc1.get_all(&automerge::ROOT, "field").unwrap().len(), 2); - - doc1.put(&automerge::ROOT, "field", 123).unwrap(); - assert_doc!( - &doc1, - map! { - "field" => { 123 } - } - ); -} - -#[test] -fn repeated_list_assignment_which_resolves_conflict_not_ignored() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - let list_id = doc1 - .put_object(&automerge::ROOT, "list", ObjType::List) - .unwrap(); - doc1.insert(&list_id, 0, 123).unwrap(); - doc2.merge(&mut doc1).unwrap(); - doc2.put(&list_id, 0, 456).unwrap(); - doc1.merge(&mut doc2).unwrap(); - doc1.put(&list_id, 0, 789).unwrap(); - - assert_doc!( - &doc1, - map! { - "list" => { - list![ - { 789 }, - ] - } - } - ); -} - -#[test] -fn list_deletion() { - let mut doc = new_doc(); - let list_id = doc - .put_object(&automerge::ROOT, "list", ObjType::List) - .unwrap(); - doc.insert(&list_id, 0, 123).unwrap(); - doc.insert(&list_id, 1, 456).unwrap(); - doc.insert(&list_id, 2, 789).unwrap(); - doc.delete(&list_id, 1).unwrap(); - assert_doc!( - &doc, - map! { - "list" => { list![ - { 123 }, - { 789 }, - ]} - } - ) -} - -#[test] -fn merge_concurrent_map_prop_updates() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - doc1.put(&automerge::ROOT, "foo", "bar").unwrap(); - doc2.put(&automerge::ROOT, "hello", "world").unwrap(); - doc1.merge(&mut doc2).unwrap(); - assert_eq!( - doc1.get(&automerge::ROOT, "foo").unwrap().unwrap().0, - "bar".into() - ); - assert_doc!( - &doc1, - map! { - "foo" => { "bar" }, - "hello" => { "world" }, - } - ); - doc2.merge(&mut doc1).unwrap(); - assert_doc!( - &doc2, - map! { - "foo" => { "bar" }, - "hello" => { "world" }, - } - ); - assert_eq!(realize(doc1.document()), realize(doc2.document())); -} - -#[test] -fn add_concurrent_increments_of_same_property() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - doc1.put(&automerge::ROOT, "counter", mk_counter(0)) - .unwrap(); - doc2.merge(&mut doc1).unwrap(); - doc1.increment(&automerge::ROOT, "counter", 1).unwrap(); - doc2.increment(&automerge::ROOT, "counter", 2).unwrap(); - doc1.merge(&mut doc2).unwrap(); - assert_doc!( - &doc1, - map! { - "counter" => { - mk_counter(3) - } - } - ); -} - -#[test] -fn add_increments_only_to_preceeded_values() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - - doc1.put(&automerge::ROOT, "counter", mk_counter(0)) - .unwrap(); - doc1.increment(&automerge::ROOT, "counter", 1).unwrap(); - - // create a counter in doc2 - doc2.put(&automerge::ROOT, "counter", mk_counter(0)) - .unwrap(); - doc2.increment(&automerge::ROOT, "counter", 3).unwrap(); - - // The two values should be conflicting rather than added - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "counter" => { - mk_counter(1), - mk_counter(3), - } - } - ); -} - -#[test] -fn concurrent_updates_of_same_field() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - doc1.put(&automerge::ROOT, "field", "one").unwrap(); - doc2.put(&automerge::ROOT, "field", "two").unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "field" => { - "one", - "two", - } - } - ); -} - -#[test] -fn concurrent_updates_of_same_list_element() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - let list_id = doc1 - .put_object(&automerge::ROOT, "birds", ObjType::List) - .unwrap(); - doc1.insert(&list_id, 0, "finch").unwrap(); - doc2.merge(&mut doc1).unwrap(); - doc1.put(&list_id, 0, "greenfinch").unwrap(); - doc2.put(&list_id, 0, "goldfinch").unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "birds" => { - list![{ - "greenfinch", - "goldfinch", - }] - } - } - ); -} - -#[test] -fn assignment_conflicts_of_different_types() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - let mut doc3 = new_doc(); - doc1.put(&automerge::ROOT, "field", "string").unwrap(); - doc2.put_object(&automerge::ROOT, "field", ObjType::List) - .unwrap(); - doc3.put_object(&automerge::ROOT, "field", ObjType::Map) - .unwrap(); - doc1.merge(&mut doc2).unwrap(); - doc1.merge(&mut doc3).unwrap(); - - assert_doc!( - &doc1, - map! { - "field" => { - "string", - list!{}, - map!{}, - } - } - ); -} - -#[test] -fn changes_within_conflicting_map_field() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - doc1.put(&automerge::ROOT, "field", "string").unwrap(); - let map_id = doc2 - .put_object(&automerge::ROOT, "field", ObjType::Map) - .unwrap(); - doc2.put(&map_id, "innerKey", 42).unwrap(); - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "field" => { - "string", - map!{ - "innerKey" => { - 42, - } - } - } - } - ); -} - -#[test] -fn changes_within_conflicting_list_element() { - let (actor1, actor2) = sorted_actors(); - let mut doc1 = new_doc_with_actor(actor1); - let mut doc2 = new_doc_with_actor(actor2); - let list_id = doc1 - .put_object(&automerge::ROOT, "list", ObjType::List) - .unwrap(); - doc1.insert(&list_id, 0, "hello").unwrap(); - doc2.merge(&mut doc1).unwrap(); - - let map_in_doc1 = doc1.put_object(&list_id, 0, ObjType::Map).unwrap(); - doc1.put(&map_in_doc1, "map1", true).unwrap(); - doc1.put(&map_in_doc1, "key", 1).unwrap(); - - let map_in_doc2 = doc2.put_object(&list_id, 0, ObjType::Map).unwrap(); - doc1.merge(&mut doc2).unwrap(); - doc2.put(&map_in_doc2, "map2", true).unwrap(); - doc2.put(&map_in_doc2, "key", 2).unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "list" => { - list![ - { - map!{ - "map2" => { true }, - "key" => { 2 }, - }, - map!{ - "key" => { 1 }, - "map1" => { true }, - } - } - ] - } - } - ); -} - -#[test] -fn concurrently_assigned_nested_maps_should_not_merge() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - - let doc1_map_id = doc1 - .put_object(&automerge::ROOT, "config", ObjType::Map) - .unwrap(); - doc1.put(&doc1_map_id, "background", "blue").unwrap(); - - let doc2_map_id = doc2 - .put_object(&automerge::ROOT, "config", ObjType::Map) - .unwrap(); - doc2.put(&doc2_map_id, "logo_url", "logo.png").unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "config" => { - map!{ - "background" => {"blue"} - }, - map!{ - "logo_url" => {"logo.png"} - } - } - } - ); -} - -#[test] -fn concurrent_insertions_at_different_list_positions() { - let (actor1, actor2) = sorted_actors(); - let mut doc1 = new_doc_with_actor(actor1); - let mut doc2 = new_doc_with_actor(actor2); - assert!(doc1.get_actor() < doc2.get_actor()); - - let list_id = doc1 - .put_object(&automerge::ROOT, "list", ObjType::List) - .unwrap(); - - doc1.insert(&list_id, 0, "one").unwrap(); - doc1.insert(&list_id, 1, "three").unwrap(); - doc2.merge(&mut doc1).unwrap(); - doc1.splice(&list_id, 1, 0, vec!["two".into()]).unwrap(); - doc2.insert(&list_id, 2, "four").unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "list" => { - list![ - {"one"}, - {"two"}, - {"three"}, - {"four"}, - ] - } - } - ); -} - -#[test] -fn concurrent_insertions_at_same_list_position() { - let (actor1, actor2) = sorted_actors(); - let mut doc1 = new_doc_with_actor(actor1); - let mut doc2 = new_doc_with_actor(actor2); - assert!(doc1.get_actor() < doc2.get_actor()); - - let list_id = doc1 - .put_object(&automerge::ROOT, "birds", ObjType::List) - .unwrap(); - doc1.insert(&list_id, 0, "parakeet").unwrap(); - - doc2.merge(&mut doc1).unwrap(); - doc1.insert(&list_id, 1, "starling").unwrap(); - doc2.insert(&list_id, 1, "chaffinch").unwrap(); - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "birds" => { - list![ - { - "parakeet", - }, - { - "chaffinch", - }, - { - "starling", - }, - ] - }, - } - ); -} - -#[test] -fn concurrent_assignment_and_deletion_of_a_map_entry() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - doc1.put(&automerge::ROOT, "bestBird", "robin").unwrap(); - doc2.merge(&mut doc1).unwrap(); - doc1.delete(&automerge::ROOT, "bestBird").unwrap(); - doc2.put(&automerge::ROOT, "bestBird", "magpie").unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "bestBird" => { - "magpie", - } - } - ); -} - -#[test] -fn concurrent_assignment_and_deletion_of_list_entry() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - let list_id = doc1 - .put_object(&automerge::ROOT, "birds", ObjType::List) - .unwrap(); - doc1.insert(&list_id, 0, "blackbird").unwrap(); - doc1.insert(&list_id, 1, "thrush").unwrap(); - doc1.insert(&list_id, 2, "goldfinch").unwrap(); - doc2.merge(&mut doc1).unwrap(); - doc1.put(&list_id, 1, "starling").unwrap(); - doc2.delete(&list_id, 1).unwrap(); - - assert_doc!( - &doc2, - map! { - "birds" => {list![ - {"blackbird"}, - {"goldfinch"}, - ]} - } - ); - - assert_doc!( - &doc1, - map! { - "birds" => {list![ - { "blackbird" }, - { "starling" }, - { "goldfinch" }, - ]} - } - ); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "birds" => {list![ - { "blackbird" }, - { "starling" }, - { "goldfinch" }, - ]} - } - ); -} - -#[test] -fn insertion_after_a_deleted_list_element() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - let list_id = doc1 - .put_object(&automerge::ROOT, "birds", ObjType::List) - .unwrap(); - - doc1.insert(&list_id, 0, "blackbird").unwrap(); - doc1.insert(&list_id, 1, "thrush").unwrap(); - doc1.insert(&list_id, 2, "goldfinch").unwrap(); - - doc2.merge(&mut doc1).unwrap(); - - doc1.splice(&list_id, 1, 2, Vec::new()).unwrap(); - - doc2.splice(&list_id, 2, 0, vec!["starling".into()]) - .unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "birds" => {list![ - { "blackbird" }, - { "starling" } - ]} - } - ); - - doc2.merge(&mut doc1).unwrap(); - assert_doc!( - &doc2, - map! { - "birds" => {list![ - { "blackbird" }, - { "starling" } - ]} - } - ); -} - -#[test] -fn concurrent_deletion_of_same_list_element() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - let list_id = doc1 - .put_object(&automerge::ROOT, "birds", ObjType::List) - .unwrap(); - - doc1.insert(&list_id, 0, "albatross").unwrap(); - doc1.insert(&list_id, 1, "buzzard").unwrap(); - doc1.insert(&list_id, 2, "cormorant").unwrap(); - - doc2.merge(&mut doc1).unwrap(); - - doc1.delete(&list_id, 1).unwrap(); - - doc2.delete(&list_id, 1).unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "birds" => {list![ - { "albatross" }, - { "cormorant" } - ]} - } - ); - - doc2.merge(&mut doc1).unwrap(); - assert_doc!( - &doc2, - map! { - "birds" => {list![ - { "albatross" }, - { "cormorant" } - ]} - } - ); -} - -#[test] -fn concurrent_updates_at_different_levels() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - - let animals = doc1 - .put_object(&automerge::ROOT, "animals", ObjType::Map) - .unwrap(); - let birds = doc1.put_object(&animals, "birds", ObjType::Map).unwrap(); - doc1.put(&birds, "pink", "flamingo").unwrap(); - doc1.put(&birds, "black", "starling").unwrap(); - - let mammals = doc1.put_object(&animals, "mammals", ObjType::List).unwrap(); - doc1.insert(&mammals, 0, "badger").unwrap(); - - doc2.merge(&mut doc1).unwrap(); - - doc1.put(&birds, "brown", "sparrow").unwrap(); - - doc2.delete(&animals, "birds").unwrap(); - doc1.merge(&mut doc2).unwrap(); - - assert_obj!( - &doc1, - &automerge::ROOT, - "animals", - map! { - "mammals" => { - list![{ "badger" }], - } - } - ); - - assert_obj!( - doc2.document(), - &automerge::ROOT, - "animals", - map! { - "mammals" => { - list![{ "badger" }], - } - } - ); -} - -#[test] -fn concurrent_updates_of_concurrently_deleted_objects() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - - let birds = doc1 - .put_object(&automerge::ROOT, "birds", ObjType::Map) - .unwrap(); - let blackbird = doc1.put_object(&birds, "blackbird", ObjType::Map).unwrap(); - doc1.put(&blackbird, "feathers", "black").unwrap(); - - doc2.merge(&mut doc1).unwrap(); - - doc1.delete(&birds, "blackbird").unwrap(); - - doc2.put(&blackbird, "beak", "orange").unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "birds" => { - map!{}, - } - } - ); -} - -#[test] -fn does_not_interleave_sequence_insertions_at_same_position() { - let (actor1, actor2) = sorted_actors(); - let mut doc1 = new_doc_with_actor(actor1); - let mut doc2 = new_doc_with_actor(actor2); - - let wisdom = doc1 - .put_object(&automerge::ROOT, "wisdom", ObjType::List) - .unwrap(); - doc2.merge(&mut doc1).unwrap(); - - doc1.splice( - &wisdom, - 0, - 0, - vec![ - "to".into(), - "be".into(), - "is".into(), - "to".into(), - "do".into(), - ], - ) - .unwrap(); - - doc2.splice( - &wisdom, - 0, - 0, - vec![ - "to".into(), - "do".into(), - "is".into(), - "to".into(), - "be".into(), - ], - ) - .unwrap(); - - doc1.merge(&mut doc2).unwrap(); - - assert_doc!( - &doc1, - map! { - "wisdom" => {list![ - {"to"}, - {"do"}, - {"is"}, - {"to"}, - {"be"}, - {"to"}, - {"be"}, - {"is"}, - {"to"}, - {"do"}, - ]} - } - ); -} - -#[test] -fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id() { - let (actor1, actor2) = sorted_actors(); - assert!(actor2 > actor1); - let mut doc1 = new_doc_with_actor(actor1); - let mut doc2 = new_doc_with_actor(actor2); - - let list = doc1 - .put_object(&automerge::ROOT, "list", ObjType::List) - .unwrap(); - doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1).unwrap(); - - doc2.insert(&list, 0, "one").unwrap(); - assert_doc!( - &doc2, - map! { - "list" => { list![ - { "one" }, - { "two" }, - ]} - } - ); -} - -#[test] -fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() { - let (actor2, actor1) = sorted_actors(); - assert!(actor2 < actor1); - let mut doc1 = new_doc_with_actor(actor1); - let mut doc2 = new_doc_with_actor(actor2); - - let list = doc1 - .put_object(&automerge::ROOT, "list", ObjType::List) - .unwrap(); - doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1).unwrap(); - - doc2.insert(&list, 0, "one").unwrap(); - assert_doc!( - &doc2, - map! { - "list" => { list![ - { "one" }, - { "two" }, - ]} - } - ); -} - -#[test] -fn insertion_consistent_with_causality() { - let mut doc1 = new_doc(); - let mut doc2 = new_doc(); - - let list = doc1 - .put_object(&automerge::ROOT, "list", ObjType::List) - .unwrap(); - doc1.insert(&list, 0, "four").unwrap(); - doc2.merge(&mut doc1).unwrap(); - doc2.insert(&list, 0, "three").unwrap(); - doc1.merge(&mut doc2).unwrap(); - doc1.insert(&list, 0, "two").unwrap(); - doc2.merge(&mut doc1).unwrap(); - doc2.insert(&list, 0, "one").unwrap(); - - assert_doc!( - &doc2, - map! { - "list" => { list![ - {"one"}, - {"two"}, - {"three" }, - {"four"}, - ]} - } - ); -} - -#[test] -fn save_and_restore_empty() { - let mut doc = new_doc(); - let loaded = Automerge::load(&doc.save()).unwrap(); - - assert_doc!(&loaded, map! {}); -} - -#[test] -fn save_restore_complex() { - let mut doc1 = new_doc(); - let todos = doc1 - .put_object(&automerge::ROOT, "todos", ObjType::List) - .unwrap(); - - let first_todo = doc1.insert_object(&todos, 0, ObjType::Map).unwrap(); - doc1.put(&first_todo, "title", "water plants").unwrap(); - doc1.put(&first_todo, "done", false).unwrap(); - - let mut doc2 = new_doc(); - doc2.merge(&mut doc1).unwrap(); - doc2.put(&first_todo, "title", "weed plants").unwrap(); - - doc1.put(&first_todo, "title", "kill plants").unwrap(); - doc1.merge(&mut doc2).unwrap(); - - let reloaded = Automerge::load(&doc1.save()).unwrap(); - - assert_doc!( - &reloaded, - map! { - "todos" => {list![ - {map!{ - "title" => { - "weed plants", - "kill plants", - }, - "done" => {false}, - }} - ]} - } - ); -} - -#[test] -fn handle_repeated_out_of_order_changes() -> Result<(), automerge::AutomergeError> { - let mut doc1 = new_doc(); - let list = doc1.put_object(ROOT, "list", ObjType::List)?; - doc1.insert(&list, 0, "a")?; - let mut doc2 = doc1.fork(); - doc1.insert(&list, 1, "b")?; - doc1.commit(); - doc1.insert(&list, 2, "c")?; - doc1.commit(); - doc1.insert(&list, 3, "d")?; - doc1.commit(); - let changes = doc1 - .get_changes(&[]) - .unwrap() - .into_iter() - .cloned() - .collect::>(); - doc2.apply_changes(changes[2..].to_vec())?; - doc2.apply_changes(changes[2..].to_vec())?; - doc2.apply_changes(changes)?; - assert_eq!(doc1.save(), doc2.save()); - Ok(()) -} - -#[test] -fn save_restore_complex_transactional() { - let mut doc1 = Automerge::new(); - let first_todo = doc1 - .transact::<_, _, automerge::AutomergeError>(|d| { - let todos = d.put_object(&automerge::ROOT, "todos", ObjType::List)?; - let first_todo = d.insert_object(&todos, 0, ObjType::Map)?; - d.put(&first_todo, "title", "water plants")?; - d.put(&first_todo, "done", false)?; - Ok(first_todo) - }) - .unwrap() - .result; - - let mut doc2 = Automerge::new(); - doc2.merge(&mut doc1).unwrap(); - doc2.transact::<_, _, automerge::AutomergeError>(|tx| { - tx.put(&first_todo, "title", "weed plants")?; - Ok(()) - }) - .unwrap(); - - doc1.transact::<_, _, automerge::AutomergeError>(|tx| { - tx.put(&first_todo, "title", "kill plants")?; - Ok(()) - }) - .unwrap(); - doc1.merge(&mut doc2).unwrap(); - - let reloaded = Automerge::load(&doc1.save()).unwrap(); - - assert_doc!( - &reloaded, - map! { - "todos" => {list![ - {map!{ - "title" => { - "weed plants", - "kill plants", - }, - "done" => {false}, - }} - ]} - } - ); -} - -#[test] -fn list_counter_del() -> Result<(), automerge::AutomergeError> { - let mut v = vec![ActorId::random(), ActorId::random(), ActorId::random()]; - v.sort(); - let actor1 = v[0].clone(); - let actor2 = v[1].clone(); - let actor3 = v[2].clone(); - - let mut doc1 = new_doc_with_actor(actor1); - - let list = doc1.put_object(ROOT, "list", ObjType::List)?; - doc1.insert(&list, 0, "a")?; - doc1.insert(&list, 1, "b")?; - doc1.insert(&list, 2, "c")?; - - let mut doc2 = AutoCommit::load(&doc1.save())?; - doc2.set_actor(actor2); - - let mut doc3 = AutoCommit::load(&doc1.save())?; - doc3.set_actor(actor3); - - doc1.put(&list, 1, ScalarValue::counter(0))?; - doc2.put(&list, 1, ScalarValue::counter(10))?; - doc3.put(&list, 1, ScalarValue::counter(100))?; - - doc1.put(&list, 2, ScalarValue::counter(0))?; - doc2.put(&list, 2, ScalarValue::counter(10))?; - doc3.put(&list, 2, 100)?; - - doc1.increment(&list, 1, 1)?; - doc1.increment(&list, 2, 1)?; - - doc1.merge(&mut doc2).unwrap(); - doc1.merge(&mut doc3).unwrap(); - - assert_obj!( - doc1.document(), - &automerge::ROOT, - "list", - list![ - { - "a", - }, - { - ScalarValue::counter(1), - ScalarValue::counter(10), - ScalarValue::counter(100) - }, - { - ScalarValue::Int(100), - ScalarValue::counter(1), - ScalarValue::counter(10), - } - ] - ); - - doc1.increment(&list, 1, 1)?; - doc1.increment(&list, 2, 1)?; - - assert_obj!( - doc1.document(), - &automerge::ROOT, - "list", - list![ - { - "a", - }, - { - ScalarValue::counter(2), - ScalarValue::counter(11), - ScalarValue::counter(101) - }, - { - ScalarValue::counter(2), - ScalarValue::counter(11), - } - ] - ); - - doc1.delete(&list, 2)?; - - assert_eq!(doc1.length(&list), 2); - - let doc4 = AutoCommit::load(&doc1.save())?; - - assert_eq!(doc4.length(&list), 2); - - doc1.delete(&list, 1)?; - - assert_eq!(doc1.length(&list), 1); - - let doc5 = AutoCommit::load(&doc1.save())?; - - assert_eq!(doc5.length(&list), 1); - - Ok(()) -} - -#[test] -fn observe_counter_change_application() { - let mut doc = AutoCommit::new(); - doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); - doc.increment(ROOT, "counter", 2).unwrap(); - doc.increment(ROOT, "counter", 5).unwrap(); - let changes = doc.get_changes(&[]).unwrap().into_iter().cloned(); - - let mut doc = AutoCommit::new().with_observer(VecOpObserver::default()); - doc.apply_changes(changes).unwrap(); -} - -#[test] -fn increment_non_counter_map() { - let mut doc = AutoCommit::new(); - // can't increment nothing - assert!(matches!( - doc.increment(ROOT, "nothing", 2), - Err(AutomergeError::MissingCounter) - )); - - // can't increment a non-counter - doc.put(ROOT, "non-counter", "mystring").unwrap(); - assert!(matches!( - doc.increment(ROOT, "non-counter", 2), - Err(AutomergeError::MissingCounter) - )); - - // can increment a counter still - doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); - assert!(matches!(doc.increment(ROOT, "counter", 2), Ok(()))); - - // can increment a counter that is part of a conflict - let mut doc1 = AutoCommit::new(); - doc1.set_actor(ActorId::from([1])); - let mut doc2 = AutoCommit::new(); - doc2.set_actor(ActorId::from([2])); - - doc1.put(ROOT, "key", ScalarValue::counter(1)).unwrap(); - doc2.put(ROOT, "key", "mystring").unwrap(); - doc1.merge(&mut doc2).unwrap(); - - assert!(matches!(doc1.increment(ROOT, "key", 2), Ok(()))); -} - -#[test] -fn increment_non_counter_list() { - let mut doc = AutoCommit::new(); - let list = doc.put_object(ROOT, "list", ObjType::List).unwrap(); - - // can't increment a non-counter - doc.insert(&list, 0, "mystring").unwrap(); - assert!(matches!( - doc.increment(&list, 0, 2), - Err(AutomergeError::MissingCounter) - )); - - // can increment a counter - doc.insert(&list, 0, ScalarValue::counter(1)).unwrap(); - assert!(matches!(doc.increment(&list, 0, 2), Ok(()))); - - // can increment a counter that is part of a conflict - let mut doc1 = AutoCommit::new(); - doc1.set_actor(ActorId::from([1])); - let list = doc1.put_object(ROOT, "list", ObjType::List).unwrap(); - doc1.insert(&list, 0, ()).unwrap(); - let mut doc2 = doc1.fork(); - doc2.set_actor(ActorId::from([2])); - - doc1.put(&list, 0, ScalarValue::counter(1)).unwrap(); - doc2.put(&list, 0, "mystring").unwrap(); - doc1.merge(&mut doc2).unwrap(); - - assert!(matches!(doc1.increment(&list, 0, 2), Ok(()))); -} - -#[test] -fn test_local_inc_in_map() { - let mut v = vec![ActorId::random(), ActorId::random(), ActorId::random()]; - v.sort(); - let actor1 = v[0].clone(); - let actor2 = v[1].clone(); - let actor3 = v[2].clone(); - - let mut doc1 = new_doc_with_actor(actor1); - doc1.put(&automerge::ROOT, "hello", "world").unwrap(); - - let mut doc2 = AutoCommit::load(&doc1.save()).unwrap(); - doc2.set_actor(actor2); - - let mut doc3 = AutoCommit::load(&doc1.save()).unwrap(); - doc3.set_actor(actor3); - - doc1.put(ROOT, "cnt", 20_u64).unwrap(); - doc2.put(ROOT, "cnt", ScalarValue::counter(0)).unwrap(); - doc3.put(ROOT, "cnt", ScalarValue::counter(10)).unwrap(); - doc1.merge(&mut doc2).unwrap(); - doc1.merge(&mut doc3).unwrap(); - - assert_doc! {doc1.document(), map!{ - "cnt" => { - 20_u64, - ScalarValue::counter(0), - ScalarValue::counter(10), - }, - "hello" => {"world"}, - }}; - - doc1.increment(ROOT, "cnt", 5).unwrap(); - - assert_doc! {doc1.document(), map!{ - "cnt" => { - ScalarValue::counter(5), - ScalarValue::counter(15), - }, - "hello" => {"world"}, - }}; - let mut doc4 = AutoCommit::load(&doc1.save()).unwrap(); - assert_eq!(doc4.save(), doc1.save()); -} - -#[test] -fn test_merging_test_conflicts_then_saving_and_loading() { - let (actor1, actor2) = sorted_actors(); - - let mut doc1 = new_doc_with_actor(actor1); - let text = doc1.put_object(ROOT, "text", ObjType::Text).unwrap(); - doc1.splice_text(&text, 0, 0, "hello").unwrap(); - - let mut doc2 = AutoCommit::load(&doc1.save()).unwrap(); - doc2.set_actor(actor2); - - assert_doc! {&doc2, map!{ - "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"o"}]}, - }}; - - doc2.splice_text(&text, 4, 1, "").unwrap(); - doc2.splice_text(&text, 4, 0, "!").unwrap(); - doc2.splice_text(&text, 5, 0, " ").unwrap(); - doc2.splice_text(&text, 6, 0, "world").unwrap(); - - assert_doc!( - &doc2, - map! { - "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"!"}, {" "}, {"w"} , {"o"}, {"r"}, {"l"}, {"d"}]} - } - ); - - let doc3 = AutoCommit::load(&doc2.save()).unwrap(); - - assert_doc!( - &doc3, - map! { - "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"!"}, {" "}, {"w"} , {"o"}, {"r"}, {"l"}, {"d"}]} - } - ); -} - -/// Surfaces an error which occurs when loading a document with a change which only contains a -/// delete operation. In this case the delete operation doesn't appear in the encoded document -/// operations except as a succ, so the max_op was calculated incorectly. -#[test] -fn delete_only_change() { - let actor = automerge::ActorId::random(); - let mut doc1 = automerge::Automerge::new().with_actor(actor.clone()); - let list = doc1 - .transact::<_, _, automerge::AutomergeError>(|d| { - let l = d.put_object(&automerge::ROOT, "list", ObjType::List)?; - d.insert(&l, 0, 'a')?; - Ok(l) - }) - .unwrap() - .result; - - let mut doc2 = automerge::Automerge::load(&doc1.save()) - .unwrap() - .with_actor(actor.clone()); - doc2.transact::<_, _, automerge::AutomergeError>(|d| d.delete(&list, 0)) - .unwrap(); - - let mut doc3 = automerge::Automerge::load(&doc2.save()) - .unwrap() - .with_actor(actor.clone()); - doc3.transact(|d| d.insert(&list, 0, "b")).unwrap(); - - let doc4 = automerge::Automerge::load(&doc3.save()) - .unwrap() - .with_actor(actor); - - let changes = doc4.get_changes(&[]).unwrap(); - assert_eq!(changes.len(), 3); - let c = changes[2]; - assert_eq!(c.start_op().get(), 4); -} - -/// Expose an error where a document which contained a create operation without any subsequent -/// operations targeting the created object did not load the object correctly. -#[test] -fn save_and_reload_create_object() { - let actor = automerge::ActorId::random(); - let mut doc = automerge::Automerge::new().with_actor(actor); - - // Create a change containing an object but no other operations - let list = doc - .transact::<_, _, automerge::AutomergeError>(|d| { - d.put_object(&automerge::ROOT, "foo", ObjType::List) - }) - .unwrap() - .result; - - // Save and load the change - let mut doc2 = automerge::Automerge::load(&doc.save()).unwrap(); - doc2.transact::<_, _, automerge::AutomergeError>(|d| { - d.insert(&list, 0, 1_u64)?; - Ok(()) - }) - .unwrap(); - - assert_doc!(&doc2, map! {"foo" => { list! [{1_u64}]}}); - - let _doc3 = automerge::Automerge::load(&doc2.save()).unwrap(); -} - -#[test] -fn test_compressed_changes() { - let mut doc = new_doc(); - // crate::storage::DEFLATE_MIN_SIZE is 250, so this should trigger compression - doc.put(ROOT, "bytes", ScalarValue::Bytes(vec![10; 300])) - .unwrap(); - let mut change = doc.get_last_local_change().unwrap().clone(); - let uncompressed = change.raw_bytes().to_vec(); - assert!(uncompressed.len() > 256); - let compressed = change.bytes().to_vec(); - assert!(compressed.len() < uncompressed.len()); - - let reloaded = automerge::Change::try_from(&compressed[..]).unwrap(); - assert_eq!(change.raw_bytes(), reloaded.raw_bytes()); -} - -#[test] -fn test_compressed_doc_cols() { - // In this test, the keyCtr column is long enough for deflate compression to kick in, but the - // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr. - // When checking whether the columns appear in ascending order, we must ignore the deflate bit. - let mut doc = new_doc(); - let list = doc.put_object(ROOT, "list", ObjType::List).unwrap(); - let mut expected = Vec::new(); - for i in 0..200 { - doc.insert(&list, i, i as u64).unwrap(); - expected.push(i as u64); - } - let uncompressed = doc.save_nocompress(); - let compressed = doc.save(); - assert!(compressed.len() < uncompressed.len()); - let loaded = automerge::Automerge::load(&compressed).unwrap(); - assert_doc!( - &loaded, - map! { - "list" => { expected} - } - ); -} - -#[test] -fn test_change_encoding_expanded_change_round_trip() { - let change_bytes: Vec = vec![ - 0x85, 0x6f, 0x4a, 0x83, // magic bytes - 0xb2, 0x98, 0x9e, 0xa9, // checksum - 1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234' - 1, 1, 252, 250, 220, 255, 5, // seq, startOp, time - 14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111, - 110, // message: 'Initialization' - 0, 6, // actor list, column count - 0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action - 0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum - 0x7f, 1, 0x78, // keyStr: 'x' - 1, // insert: false - 0x7f, 1, // action: set - 0x7f, 19, // valLen: 1 byte of type uint - 1, // valRaw: 1 - 0x7f, 0, // predNum: 0 - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, // 10 trailing bytes - ]; - let change = automerge::Change::try_from(&change_bytes[..]).unwrap(); - assert_eq!(change.raw_bytes(), change_bytes); - let expanded = automerge::ExpandedChange::from(&change); - let unexpanded: automerge::Change = expanded.try_into().unwrap(); - assert_eq!(unexpanded.raw_bytes(), change_bytes); -} - -#[test] -fn save_and_load_incremented_counter() { - let mut doc = AutoCommit::new(); - doc.put(ROOT, "counter", ScalarValue::counter(1)).unwrap(); - doc.commit(); - doc.increment(ROOT, "counter", 1).unwrap(); - doc.commit(); - let changes1: Vec = doc.get_changes(&[]).unwrap().into_iter().cloned().collect(); - let json: Vec<_> = changes1 - .iter() - .map(|c| serde_json::to_string(&c.decode()).unwrap()) - .collect(); - let changes2: Vec = json - .iter() - .map(|j| serde_json::from_str::(j).unwrap().into()) - .collect(); - - assert_eq!(changes1, changes2); -} - -#[test] -fn load_incremental_with_corrupted_tail() { - let mut doc = AutoCommit::new(); - doc.put(ROOT, "key", ScalarValue::Str("value".into())) - .unwrap(); - doc.commit(); - let mut bytes = doc.save(); - bytes.extend_from_slice(&[1, 2, 3, 4]); - let mut loaded = Automerge::new(); - let loaded_len = loaded.load_incremental(&bytes).unwrap(); - assert_eq!(loaded_len, 1); - assert_doc!( - &loaded, - map! { - "key" => { "value" }, - } - ); -} - -#[test] -fn load_doc_with_deleted_objects() { - // Reproduces an issue where a document with deleted objects failed to load - let mut doc = AutoCommit::new(); - doc.put_object(ROOT, "list", ObjType::List).unwrap(); - doc.put_object(ROOT, "text", ObjType::Text).unwrap(); - doc.put_object(ROOT, "map", ObjType::Map).unwrap(); - doc.put_object(ROOT, "table", ObjType::Table).unwrap(); - doc.delete(&ROOT, "list").unwrap(); - doc.delete(&ROOT, "text").unwrap(); - doc.delete(&ROOT, "map").unwrap(); - doc.delete(&ROOT, "table").unwrap(); - let saved = doc.save(); - Automerge::load(&saved).unwrap(); -} - -#[test] -fn insert_after_many_deletes() { - let mut doc = AutoCommit::new(); - let obj = doc.put_object(&ROOT, "object", ObjType::Map).unwrap(); - for i in 0..100 { - doc.put(&obj, i.to_string(), i).unwrap(); - doc.delete(&obj, i.to_string()).unwrap(); - } -} - -#[test] -fn simple_bad_saveload() { - let mut doc = Automerge::new(); - doc.transact::<_, _, AutomergeError>(|d| { - d.put(ROOT, "count", 0)?; - Ok(()) - }) - .unwrap(); - - doc.transact::<_, _, AutomergeError>(|_d| Ok(())).unwrap(); - - doc.transact::<_, _, AutomergeError>(|d| { - d.put(ROOT, "count", 0)?; - Ok(()) - }) - .unwrap(); - - let bytes = doc.save(); - Automerge::load(&bytes).unwrap(); -} - -#[test] -fn ops_on_wrong_objets() -> Result<(), AutomergeError> { - let mut doc = AutoCommit::new(); - let list = doc.put_object(&automerge::ROOT, "list", ObjType::List)?; - doc.insert(&list, 0, "a")?; - doc.insert(&list, 1, "b")?; - let e1 = doc.put(&list, "a", "AAA"); - assert_eq!(e1, Err(AutomergeError::InvalidOp(ObjType::List))); - let e2 = doc.splice_text(&list, 0, 0, "hello world"); - assert_eq!(e2, Err(AutomergeError::InvalidOp(ObjType::List))); - let map = doc.put_object(&automerge::ROOT, "map", ObjType::Map)?; - doc.put(&map, "a", "AAA")?; - doc.put(&map, "b", "BBB")?; - let e3 = doc.insert(&map, 0, "b"); - assert_eq!(e3, Err(AutomergeError::InvalidOp(ObjType::Map))); - let e4 = doc.splice_text(&map, 0, 0, "hello world"); - assert_eq!(e4, Err(AutomergeError::InvalidOp(ObjType::Map))); - let text = doc.put_object(&automerge::ROOT, "text", ObjType::Text)?; - doc.splice_text(&text, 0, 0, "hello world")?; - let e5 = doc.put(&text, "a", "AAA"); - assert_eq!(e5, Err(AutomergeError::InvalidOp(ObjType::Text))); - //let e6 = doc.insert(&text, 0, "b"); - //assert_eq!(e6, Err(AutomergeError::InvalidOp(ObjType::Text))); - Ok(()) -} - -#[test] -fn fuzz_crashers() { - let paths = fs::read_dir("./tests/fuzz-crashers").unwrap(); - - for path in paths { - // uncomment this line to figure out which fixture is crashing: - // println!("{:?}", path.as_ref().unwrap().path().display()); - let bytes = fs::read(path.as_ref().unwrap().path()); - let res = Automerge::load(&bytes.unwrap()); - assert!(res.is_err()); - } -} - -fn fixture(name: &str) -> Vec { - fs::read("./tests/fixtures/".to_owned() + name).unwrap() -} - -#[test] -fn overlong_leb() { - // the value metadata says "2", but the LEB is only 1-byte long and there's an extra 0 - assert!(Automerge::load(&fixture("counter_value_has_incorrect_meta.automerge")).is_err()); - // the LEB is overlong (using 2 bytes where one would have sufficed) - assert!(Automerge::load(&fixture("counter_value_is_overlong.automerge")).is_err()); - // the LEB is correct - assert!(Automerge::load(&fixture("counter_value_is_ok.automerge")).is_ok()); -} - -#[test] -fn load() { - fn check_fixture(name: &str) { - let doc = Automerge::load(&fixture(name)).unwrap(); - let map_id = doc.get(ROOT, "a").unwrap().unwrap().1; - assert_eq!(doc.get(map_id, "a").unwrap().unwrap().0, "b".into()); - } - - check_fixture("two_change_chunks.automerge"); - check_fixture("two_change_chunks_compressed.automerge"); - check_fixture("two_change_chunks_out_of_order.automerge"); -} - -#[test] -fn negative_64() { - let mut doc = Automerge::new(); - assert!(doc.transact(|d| { d.put(ROOT, "a", -64_i64) }).is_ok()) -} - -#[test] -fn obj_id_64bits() { - // this change has an opId of 2**42, which when cast to a 32-bit int gives 0. - // The file should either fail to load (a limit of ~4 billion ops per doc seems reasonable), or be handled correctly. - if let Ok(doc) = Automerge::load(&fixture("64bit_obj_id_change.automerge")) { - let map_id = doc.get(ROOT, "a").unwrap().unwrap().1; - assert!(map_id != ROOT) - } - - // this fixture is the same as the above, but as a document chunk. - if let Ok(doc) = Automerge::load(&fixture("64bit_obj_id_doc.automerge")) { - let map_id = doc.get(ROOT, "a").unwrap().unwrap().1; - assert!(map_id != ROOT) - } -} - -#[test] -fn bad_change_on_optree_node_boundary() { - let mut doc = Automerge::new(); - doc.transact::<_, _, AutomergeError>(|d| { - d.put(ROOT, "a", "z")?; - d.put(ROOT, "b", 0)?; - d.put(ROOT, "c", 0)?; - Ok(()) - }) - .unwrap(); - let iterations = 15_u64; - for i in 0_u64..iterations { - doc.transact::<_, _, AutomergeError>(|d| { - let s = "a".repeat(i as usize); - d.put(ROOT, "a", s)?; - d.put(ROOT, "b", i + 1)?; - d.put(ROOT, "c", i + 1)?; - Ok(()) - }) - .unwrap(); - } - let mut doc2 = Automerge::load(doc.save().as_slice()).unwrap(); - doc.transact::<_, _, AutomergeError>(|d| { - let i = iterations + 2; - let s = "a".repeat(i as usize); - d.put(ROOT, "a", s)?; - d.put(ROOT, "b", i)?; - d.put(ROOT, "c", i)?; - Ok(()) - }) - .unwrap(); - let change = doc.get_changes(&doc2.get_heads()).unwrap(); - doc2.apply_changes(change.into_iter().cloned().collect::>()) - .unwrap(); - Automerge::load(doc2.save().as_slice()).unwrap(); -} diff --git a/rust/edit-trace/Makefile b/rust/edit-trace/Makefile deleted file mode 100644 index e0e77643..00000000 --- a/rust/edit-trace/Makefile +++ /dev/null @@ -1,25 +0,0 @@ -.PHONY: rust -rust: - cargo run --release - -.PHONY: build-wasm -build-wasm: - cd ../automerge-wasm && yarn - cd ../automerge-wasm && yarn release - -.PHONY: wasm -wasm: build-wasm - node automerge-wasm.js - -.PHONY: build-js -build-js: build-wasm - cd ../automerge-js && yarn - cd ../automerge-js && yarn build - -.PHONY: js -js: build-js - node automerge-js.js - -.PHONY: baseline -baseline: - node baseline.js diff --git a/rust/edit-trace/README.md b/rust/edit-trace/README.md deleted file mode 100644 index aabe83dc..00000000 --- a/rust/edit-trace/README.md +++ /dev/null @@ -1,48 +0,0 @@ -# Edit trace benchmarks - -Try the different editing traces on different automerge implementations - -## Automerge Experiement - pure rust - -```sh -make rust -``` - -### Benchmarks - -There are some criterion benchmarks in the `benches` folder which can be run with `cargo bench` or `cargo criterion`. -For flamegraphing, `cargo flamegraph --bench main -- --bench "save" # or "load" or "replay" or nothing` can be useful. - -## Automerge Experiement - wasm api - -```sh -make wasm -``` - -## Automerge Experiment - JS wrapper - -```sh -make js -``` - -## Automerge 1.0 pure javascript - new fast backend - -This assumes automerge has been checked out in a directory along side this repo - -```sh -node automerge-1.0.js -``` - -## Automerge 1.0 with rust backend - -This assumes automerge has been checked out in a directory along side this repo - -```sh -node automerge-rs.js -``` - -## Baseline Test. Javascript Array with no CRDT info - -```sh -make baseline -``` diff --git a/scripts/ci/advisory b/scripts/ci/advisory index 6da4a578..07e8c72e 100755 --- a/scripts/ci/advisory +++ b/scripts/ci/advisory @@ -1,7 +1,6 @@ #!/usr/bin/env bash set -eoux pipefail -cd rust cargo deny --version cargo deny check advisories cargo deny check licenses diff --git a/scripts/ci/build-test b/scripts/ci/build-test index de592f7e..dbd89f5d 100755 --- a/scripts/ci/build-test +++ b/scripts/ci/build-test @@ -1,7 +1,6 @@ #!/usr/bin/env bash set -eoux pipefail -cd rust cargo build --workspace --all-features RUST_LOG=error cargo test --workspace --all-features diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build deleted file mode 100755 index 25a69756..00000000 --- a/scripts/ci/cmake-build +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash -set -eoux pipefail - -# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself -THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" -# \note CMake's default build types are "Debug", "MinSizeRel", "Release" and -# "RelWithDebInfo" but custom ones can also be defined so we pass it verbatim. -BUILD_TYPE=$1; -LIB_TYPE=$2; -if [ "$(echo "${LIB_TYPE}" | tr '[:upper:]' '[:lower:]')" == "shared" ]; then - SHARED_TOGGLE="ON" -else - SHARED_TOGGLE="OFF" -fi -C_PROJECT=$THIS_SCRIPT/../../rust/automerge-c; -mkdir -p $C_PROJECT/build; -cd $C_PROJECT/build; -cmake --log-level=ERROR -B . -S .. -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DBUILD_SHARED_LIBS=$SHARED_TOGGLE; -cmake --build . --target automerge_test; diff --git a/scripts/ci/deno_tests b/scripts/ci/deno_tests deleted file mode 100755 index 9f297557..00000000 --- a/scripts/ci/deno_tests +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env bash -set -eou pipefail -# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself -THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" -WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; -JS_PROJECT=$THIS_SCRIPT/../../javascript; -E2E_PROJECT=$THIS_SCRIPT/../../javascript/e2e; - -echo "building wasm and js" -yarn --cwd $E2E_PROJECT install; -yarn --cwd $E2E_PROJECT e2e buildjs; -cp $WASM_PROJECT/index.d.ts $WASM_PROJECT/deno/; -sed -i '1i /// ' $WASM_PROJECT/deno/automerge_wasm.js; - -echo "Running Wasm Deno tests"; -deno test $WASM_PROJECT/deno-tests/deno.ts --allow-read; - -echo "Running JS Deno tests"; -ROOT_MODULE=$WASM_PROJECT/deno yarn --cwd $JS_PROJECT deno:build; -yarn --cwd $JS_PROJECT deno:test; - diff --git a/scripts/ci/docs b/scripts/ci/docs new file mode 100755 index 00000000..647880ce --- /dev/null +++ b/scripts/ci/docs @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +set -eoux pipefail + +RUSTDOCFLAGS="-D rustdoc::broken-intra-doc-links -D warnings" \ +cargo doc --no-deps --workspace --document-private-items diff --git a/scripts/ci/fmt b/scripts/ci/fmt index 27235f92..d3d7e28c 100755 --- a/scripts/ci/fmt +++ b/scripts/ci/fmt @@ -1,5 +1,4 @@ #!/usr/bin/env bash set -eoux pipefail -cd rust cargo fmt -- --check diff --git a/scripts/ci/fmt_js b/scripts/ci/fmt_js deleted file mode 100755 index 8f387b6a..00000000 --- a/scripts/ci/fmt_js +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash -set -eoux pipefail - -# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself -THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" -yarn --cwd $THIS_SCRIPT/../../javascript prettier -c . - diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests index 68205a33..6c4a16d4 100755 --- a/scripts/ci/js_tests +++ b/scripts/ci/js_tests @@ -1,15 +1,18 @@ -#!/usr/bin/env bash -set -eoux pipefail +THIS_SCRIPT=$(dirname "$0"); +WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; +JS_PROJECT=$THIS_SCRIPT/../../automerge-js; + +yarn --cwd $WASM_PROJECT install; +# This will take care of running wasm-pack +yarn --cwd $WASM_PROJECT build; +# If the dependencies are already installed we delete automerge-wasm. This makes +# this script usable for iterative development. +#if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then +# rm -rf $JS_PROJECT/node_modules/automerge-wasm +#fi +# --check-files forces yarn to check if the local dep has changed +yarn --cwd $JS_PROJECT install --check-files; +yarn --cwd $JS_PROJECT test; + -# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself -THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" -WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; -JS_PROJECT=$THIS_SCRIPT/../../javascript; -E2E_PROJECT=$THIS_SCRIPT/../../javascript/e2e; -yarn --cwd $E2E_PROJECT install; -# This will build the automerge-wasm project, publish it to a local NPM -# repository, then run `yarn build` in the `javascript` directory with -# the local registry -yarn --cwd $E2E_PROJECT e2e buildjs; -yarn --cwd $JS_PROJECT test diff --git a/scripts/ci/lint b/scripts/ci/lint index 87a16765..1b29d909 100755 --- a/scripts/ci/lint +++ b/scripts/ci/lint @@ -1,11 +1,7 @@ #!/usr/bin/env bash set -eoux pipefail -# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself -THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" - -cd $THIS_SCRIPT/../../rust # Force clippy to consider all local sources # https://github.com/rust-lang/rust-clippy/issues/4612 find . -name "*.rs" -not -path "./target/*" -exec touch "{}" + -cargo clippy --all-targets --all-features -- -D warnings +cargo clippy --all-features --all-targets -- -D warnings diff --git a/scripts/ci/run b/scripts/ci/run index aebfe4c4..a0fa3616 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -2,12 +2,9 @@ set -eou pipefail ./scripts/ci/fmt -./scripts/ci/fmt_js ./scripts/ci/lint ./scripts/ci/build-test -./scripts/ci/rust-docs +./scripts/ci/docs ./scripts/ci/advisory ./scripts/ci/wasm_tests -./scripts/ci/deno_tests ./scripts/ci/js_tests -./scripts/ci/cmake-build Release static diff --git a/scripts/ci/rust-docs b/scripts/ci/rust-docs deleted file mode 100755 index 4be0ed9a..00000000 --- a/scripts/ci/rust-docs +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash -set -eoux pipefail - -# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself -THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" -cd $THIS_SCRIPT/../../rust -RUSTDOCFLAGS="-D rustdoc::broken-intra-doc-links -D warnings" \ -cargo doc --no-deps --workspace --document-private-items diff --git a/scripts/ci/wasm_tests b/scripts/ci/wasm_tests index fac344d8..778e1e1f 100755 --- a/scripts/ci/wasm_tests +++ b/scripts/ci/wasm_tests @@ -1,6 +1,5 @@ -# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself -THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" -WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; +THIS_SCRIPT=$(dirname "$0"); +WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; yarn --cwd $WASM_PROJECT install; yarn --cwd $WASM_PROJECT build;