diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4fc75fef..8519ac5e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,10 +2,10 @@ name: CI on: push: branches: - - main + - main pull_request: branches: - - main + - main jobs: fmt: runs-on: ubuntu-latest @@ -14,7 +14,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.67.0 default: true components: rustfmt - uses: Swatinem/rust-cache@v1 @@ -28,7 +28,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.67.0 default: true components: clippy - uses: Swatinem/rust-cache@v1 @@ -42,7 +42,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.67.0 default: true - uses: Swatinem/rust-cache@v1 - name: Build rust docs @@ -51,9 +51,6 @@ jobs: - name: Install doxygen run: sudo apt-get install -y doxygen shell: bash - - name: Build C docs - run: ./scripts/ci/cmake-docs - shell: bash cargo-deny: runs-on: ubuntu-latest @@ -67,23 +64,50 @@ jobs: - uses: actions/checkout@v2 - uses: EmbarkStudios/cargo-deny-action@v1 with: + arguments: '--manifest-path ./rust/Cargo.toml' command: check ${{ matrix.checks }} wasm_tests: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Install wasm-pack - run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + - name: Install wasm-bindgen-cli + run: cargo install wasm-bindgen-cli wasm-opt + - name: Install wasm32 target + run: rustup target add wasm32-unknown-unknown - name: run tests run: ./scripts/ci/wasm_tests + deno_tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: denoland/setup-deno@v1 + with: + deno-version: v1.x + - name: Install wasm-bindgen-cli + run: cargo install wasm-bindgen-cli wasm-opt + - name: Install wasm32 target + run: rustup target add wasm32-unknown-unknown + - name: run tests + run: ./scripts/ci/deno_tests + + js_fmt: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: install + run: yarn global add prettier + - name: format + run: prettier -c javascript/.prettierrc javascript js_tests: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Install wasm-pack - run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh + - name: Install wasm-bindgen-cli + run: cargo install wasm-bindgen-cli wasm-opt + - name: Install wasm32 target + run: rustup target add wasm32-unknown-unknown - name: run tests run: ./scripts/ci/js_tests @@ -94,7 +118,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: nightly-2023-01-26 default: true - uses: Swatinem/rust-cache@v1 - name: Install CMocka @@ -103,6 +127,8 @@ jobs: uses: jwlawson/actions-setup-cmake@v1.12 with: cmake-version: latest + - name: Install rust-src + run: rustup component add rust-src - name: Build and test C bindings run: ./scripts/ci/cmake-build Release Static shell: bash @@ -112,9 +138,7 @@ jobs: strategy: matrix: toolchain: - - 1.60.0 - - nightly - continue-on-error: ${{ matrix.toolchain == 'nightly' }} + - 1.67.0 steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 @@ -133,7 +157,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.67.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test @@ -146,7 +170,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.60.0 + toolchain: 1.67.0 default: true - uses: Swatinem/rust-cache@v1 - run: ./scripts/ci/build-test diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 1f682628..b501d526 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -30,28 +30,16 @@ jobs: uses: actions-rs/cargo@v1 with: command: clean - args: --doc + args: --manifest-path ./rust/Cargo.toml --doc - name: Build Rust docs uses: actions-rs/cargo@v1 with: command: doc - args: --workspace --all-features --no-deps + args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps - name: Move Rust docs - run: mkdir -p docs && mv target/doc/* docs/. - shell: bash - - - name: Install doxygen - run: sudo apt-get install -y doxygen - shell: bash - - - name: Build C docs - run: ./scripts/ci/cmake-docs - shell: bash - - - name: Move C docs - run: mkdir -p docs/automerge-c && mv automerge-c/build/src/html/* docs/automerge-c/. + run: mkdir -p docs && mv rust/target/doc/* docs/. shell: bash - name: Configure root page diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 00000000..762671ff --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,214 @@ +name: Release +on: + push: + branches: + - main + +jobs: + check_if_wasm_version_upgraded: + name: Check if WASM version has been upgraded + runs-on: ubuntu-latest + outputs: + wasm_version: ${{ steps.version-updated.outputs.current-package-version }} + wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }} + steps: + - uses: JiPaix/package-json-updated-action@v1.0.5 + id: version-updated + with: + path: rust/automerge-wasm/package.json + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + publish-wasm: + name: Publish WASM package + runs-on: ubuntu-latest + needs: + - check_if_wasm_version_upgraded + # We create release only if the version in the package.json has been upgraded + if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true' + steps: + - uses: actions/setup-node@v3 + with: + node-version: '16.x' + registry-url: 'https://registry.npmjs.org' + - uses: denoland/setup-deno@v1 + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + ref: ${{ github.ref }} + - name: Get rid of local github workflows + run: rm -r .github/workflows + - name: Remove tmp_branch if it exists + run: git push origin :tmp_branch || true + - run: git checkout -b tmp_branch + - name: Install wasm-bindgen-cli + run: cargo install wasm-bindgen-cli wasm-opt + - name: Install wasm32 target + run: rustup target add wasm32-unknown-unknown + - name: run wasm js tests + id: wasm_js_tests + run: ./scripts/ci/wasm_tests + - name: run wasm deno tests + id: wasm_deno_tests + run: ./scripts/ci/deno_tests + - name: build release + id: build_release + run: | + npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release + - name: Collate deno release files + if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' + run: | + mkdir $GITHUB_WORKSPACE/deno_wasm_dist + cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist + cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist + cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist + cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist + sed -i '1i /// ' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js + - name: Create npm release + if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' + run: | + if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then + echo "This version is already published" + exit 0 + fi + EXTRA_ARGS="--access public" + if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then + echo "Is pre-release version" + EXTRA_ARGS="$EXTRA_ARGS --tag next" + fi + if [ "$NODE_AUTH_TOKEN" = "" ]; then + echo "Can't publish on NPM, You need a NPM_TOKEN secret." + false + fi + npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS + env: + NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} + VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} + - name: Commit wasm deno release files + run: | + git config --global user.name "actions" + git config --global user.email actions@github.com + git add $GITHUB_WORKSPACE/deno_wasm_dist + git commit -am "Add deno release files" + git push origin tmp_branch + - name: Tag wasm release + if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success' + uses: softprops/action-gh-release@v1 + with: + name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} + tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} + target_commitish: tmp_branch + generate_release_notes: false + draft: false + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Remove tmp_branch + run: git push origin :tmp_branch + check_if_js_version_upgraded: + name: Check if JS version has been upgraded + runs-on: ubuntu-latest + outputs: + js_version: ${{ steps.version-updated.outputs.current-package-version }} + js_has_updated: ${{ steps.version-updated.outputs.has-updated }} + steps: + - uses: JiPaix/package-json-updated-action@v1.0.5 + id: version-updated + with: + path: javascript/package.json + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + publish-js: + name: Publish JS package + runs-on: ubuntu-latest + needs: + - check_if_js_version_upgraded + - check_if_wasm_version_upgraded + - publish-wasm + # We create release only if the version in the package.json has been upgraded and after the WASM release + if: | + (always() && ! cancelled()) && + (needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') && + needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true' + steps: + - uses: actions/setup-node@v3 + with: + node-version: '16.x' + registry-url: 'https://registry.npmjs.org' + - uses: denoland/setup-deno@v1 + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + ref: ${{ github.ref }} + - name: Get rid of local github workflows + run: rm -r .github/workflows + - name: Remove js_tmp_branch if it exists + run: git push origin :js_tmp_branch || true + - run: git checkout -b js_tmp_branch + - name: check js formatting + run: | + yarn global add prettier + prettier -c javascript/.prettierrc javascript + - name: run js tests + id: js_tests + run: | + cargo install wasm-bindgen-cli wasm-opt + rustup target add wasm32-unknown-unknown + ./scripts/ci/js_tests + - name: build js release + id: build_release + run: | + npm --prefix $GITHUB_WORKSPACE/javascript run build + - name: build js deno release + id: build_deno_release + run: | + VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build + env: + WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }} + - name: run deno tests + id: deno_tests + run: | + npm --prefix $GITHUB_WORKSPACE/javascript run deno:test + - name: Collate deno release files + if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success' + run: | + mkdir $GITHUB_WORKSPACE/deno_js_dist + cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist + - name: Create npm release + if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success' + run: | + if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then + echo "This version is already published" + exit 0 + fi + EXTRA_ARGS="--access public" + if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then + echo "Is pre-release version" + EXTRA_ARGS="$EXTRA_ARGS --tag next" + fi + if [ "$NODE_AUTH_TOKEN" = "" ]; then + echo "Can't publish on NPM, You need a NPM_TOKEN secret." + false + fi + npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS + env: + NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} + VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }} + - name: Commit js deno release files + run: | + git config --global user.name "actions" + git config --global user.email actions@github.com + git add $GITHUB_WORKSPACE/deno_js_dist + git commit -am "Add deno js release files" + git push origin js_tmp_branch + - name: Tag JS release + if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success' + uses: softprops/action-gh-release@v1 + with: + name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }} + tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }} + target_commitish: js_tmp_branch + generate_release_notes: false + draft: false + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Remove js_tmp_branch + run: git push origin :js_tmp_branch diff --git a/.gitignore b/.gitignore index 4ca7b595..f77865d0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ -/target /.direnv perf.* /Cargo.lock build/ -automerge/proptest-regressions/ .vim/* +/target diff --git a/Makefile b/Makefile deleted file mode 100644 index a1f3fd62..00000000 --- a/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -.PHONY: rust -rust: - cd automerge && cargo test - -.PHONY: wasm -wasm: - cd automerge-wasm && yarn - cd automerge-wasm && yarn build - cd automerge-wasm && yarn test - cd automerge-wasm && yarn link - -.PHONY: js -js: wasm - cd automerge-js && yarn - cd automerge-js && yarn link "automerge-wasm" - cd automerge-js && yarn test - -.PHONY: clean -clean: - git clean -x -d -f diff --git a/README.md b/README.md index 64b0f9b7..ad174da4 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Automerge RS +# Automerge Automerge logo @@ -7,103 +7,141 @@ [![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml) [![docs](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml) -This is a Rust library implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol. Its focus is to support the creation of Automerge implementations in other languages, currently; WASM, JS and C. A `libautomerge` if you will. +Automerge is a library which provides fast implementations of several different +CRDTs, a compact compression format for these CRDTs, and a sync protocol for +efficiently transmitting those changes over the network. The objective of the +project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational +databases support server applications - by providing mechanisms for persistence +which allow application developers to avoid thinking about hard distributed +computing problems. Automerge aims to be PostgreSQL for your local-first app. -The original [Automerge](https://github.com/automerge/automerge) project (written in JS from the ground up) is still very much maintained and recommended. Indeed it is because of the success of that project that the next stage of Automerge is being explored here. Hopefully Rust can offer a more performant and scalable Automerge, opening up even more use cases. +If you're looking for documentation on the JavaScript implementation take a look +at https://automerge.org/docs/hello/. There are other implementations in both +Rust and C, but they are earlier and don't have documentation yet. You can find +them in `rust/automerge` and `rust/automerge-c` if you are comfortable +reading the code and tests to figure out how to use them. + +If you're familiar with CRDTs and interested in the design of Automerge in +particular take a look at https://automerge.org/docs/how-it-works/backend/ + +Finally, if you want to talk to us about this project please [join the +Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw) ## Status -The project has 5 components: +This project is formed of a core Rust implementation which is exposed via FFI in +javascript+WASM, C, and soon other languages. Alex +([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining +automerge, other members of Ink and Switch are also contributing time and there +are several other maintainers. The focus is currently on shipping the new JS +package. We expect to be iterating the API and adding new features over the next +six months so there will likely be several major version bumps in all packages +in that time. -1. [_automerge_](automerge) - The main Rust implementation of the library. -2. [_automerge-wasm_](automerge-wasm) - A JS/WASM interface to the underlying Rust library. This API is generally mature and in use in a handful of projects. -3. [_automerge-js_](automerge-js) - This is a Javascript library using the WASM interface to export the same public API of the primary Automerge project. Currently this project passes all of Automerge's tests but has not been used in any real project or packaged as an NPM. Alpha testers welcome. -4. [_automerge-c_](automerge-c) - This is a C library intended to be an FFI integration point for all other languages. It is currently a work in progress and not yet ready for any testing. -5. [_automerge-cli_](automerge-cli) - An experimental CLI wrapper around the Rust library. Currently not functional. +In general we try and respect semver. -## How? +### JavaScript -The magic of the architecture is built around the `OpTree`. This is a data structure -which supports efficiently inserting new operations and realising values of -existing operations. Most interactions with the `OpTree` are in the form of -implementations of `TreeQuery` - a trait which can be used to traverse the -`OpTree` and producing state of some kind. User facing operations are exposed on -an `Automerge` object, under the covers these operations typically instantiate -some `TreeQuery` and run it over the `OpTree`. +A stable release of the javascript package is currently available as +`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are +available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at +https://deno.land/x/automerge -## Development +### Rust -Please feel free to open issues and pull requests. +The rust codebase is currently oriented around producing a performant backend +for the Javascript wrapper and as such the API for Rust code is low level and +not well documented. We will be returning to this over the next few months but +for now you will need to be comfortable reading the tests and asking questions +to figure out how to use it. If you are looking to build rust applications which +use automerge you may want to look into +[autosurgeon](https://github.com/alexjg/autosurgeon) -### Running CI +## Repository Organisation -The steps CI will run are all defined in `./scripts/ci`. Obviously CI will run -everything when you submit a PR, but if you want to run everything locally -before you push you can run `./scripts/ci/run` to run everything. +- `./rust` - the rust rust implementation and also the Rust components of + platform specific wrappers (e.g. `automerge-wasm` for the WASM API or + `automerge-c` for the C FFI bindings) +- `./javascript` - The javascript library which uses `automerge-wasm` + internally but presents a more idiomatic javascript interface +- `./scripts` - scripts which are useful to maintenance of the repository. + This includes the scripts which are run in CI. +- `./img` - static assets for use in `.md` files -### Running the JS tests +## Building -You will need to have [node](https://nodejs.org/en/), [yarn](https://yarnpkg.com/getting-started/install), [rust](https://rustup.rs/) and [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/) installed. +To build this codebase you will need: -To build and test the rust library: +- `rust` +- `node` +- `yarn` +- `cmake` +- `cmocka` -```shell - $ cd automerge - $ cargo test +You will also need to install the following with `cargo install` + +- `wasm-bindgen-cli` +- `wasm-opt` +- `cargo-deny` + +And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation. + +The various subprojects (the rust code, the wrapper projects) have their own +build instructions, but to run the tests that will be run in CI you can run +`./scripts/ci/run`. + +### For macOS + +These instructions worked to build locally on macOS 13.1 (arm64) as of +Nov 29th 2022. + +```bash +# clone the repo +git clone https://github.com/automerge/automerge-rs +cd automerge-rs + +# install rustup +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + +# install homebrew +/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" + +# install cmake, node, cmocka +brew install cmake node cmocka + +# install yarn +npm install --global yarn + +# install javascript dependencies +yarn --cwd ./javascript + +# install rust dependencies +cargo install wasm-bindgen-cli wasm-opt cargo-deny + +# get nightly rust to produce optimized automerge-c builds +rustup toolchain install nightly +rustup component add rust-src --toolchain nightly + +# add wasm target in addition to current architecture +rustup target add wasm32-unknown-unknown + +# Run ci script +./scripts/ci/run ``` -To build and test the wasm library: +If your build fails to find `cmocka.h` you may need to teach it about homebrew's +installation location: -```shell - ## setup - $ cd automerge-wasm - $ yarn - - ## building or testing - $ yarn build - $ yarn test - - ## without this the js library wont automatically use changes - $ yarn link - - ## cutting a release or doing benchmarking - $ yarn release +``` +export CPATH=/opt/homebrew/include +export LIBRARY_PATH=/opt/homebrew/lib +./scripts/ci/run ``` -To test the js library. This is where most of the tests reside. +## Contributing -```shell - ## setup - $ cd automerge-js - $ yarn - $ yarn link "automerge-wasm" - - ## testing - $ yarn test -``` - -And finally, to build and test the C bindings with CMake: - -```shell -## setup -$ cd automerge-c -$ mkdir -p build -$ cd build -$ cmake -S .. -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF -## building and testing -$ cmake --build . --target test_automerge -``` - -To add debugging symbols, replace `Release` with `Debug`. -To build a shared library instead of a static one, replace `OFF` with `ON`. - -The C bindings can be built and tested on any platform for which CMake is -available but the steps for doing so vary across platforms and are too numerous -to list here. - -## Benchmarking - -The [`edit-trace`](edit-trace) folder has the main code for running the edit trace benchmarking. - -## The old Rust project -If you are looking for the origional `automerge-rs` project that can be used as a wasm backend to the javascript implementation, it can be found [here](https://github.com/automerge/automerge-rs/tree/automerge-1.0). +Please try and split your changes up into relatively independent commits which +change one subsystem at a time and add good commit messages which describe what +the change is and why you're making it (err on the side of longer commit +messages). `git blame` should give future maintainers a good idea of why +something is the way it is. diff --git a/TODO.md b/TODO.md deleted file mode 100644 index 646c0c20..00000000 --- a/TODO.md +++ /dev/null @@ -1,32 +0,0 @@ -### next steps: - 1. C API - 2. port rust command line tool - 3. fast load - -### ergonomics: - 1. value() -> () or something that into's a value - -### automerge: - 1. single pass (fast) load - 2. micro-patches / bare bones observation API / fully hydrated documents - -### future: - 1. handle columns with unknown data in and out - 2. branches with different indexes - -### Peritext - 1. add mark / remove mark -- type, start/end elemid (inclusive,exclusive) - 2. track any formatting ops that start or end on a character - 3. ops right before the character, ops right after that character - 4. query a single character - character, plus marks that start or end on that character - what is its current formatting, - what are the ops that include that in their span, - None = same as last time, Set( bold, italic ), - keep these on index - 5. op probably belongs with the start character - possible packed at the beginning or end of the list - -### maybe: - 1. tables - -### no: - 1. cursors diff --git a/automerge-c/.gitignore b/automerge-c/.gitignore deleted file mode 100644 index cb544af0..00000000 --- a/automerge-c/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -automerge -automerge.h -automerge.o diff --git a/automerge-c/CMakeLists.txt b/automerge-c/CMakeLists.txt deleted file mode 100644 index e5a7b1ca..00000000 --- a/automerge-c/CMakeLists.txt +++ /dev/null @@ -1,141 +0,0 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) - -set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake") - -# Parse the library name, project name and project version out of Cargo's TOML file. -set(CARGO_LIB_SECTION OFF) - -set(LIBRARY_NAME "") - -set(CARGO_PKG_SECTION OFF) - -set(CARGO_PKG_NAME "") - -set(CARGO_PKG_VERSION "") - -file(READ Cargo.toml TOML_STRING) - -string(REPLACE ";" "\\\\;" TOML_STRING "${TOML_STRING}") - -string(REPLACE "\n" ";" TOML_LINES "${TOML_STRING}") - -foreach(TOML_LINE IN ITEMS ${TOML_LINES}) - string(REGEX MATCH "^\\[(lib|package)\\]$" _ ${TOML_LINE}) - - if(CMAKE_MATCH_1 STREQUAL "lib") - set(CARGO_LIB_SECTION ON) - - set(CARGO_PKG_SECTION OFF) - elseif(CMAKE_MATCH_1 STREQUAL "package") - set(CARGO_LIB_SECTION OFF) - - set(CARGO_PKG_SECTION ON) - endif() - - string(REGEX MATCH "^name += +\"([^\"]+)\"$" _ ${TOML_LINE}) - - if(CMAKE_MATCH_1 AND (CARGO_LIB_SECTION AND NOT CARGO_PKG_SECTION)) - set(LIBRARY_NAME "${CMAKE_MATCH_1}") - elseif(CMAKE_MATCH_1 AND (NOT CARGO_LIB_SECTION AND CARGO_PKG_SECTION)) - set(CARGO_PKG_NAME "${CMAKE_MATCH_1}") - endif() - - string(REGEX MATCH "^version += +\"([^\"]+)\"$" _ ${TOML_LINE}) - - if(CMAKE_MATCH_1 AND CARGO_PKG_SECTION) - set(CARGO_PKG_VERSION "${CMAKE_MATCH_1}") - endif() - - if(LIBRARY_NAME AND (CARGO_PKG_NAME AND CARGO_PKG_VERSION)) - break() - endif() -endforeach() - -project(${CARGO_PKG_NAME} VERSION ${CARGO_PKG_VERSION} LANGUAGES C DESCRIPTION "C bindings for the Automerge Rust backend.") - -include(CTest) - -option(BUILD_SHARED_LIBS "Enable the choice of a shared or static library.") - -include(CMakePackageConfigHelpers) - -include(GNUInstallDirs) - -string(MAKE_C_IDENTIFIER ${PROJECT_NAME} SYMBOL_PREFIX) - -string(TOUPPER ${SYMBOL_PREFIX} SYMBOL_PREFIX) - -set(CARGO_TARGET_DIR "${CMAKE_CURRENT_BINARY_DIR}/Cargo/target") - -set(CBINDGEN_INCLUDEDIR "${CARGO_TARGET_DIR}/${CMAKE_INSTALL_INCLUDEDIR}") - -set(CBINDGEN_TARGET_DIR "${CBINDGEN_INCLUDEDIR}/${PROJECT_NAME}") - -add_subdirectory(src) - -# Generate and install the configuration header. -math(EXPR INTEGER_PROJECT_VERSION_MAJOR "${PROJECT_VERSION_MAJOR} * 100000") - -math(EXPR INTEGER_PROJECT_VERSION_MINOR "${PROJECT_VERSION_MINOR} * 100") - -math(EXPR INTEGER_PROJECT_VERSION_PATCH "${PROJECT_VERSION_PATCH}") - -math(EXPR INTEGER_PROJECT_VERSION "${INTEGER_PROJECT_VERSION_MAJOR} + ${INTEGER_PROJECT_VERSION_MINOR} + ${INTEGER_PROJECT_VERSION_PATCH}") - -configure_file( - ${CMAKE_MODULE_PATH}/config.h.in - config.h - @ONLY - NEWLINE_STYLE LF -) - -install( - FILES ${CMAKE_BINARY_DIR}/config.h - DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME} -) - -if(BUILD_TESTING) - add_subdirectory(test EXCLUDE_FROM_ALL) - - enable_testing() -endif() - -add_subdirectory(examples EXCLUDE_FROM_ALL) - -# Generate and install .cmake files -set(PROJECT_CONFIG_NAME "${PROJECT_NAME}-config") - -set(PROJECT_CONFIG_VERSION_NAME "${PROJECT_CONFIG_NAME}-version") - -write_basic_package_version_file( - ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_VERSION_NAME}.cmake - VERSION ${PROJECT_VERSION} - COMPATIBILITY ExactVersion -) - -# The namespace label starts with the title-cased library name. -string(SUBSTRING ${LIBRARY_NAME} 0 1 NS_FIRST) - -string(SUBSTRING ${LIBRARY_NAME} 1 -1 NS_REST) - -string(TOUPPER ${NS_FIRST} NS_FIRST) - -string(TOLOWER ${NS_REST} NS_REST) - -string(CONCAT NAMESPACE ${NS_FIRST} ${NS_REST} "::") - -# \note CMake doesn't automate the exporting of an imported library's targets -# so the package configuration script must do it. -configure_package_config_file( - ${CMAKE_MODULE_PATH}/${PROJECT_CONFIG_NAME}.cmake.in - ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_NAME}.cmake - INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME} -) - -install( - FILES - ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_NAME}.cmake - ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_VERSION_NAME}.cmake - DESTINATION - ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME} -) diff --git a/automerge-c/README.md b/automerge-c/README.md deleted file mode 100644 index 1b0e618d..00000000 --- a/automerge-c/README.md +++ /dev/null @@ -1,97 +0,0 @@ - -## Methods we need to support - -### Basic management - - 1. `AMcreate()` - 1. `AMclone(doc)` - 1. `AMfree(doc)` - 1. `AMconfig(doc, key, val)` // set actor - 1. `actor = get_actor(doc)` - -### Transactions - - 1. `AMpendingOps(doc)` - 1. `AMcommit(doc, message, time)` - 1. `AMrollback(doc)` - -### Write - - 1. `AMset{Map|List}(doc, obj, prop, value)` - 1. `AMinsert(doc, obj, index, value)` - 1. `AMpush(doc, obj, value)` - 1. `AMdel{Map|List}(doc, obj, prop)` - 1. `AMinc{Map|List}(doc, obj, prop, value)` - 1. `AMspliceText(doc, obj, start, num_del, text)` - -### Read (the heads argument is optional and can be on an `at` variant) - - 1. `AMkeys(doc, obj, heads)` - 1. `AMlength(doc, obj, heads)` - 1. `AMlistRange(doc, obj, heads)` - 1. `AMmapRange(doc, obj, heads)` - 1. `AMvalues(doc, obj, heads)` - 1. `AMtext(doc, obj, heads)` - -### Sync - - 1. `AMgenerateSyncMessage(doc, state)` - 1. `AMreceiveSyncMessage(doc, state, message)` - 1. `AMinitSyncState()` - -### Save / Load - - 1. `AMload(data)` - 1. `AMloadIncremental(doc, data)` - 1. `AMsave(doc)` - 1. `AMsaveIncremental(doc)` - -### Low Level Access - - 1. `AMapplyChanges(doc, changes)` - 1. `AMgetChanges(doc, deps)` - 1. `AMgetChangesAdded(doc1, doc2)` - 1. `AMgetHeads(doc)` - 1. `AMgetLastLocalChange(doc)` - 1. `AMgetMissingDeps(doc, heads)` - -### Encode/Decode - - 1. `AMencodeChange(change)` - 1. `AMdecodeChange(change)` - 1. `AMencodeSyncMessage(change)` - 1. `AMdecodeSyncMessage(change)` - 1. `AMencodeSyncState(change)` - 1. `AMdecodeSyncState(change)` - -## Open Question - Memory management - -Most of these calls return one or more items of arbitrary length. Doing memory management in C is tricky. This is my proposed solution... - -### - - ``` - // returns 1 or zero opids - n = automerge_set(doc, "_root", "hello", datatype, value); - if (n) { - automerge_pop(doc, &obj, len); - } - - // returns n values - n = automerge_values(doc, "_root", "hello"); - for (i = 0; i -#include -#include - -#include - -static void abort_cb(AMresultStack**, uint8_t); - -/** - * \brief Based on https://automerge.github.io/docs/quickstart - */ -int main(int argc, char** argv) { - AMresultStack* stack = NULL; - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, abort_cb).doc; - AMobjId const* const cards = AMpush(&stack, - AMmapPutObject(doc1, AM_ROOT, "cards", AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - abort_cb).obj_id; - AMobjId const* const card1 = AMpush(&stack, - AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - abort_cb).obj_id; - AMfree(AMmapPutStr(doc1, card1, "title", "Rewrite everything in Clojure")); - AMfree(AMmapPutBool(doc1, card1, "done", false)); - AMobjId const* const card2 = AMpush(&stack, - AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - abort_cb).obj_id; - AMfree(AMmapPutStr(doc1, card2, "title", "Rewrite everything in Haskell")); - AMfree(AMmapPutBool(doc1, card2, "done", false)); - AMfree(AMcommit(doc1, "Add card", NULL)); - - AMdoc* doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, abort_cb).doc; - AMfree(AMmerge(doc2, doc1)); - - AMbyteSpan const binary = AMpush(&stack, AMsave(doc1), AM_VALUE_BYTES, abort_cb).bytes; - doc2 = AMpush(&stack, AMload(binary.src, binary.count), AM_VALUE_DOC, abort_cb).doc; - - AMfree(AMmapPutBool(doc1, card1, "done", true)); - AMfree(AMcommit(doc1, "Mark card as done", NULL)); - - AMfree(AMlistDelete(doc2, cards, 0)); - AMfree(AMcommit(doc2, "Delete card", NULL)); - - AMfree(AMmerge(doc1, doc2)); - - AMchanges changes = AMpush(&stack, AMgetChanges(doc1, NULL), AM_VALUE_CHANGES, abort_cb).changes; - AMchange const* change = NULL; - while ((change = AMchangesNext(&changes, 1)) != NULL) { - AMbyteSpan const change_hash = AMchangeHash(change); - AMchangeHashes const heads = AMpush(&stack, - AMchangeHashesInit(&change_hash, 1), - AM_VALUE_CHANGE_HASHES, - abort_cb).change_hashes; - printf("%s %ld\n", AMchangeMessage(change), AMobjSize(doc1, cards, &heads)); - } - AMfreeStack(&stack); -} - -static char const* discriminant_suffix(AMvalueVariant const); - -/** - * \brief Prints an error message to `stderr`, deallocates all results in the - * given stack and exits. - * - * \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. - * \param[in] discriminant An `AMvalueVariant` enum tag. - * \pre \p stack` != NULL`. - * \post `*stack == NULL`. - */ -static void abort_cb(AMresultStack** stack, uint8_t discriminant) { - static char buffer[512] = {0}; - - char const* suffix = NULL; - if (!stack) { - suffix = "Stack*"; - } - else if (!*stack) { - suffix = "Stack"; - } - else if (!(*stack)->result) { - suffix = ""; - } - if (suffix) { - fprintf(stderr, "Null `AMresult%s*`.", suffix); - AMfreeStack(stack); - exit(EXIT_FAILURE); - return; - } - AMstatus const status = AMresultStatus((*stack)->result); - switch (status) { - case AM_STATUS_ERROR: strcpy(buffer, "Error"); break; - case AM_STATUS_INVALID_RESULT: strcpy(buffer, "Invalid result"); break; - case AM_STATUS_OK: break; - default: sprintf(buffer, "Unknown `AMstatus` tag %d", status); - } - if (buffer[0]) { - fprintf(stderr, "%s; %s.", buffer, AMerrorMessage((*stack)->result)); - AMfreeStack(stack); - exit(EXIT_FAILURE); - return; - } - AMvalue const value = AMresultValue((*stack)->result); - fprintf(stderr, "Unexpected tag `AM_VALUE_%s` (%d); expected `AM_VALUE_%s`.", - discriminant_suffix(value.tag), - value.tag, - discriminant_suffix(discriminant)); - AMfreeStack(stack); - exit(EXIT_FAILURE); -} - -/** - * \brief Gets the suffix for a discriminant's corresponding string - * representation. - * - * \param[in] discriminant An `AMvalueVariant` enum tag. - * \return A UTF-8 string. - */ -static char const* discriminant_suffix(AMvalueVariant const discriminant) { - char const* suffix = NULL; - switch (discriminant) { - case AM_VALUE_ACTOR_ID: suffix = "ACTOR_ID"; break; - case AM_VALUE_BOOLEAN: suffix = "BOOLEAN"; break; - case AM_VALUE_BYTES: suffix = "BYTES"; break; - case AM_VALUE_CHANGE_HASHES: suffix = "CHANGE_HASHES"; break; - case AM_VALUE_CHANGES: suffix = "CHANGES"; break; - case AM_VALUE_COUNTER: suffix = "COUNTER"; break; - case AM_VALUE_DOC: suffix = "DOC"; break; - case AM_VALUE_F64: suffix = "F64"; break; - case AM_VALUE_INT: suffix = "INT"; break; - case AM_VALUE_LIST_ITEMS: suffix = "LIST_ITEMS"; break; - case AM_VALUE_MAP_ITEMS: suffix = "MAP_ITEMS"; break; - case AM_VALUE_NULL: suffix = "NULL"; break; - case AM_VALUE_OBJ_ID: suffix = "OBJ_ID"; break; - case AM_VALUE_OBJ_ITEMS: suffix = "OBJ_ITEMS"; break; - case AM_VALUE_STR: suffix = "STR"; break; - case AM_VALUE_STRS: suffix = "STRINGS"; break; - case AM_VALUE_SYNC_MESSAGE: suffix = "SYNC_MESSAGE"; break; - case AM_VALUE_SYNC_STATE: suffix = "SYNC_STATE"; break; - case AM_VALUE_TIMESTAMP: suffix = "TIMESTAMP"; break; - case AM_VALUE_UINT: suffix = "UINT"; break; - case AM_VALUE_VOID: suffix = "VOID"; break; - default: suffix = "..."; - } - return suffix; -} diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt deleted file mode 100644 index e02c0a96..00000000 --- a/automerge-c/src/CMakeLists.txt +++ /dev/null @@ -1,250 +0,0 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) - -find_program ( - CARGO_CMD - "cargo" - PATHS "$ENV{CARGO_HOME}/bin" - DOC "The Cargo command" -) - -if(NOT CARGO_CMD) - message(FATAL_ERROR "Cargo (Rust package manager) not found! Install it and/or set the CARGO_HOME environment variable.") -endif() - -string(TOLOWER "${CMAKE_BUILD_TYPE}" BUILD_TYPE_LOWER) - -if(BUILD_TYPE_LOWER STREQUAL debug) - set(CARGO_BUILD_TYPE "debug") - - set(CARGO_FLAG "") -else() - set(CARGO_BUILD_TYPE "release") - - set(CARGO_FLAG "--release") -endif() - -set(CARGO_FEATURES "") - -set(CARGO_CURRENT_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_BUILD_TYPE}") - -set( - CARGO_OUTPUT - ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX} - ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX} -) - -if(WIN32) - # \note The basename of an import library output by Cargo is the filename - # of its corresponding shared library. - list(APPEND CARGO_OUTPUT ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}) -endif() - -add_custom_command( - OUTPUT - ${CARGO_OUTPUT} - COMMAND - # \note cbindgen won't regenerate its output header file after it's - # been removed but it will after its configuration file has been - # updated. - ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file_touch.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml - COMMAND - ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} CBINDGEN_TARGET_DIR=${CBINDGEN_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG} ${CARGO_FEATURES} - MAIN_DEPENDENCY - lib.rs - DEPENDS - actor_id.rs - byte_span.rs - change_hashes.rs - change.rs - changes.rs - doc.rs - doc/list.rs - doc/list/item.rs - doc/list/items.rs - doc/map.rs - doc/map/item.rs - doc/map/items.rs - doc/utils.rs - obj.rs - obj/item.rs - obj/items.rs - result.rs - result_stack.rs - strs.rs - sync.rs - sync/have.rs - sync/haves.rs - sync/message.rs - sync/state.rs - ${CMAKE_SOURCE_DIR}/build.rs - ${CMAKE_SOURCE_DIR}/Cargo.toml - ${CMAKE_SOURCE_DIR}/cbindgen.toml - WORKING_DIRECTORY - ${CMAKE_SOURCE_DIR} - COMMENT - "Producing the library artifacts with Cargo..." - VERBATIM -) - -add_custom_target( - ${LIBRARY_NAME}_artifacts ALL - DEPENDS ${CARGO_OUTPUT} -) - -# \note cbindgen's naming behavior isn't fully configurable and it ignores -# `const fn` calls (https://github.com/eqrion/cbindgen/issues/252). -add_custom_command( - TARGET ${LIBRARY_NAME}_artifacts - POST_BUILD - COMMAND - # Compensate for cbindgen's variant struct naming. - ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+_[^_]+\)_Body -DREPLACE_EXPR=AM\\1 -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - COMMAND - # Compensate for cbindgen's union tag enum type naming. - ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+\)_Tag -DREPLACE_EXPR=AM\\1Variant -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - COMMAND - # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase". - ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - COMMAND - # Compensate for cbindgen ignoring `std:mem::size_of()` calls. - ${CMAKE_COMMAND} -DMATCH_REGEX=USIZE_ -DREPLACE_EXPR=\+${CMAKE_SIZEOF_VOID_P} -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h - WORKING_DIRECTORY - ${CMAKE_SOURCE_DIR} - COMMENT - "Compensating for cbindgen deficits..." - VERBATIM -) - -if(BUILD_SHARED_LIBS) - if(WIN32) - set(LIBRARY_DESTINATION "${CMAKE_INSTALL_BINDIR}") - else() - set(LIBRARY_DESTINATION "${CMAKE_INSTALL_LIBDIR}") - endif() - - set(LIBRARY_DEFINE_SYMBOL "${SYMBOL_PREFIX}_EXPORTS") - - # \note The basename of an import library output by Cargo is the filename - # of its corresponding shared library. - set(LIBRARY_IMPLIB "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}") - - set(LIBRARY_LOCATION "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}") - - set(LIBRARY_NO_SONAME "${WIN32}") - - set(LIBRARY_SONAME "${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}") - - set(LIBRARY_TYPE "SHARED") -else() - set(LIBRARY_DEFINE_SYMBOL "") - - set(LIBRARY_DESTINATION "${CMAKE_INSTALL_LIBDIR}") - - set(LIBRARY_IMPLIB "") - - set(LIBRARY_LOCATION "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}") - - set(LIBRARY_NO_SONAME "TRUE") - - set(LIBRARY_SONAME "") - - set(LIBRARY_TYPE "STATIC") -endif() - -add_library(${LIBRARY_NAME} ${LIBRARY_TYPE} IMPORTED GLOBAL) - -set_target_properties( - ${LIBRARY_NAME} - PROPERTIES - # \note Cargo writes a debug build into a nested directory instead of - # decorating its name. - DEBUG_POSTFIX "" - DEFINE_SYMBOL "${LIBRARY_DEFINE_SYMBOL}" - IMPORTED_IMPLIB "${LIBRARY_IMPLIB}" - IMPORTED_LOCATION "${LIBRARY_LOCATION}" - IMPORTED_NO_SONAME "${LIBRARY_NO_SONAME}" - IMPORTED_SONAME "${LIBRARY_SONAME}" - LINKER_LANGUAGE C - PUBLIC_HEADER "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" - SOVERSION "${PROJECT_VERSION_MAJOR}" - VERSION "${PROJECT_VERSION}" - # \note Cargo exports all of the symbols automatically. - WINDOWS_EXPORT_ALL_SYMBOLS "TRUE" -) - -target_compile_definitions(${LIBRARY_NAME} INTERFACE $) - -target_include_directories( - ${LIBRARY_NAME} - INTERFACE - "$" -) - -set(CMAKE_THREAD_PREFER_PTHREAD TRUE) - -set(THREADS_PREFER_PTHREAD_FLAG TRUE) - -find_package(Threads REQUIRED) - -set(LIBRARY_DEPENDENCIES Threads::Threads ${CMAKE_DL_LIBS}) - -if(WIN32) - list(APPEND LIBRARY_DEPENDENCIES Bcrypt userenv ws2_32) -else() - list(APPEND LIBRARY_DEPENDENCIES m) -endif() - -target_link_libraries(${LIBRARY_NAME} INTERFACE ${LIBRARY_DEPENDENCIES}) - -install( - FILES $ - TYPE LIB - # \note The basename of an import library output by Cargo is the filename - # of its corresponding shared library. - RENAME "${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}" - OPTIONAL -) - -set(LIBRARY_FILE_NAME "${CMAKE_${LIBRARY_TYPE}_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_${LIBRARY_TYPE}_LIBRARY_SUFFIX}") - -install( - FILES $ - RENAME "${LIBRARY_FILE_NAME}" - DESTINATION ${LIBRARY_DESTINATION} -) - -install( - FILES $ - DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME} -) - -find_package(Doxygen OPTIONAL_COMPONENTS dot) - -if(DOXYGEN_FOUND) - set(DOXYGEN_ALIASES "installed_headerfile=\\headerfile ${LIBRARY_NAME}.h <${PROJECT_NAME}/${LIBRARY_NAME}.h>") - - set(DOXYGEN_GENERATE_LATEX YES) - - set(DOXYGEN_PDF_HYPERLINKS YES) - - set(DOXYGEN_PROJECT_LOGO "${CMAKE_SOURCE_DIR}/img/brandmark.png") - - set(DOXYGEN_SORT_BRIEF_DOCS YES) - - set(DOXYGEN_USE_MDFILE_AS_MAINPAGE "${CMAKE_SOURCE_DIR}/README.md") - - doxygen_add_docs( - ${LIBRARY_NAME}_docs - "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" - "${CMAKE_SOURCE_DIR}/README.md" - USE_STAMP_FILE - WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} - COMMENT "Producing documentation with Doxygen..." - ) - - # \note A Doxygen input file isn't a file-level dependency so the Doxygen - # command must instead depend upon a target that outputs the file or - # it will just output an error message when it can't be found. - add_dependencies(${LIBRARY_NAME}_docs ${LIBRARY_NAME}_artifacts) -endif() diff --git a/automerge-c/src/actor_id.rs b/automerge-c/src/actor_id.rs deleted file mode 100644 index e5f75856..00000000 --- a/automerge-c/src/actor_id.rs +++ /dev/null @@ -1,166 +0,0 @@ -use automerge as am; -use std::cell::RefCell; -use std::cmp::Ordering; -use std::ffi::{CStr, CString}; -use std::os::raw::c_char; -use std::str::FromStr; - -use crate::byte_span::AMbyteSpan; -use crate::result::{to_result, AMresult}; - -/// \struct AMactorId -/// \installed_headerfile -/// \brief An actor's unique identifier. -#[derive(Eq, PartialEq)] -pub struct AMactorId { - body: *const am::ActorId, - c_str: RefCell>, -} - -impl AMactorId { - pub fn new(actor_id: &am::ActorId) -> Self { - Self { - body: actor_id, - c_str: Default::default(), - } - } - - pub fn as_c_str(&self) -> *const c_char { - let mut c_str = self.c_str.borrow_mut(); - match c_str.as_mut() { - None => { - let hex_str = unsafe { (*self.body).to_hex_string() }; - c_str.insert(CString::new(hex_str).unwrap()).as_ptr() - } - Some(hex_str) => hex_str.as_ptr(), - } - } -} - -impl AsRef for AMactorId { - fn as_ref(&self) -> &am::ActorId { - unsafe { &*self.body } - } -} - -/// \memberof AMactorId -/// \brief Gets the value of an actor identifier as a sequence of bytes. -/// -/// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \pre \p actor_id `!= NULL`. -/// \return An `AMbyteSpan` struct. -/// \internal -/// -/// # Safety -/// actor_id must be a valid pointer to an AMactorId -#[no_mangle] -pub unsafe extern "C" fn AMactorIdBytes(actor_id: *const AMactorId) -> AMbyteSpan { - match actor_id.as_ref() { - Some(actor_id) => actor_id.as_ref().into(), - None => AMbyteSpan::default(), - } -} - -/// \memberof AMactorId -/// \brief Compares two actor identifiers. -/// -/// \param[in] actor_id1 A pointer to an `AMactorId` struct. -/// \param[in] actor_id2 A pointer to an `AMactorId` struct. -/// \return `-1` if \p actor_id1 `<` \p actor_id2, `0` if -/// \p actor_id1 `==` \p actor_id2 and `1` if -/// \p actor_id1 `>` \p actor_id2. -/// \pre \p actor_id1 `!= NULL`. -/// \pre \p actor_id2 `!= NULL`. -/// \internal -/// -/// #Safety -/// actor_id1 must be a valid pointer to an AMactorId -/// actor_id2 must be a valid pointer to an AMactorId -#[no_mangle] -pub unsafe extern "C" fn AMactorIdCmp( - actor_id1: *const AMactorId, - actor_id2: *const AMactorId, -) -> isize { - match (actor_id1.as_ref(), actor_id2.as_ref()) { - (Some(actor_id1), Some(actor_id2)) => match actor_id1.as_ref().cmp(actor_id2.as_ref()) { - Ordering::Less => -1, - Ordering::Equal => 0, - Ordering::Greater => 1, - }, - (None, Some(_)) => -1, - (Some(_), None) => 1, - (None, None) => 0, - } -} - -/// \memberof AMactorId -/// \brief Allocates a new actor identifier and initializes it with a random -/// UUID. -/// -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMactorId` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -#[no_mangle] -pub unsafe extern "C" fn AMactorIdInit() -> *mut AMresult { - to_result(Ok::(am::ActorId::random())) -} - -/// \memberof AMactorId -/// \brief Allocates a new actor identifier and initializes it from a sequence -/// of bytes. -/// -/// \param[in] src A pointer to a contiguous sequence of bytes. -/// \param[in] count The number of bytes to copy from \p src. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMactorId` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// src must be a byte array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMactorIdInitBytes(src: *const u8, count: usize) -> *mut AMresult { - let slice = std::slice::from_raw_parts(src, count); - to_result(Ok::(am::ActorId::from( - slice, - ))) -} - -/// \memberof AMactorId -/// \brief Allocates a new actor identifier and initializes it from a -/// hexadecimal string. -/// -/// \param[in] hex_str A UTF-8 string. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMactorId` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// hex_str must be a null-terminated array of `c_char` -#[no_mangle] -pub unsafe extern "C" fn AMactorIdInitStr(hex_str: *const c_char) -> *mut AMresult { - to_result(am::ActorId::from_str( - CStr::from_ptr(hex_str).to_str().unwrap(), - )) -} - -/// \memberof AMactorId -/// \brief Gets the value of an actor identifier as a hexadecimal string. -/// -/// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \pre \p actor_id `!= NULL`. -/// \return A UTF-8 string. -/// \internal -/// -/// # Safety -/// actor_id must be a valid pointer to an AMactorId -#[no_mangle] -pub unsafe extern "C" fn AMactorIdStr(actor_id: *const AMactorId) -> *const c_char { - match actor_id.as_ref() { - Some(actor_id) => actor_id.as_c_str(), - None => std::ptr::null::(), - } -} diff --git a/automerge-c/src/byte_span.rs b/automerge-c/src/byte_span.rs deleted file mode 100644 index a8e55065..00000000 --- a/automerge-c/src/byte_span.rs +++ /dev/null @@ -1,64 +0,0 @@ -use automerge as am; - -/// \struct AMbyteSpan -/// \installed_headerfile -/// \brief A view onto a contiguous sequence of bytes. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMbyteSpan { - /// A pointer to an array of bytes. - /// \attention NEVER CALL `free()` ON \p src! - /// \warning \p src is only valid until the `AMfree()` function is called - /// on the `AMresult` struct that stores the array of bytes to - /// which it points. - pub src: *const u8, - /// The number of bytes in the array. - pub count: usize, -} - -impl Default for AMbyteSpan { - fn default() -> Self { - Self { - src: std::ptr::null(), - count: 0, - } - } -} - -impl From<&am::ActorId> for AMbyteSpan { - fn from(actor: &am::ActorId) -> Self { - let slice = actor.to_bytes(); - Self { - src: slice.as_ptr(), - count: slice.len(), - } - } -} - -impl From<&mut am::ActorId> for AMbyteSpan { - fn from(actor: &mut am::ActorId) -> Self { - let slice = actor.to_bytes(); - Self { - src: slice.as_ptr(), - count: slice.len(), - } - } -} - -impl From<&am::ChangeHash> for AMbyteSpan { - fn from(change_hash: &am::ChangeHash) -> Self { - Self { - src: change_hash.0.as_ptr(), - count: change_hash.0.len(), - } - } -} - -impl From<&[u8]> for AMbyteSpan { - fn from(slice: &[u8]) -> Self { - Self { - src: slice.as_ptr(), - count: slice.len(), - } - } -} diff --git a/automerge-c/src/change_hashes.rs b/automerge-c/src/change_hashes.rs deleted file mode 100644 index 87ae6c7f..00000000 --- a/automerge-c/src/change_hashes.rs +++ /dev/null @@ -1,399 +0,0 @@ -use automerge as am; -use std::cmp::Ordering; -use std::ffi::c_void; -use std::mem::size_of; - -use crate::byte_span::AMbyteSpan; -use crate::result::{to_result, AMresult}; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(change_hashes: &[am::ChangeHash], offset: isize) -> Self { - Self { - len: change_hashes.len(), - offset, - ptr: change_hashes.as_ptr() as *const c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<&am::ChangeHash> { - if self.is_stopped() { - return None; - } - let slice: &[am::ChangeHash] = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) }; - let value = &slice[self.get_index()]; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[am::ChangeHash] = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::ChangeHash, self.len) }; - Some(&slice[self.get_index()]) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMchangeHashes -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of change hashes. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMchangeHashes { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], -} - -impl AMchangeHashes { - pub fn new(change_hashes: &[am::ChangeHash]) -> Self { - Self { - detail: Detail::new(change_hashes, 0).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<&am::ChangeHash> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<&am::ChangeHash> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[am::ChangeHash]> for AMchangeHashes { - fn as_ref(&self) -> &[am::ChangeHash] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const am::ChangeHash, detail.len) } - } -} - -impl Default for AMchangeHashes { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMchangeHashes -/// \brief Advances an iterator over a sequence of change hashes by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesAdvance(change_hashes: *mut AMchangeHashes, n: isize) { - if let Some(change_hashes) = change_hashes.as_mut() { - change_hashes.advance(n); - }; -} - -/// \memberof AMchangeHashes -/// \brief Compares the sequences of change hashes underlying a pair of -/// iterators. -/// -/// \param[in] change_hashes1 A pointer to an `AMchangeHashes` struct. -/// \param[in] change_hashes2 A pointer to an `AMchangeHashes` struct. -/// \return `-1` if \p change_hashes1 `<` \p change_hashes2, `0` if -/// \p change_hashes1 `==` \p change_hashes2 and `1` if -/// \p change_hashes1 `>` \p change_hashes2. -/// \pre \p change_hashes1 `!= NULL`. -/// \pre \p change_hashes2 `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes1 must be a valid pointer to an AMchangeHashes -/// change_hashes2 must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesCmp( - change_hashes1: *const AMchangeHashes, - change_hashes2: *const AMchangeHashes, -) -> isize { - match (change_hashes1.as_ref(), change_hashes2.as_ref()) { - (Some(change_hashes1), Some(change_hashes2)) => { - match change_hashes1.as_ref().cmp(change_hashes2.as_ref()) { - Ordering::Less => -1, - Ordering::Equal => 0, - Ordering::Greater => 1, - } - } - (None, Some(_)) => -1, - (Some(_), None) => 1, - (None, None) => 0, - } -} - -/// \memberof AMchangeHashes -/// \brief Allocates an iterator over a sequence of change hashes and -/// initializes it from a sequence of byte spans. -/// -/// \param[in] src A pointer to an array of `AMbyteSpan` structs. -/// \param[in] count The number of `AMbyteSpan` structs to copy from \p src. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`) / sizeof(AMbyteSpan)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// src must be an AMbyteSpan array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesInit(src: *const AMbyteSpan, count: usize) -> *mut AMresult { - let mut change_hashes = Vec::::new(); - for n in 0..count { - let byte_span = &*src.add(n); - let slice = std::slice::from_raw_parts(byte_span.src, byte_span.count); - match slice.try_into() { - Ok(change_hash) => { - change_hashes.push(change_hash); - } - Err(e) => { - return to_result(Err(e)); - } - } - } - to_result(Ok::, am::InvalidChangeHashSlice>( - change_hashes, - )) -} - -/// \memberof AMchangeHashes -/// \brief Gets the change hash at the current position of an iterator over a -/// sequence of change hashes and then advances it by at most \p |n| -/// positions where the sign of \p n is relative to the iterator's -/// direction. -/// -/// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes -/// was previously advanced past its forward/reverse limit. -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesNext( - change_hashes: *mut AMchangeHashes, - n: isize, -) -> AMbyteSpan { - if let Some(change_hashes) = change_hashes.as_mut() { - if let Some(change_hash) = change_hashes.next(n) { - return change_hash.into(); - } - } - AMbyteSpan::default() -} - -/// \memberof AMchangeHashes -/// \brief Advances an iterator over a sequence of change hashes by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the change hash at its new -/// position. -/// -/// \param[in,out] change_hashes A pointer to an `AMchangeHashes` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return An `AMbyteSpan` struct with `.src == NULL` when \p change_hashes is -/// presently advanced past its forward/reverse limit. -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesPrev( - change_hashes: *mut AMchangeHashes, - n: isize, -) -> AMbyteSpan { - if let Some(change_hashes) = change_hashes.as_mut() { - if let Some(change_hash) = change_hashes.prev(n) { - return change_hash.into(); - } - } - AMbyteSpan::default() -} - -/// \memberof AMchangeHashes -/// \brief Gets the size of the sequence of change hashes underlying an -/// iterator. -/// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \return The count of values in \p change_hashes. -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesSize(change_hashes: *const AMchangeHashes) -> usize { - if let Some(change_hashes) = change_hashes.as_ref() { - change_hashes.len() - } else { - 0 - } -} - -/// \memberof AMchangeHashes -/// \brief Creates an iterator over the same sequence of change hashes as the -/// given one but with the opposite position and direction. -/// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \return An `AMchangeHashes` struct -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesReversed( - change_hashes: *const AMchangeHashes, -) -> AMchangeHashes { - if let Some(change_hashes) = change_hashes.as_ref() { - change_hashes.reversed() - } else { - AMchangeHashes::default() - } -} - -/// \memberof AMchangeHashes -/// \brief Creates an iterator at the starting position over the same sequence -/// of change hashes as the given one. -/// -/// \param[in] change_hashes A pointer to an `AMchangeHashes` struct. -/// \return An `AMchangeHashes` struct -/// \pre \p change_hashes `!= NULL`. -/// \internal -/// -/// #Safety -/// change_hashes must be a valid pointer to an AMchangeHashes -#[no_mangle] -pub unsafe extern "C" fn AMchangeHashesRewound( - change_hashes: *const AMchangeHashes, -) -> AMchangeHashes { - if let Some(change_hashes) = change_hashes.as_ref() { - change_hashes.rewound() - } else { - AMchangeHashes::default() - } -} diff --git a/automerge-c/src/changes.rs b/automerge-c/src/changes.rs deleted file mode 100644 index e359cfb6..00000000 --- a/automerge-c/src/changes.rs +++ /dev/null @@ -1,398 +0,0 @@ -use automerge as am; -use std::collections::BTreeMap; -use std::ffi::c_void; -use std::mem::size_of; - -use crate::byte_span::AMbyteSpan; -use crate::change::AMchange; -use crate::result::{to_result, AMresult}; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, - storage: *mut c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(changes: &[am::Change], offset: isize, storage: &mut BTreeMap) -> Self { - let storage: *mut BTreeMap = storage; - Self { - len: changes.len(), - offset, - ptr: changes.as_ptr() as *const c_void, - storage: storage as *mut c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<*const AMchange> { - if self.is_stopped() { - return None; - } - let slice: &mut [am::Change] = - unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, self.len) }; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; - let index = self.get_index(); - let value = match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMchange::new(&mut slice[index])); - storage.get_mut(&index).unwrap() - } - }; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &mut [am::Change] = - unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut am::Change, self.len) }; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; - let index = self.get_index(); - Some(match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMchange::new(&mut slice[index])); - storage.get_mut(&index).unwrap() - } - }) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - storage: self.storage, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - storage: self.storage, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts( - (&detail as *const Detail) as *const u8, - USIZE_USIZE_USIZE_USIZE_, - ) - .try_into() - .unwrap() - } - } -} - -/// \struct AMchanges -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of changes. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMchanges { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_USIZE_], -} - -impl AMchanges { - pub fn new(changes: &[am::Change], storage: &mut BTreeMap) -> Self { - Self { - detail: Detail::new(changes, 0, &mut *storage).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<*const AMchange> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<*const AMchange> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[am::Change]> for AMchanges { - fn as_ref(&self) -> &[am::Change] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const am::Change, detail.len) } - } -} - -impl Default for AMchanges { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMchanges -/// \brief Advances an iterator over a sequence of changes by at most \p |n| -/// positions where the sign of \p n is relative to the iterator's -/// direction. -/// -/// \param[in,out] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesAdvance(changes: *mut AMchanges, n: isize) { - if let Some(changes) = changes.as_mut() { - changes.advance(n); - }; -} - -/// \memberof AMchanges -/// \brief Tests the equality of two sequences of changes underlying a pair of -/// iterators. -/// -/// \param[in] changes1 A pointer to an `AMchanges` struct. -/// \param[in] changes2 A pointer to an `AMchanges` struct. -/// \return `true` if \p changes1 `==` \p changes2 and `false` otherwise. -/// \pre \p changes1 `!= NULL`. -/// \pre \p changes2 `!= NULL`. -/// \internal -/// -/// #Safety -/// changes1 must be a valid pointer to an AMchanges -/// changes2 must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesEqual( - changes1: *const AMchanges, - changes2: *const AMchanges, -) -> bool { - match (changes1.as_ref(), changes2.as_ref()) { - (Some(changes1), Some(changes2)) => changes1.as_ref() == changes2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMchanges -/// \brief Allocates an iterator over a sequence of changes and initializes it -/// from a sequence of byte spans. -/// -/// \param[in] src A pointer to an array of `AMbyteSpan` structs. -/// \param[in] count The number of `AMbyteSpan` structs to copy from \p src. -/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`) / sizeof(AMbyteSpan)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// src must be an AMbyteSpan array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMchangesInit(src: *const AMbyteSpan, count: usize) -> *mut AMresult { - let mut changes = Vec::::new(); - for n in 0..count { - let byte_span = &*src.add(n); - let slice = std::slice::from_raw_parts(byte_span.src, byte_span.count); - match slice.try_into() { - Ok(change) => { - changes.push(change); - } - Err(e) => { - return to_result(Err::, am::LoadChangeError>(e)); - } - } - } - to_result(Ok::, am::LoadChangeError>(changes)) -} - -/// \memberof AMchanges -/// \brief Gets the change at the current position of an iterator over a -/// sequence of changes and then advances it by at most \p |n| positions -/// where the sign of \p n is relative to the iterator's direction. -/// -/// \param[in,out] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMchange` struct that's `NULL` when \p changes was -/// previously advanced past its forward/reverse limit. -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesNext(changes: *mut AMchanges, n: isize) -> *const AMchange { - if let Some(changes) = changes.as_mut() { - if let Some(change) = changes.next(n) { - return change; - } - } - std::ptr::null() -} - -/// \memberof AMchanges -/// \brief Advances an iterator over a sequence of changes by at most \p |n| -/// positions where the sign of \p n is relative to the iterator's -/// direction and then gets the change at its new position. -/// -/// \param[in,out] changes A pointer to an `AMchanges` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMchange` struct that's `NULL` when \p changes is -/// presently advanced past its forward/reverse limit. -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesPrev(changes: *mut AMchanges, n: isize) -> *const AMchange { - if let Some(changes) = changes.as_mut() { - if let Some(change) = changes.prev(n) { - return change; - } - } - std::ptr::null() -} - -/// \memberof AMchanges -/// \brief Gets the size of the sequence of changes underlying an iterator. -/// -/// \param[in] changes A pointer to an `AMchanges` struct. -/// \return The count of values in \p changes. -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesSize(changes: *const AMchanges) -> usize { - if let Some(changes) = changes.as_ref() { - changes.len() - } else { - 0 - } -} - -/// \memberof AMchanges -/// \brief Creates an iterator over the same sequence of changes as the given -/// one but with the opposite position and direction. -/// -/// \param[in] changes A pointer to an `AMchanges` struct. -/// \return An `AMchanges` struct. -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesReversed(changes: *const AMchanges) -> AMchanges { - if let Some(changes) = changes.as_ref() { - changes.reversed() - } else { - AMchanges::default() - } -} - -/// \memberof AMchanges -/// \brief Creates an iterator at the starting position over the same sequence -/// of changes as the given one. -/// -/// \param[in] changes A pointer to an `AMchanges` struct. -/// \return An `AMchanges` struct -/// \pre \p changes `!= NULL`. -/// \internal -/// -/// #Safety -/// changes must be a valid pointer to an AMchanges -#[no_mangle] -pub unsafe extern "C" fn AMchangesRewound(changes: *const AMchanges) -> AMchanges { - if let Some(changes) = changes.as_ref() { - changes.rewound() - } else { - AMchanges::default() - } -} diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs deleted file mode 100644 index beaf7347..00000000 --- a/automerge-c/src/doc.rs +++ /dev/null @@ -1,833 +0,0 @@ -use automerge as am; -use automerge::transaction::{CommitOptions, Transactable}; -use std::ops::{Deref, DerefMut}; -use std::os::raw::c_char; - -use crate::actor_id::AMactorId; -use crate::change_hashes::AMchangeHashes; -use crate::obj::AMobjId; -use crate::result::{to_result, AMresult, AMvalue}; -use crate::sync::{to_sync_message, AMsyncMessage, AMsyncState}; - -pub mod list; -pub mod map; -pub mod utils; - -use crate::changes::AMchanges; -use crate::doc::utils::to_str; -use crate::doc::utils::{to_actor_id, to_doc, to_doc_mut, to_obj_id}; - -macro_rules! to_changes { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::err("Invalid AMchanges pointer").into(), - } - }}; -} - -macro_rules! to_index { - ($index:expr, $len:expr, $param_name:expr) => {{ - if $index > $len && $index != usize::MAX { - return AMresult::err(&format!("Invalid {} {}", $param_name, $index)).into(); - } - std::cmp::min($index, $len) - }}; -} - -macro_rules! to_sync_state_mut { - ($handle:expr) => {{ - let handle = $handle.as_mut(); - match handle { - Some(b) => b, - None => return AMresult::err("Invalid AMsyncState pointer").into(), - } - }}; -} - -/// \struct AMdoc -/// \installed_headerfile -/// \brief A JSON-like CRDT. -#[derive(Clone)] -pub struct AMdoc(am::AutoCommit); - -impl AMdoc { - pub fn new(auto_commit: am::AutoCommit) -> Self { - Self(auto_commit) - } -} - -impl AsRef for AMdoc { - fn as_ref(&self) -> &am::AutoCommit { - &self.0 - } -} - -impl Deref for AMdoc { - type Target = am::AutoCommit; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for AMdoc { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -/// \memberof AMdoc -/// \brief Applies a sequence of changes to a document. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] changes A pointer to an `AMchanges` struct. -/// \pre \p doc `!= NULL`. -/// \pre \p changes `!= NULL`. -/// \return A pointer to an `AMresult` struct containing a void. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// changes must be a valid pointer to an AMchanges. -#[no_mangle] -pub unsafe extern "C" fn AMapplyChanges( - doc: *mut AMdoc, - changes: *const AMchanges, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let changes = to_changes!(changes); - to_result(doc.apply_changes(changes.as_ref().to_vec())) -} - -/// \memberof AMdoc -/// \brief Allocates storage for a document and initializes it by duplicating -/// the given document. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMdoc` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMclone(doc: *const AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(doc.as_ref().clone()) -} - -/// \memberof AMdoc -/// \brief Allocates a new document and initializes it with defaults. -/// -/// \param[in] actor_id A pointer to an `AMactorId` struct or `NULL` for a -/// random one. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMdoc` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// -/// # Safety -/// actor_id must be a valid pointer to an AMactorId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMcreate(actor_id: *const AMactorId) -> *mut AMresult { - to_result(match actor_id.as_ref() { - Some(actor_id) => am::AutoCommit::new().with_actor(actor_id.as_ref().clone()), - None => am::AutoCommit::new(), - }) -} - -/// \memberof AMdoc -/// \brief Commits the current operations on a document with an optional -/// message and/or time override as seconds since the epoch. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] message A UTF-8 string or `NULL`. -/// \param[in] time A pointer to a `time_t` value or `NULL`. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// with one element. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMcommit( - doc: *mut AMdoc, - message: *const c_char, - time: *const libc::time_t, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let mut options = CommitOptions::default(); - if !message.is_null() { - options.set_message(to_str(message)); - } - if let Some(time) = time.as_ref() { - options.set_time(*time); - } - to_result(doc.commit_with(options)) -} - -/// \memberof AMdoc -/// \brief Tests the equality of two documents after closing their respective -/// transactions. -/// -/// \param[in,out] doc1 An `AMdoc` struct. -/// \param[in,out] doc2 An `AMdoc` struct. -/// \return `true` if \p doc1 `==` \p doc2 and `false` otherwise. -/// \pre \p doc1 `!= NULL`. -/// \pre \p doc2 `!= NULL`. -/// \internal -/// -/// #Safety -/// doc1 must be a valid pointer to an AMdoc -/// doc2 must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { - match (doc1.as_mut(), doc2.as_mut()) { - (Some(doc1), Some(doc2)) => doc1.document().get_heads() == doc2.document().get_heads(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMdoc -/// \brief Forks this document at the current or a historical point for use by -/// a different actor. -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical -/// point or `NULL` for the current point. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMdoc` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMfork(doc: *mut AMdoc, heads: *const AMchangeHashes) -> *mut AMresult { - let doc = to_doc_mut!(doc); - match heads.as_ref() { - None => to_result(doc.fork()), - Some(heads) => to_result(doc.fork_at(heads.as_ref())), - } -} - -/// \memberof AMdoc -/// \brief Generates a synchronization message for a peer based upon the given -/// synchronization state. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in,out] sync_state A pointer to an `AMsyncState` struct. -/// \return A pointer to an `AMresult` struct containing either a pointer to an -/// `AMsyncMessage` struct or a void. -/// \pre \p doc `!= NULL`. -/// \pre \p sync_state `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// sync_state must be a valid pointer to an AMsyncState -#[no_mangle] -pub unsafe extern "C" fn AMgenerateSyncMessage( - doc: *mut AMdoc, - sync_state: *mut AMsyncState, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let sync_state = to_sync_state_mut!(sync_state); - to_result(doc.generate_sync_message(sync_state.as_mut())) -} - -/// \memberof AMdoc -/// \brief Gets a document's actor identifier. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMactorId` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetActorId(doc: *const AMdoc) -> *mut AMresult { - let doc = to_doc!(doc); - to_result(Ok::( - doc.get_actor().clone(), - )) -} - -/// \memberof AMdoc -/// \brief Gets the change added to a document by its respective hash. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src. -/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc `!= NULL`. -/// \pre \p src `!= NULL`. -/// \pre \p count `>= AM_CHANGE_HASH_SIZE`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// src must be a byte array of size `>= automerge::types::HASH_SIZE` -#[no_mangle] -pub unsafe extern "C" fn AMgetChangeByHash( - doc: *mut AMdoc, - src: *const u8, - count: usize, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let slice = std::slice::from_raw_parts(src, count); - match slice.try_into() { - Ok(change_hash) => to_result(doc.get_change_by_hash(&change_hash)), - Err(e) => AMresult::err(&e.to_string()).into(), - } -} - -/// \memberof AMdoc -/// \brief Gets the changes added to a document by their respective hashes. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] have_deps A pointer to an `AMchangeHashes` struct or `NULL`. -/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetChanges( - doc: *mut AMdoc, - have_deps: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let empty_deps = Vec::::new(); - let have_deps = match have_deps.as_ref() { - Some(have_deps) => have_deps.as_ref(), - None => &empty_deps, - }; - to_result(doc.get_changes(have_deps)) -} - -/// \memberof AMdoc -/// \brief Gets the changes added to a second document that weren't added to -/// a first document. -/// -/// \param[in,out] doc1 An `AMdoc` struct. -/// \param[in,out] doc2 An `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an `AMchanges` struct. -/// \pre \p doc1 `!= NULL`. -/// \pre \p doc2 `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc1 must be a valid pointer to an AMdoc -/// doc2 must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) -> *mut AMresult { - let doc1 = to_doc_mut!(doc1); - let doc2 = to_doc_mut!(doc2); - to_result(doc1.get_changes_added(doc2)) -} - -/// \memberof AMdoc -/// \brief Gets the current heads of a document. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(Ok::, am::AutomergeError>( - doc.get_heads(), - )) -} - -/// \memberof AMdoc -/// \brief Gets the hashes of the changes in a document that aren't transitive -/// dependencies of the given hashes of changes. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] heads A pointer to an `AMchangeHashes` struct or `NULL`. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMgetMissingDeps( - doc: *mut AMdoc, - heads: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let empty_heads = Vec::::new(); - let heads = match heads.as_ref() { - Some(heads) => heads.as_ref(), - None => &empty_heads, - }; - to_result(doc.get_missing_deps(heads)) -} - -/// \memberof AMdoc -/// \brief Gets the last change made to a document. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing either an `AMchange` -/// struct or a void. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.get_last_local_change()) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical keys of a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// keys or `NULL` for current keys. -/// \return A pointer to an `AMresult` struct containing an `AMstrs` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMkeys( - doc: *const AMdoc, - obj_id: *const AMobjId, - heads: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - match heads.as_ref() { - None => to_result(doc.keys(obj_id)), - Some(heads) => to_result(doc.keys_at(obj_id, heads.as_ref())), - } -} - -/// \memberof AMdoc -/// \brief Allocates storage for a document and initializes it with the compact -/// form of an incremental save. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMdoc` struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// src must be a byte array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(am::AutoCommit::load(&data)) -} - -/// \memberof AMdoc -/// \brief Loads the compact form of an incremental save into a document. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMresult` struct containing the number of -/// operations loaded from \p src. -/// \pre \p doc `!= NULL`. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// src must be a byte array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMloadIncremental( - doc: *mut AMdoc, - src: *const u8, - count: usize, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(doc.load_incremental(&data)) -} - -/// \memberof AMdoc -/// \brief Applies all of the changes in \p src which are not in \p dest to -/// \p dest. -/// -/// \param[in,out] dest A pointer to an `AMdoc` struct. -/// \param[in,out] src A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an `AMchangeHashes` -/// struct. -/// \pre \p dest `!= NULL`. -/// \pre \p src `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// dest must be a valid pointer to an AMdoc -/// src must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMresult { - let dest = to_doc_mut!(dest); - to_result(dest.merge(to_doc_mut!(src))) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical size of an object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// size or `NULL` for current size. -/// \return A 64-bit unsigned integer. -/// \pre \p doc `!= NULL`. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMobjSize( - doc: *const AMdoc, - obj_id: *const AMobjId, - heads: *const AMchangeHashes, -) -> usize { - if let Some(doc) = doc.as_ref() { - let obj_id = to_obj_id!(obj_id); - match heads.as_ref() { - None => doc.length(obj_id), - Some(heads) => doc.length_at(obj_id, heads.as_ref()), - } - } else { - 0 - } -} - -/// \memberof AMdoc -/// \brief Gets the current or historical values of an object within its entire -/// range. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// items or `NULL` for current items. -/// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMobjValues( - doc: *const AMdoc, - obj_id: *const AMobjId, - heads: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - match heads.as_ref() { - None => to_result(doc.values(obj_id)), - Some(heads) => to_result(doc.values_at(obj_id, heads.as_ref())), - } -} - -/// \memberof AMdoc -/// \brief Gets the number of pending operations added during a document's -/// current transaction. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \return The count of pending operations for \p doc. -/// \pre \p doc `!= NULL`. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMpendingOps(doc: *const AMdoc) -> usize { - if let Some(doc) = doc.as_ref() { - doc.pending_ops() - } else { - 0 - } -} - -/// \memberof AMdoc -/// \brief Receives a synchronization message from a peer based upon a given -/// synchronization state. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in,out] sync_state A pointer to an `AMsyncState` struct. -/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p sync_state `!= NULL`. -/// \pre \p sync_message `!= NULL`. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// sync_state must be a valid pointer to an AMsyncState -/// sync_message must be a valid pointer to an AMsyncMessage -#[no_mangle] -pub unsafe extern "C" fn AMreceiveSyncMessage( - doc: *mut AMdoc, - sync_state: *mut AMsyncState, - sync_message: *const AMsyncMessage, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let sync_state = to_sync_state_mut!(sync_state); - let sync_message = to_sync_message!(sync_message); - to_result(doc.receive_sync_message(sync_state.as_mut(), sync_message.as_ref().clone())) -} - -/// \memberof AMdoc -/// \brief Cancels the pending operations added during a document's current -/// transaction and gets the number of cancellations. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return The count of pending operations for \p doc that were cancelled. -/// \pre \p doc `!= NULL`. -/// \internal -/// -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMrollback(doc: *mut AMdoc) -> usize { - if let Some(doc) = doc.as_mut() { - doc.rollback() - } else { - 0 - } -} - -/// \memberof AMdoc -/// \brief Saves the entirety of a document into a compact form. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an array of bytes as -/// an `AMbyteSpan` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(Ok(doc.save())) -} - -/// \memberof AMdoc -/// \brief Saves the changes to a document since its last save into a compact -/// form. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \return A pointer to an `AMresult` struct containing an array of bytes as -/// an `AMbyteSpan` struct. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -#[no_mangle] -pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(Ok(doc.save_incremental())) -} - -/// \memberof AMdoc -/// \brief Puts the actor identifier of a document. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] actor_id A pointer to an `AMactorId` struct. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p actor_id `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// actor_id must be a valid pointer to an AMactorId -#[no_mangle] -pub unsafe extern "C" fn AMsetActorId( - doc: *mut AMdoc, - actor_id: *const AMactorId, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let actor_id = to_actor_id!(actor_id); - doc.set_actor(actor_id.as_ref().clone()); - to_result(Ok(())) -} - -/// \memberof AMdoc -/// \brief Splices values into and/or removes values from the identified object -/// at a given position within it. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos A position in the object identified by \p obj_id or -/// `SIZE_MAX` to indicate one past its end. -/// \param[in] del The number of characters to delete or `SIZE_MAX` to indicate -/// all of them. -/// \param[in] src A pointer to an array of `AMvalue` structs. -/// \param[in] count The number of `AMvalue` structs in \p src to load. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id`)` or \p pos `== SIZE_MAX`. -/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id`)` or \p del `== SIZE_MAX`. -/// \pre `(`\p src `!= NULL and 1 <=` \p count `<= sizeof(`\p src`)/ -/// sizeof(AMvalue)) or `\p src `== NULL or `\p count `== 0`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// src must be an AMvalue array of size `>= count` or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMsplice( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - del: usize, - src: *const AMvalue, - count: usize, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let len = doc.length(obj_id); - let pos = to_index!(pos, len, "pos"); - let del = to_index!(del, len, "del"); - let mut vals: Vec = vec![]; - if !(src.is_null() || count == 0) { - let c_vals = std::slice::from_raw_parts(src, count); - for c_val in c_vals { - match c_val.try_into() { - Ok(s) => { - vals.push(s); - } - Err(e) => { - return AMresult::err(&e.to_string()).into(); - } - } - } - } - to_result(doc.splice(obj_id, pos, del, vals)) -} - -/// \memberof AMdoc -/// \brief Splices characters into and/or removes characters from the -/// identified object at a given position within it. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] pos A position in the text object identified by \p obj_id or -/// `SIZE_MAX` to indicate one past its end. -/// \param[in] del The number of characters to delete or `SIZE_MAX` to indicate -/// all of them. -/// \param[in] text A UTF-8 string. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id`)` or \p pos `== SIZE_MAX`. -/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id`)` or \p del `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// text must be a null-terminated array of `c_char` or NULL. -#[no_mangle] -pub unsafe extern "C" fn AMspliceText( - doc: *mut AMdoc, - obj_id: *const AMobjId, - pos: usize, - del: usize, - text: *const c_char, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let len = doc.length(obj_id); - let pos = to_index!(pos, len, "pos"); - let del = to_index!(del, len, "del"); - to_result(doc.splice_text(obj_id, pos, del, &to_str(text))) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical string represented by a text object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// keys or `NULL` for current keys. -/// \return A pointer to an `AMresult` struct containing a UTF-8 string. -/// \pre \p doc `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMtext( - doc: *const AMdoc, - obj_id: *const AMobjId, - heads: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - match heads.as_ref() { - None => to_result(doc.text(obj_id)), - Some(heads) => to_result(doc.text_at(obj_id, heads.as_ref())), - } -} diff --git a/automerge-c/src/doc/list.rs b/automerge-c/src/doc/list.rs deleted file mode 100644 index c8b160cb..00000000 --- a/automerge-c/src/doc/list.rs +++ /dev/null @@ -1,604 +0,0 @@ -use automerge as am; -use automerge::transaction::Transactable; -use std::os::raw::c_char; - -use crate::change_hashes::AMchangeHashes; -use crate::doc::{to_doc, to_doc_mut, to_obj_id, to_str, AMdoc}; -use crate::obj::{AMobjId, AMobjType}; -use crate::result::{to_result, AMresult}; - -pub mod item; -pub mod items; - -macro_rules! adjust { - ($index:expr, $insert:expr, $len:expr) => {{ - // An empty object can only be inserted into. - let insert = $insert || $len == 0; - let end = if insert { $len } else { $len - 1 }; - if $index > end && $index != usize::MAX { - return AMresult::err(&format!("Invalid index {}", $index)).into(); - } - (std::cmp::min($index, end), insert) - }}; -} - -macro_rules! to_range { - ($begin:expr, $end:expr) => {{ - if $begin > $end { - return AMresult::err(&format!("Invalid range [{}-{})", $begin, $end)).into(); - }; - ($begin..$end) - }}; -} - -/// \memberof AMdoc -/// \brief Deletes an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistDelete( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, _) = adjust!(index, false, doc.length(obj_id)); - to_result(doc.delete(obj_id, index)) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical value at an index in a list object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical -/// value or `NULL` for the current value. -/// \return A pointer to an `AMresult` struct that doesn't contain a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistGet( - doc: *const AMdoc, - obj_id: *const AMobjId, - index: usize, - heads: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, _) = adjust!(index, false, doc.length(obj_id)); - match heads.as_ref() { - None => to_result(doc.get(obj_id, index)), - Some(heads) => to_result(doc.get_at(obj_id, index, heads.as_ref())), - } -} - -/// \memberof AMdoc -/// \brief Gets all of the historical values at an index in a list object until -/// its current one or a specific one. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical -/// last value or `NULL` for the current last value. -/// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistGetAll( - doc: *const AMdoc, - obj_id: *const AMobjId, - index: usize, - heads: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, _) = adjust!(index, false, doc.length(obj_id)); - match heads.as_ref() { - None => to_result(doc.get_all(obj_id, index)), - Some(heads) => to_result(doc.get_all_at(obj_id, index, heads.as_ref())), - } -} - -/// \memberof AMdoc -/// \brief Increments a counter at an index in a list object by the given -/// value. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistIncrement( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, _) = adjust!(index, false, doc.length(obj_id)); - to_result(doc.increment(obj_id, index, value)) -} - -/// \memberof AMdoc -/// \brief Puts a boolean as the value at an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A boolean. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutBool( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: bool, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - let value = am::ScalarValue::Boolean(value); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a sequence of bytes as the value at an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p src before \p index instead of -/// writing \p src over \p index. -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes to copy from \p src. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// src must be a byte array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMlistPutBytes( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - src: *const u8, - count: usize, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - let mut value = Vec::new(); - value.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a CRDT counter as the value at an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutCounter( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - let value = am::ScalarValue::Counter(value.into()); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a float as the value at an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutF64( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: f64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a signed integer as the value at an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutInt( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts null as the value at an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutNull( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - to_result(if insert { - doc.insert(obj_id, index, ()) - } else { - doc.put(obj_id, index, ()) - }) -} - -/// \memberof AMdoc -/// \brief Puts an empty object as the value at an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] obj_type An `AMobjIdType` enum tag. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMobjId` struct. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutObject( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - obj_type: AMobjType, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - let object = obj_type.into(); - to_result(if insert { - doc.insert_object(obj_id, index, object) - } else { - doc.put_object(obj_id, index, object) - }) -} - -/// \memberof AMdoc -/// \brief Puts a UTF-8 string as the value at an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A UTF-8 string. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \pre \p value `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// value must be a null-terminated array of `c_char` -#[no_mangle] -pub unsafe extern "C" fn AMlistPutStr( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: *const c_char, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - let value = to_str(value); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts a Lamport timestamp as the value at an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutTimestamp( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - let value = am::ScalarValue::Timestamp(value); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Puts an unsigned integer as the value at an index in a list object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] index An index in the list object identified by \p obj_id or -/// `SIZE_MAX` to indicate its last index if \p insert -/// `== false` or one past its last index if \p insert -/// `== true`. -/// \param[in] insert A flag to insert \p value before \p index instead of -/// writing \p value over \p index. -/// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre `0 <=` \p index `<= AMobjSize(`\p obj_id`)` or \p index `== SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistPutUint( - doc: *mut AMdoc, - obj_id: *const AMobjId, - index: usize, - insert: bool, - value: u64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let obj_id = to_obj_id!(obj_id); - let (index, insert) = adjust!(index, insert, doc.length(obj_id)); - to_result(if insert { - doc.insert(obj_id, index, value) - } else { - doc.put(obj_id, index, value) - }) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical indices and values of the list object -/// within the given range. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] begin The first index in a range of indices. -/// \param[in] end At least one past the last index in a range of indices. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// indices and values or `NULL` for current indices and -/// values. -/// \return A pointer to an `AMresult` struct containing an `AMlistItems` -/// struct. -/// \pre \p doc `!= NULL`. -/// \pre \p begin `<=` \p end `<= SIZE_MAX`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMlistRange( - doc: *const AMdoc, - obj_id: *const AMobjId, - begin: usize, - end: usize, - heads: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - let range = to_range!(begin, end); - match heads.as_ref() { - None => to_result(doc.list_range(obj_id, range)), - Some(heads) => to_result(doc.list_range_at(obj_id, range, heads.as_ref())), - } -} diff --git a/automerge-c/src/doc/list/item.rs b/automerge-c/src/doc/list/item.rs deleted file mode 100644 index fcd6281d..00000000 --- a/automerge-c/src/doc/list/item.rs +++ /dev/null @@ -1,100 +0,0 @@ -use automerge as am; -use std::cell::RefCell; -use std::ffi::CString; - -use crate::obj::AMobjId; -use crate::result::AMvalue; - -/// \struct AMlistItem -/// \installed_headerfile -/// \brief An item in a list object. -#[repr(C)] -pub struct AMlistItem { - /// The index of an item in a list object. - index: usize, - /// The object identifier of an item in a list object. - obj_id: AMobjId, - /// The value of an item in a list object. - value: (am::Value<'static>, RefCell>), -} - -impl AMlistItem { - pub fn new(index: usize, value: am::Value<'static>, obj_id: am::ObjId) -> Self { - Self { - index, - obj_id: AMobjId::new(obj_id), - value: (value, Default::default()), - } - } -} - -impl PartialEq for AMlistItem { - fn eq(&self, other: &Self) -> bool { - self.index == other.index && self.obj_id == other.obj_id && self.value.0 == other.value.0 - } -} - -/* -impl From<&AMlistItem> for (usize, am::Value<'static>, am::ObjId) { - fn from(list_item: &AMlistItem) -> Self { - (list_item.index, list_item.value.0.clone(), list_item.obj_id.as_ref().clone()) - } -} -*/ - -/// \memberof AMlistItem -/// \brief Gets the index of an item in a list object. -/// -/// \param[in] list_item A pointer to an `AMlistItem` struct. -/// \return A 64-bit unsigned integer. -/// \pre \p list_item `!= NULL`. -/// \internal -/// -/// # Safety -/// list_item must be a valid pointer to an AMlistItem -#[no_mangle] -pub unsafe extern "C" fn AMlistItemIndex(list_item: *const AMlistItem) -> usize { - if let Some(list_item) = list_item.as_ref() { - list_item.index - } else { - usize::MAX - } -} - -/// \memberof AMlistItem -/// \brief Gets the object identifier of an item in a list object. -/// -/// \param[in] list_item A pointer to an `AMlistItem` struct. -/// \return A pointer to an `AMobjId` struct. -/// \pre \p list_item `!= NULL`. -/// \internal -/// -/// # Safety -/// list_item must be a valid pointer to an AMlistItem -#[no_mangle] -pub unsafe extern "C" fn AMlistItemObjId(list_item: *const AMlistItem) -> *const AMobjId { - if let Some(list_item) = list_item.as_ref() { - &list_item.obj_id - } else { - std::ptr::null() - } -} - -/// \memberof AMlistItem -/// \brief Gets the value of an item in a list object. -/// -/// \param[in] list_item A pointer to an `AMlistItem` struct. -/// \return An `AMvalue` struct. -/// \pre \p list_item `!= NULL`. -/// \internal -/// -/// # Safety -/// list_item must be a valid pointer to an AMlistItem -#[no_mangle] -pub unsafe extern "C" fn AMlistItemValue<'a>(list_item: *const AMlistItem) -> AMvalue<'a> { - if let Some(list_item) = list_item.as_ref() { - (&list_item.value.0, &list_item.value.1).into() - } else { - AMvalue::Void - } -} diff --git a/automerge-c/src/doc/list/items.rs b/automerge-c/src/doc/list/items.rs deleted file mode 100644 index aa676c4a..00000000 --- a/automerge-c/src/doc/list/items.rs +++ /dev/null @@ -1,348 +0,0 @@ -use std::ffi::c_void; -use std::mem::size_of; - -use crate::doc::list::item::AMlistItem; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(list_items: &[AMlistItem], offset: isize) -> Self { - Self { - len: list_items.len(), - offset, - ptr: list_items.as_ptr() as *const c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<&AMlistItem> { - if self.is_stopped() { - return None; - } - let slice: &[AMlistItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMlistItem, self.len) }; - let value = &slice[self.get_index()]; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<&AMlistItem> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[AMlistItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMlistItem, self.len) }; - Some(&slice[self.get_index()]) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMlistItems -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of list object items. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMlistItems { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], -} - -impl AMlistItems { - pub fn new(list_items: &[AMlistItem]) -> Self { - Self { - detail: Detail::new(list_items, 0).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<&AMlistItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<&AMlistItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[AMlistItem]> for AMlistItems { - fn as_ref(&self) -> &[AMlistItem] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const AMlistItem, detail.len) } - } -} - -impl Default for AMlistItems { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMlistItems -/// \brief Advances an iterator over a sequence of list object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] list_items A pointer to an `AMlistItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsAdvance(list_items: *mut AMlistItems, n: isize) { - if let Some(list_items) = list_items.as_mut() { - list_items.advance(n); - }; -} - -/// \memberof AMlistItems -/// \brief Tests the equality of two sequences of list object items underlying -/// a pair of iterators. -/// -/// \param[in] list_items1 A pointer to an `AMlistItems` struct. -/// \param[in] list_items2 A pointer to an `AMlistItems` struct. -/// \return `true` if \p list_items1 `==` \p list_items2 and `false` otherwise. -/// \pre \p list_items1 `!= NULL`. -/// \pre \p list_items2 `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items1 must be a valid pointer to an AMlistItems -/// list_items2 must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsEqual( - list_items1: *const AMlistItems, - list_items2: *const AMlistItems, -) -> bool { - match (list_items1.as_ref(), list_items2.as_ref()) { - (Some(list_items1), Some(list_items2)) => list_items1.as_ref() == list_items2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMlistItems -/// \brief Gets the list object item at the current position of an iterator -/// over a sequence of list object items and then advances it by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] list_items A pointer to an `AMlistItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMlistItem` struct that's `NULL` when -/// \p list_items was previously advanced past its forward/reverse -/// limit. -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsNext( - list_items: *mut AMlistItems, - n: isize, -) -> *const AMlistItem { - if let Some(list_items) = list_items.as_mut() { - if let Some(list_item) = list_items.next(n) { - return list_item; - } - } - std::ptr::null() -} - -/// \memberof AMlistItems -/// \brief Advances an iterator over a sequence of list object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the list object item at its new -/// position. -/// -/// \param[in,out] list_items A pointer to an `AMlistItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMlistItem` struct that's `NULL` when -/// \p list_items is presently advanced past its forward/reverse limit. -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsPrev( - list_items: *mut AMlistItems, - n: isize, -) -> *const AMlistItem { - if let Some(list_items) = list_items.as_mut() { - if let Some(list_item) = list_items.prev(n) { - return list_item; - } - } - std::ptr::null() -} - -/// \memberof AMlistItems -/// \brief Gets the size of the sequence of list object items underlying an -/// iterator. -/// -/// \param[in] list_items A pointer to an `AMlistItems` struct. -/// \return The count of values in \p list_items. -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsSize(list_items: *const AMlistItems) -> usize { - if let Some(list_items) = list_items.as_ref() { - list_items.len() - } else { - 0 - } -} - -/// \memberof AMlistItems -/// \brief Creates an iterator over the same sequence of list object items as -/// the given one but with the opposite position and direction. -/// -/// \param[in] list_items A pointer to an `AMlistItems` struct. -/// \return An `AMlistItems` struct -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsReversed(list_items: *const AMlistItems) -> AMlistItems { - if let Some(list_items) = list_items.as_ref() { - list_items.reversed() - } else { - AMlistItems::default() - } -} - -/// \memberof AMlistItems -/// \brief Creates an iterator at the starting position over the same sequence -/// of list object items as the given one. -/// -/// \param[in] list_items A pointer to an `AMlistItems` struct. -/// \return An `AMlistItems` struct -/// \pre \p list_items `!= NULL`. -/// \internal -/// -/// #Safety -/// list_items must be a valid pointer to an AMlistItems -#[no_mangle] -pub unsafe extern "C" fn AMlistItemsRewound(list_items: *const AMlistItems) -> AMlistItems { - if let Some(list_items) = list_items.as_ref() { - list_items.rewound() - } else { - AMlistItems::default() - } -} diff --git a/automerge-c/src/doc/map.rs b/automerge-c/src/doc/map.rs deleted file mode 100644 index 4b2b6cc2..00000000 --- a/automerge-c/src/doc/map.rs +++ /dev/null @@ -1,506 +0,0 @@ -use automerge as am; -use automerge::transaction::Transactable; -use std::os::raw::c_char; - -use crate::change_hashes::AMchangeHashes; -use crate::doc::utils::to_str; -use crate::doc::{to_doc, to_doc_mut, to_obj_id, AMdoc}; -use crate::obj::{AMobjId, AMobjType}; -use crate::result::{to_result, AMresult}; - -pub mod item; -pub mod items; - -/// \memberof AMdoc -/// \brief Deletes a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapDelete( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.delete(to_obj_id!(obj_id), to_str(key))) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical value for a key in a map object. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by -/// \p obj_id. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical -/// value or `NULL` for the current value. -/// \return A pointer to an `AMresult` struct that doesn't contain a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMmapGet( - doc: *const AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - heads: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - match heads.as_ref() { - None => to_result(doc.get(obj_id, to_str(key))), - Some(heads) => to_result(doc.get_at(obj_id, to_str(key), heads.as_ref())), - } -} - -/// \memberof AMdoc -/// \brief Gets all of the historical values for a key in a map object until -/// its current one or a specific one. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by -/// \p obj_id. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for a historical -/// last value or `NULL` for the current last value. -/// \return A pointer to an `AMresult` struct containing an `AMobjItems` struct. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMmapGetAll( - doc: *const AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - heads: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - match heads.as_ref() { - None => to_result(doc.get_all(obj_id, to_str(key))), - Some(heads) => to_result(doc.get_all_at(obj_id, to_str(key), heads.as_ref())), - } -} - -/// \memberof AMdoc -/// \brief Increments a counter for a key in a map object by the given value. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapIncrement( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.increment(to_obj_id!(obj_id), to_str(key), value)) -} - -/// \memberof AMdoc -/// \brief Puts a boolean as the value of a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A boolean. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutBool( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: bool, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) -} - -/// \memberof AMdoc -/// \brief Puts a sequence of bytes as the value of a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes to copy from \p src. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -/// src must be a byte array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMmapPutBytes( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - src: *const u8, - count: usize, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - let mut vec = Vec::new(); - vec.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), vec)) -} - -/// \memberof AMdoc -/// \brief Puts a CRDT counter as the value of a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutCounter( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.put( - to_obj_id!(obj_id), - to_str(key), - am::ScalarValue::Counter(value.into()), - )) -} - -/// \memberof AMdoc -/// \brief Puts null as the value of a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutNull( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), ())) -} - -/// \memberof AMdoc -/// \brief Puts an empty object as the value of a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] obj_type An `AMobjIdType` enum tag. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMobjId` struct. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutObject( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - obj_type: AMobjType, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.put_object(to_obj_id!(obj_id), to_str(key), obj_type.into())) -} - -/// \memberof AMdoc -/// \brief Puts a float as the value of a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit float. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutF64( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: f64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) -} - -/// \memberof AMdoc -/// \brief Puts a signed integer as the value of a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutInt( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) -} - -/// \memberof AMdoc -/// \brief Puts a UTF-8 string as the value of a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A UTF-8 string. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \pre \p value `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -/// value must be a null-terminated array of `c_char` -#[no_mangle] -pub unsafe extern "C" fn AMmapPutStr( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: *const c_char, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), to_str(value))) -} - -/// \memberof AMdoc -/// \brief Puts a Lamport timestamp as the value of a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit signed integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutTimestamp( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: i64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.put( - to_obj_id!(obj_id), - to_str(key), - am::ScalarValue::Timestamp(value), - )) -} - -/// \memberof AMdoc -/// \brief Puts an unsigned integer as the value of a key in a map object. -/// -/// \param[in,out] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] key A UTF-8 string key for the map object identified by \p obj_id. -/// \param[in] value A 64-bit unsigned integer. -/// \return A pointer to an `AMresult` struct containing a void. -/// \pre \p doc `!= NULL`. -/// \pre \p key `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// key must be a c string of the map key to be used -#[no_mangle] -pub unsafe extern "C" fn AMmapPutUint( - doc: *mut AMdoc, - obj_id: *const AMobjId, - key: *const c_char, - value: u64, -) -> *mut AMresult { - let doc = to_doc_mut!(doc); - to_result(doc.put(to_obj_id!(obj_id), to_str(key), value)) -} - -/// \memberof AMdoc -/// \brief Gets the current or historical keys and values of the map object -/// within the given range. -/// -/// \param[in] doc A pointer to an `AMdoc` struct. -/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. -/// \param[in] begin The first key in a subrange or `NULL` to indicate the -/// absolute first key. -/// \param[in] end The key one past the last key in a subrange or `NULL` to -/// indicate one past the absolute last key. -/// \param[in] heads A pointer to an `AMchangeHashes` struct for historical -/// keys and values or `NULL` for current keys and values. -/// \return A pointer to an `AMresult` struct containing an `AMmapItems` -/// struct. -/// \pre \p doc `!= NULL`. -/// \pre `strcmp(`\p begin, \p end`) != 1` if \p begin `!= NULL` and \p end `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// doc must be a valid pointer to an AMdoc -/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() -/// heads must be a valid pointer to an AMchangeHashes or std::ptr::null() -#[no_mangle] -pub unsafe extern "C" fn AMmapRange( - doc: *const AMdoc, - obj_id: *const AMobjId, - begin: *const c_char, - end: *const c_char, - heads: *const AMchangeHashes, -) -> *mut AMresult { - let doc = to_doc!(doc); - let obj_id = to_obj_id!(obj_id); - match (begin.as_ref(), end.as_ref()) { - (Some(_), Some(_)) => { - let (begin, end) = (to_str(begin), to_str(end)); - if begin > end { - return AMresult::err(&format!("Invalid range [{}-{})", begin, end)).into(); - }; - let bounds = begin..end; - if let Some(heads) = heads.as_ref() { - to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) - } else { - to_result(doc.map_range(obj_id, bounds)) - } - } - (Some(_), None) => { - let bounds = to_str(begin)..; - if let Some(heads) = heads.as_ref() { - to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) - } else { - to_result(doc.map_range(obj_id, bounds)) - } - } - (None, Some(_)) => { - let bounds = ..to_str(end); - if let Some(heads) = heads.as_ref() { - to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) - } else { - to_result(doc.map_range(obj_id, bounds)) - } - } - (None, None) => { - let bounds = ..; - if let Some(heads) = heads.as_ref() { - to_result(doc.map_range_at(obj_id, bounds, heads.as_ref())) - } else { - to_result(doc.map_range(obj_id, bounds)) - } - } - } -} diff --git a/automerge-c/src/doc/map/item.rs b/automerge-c/src/doc/map/item.rs deleted file mode 100644 index 0d10f3c3..00000000 --- a/automerge-c/src/doc/map/item.rs +++ /dev/null @@ -1,101 +0,0 @@ -use automerge as am; -use std::cell::RefCell; -use std::ffi::CString; -use std::os::raw::c_char; - -use crate::obj::AMobjId; -use crate::result::AMvalue; - -/// \struct AMmapItem -/// \installed_headerfile -/// \brief An item in a map object. -#[repr(C)] -pub struct AMmapItem { - /// The key of an item in a map object. - key: CString, - /// The object identifier of an item in a map object. - obj_id: AMobjId, - /// The value of an item in a map object. - value: (am::Value<'static>, RefCell>), -} - -impl AMmapItem { - pub fn new(key: &'static str, value: am::Value<'static>, obj_id: am::ObjId) -> Self { - Self { - key: CString::new(key).unwrap(), - obj_id: AMobjId::new(obj_id), - value: (value, Default::default()), - } - } -} - -impl PartialEq for AMmapItem { - fn eq(&self, other: &Self) -> bool { - self.key == other.key && self.obj_id == other.obj_id && self.value.0 == other.value.0 - } -} - -/* -impl From<&AMmapItem> for (String, am::Value<'static>, am::ObjId) { - fn from(map_item: &AMmapItem) -> Self { - (map_item.key.into_string().unwrap(), map_item.value.0.clone(), map_item.obj_id.as_ref().clone()) - } -} -*/ - -/// \memberof AMmapItem -/// \brief Gets the key of an item in a map object. -/// -/// \param[in] map_item A pointer to an `AMmapItem` struct. -/// \return A 64-bit unsigned integer. -/// \pre \p map_item `!= NULL`. -/// \internal -/// -/// # Safety -/// map_item must be a valid pointer to an AMmapItem -#[no_mangle] -pub unsafe extern "C" fn AMmapItemKey(map_item: *const AMmapItem) -> *const c_char { - if let Some(map_item) = map_item.as_ref() { - map_item.key.as_ptr() - } else { - std::ptr::null() - } -} - -/// \memberof AMmapItem -/// \brief Gets the object identifier of an item in a map object. -/// -/// \param[in] map_item A pointer to an `AMmapItem` struct. -/// \return A pointer to an `AMobjId` struct. -/// \pre \p map_item `!= NULL`. -/// \internal -/// -/// # Safety -/// map_item must be a valid pointer to an AMmapItem -#[no_mangle] -pub unsafe extern "C" fn AMmapItemObjId(map_item: *const AMmapItem) -> *const AMobjId { - if let Some(map_item) = map_item.as_ref() { - &map_item.obj_id - } else { - std::ptr::null() - } -} - -/// \memberof AMmapItem -/// \brief Gets the value of an item in a map object. -/// -/// \param[in] map_item A pointer to an `AMmapItem` struct. -/// \return An `AMvalue` struct. -/// \pre \p map_item `!= NULL`. -/// \internal -/// -/// # Safety -/// map_item must be a valid pointer to an AMmapItem -#[no_mangle] -pub unsafe extern "C" fn AMmapItemValue<'a>(map_item: *const AMmapItem) -> AMvalue<'a> { - if let Some(map_item) = map_item.as_ref() { - (&map_item.value.0, &map_item.value.1).into() - } else { - AMvalue::Void - } -} diff --git a/automerge-c/src/doc/map/items.rs b/automerge-c/src/doc/map/items.rs deleted file mode 100644 index b1f046b1..00000000 --- a/automerge-c/src/doc/map/items.rs +++ /dev/null @@ -1,340 +0,0 @@ -use std::ffi::c_void; -use std::mem::size_of; - -use crate::doc::map::item::AMmapItem; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(map_items: &[AMmapItem], offset: isize) -> Self { - Self { - len: map_items.len(), - offset, - ptr: map_items.as_ptr() as *const c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<&AMmapItem> { - if self.is_stopped() { - return None; - } - let slice: &[AMmapItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMmapItem, self.len) }; - let value = &slice[self.get_index()]; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<&AMmapItem> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[AMmapItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMmapItem, self.len) }; - Some(&slice[self.get_index()]) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMmapItems -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of map object items. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMmapItems { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], -} - -impl AMmapItems { - pub fn new(map_items: &[AMmapItem]) -> Self { - Self { - detail: Detail::new(map_items, 0).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<&AMmapItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<&AMmapItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[AMmapItem]> for AMmapItems { - fn as_ref(&self) -> &[AMmapItem] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const AMmapItem, detail.len) } - } -} - -impl Default for AMmapItems { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMmapItems -/// \brief Advances an iterator over a sequence of map object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] map_items A pointer to an `AMmapItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsAdvance(map_items: *mut AMmapItems, n: isize) { - if let Some(map_items) = map_items.as_mut() { - map_items.advance(n); - }; -} - -/// \memberof AMmapItems -/// \brief Tests the equality of two sequences of map object items underlying -/// a pair of iterators. -/// -/// \param[in] map_items1 A pointer to an `AMmapItems` struct. -/// \param[in] map_items2 A pointer to an `AMmapItems` struct. -/// \return `true` if \p map_items1 `==` \p map_items2 and `false` otherwise. -/// \pre \p map_items1 `!= NULL`. -/// \pre \p map_items2 `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items1 must be a valid pointer to an AMmapItems -/// map_items2 must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsEqual( - map_items1: *const AMmapItems, - map_items2: *const AMmapItems, -) -> bool { - match (map_items1.as_ref(), map_items2.as_ref()) { - (Some(map_items1), Some(map_items2)) => map_items1.as_ref() == map_items2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMmapItems -/// \brief Gets the map object item at the current position of an iterator -/// over a sequence of map object items and then advances it by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] map_items A pointer to an `AMmapItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMmapItem` struct that's `NULL` when \p map_items -/// was previously advanced past its forward/reverse limit. -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsNext(map_items: *mut AMmapItems, n: isize) -> *const AMmapItem { - if let Some(map_items) = map_items.as_mut() { - if let Some(map_item) = map_items.next(n) { - return map_item; - } - } - std::ptr::null() -} - -/// \memberof AMmapItems -/// \brief Advances an iterator over a sequence of map object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the map object item at its new -/// position. -/// -/// \param[in,out] map_items A pointer to an `AMmapItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMmapItem` struct that's `NULL` when \p map_items -/// is presently advanced past its forward/reverse limit. -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsPrev(map_items: *mut AMmapItems, n: isize) -> *const AMmapItem { - if let Some(map_items) = map_items.as_mut() { - if let Some(map_item) = map_items.prev(n) { - return map_item; - } - } - std::ptr::null() -} - -/// \memberof AMmapItems -/// \brief Gets the size of the sequence of map object items underlying an -/// iterator. -/// -/// \param[in] map_items A pointer to an `AMmapItems` struct. -/// \return The count of values in \p map_items. -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsSize(map_items: *const AMmapItems) -> usize { - if let Some(map_items) = map_items.as_ref() { - map_items.len() - } else { - 0 - } -} - -/// \memberof AMmapItems -/// \brief Creates an iterator over the same sequence of map object items as -/// the given one but with the opposite position and direction. -/// -/// \param[in] map_items A pointer to an `AMmapItems` struct. -/// \return An `AMmapItems` struct -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsReversed(map_items: *const AMmapItems) -> AMmapItems { - if let Some(map_items) = map_items.as_ref() { - map_items.reversed() - } else { - AMmapItems::default() - } -} - -/// \memberof AMmapItems -/// \brief Creates an iterator at the starting position over the same sequence of map object items as the given one. -/// -/// \param[in] map_items A pointer to an `AMmapItems` struct. -/// \return An `AMmapItems` struct -/// \pre \p map_items `!= NULL`. -/// \internal -/// -/// #Safety -/// map_items must be a valid pointer to an AMmapItems -#[no_mangle] -pub unsafe extern "C" fn AMmapItemsRewound(map_items: *const AMmapItems) -> AMmapItems { - if let Some(map_items) = map_items.as_ref() { - map_items.rewound() - } else { - AMmapItems::default() - } -} diff --git a/automerge-c/src/doc/utils.rs b/automerge-c/src/doc/utils.rs deleted file mode 100644 index b3a975e5..00000000 --- a/automerge-c/src/doc/utils.rs +++ /dev/null @@ -1,57 +0,0 @@ -use std::ffi::CStr; -use std::os::raw::c_char; - -macro_rules! to_actor_id { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::err("Invalid AMactorId pointer").into(), - } - }}; -} - -pub(crate) use to_actor_id; - -macro_rules! to_doc { - ($handle:expr) => {{ - let handle = $handle.as_ref(); - match handle { - Some(b) => b, - None => return AMresult::err("Invalid AMdoc pointer").into(), - } - }}; -} - -pub(crate) use to_doc; - -macro_rules! to_doc_mut { - ($handle:expr) => {{ - let handle = $handle.as_mut(); - match handle { - Some(b) => b, - None => return AMresult::err("Invalid AMdoc pointer").into(), - } - }}; -} - -pub(crate) use to_doc_mut; - -macro_rules! to_obj_id { - ($handle:expr) => {{ - match $handle.as_ref() { - Some(obj_id) => obj_id, - None => &automerge::ROOT, - } - }}; -} - -pub(crate) use to_obj_id; - -pub(crate) unsafe fn to_str(c: *const c_char) -> String { - if !c.is_null() { - CStr::from_ptr(c).to_string_lossy().to_string() - } else { - String::default() - } -} diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs deleted file mode 100644 index 6418bd33..00000000 --- a/automerge-c/src/lib.rs +++ /dev/null @@ -1,11 +0,0 @@ -mod actor_id; -mod byte_span; -mod change; -mod change_hashes; -mod changes; -mod doc; -mod obj; -mod result; -mod result_stack; -mod strs; -mod sync; diff --git a/automerge-c/src/obj/item.rs b/automerge-c/src/obj/item.rs deleted file mode 100644 index 84bc0fd1..00000000 --- a/automerge-c/src/obj/item.rs +++ /dev/null @@ -1,76 +0,0 @@ -use automerge as am; -use std::cell::RefCell; -use std::ffi::CString; - -use crate::obj::AMobjId; -use crate::result::AMvalue; - -/// \struct AMobjItem -/// \installed_headerfile -/// \brief An item in an object. -#[repr(C)] -pub struct AMobjItem { - /// The object identifier of an item in an object. - obj_id: AMobjId, - /// The value of an item in an object. - value: (am::Value<'static>, RefCell>), -} - -impl AMobjItem { - pub fn new(value: am::Value<'static>, obj_id: am::ObjId) -> Self { - Self { - obj_id: AMobjId::new(obj_id), - value: (value, Default::default()), - } - } -} - -impl PartialEq for AMobjItem { - fn eq(&self, other: &Self) -> bool { - self.obj_id == other.obj_id && self.value.0 == other.value.0 - } -} - -impl From<&AMobjItem> for (am::Value<'static>, am::ObjId) { - fn from(obj_item: &AMobjItem) -> Self { - (obj_item.value.0.clone(), obj_item.obj_id.as_ref().clone()) - } -} - -/// \memberof AMobjItem -/// \brief Gets the object identifier of an item in an object. -/// -/// \param[in] obj_item A pointer to an `AMobjItem` struct. -/// \return A pointer to an `AMobjId` struct. -/// \pre \p obj_item `!= NULL`. -/// \internal -/// -/// # Safety -/// obj_item must be a valid pointer to an AMobjItem -#[no_mangle] -pub unsafe extern "C" fn AMobjItemObjId(obj_item: *const AMobjItem) -> *const AMobjId { - if let Some(obj_item) = obj_item.as_ref() { - &obj_item.obj_id - } else { - std::ptr::null() - } -} - -/// \memberof AMobjItem -/// \brief Gets the value of an item in an object. -/// -/// \param[in] obj_item A pointer to an `AMobjItem` struct. -/// \return An `AMvalue` struct. -/// \pre \p obj_item `!= NULL`. -/// \internal -/// -/// # Safety -/// obj_item must be a valid pointer to an AMobjItem -#[no_mangle] -pub unsafe extern "C" fn AMobjItemValue<'a>(obj_item: *const AMobjItem) -> AMvalue<'a> { - if let Some(obj_item) = obj_item.as_ref() { - (&obj_item.value.0, &obj_item.value.1).into() - } else { - AMvalue::Void - } -} diff --git a/automerge-c/src/obj/items.rs b/automerge-c/src/obj/items.rs deleted file mode 100644 index fbb1d641..00000000 --- a/automerge-c/src/obj/items.rs +++ /dev/null @@ -1,341 +0,0 @@ -use std::ffi::c_void; -use std::mem::size_of; - -use crate::obj::item::AMobjItem; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(obj_items: &[AMobjItem], offset: isize) -> Self { - Self { - len: obj_items.len(), - offset, - ptr: obj_items.as_ptr() as *const c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<&AMobjItem> { - if self.is_stopped() { - return None; - } - let slice: &[AMobjItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMobjItem, self.len) }; - let value = &slice[self.get_index()]; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<&AMobjItem> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[AMobjItem] = - unsafe { std::slice::from_raw_parts(self.ptr as *const AMobjItem, self.len) }; - Some(&slice[self.get_index()]) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMobjItems -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of object items. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMobjItems { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], -} - -impl AMobjItems { - pub fn new(obj_items: &[AMobjItem]) -> Self { - Self { - detail: Detail::new(obj_items, 0).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<&AMobjItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<&AMobjItem> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[AMobjItem]> for AMobjItems { - fn as_ref(&self) -> &[AMobjItem] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const AMobjItem, detail.len) } - } -} - -impl Default for AMobjItems { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMobjItems -/// \brief Advances an iterator over a sequence of object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] obj_items A pointer to an `AMobjItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsAdvance(obj_items: *mut AMobjItems, n: isize) { - if let Some(obj_items) = obj_items.as_mut() { - obj_items.advance(n); - }; -} - -/// \memberof AMobjItems -/// \brief Tests the equality of two sequences of object items underlying a -/// pair of iterators. -/// -/// \param[in] obj_items1 A pointer to an `AMobjItems` struct. -/// \param[in] obj_items2 A pointer to an `AMobjItems` struct. -/// \return `true` if \p obj_items1 `==` \p obj_items2 and `false` otherwise. -/// \pre \p obj_items1 `!= NULL`. -/// \pre \p obj_items2 `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items1 must be a valid pointer to an AMobjItems -/// obj_items2 must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsEqual( - obj_items1: *const AMobjItems, - obj_items2: *const AMobjItems, -) -> bool { - match (obj_items1.as_ref(), obj_items2.as_ref()) { - (Some(obj_items1), Some(obj_items2)) => obj_items1.as_ref() == obj_items2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMobjItems -/// \brief Gets the object item at the current position of an iterator over a -/// sequence of object items and then advances it by at most \p |n| -/// positions where the sign of \p n is relative to the iterator's -/// direction. -/// -/// \param[in,out] obj_items A pointer to an `AMobjItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMobjItem` struct that's `NULL` when \p obj_items -/// was previously advanced past its forward/reverse limit. -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsNext(obj_items: *mut AMobjItems, n: isize) -> *const AMobjItem { - if let Some(obj_items) = obj_items.as_mut() { - if let Some(obj_item) = obj_items.next(n) { - return obj_item; - } - } - std::ptr::null() -} - -/// \memberof AMobjItems -/// \brief Advances an iterator over a sequence of object items by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the object item at its new -/// position. -/// -/// \param[in,out] obj_items A pointer to an `AMobjItems` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMobjItem` struct that's `NULL` when \p obj_items -/// is presently advanced past its forward/reverse limit. -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsPrev(obj_items: *mut AMobjItems, n: isize) -> *const AMobjItem { - if let Some(obj_items) = obj_items.as_mut() { - if let Some(obj_item) = obj_items.prev(n) { - return obj_item; - } - } - std::ptr::null() -} - -/// \memberof AMobjItems -/// \brief Gets the size of the sequence of object items underlying an -/// iterator. -/// -/// \param[in] obj_items A pointer to an `AMobjItems` struct. -/// \return The count of values in \p obj_items. -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsSize(obj_items: *const AMobjItems) -> usize { - if let Some(obj_items) = obj_items.as_ref() { - obj_items.len() - } else { - 0 - } -} - -/// \memberof AMobjItems -/// \brief Creates an iterator over the same sequence of object items as the -/// given one but with the opposite position and direction. -/// -/// \param[in] obj_items A pointer to an `AMobjItems` struct. -/// \return An `AMobjItems` struct -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsReversed(obj_items: *const AMobjItems) -> AMobjItems { - if let Some(obj_items) = obj_items.as_ref() { - obj_items.reversed() - } else { - AMobjItems::default() - } -} - -/// \memberof AMobjItems -/// \brief Creates an iterator at the starting position over the same sequence -/// of object items as the given one. -/// -/// \param[in] obj_items A pointer to an `AMobjItems` struct. -/// \return An `AMobjItems` struct -/// \pre \p obj_items `!= NULL`. -/// \internal -/// -/// #Safety -/// obj_items must be a valid pointer to an AMobjItems -#[no_mangle] -pub unsafe extern "C" fn AMobjItemsRewound(obj_items: *const AMobjItems) -> AMobjItems { - if let Some(obj_items) = obj_items.as_ref() { - obj_items.rewound() - } else { - AMobjItems::default() - } -} diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs deleted file mode 100644 index 67b14b1d..00000000 --- a/automerge-c/src/result.rs +++ /dev/null @@ -1,914 +0,0 @@ -use automerge as am; -use libc::strcmp; -use smol_str::SmolStr; -use std::any::type_name; -use std::cell::RefCell; -use std::collections::BTreeMap; -use std::ffi::CString; -use std::ops::{Range, RangeFrom, RangeFull, RangeTo}; -use std::os::raw::c_char; - -use crate::actor_id::AMactorId; -use crate::byte_span::AMbyteSpan; -use crate::change::AMchange; -use crate::change_hashes::AMchangeHashes; -use crate::changes::AMchanges; -use crate::doc::list::{item::AMlistItem, items::AMlistItems}; -use crate::doc::map::{item::AMmapItem, items::AMmapItems}; -use crate::doc::utils::to_str; -use crate::doc::AMdoc; -use crate::obj::item::AMobjItem; -use crate::obj::items::AMobjItems; -use crate::obj::AMobjId; -use crate::strs::AMstrs; -use crate::sync::{AMsyncMessage, AMsyncState}; - -/// \struct AMvalue -/// \installed_headerfile -/// \brief A discriminated union of value type variants for a result. -/// -/// \enum AMvalueVariant -/// \brief A value type discriminant. -/// -/// \var AMvalue::actor_id -/// An actor identifier as a pointer to an `AMactorId` struct. -/// -/// \var AMvalue::boolean -/// A boolean. -/// -/// \var AMvalue::bytes -/// A sequence of bytes as an `AMbyteSpan` struct. -/// -/// \var AMvalue::change_hashes -/// A sequence of change hashes as an `AMchangeHashes` struct. -/// -/// \var AMvalue::changes -/// A sequence of changes as an `AMchanges` struct. -/// -/// \var AMvalue::counter -/// A CRDT counter. -/// -/// \var AMvalue::doc -/// A document as a pointer to an `AMdoc` struct. -/// -/// \var AMvalue::f64 -/// A 64-bit float. -/// -/// \var AMvalue::int_ -/// A 64-bit signed integer. -/// -/// \var AMvalue::list_items -/// A sequence of list object items as an `AMlistItems` struct. -/// -/// \var AMvalue::map_items -/// A sequence of map object items as an `AMmapItems` struct. -/// -/// \var AMvalue::obj_id -/// An object identifier as a pointer to an `AMobjId` struct. -/// -/// \var AMvalue::obj_items -/// A sequence of object items as an `AMobjItems` struct. -/// -/// \var AMvalue::str -/// A UTF-8 string. -/// -/// \var AMvalue::strs -/// A sequence of UTF-8 strings as an `AMstrs` struct. -/// -/// \var AMvalue::sync_message -/// A synchronization message as a pointer to an `AMsyncMessage` struct. -/// -/// \var AMvalue::sync_state -/// A synchronization state as a pointer to an `AMsyncState` struct. -/// -/// \var AMvalue::tag -/// The variant discriminator. -/// -/// \var AMvalue::timestamp -/// A Lamport timestamp. -/// -/// \var AMvalue::uint -/// A 64-bit unsigned integer. -/// -/// \var AMvalue::unknown -/// A value of unknown type as an `AMunknownValue` struct. -#[repr(u8)] -pub enum AMvalue<'a> { - /// A void variant. - /// \note This tag is unalphabetized so that a zeroed struct will have it. - Void, - /// An actor identifier variant. - ActorId(&'a AMactorId), - /// A boolean variant. - Boolean(bool), - /// A byte array variant. - Bytes(AMbyteSpan), - /// A change hashes variant. - ChangeHashes(AMchangeHashes), - /// A changes variant. - Changes(AMchanges), - /// A CRDT counter variant. - Counter(i64), - /// A document variant. - Doc(*mut AMdoc), - /// A 64-bit float variant. - F64(f64), - /// A 64-bit signed integer variant. - Int(i64), - /// A list items variant. - ListItems(AMlistItems), - /// A map items variant. - MapItems(AMmapItems), - /// A null variant. - Null, - /// An object identifier variant. - ObjId(&'a AMobjId), - /// An object items variant. - ObjItems(AMobjItems), - /// A UTF-8 string variant. - Str(*const libc::c_char), - /// A UTF-8 strings variant. - Strs(AMstrs), - /// A synchronization message variant. - SyncMessage(&'a AMsyncMessage), - /// A synchronization state variant. - SyncState(&'a mut AMsyncState), - /// A Lamport timestamp variant. - Timestamp(i64), - /// A 64-bit unsigned integer variant. - Uint(u64), - /// An unknown type of scalar value variant. - Unknown(AMunknownValue), -} - -impl<'a> PartialEq for AMvalue<'a> { - fn eq(&self, other: &Self) -> bool { - use AMvalue::*; - - match (self, other) { - (ActorId(lhs), ActorId(rhs)) => *lhs == *rhs, - (Boolean(lhs), Boolean(rhs)) => lhs == rhs, - (Bytes(lhs), Bytes(rhs)) => lhs == rhs, - (ChangeHashes(lhs), ChangeHashes(rhs)) => lhs == rhs, - (Changes(lhs), Changes(rhs)) => lhs == rhs, - (Counter(lhs), Counter(rhs)) => lhs == rhs, - (Doc(lhs), Doc(rhs)) => *lhs == *rhs, - (F64(lhs), F64(rhs)) => lhs == rhs, - (Int(lhs), Int(rhs)) => lhs == rhs, - (ListItems(lhs), ListItems(rhs)) => lhs == rhs, - (MapItems(lhs), MapItems(rhs)) => lhs == rhs, - (ObjId(lhs), ObjId(rhs)) => *lhs == *rhs, - (ObjItems(lhs), ObjItems(rhs)) => lhs == rhs, - (Str(lhs), Str(rhs)) => unsafe { strcmp(*lhs, *rhs) == 0 }, - (Strs(lhs), Strs(rhs)) => lhs == rhs, - (SyncMessage(lhs), SyncMessage(rhs)) => *lhs == *rhs, - (SyncState(lhs), SyncState(rhs)) => *lhs == *rhs, - (Timestamp(lhs), Timestamp(rhs)) => lhs == rhs, - (Uint(lhs), Uint(rhs)) => lhs == rhs, - (Unknown(lhs), Unknown(rhs)) => lhs == rhs, - (Null, Null) | (Void, Void) => true, - _ => false, - } - } -} - -impl From<(&am::Value<'_>, &RefCell>)> for AMvalue<'_> { - fn from((value, c_str): (&am::Value<'_>, &RefCell>)) -> Self { - match value { - am::Value::Scalar(scalar) => match scalar.as_ref() { - am::ScalarValue::Boolean(flag) => AMvalue::Boolean(*flag), - am::ScalarValue::Bytes(bytes) => AMvalue::Bytes(bytes.as_slice().into()), - am::ScalarValue::Counter(counter) => AMvalue::Counter(counter.into()), - am::ScalarValue::F64(float) => AMvalue::F64(*float), - am::ScalarValue::Int(int) => AMvalue::Int(*int), - am::ScalarValue::Null => AMvalue::Null, - am::ScalarValue::Str(smol_str) => { - let mut c_str = c_str.borrow_mut(); - AMvalue::Str(match c_str.as_mut() { - None => { - let value_str = CString::new(smol_str.to_string()).unwrap(); - c_str.insert(value_str).as_ptr() - } - Some(value_str) => value_str.as_ptr(), - }) - } - am::ScalarValue::Timestamp(timestamp) => AMvalue::Timestamp(*timestamp), - am::ScalarValue::Uint(uint) => AMvalue::Uint(*uint), - am::ScalarValue::Unknown { bytes, type_code } => AMvalue::Unknown(AMunknownValue { - bytes: bytes.as_slice().into(), - type_code: *type_code, - }), - }, - // \todo Confirm that an object variant should be ignored - // when there's no object ID variant. - am::Value::Object(_) => AMvalue::Void, - } - } -} - -impl From<&AMvalue<'_>> for u8 { - fn from(value: &AMvalue) -> Self { - use AMvalue::*; - - // \warning These numbers must correspond to the order in which the - // variants of an AMvalue are declared within it. - match value { - ActorId(_) => 1, - Boolean(_) => 2, - Bytes(_) => 3, - ChangeHashes(_) => 4, - Changes(_) => 5, - Counter(_) => 6, - Doc(_) => 7, - F64(_) => 8, - Int(_) => 9, - ListItems(_) => 10, - MapItems(_) => 11, - Null => 12, - ObjId(_) => 13, - ObjItems(_) => 14, - Str(_) => 15, - Strs(_) => 16, - SyncMessage(_) => 17, - SyncState(_) => 18, - Timestamp(_) => 19, - Uint(_) => 20, - Unknown(..) => 21, - Void => 0, - } - } -} - -impl TryFrom<&AMvalue<'_>> for am::ScalarValue { - type Error = am::AutomergeError; - - fn try_from(c_value: &AMvalue) -> Result { - use am::AutomergeError::InvalidValueType; - use AMvalue::*; - - let expected = type_name::().to_string(); - match c_value { - Boolean(b) => Ok(am::ScalarValue::Boolean(*b)), - Bytes(span) => { - let slice = unsafe { std::slice::from_raw_parts(span.src, span.count) }; - Ok(am::ScalarValue::Bytes(slice.to_vec())) - } - Counter(c) => Ok(am::ScalarValue::Counter(c.into())), - F64(f) => Ok(am::ScalarValue::F64(*f)), - Int(i) => Ok(am::ScalarValue::Int(*i)), - Str(c_str) => { - let smol_str = unsafe { SmolStr::new(to_str(*c_str)) }; - Ok(am::ScalarValue::Str(smol_str)) - } - Timestamp(t) => Ok(am::ScalarValue::Timestamp(*t)), - Uint(u) => Ok(am::ScalarValue::Uint(*u)), - Null => Ok(am::ScalarValue::Null), - Unknown(AMunknownValue { bytes, type_code }) => { - let slice = unsafe { std::slice::from_raw_parts(bytes.src, bytes.count) }; - Ok(am::ScalarValue::Unknown { - bytes: slice.to_vec(), - type_code: *type_code, - }) - } - ActorId(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - ChangeHashes(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Changes(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Doc(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - ListItems(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - MapItems(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - ObjId(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - ObjItems(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Strs(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - SyncMessage(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - SyncState(_) => Err(InvalidValueType { - expected, - unexpected: type_name::().to_string(), - }), - Void => Err(InvalidValueType { - expected, - unexpected: type_name::<()>().to_string(), - }), - } - } -} - -/// \memberof AMvalue -/// \brief Tests the equality of two values. -/// -/// \param[in] value1 A pointer to an `AMvalue` struct. -/// \param[in] value2 A pointer to an `AMvalue` struct. -/// \return `true` if \p value1 `==` \p value2 and `false` otherwise. -/// \pre \p value1 `!= NULL`. -/// \pre \p value2 `!= NULL`. -/// \internal -/// -/// #Safety -/// value1 must be a valid AMvalue pointer -/// value2 must be a valid AMvalue pointer -#[no_mangle] -pub unsafe extern "C" fn AMvalueEqual(value1: *const AMvalue, value2: *const AMvalue) -> bool { - match (value1.as_ref(), value2.as_ref()) { - (Some(value1), Some(value2)) => *value1 == *value2, - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \struct AMresult -/// \installed_headerfile -/// \brief A discriminated union of result variants. -pub enum AMresult { - ActorId(am::ActorId, Option), - ChangeHashes(Vec), - Changes(Vec, Option>), - Doc(Box), - Error(CString), - ListItems(Vec), - MapItems(Vec), - ObjId(AMobjId), - ObjItems(Vec), - String(CString), - Strings(Vec), - SyncMessage(AMsyncMessage), - SyncState(Box), - Value(am::Value<'static>, RefCell>), - Void, -} - -impl AMresult { - pub(crate) fn err(s: &str) -> Self { - AMresult::Error(CString::new(s).unwrap()) - } -} - -impl From for AMresult { - fn from(auto_commit: am::AutoCommit) -> Self { - AMresult::Doc(Box::new(AMdoc::new(auto_commit))) - } -} - -impl From for AMresult { - fn from(change_hash: am::ChangeHash) -> Self { - AMresult::ChangeHashes(vec![change_hash]) - } -} - -impl From> for AMresult { - fn from(keys: am::Keys<'_, '_>) -> Self { - let cstrings: Vec = keys.map(|s| CString::new(s).unwrap()).collect(); - AMresult::Strings(cstrings) - } -} - -impl From> for AMresult { - fn from(keys: am::KeysAt<'_, '_>) -> Self { - let cstrings: Vec = keys.map(|s| CString::new(s).unwrap()).collect(); - AMresult::Strings(cstrings) - } -} - -impl From>> for AMresult { - fn from(list_range: am::ListRange<'static, Range>) -> Self { - AMresult::ListItems( - list_range - .map(|(i, v, o)| AMlistItem::new(i, v.clone(), o)) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(list_range: am::ListRangeAt<'static, Range>) -> Self { - AMresult::ListItems( - list_range - .map(|(i, v, o)| AMlistItem::new(i, v.clone(), o)) - .collect(), - ) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRange<'static, Range>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRangeAt<'static, Range>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRange<'static, RangeFrom>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRangeAt<'static, RangeFrom>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From> for AMresult { - fn from(map_range: am::MapRange<'static, RangeFull>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From> for AMresult { - fn from(map_range: am::MapRangeAt<'static, RangeFull>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRange<'static, RangeTo>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From>> for AMresult { - fn from(map_range: am::MapRangeAt<'static, RangeTo>) -> Self { - let map_items: Vec = map_range - .map(|(k, v, o): (&'_ str, am::Value<'_>, am::ObjId)| AMmapItem::new(k, v.clone(), o)) - .collect(); - AMresult::MapItems(map_items) - } -} - -impl From for AMresult { - fn from(state: am::sync::State) -> Self { - AMresult::SyncState(Box::new(AMsyncState::new(state))) - } -} - -impl From> for AMresult { - fn from(pairs: am::Values<'static>) -> Self { - AMresult::ObjItems(pairs.map(|(v, o)| AMobjItem::new(v.clone(), o)).collect()) - } -} - -impl From, am::ObjId)>, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { - match maybe { - Ok(pairs) => AMresult::ObjItems( - pairs - .into_iter() - .map(|(v, o)| AMobjItem::new(v, o)) - .collect(), - ), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From for *mut AMresult { - fn from(b: AMresult) -> Self { - Box::into_raw(Box::new(b)) - } -} - -impl From> for AMresult { - fn from(maybe: Option<&am::Change>) -> Self { - match maybe { - Some(change) => AMresult::Changes(vec![change.clone()], None), - None => AMresult::Void, - } - } -} - -impl From> for AMresult { - fn from(maybe: Option) -> Self { - match maybe { - Some(message) => AMresult::SyncMessage(AMsyncMessage::new(message)), - None => AMresult::Void, - } - } -} - -impl From> for AMresult { - fn from(maybe: Result<(), am::AutomergeError>) -> Self { - match maybe { - Ok(()) => AMresult::Void, - Err(e) => AMresult::err(&e.to_string()), - } - } -} -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(actor_id) => AMresult::ActorId(actor_id, None), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(actor_id) => AMresult::ActorId(actor_id, None), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(auto_commit) => AMresult::Doc(Box::new(AMdoc::new(auto_commit))), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(change) => AMresult::Changes(vec![change], None), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(obj_id) => AMresult::ObjId(AMobjId::new(obj_id)), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(message) => AMresult::SyncMessage(AMsyncMessage::new(message)), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(state) => AMresult::SyncState(Box::new(AMsyncState::new(state))), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(value) => AMresult::Value(value, Default::default()), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From, am::ObjId)>, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { - match maybe { - Ok(Some((value, obj_id))) => match value { - am::Value::Object(_) => AMresult::ObjId(AMobjId::new(obj_id)), - _ => AMresult::Value(value, Default::default()), - }, - Ok(None) => AMresult::Void, - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(string) => AMresult::String(CString::new(string).unwrap()), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(maybe: Result) -> Self { - match maybe { - Ok(size) => AMresult::Value(am::Value::uint(size as u64), Default::default()), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(changes) => AMresult::Changes(changes, None), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From, am::LoadChangeError>> for AMresult { - fn from(maybe: Result, am::LoadChangeError>) -> Self { - match maybe { - Ok(changes) => AMresult::Changes(changes, None), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(changes) => { - let changes: Vec = - changes.iter().map(|&change| change.clone()).collect(); - AMresult::Changes(changes, None) - } - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(change_hashes) => AMresult::ChangeHashes(change_hashes), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From, am::InvalidChangeHashSlice>> for AMresult { - fn from(maybe: Result, am::InvalidChangeHashSlice>) -> Self { - match maybe { - Ok(change_hashes) => AMresult::ChangeHashes(change_hashes), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From, am::AutomergeError>> for AMresult { - fn from(maybe: Result, am::AutomergeError>) -> Self { - match maybe { - Ok(bytes) => AMresult::Value(am::Value::bytes(bytes), Default::default()), - Err(e) => AMresult::err(&e.to_string()), - } - } -} - -impl From> for AMresult { - fn from(changes: Vec<&am::Change>) -> Self { - let changes: Vec = changes.iter().map(|&change| change.clone()).collect(); - AMresult::Changes(changes, None) - } -} - -impl From> for AMresult { - fn from(change_hashes: Vec) -> Self { - AMresult::ChangeHashes(change_hashes) - } -} - -impl From> for AMresult { - fn from(bytes: Vec) -> Self { - AMresult::Value(am::Value::bytes(bytes), Default::default()) - } -} - -pub fn to_result>(r: R) -> *mut AMresult { - (r.into()).into() -} - -/// \ingroup enumerations -/// \enum AMstatus -/// \brief The status of an API call. -#[derive(Debug)] -#[repr(u8)] -pub enum AMstatus { - /// Success. - /// \note This tag is unalphabetized so that `0` indicates success. - Ok, - /// Failure due to an error. - Error, - /// Failure due to an invalid result. - InvalidResult, -} - -/// \memberof AMresult -/// \brief Gets a result's error message string. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return A UTF-8 string value or `NULL`. -/// \pre \p result `!= NULL`. -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMerrorMessage(result: *const AMresult) -> *const c_char { - match result.as_ref() { - Some(AMresult::Error(s)) => s.as_ptr(), - _ => std::ptr::null::(), - } -} - -/// \memberof AMresult -/// \brief Deallocates the storage for a result. -/// -/// \param[in,out] result A pointer to an `AMresult` struct. -/// \pre \p result `!= NULL`. -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMfree(result: *mut AMresult) { - if !result.is_null() { - let result: AMresult = *Box::from_raw(result); - drop(result) - } -} - -/// \memberof AMresult -/// \brief Gets the size of a result's value. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return The count of values in \p result. -/// \pre \p result `!= NULL`. -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultSize(result: *const AMresult) -> usize { - if let Some(result) = result.as_ref() { - use AMresult::*; - - match result { - Error(_) | Void => 0, - ActorId(_, _) - | Doc(_) - | ObjId(_) - | String(_) - | SyncMessage(_) - | SyncState(_) - | Value(_, _) => 1, - ChangeHashes(change_hashes) => change_hashes.len(), - Changes(changes, _) => changes.len(), - ListItems(list_items) => list_items.len(), - MapItems(map_items) => map_items.len(), - ObjItems(obj_items) => obj_items.len(), - Strings(cstrings) => cstrings.len(), - } - } else { - 0 - } -} - -/// \memberof AMresult -/// \brief Gets the status code of a result. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return An `AMstatus` enum tag. -/// \pre \p result `!= NULL`. -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultStatus(result: *const AMresult) -> AMstatus { - match result.as_ref() { - Some(AMresult::Error(_)) => AMstatus::Error, - None => AMstatus::InvalidResult, - _ => AMstatus::Ok, - } -} - -/// \memberof AMresult -/// \brief Gets a result's value. -/// -/// \param[in] result A pointer to an `AMresult` struct. -/// \return An `AMvalue` struct. -/// \pre \p result `!= NULL`. -/// \internal -/// -/// # Safety -/// result must be a valid pointer to an AMresult -#[no_mangle] -pub unsafe extern "C" fn AMresultValue<'a>(result: *mut AMresult) -> AMvalue<'a> { - let mut content = AMvalue::Void; - if let Some(result) = result.as_mut() { - match result { - AMresult::ActorId(actor_id, c_actor_id) => match c_actor_id { - None => { - content = AMvalue::ActorId(&*c_actor_id.insert(AMactorId::new(&*actor_id))); - } - Some(c_actor_id) => { - content = AMvalue::ActorId(&*c_actor_id); - } - }, - AMresult::ChangeHashes(change_hashes) => { - content = AMvalue::ChangeHashes(AMchangeHashes::new(change_hashes)); - } - AMresult::Changes(changes, storage) => { - content = AMvalue::Changes(AMchanges::new( - changes, - storage.get_or_insert(BTreeMap::new()), - )); - } - AMresult::Doc(doc) => content = AMvalue::Doc(&mut **doc), - AMresult::Error(_) => {} - AMresult::ListItems(list_items) => { - content = AMvalue::ListItems(AMlistItems::new(list_items)); - } - AMresult::MapItems(map_items) => { - content = AMvalue::MapItems(AMmapItems::new(map_items)); - } - AMresult::ObjId(obj_id) => { - content = AMvalue::ObjId(obj_id); - } - AMresult::ObjItems(obj_items) => { - content = AMvalue::ObjItems(AMobjItems::new(obj_items)); - } - AMresult::String(cstring) => content = AMvalue::Str(cstring.as_ptr()), - AMresult::Strings(cstrings) => { - content = AMvalue::Strs(AMstrs::new(cstrings)); - } - AMresult::SyncMessage(sync_message) => { - content = AMvalue::SyncMessage(sync_message); - } - AMresult::SyncState(sync_state) => { - content = AMvalue::SyncState(&mut *sync_state); - } - AMresult::Value(value, value_str) => { - content = (&*value, &*value_str).into(); - } - AMresult::Void => {} - } - }; - content -} - -/// \struct AMunknownValue -/// \installed_headerfile -/// \brief A value (typically for a `set` operation) whose type is unknown. -/// -#[derive(Eq, PartialEq)] -#[repr(C)] -pub struct AMunknownValue { - /// The value's raw bytes. - bytes: AMbyteSpan, - /// The value's encoded type identifier. - type_code: u8, -} diff --git a/automerge-c/src/result_stack.rs b/automerge-c/src/result_stack.rs deleted file mode 100644 index cfb9c7d2..00000000 --- a/automerge-c/src/result_stack.rs +++ /dev/null @@ -1,156 +0,0 @@ -use crate::result::{AMfree, AMresult, AMresultStatus, AMresultValue, AMstatus, AMvalue}; - -/// \struct AMresultStack -/// \installed_headerfile -/// \brief A node in a singly-linked list of result pointers. -/// -/// \note Using this data structure is purely optional because its only purpose -/// is to make memory management tolerable for direct usage of this API -/// in C, C++ and Objective-C. -#[repr(C)] -pub struct AMresultStack { - /// A result to be deallocated. - pub result: *mut AMresult, - /// The next node in the singly-linked list or `NULL`. - pub next: *mut AMresultStack, -} - -impl AMresultStack { - pub fn new(result: *mut AMresult, next: *mut AMresultStack) -> Self { - Self { result, next } - } -} - -/// \memberof AMresultStack -/// \brief Deallocates the storage for a stack of results. -/// -/// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. -/// \return The number of `AMresult` structs freed. -/// \pre \p stack `!= NULL`. -/// \post `*stack == NULL`. -/// \note Calling this function is purely optional because its only purpose is -/// to make memory management tolerable for direct usage of this API in -/// C, C++ and Objective-C. -/// \internal -/// -/// # Safety -/// stack must be a valid AMresultStack pointer pointer -#[no_mangle] -pub unsafe extern "C" fn AMfreeStack(stack: *mut *mut AMresultStack) -> usize { - if stack.is_null() { - return 0; - } - let mut count: usize = 0; - while !(*stack).is_null() { - AMfree(AMpop(stack)); - count += 1; - } - count -} - -/// \memberof AMresultStack -/// \brief Gets the topmost result from the stack after removing it. -/// -/// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. -/// \return A pointer to an `AMresult` struct or `NULL`. -/// \pre \p stack `!= NULL`. -/// \post `*stack == NULL`. -/// \note Calling this function is purely optional because its only purpose is -/// to make memory management tolerable for direct usage of this API in -/// C, C++ and Objective-C. -/// \internal -/// -/// # Safety -/// stack must be a valid AMresultStack pointer pointer -#[no_mangle] -pub unsafe extern "C" fn AMpop(stack: *mut *mut AMresultStack) -> *mut AMresult { - if stack.is_null() || (*stack).is_null() { - return std::ptr::null_mut(); - } - let top = Box::from_raw(*stack); - *stack = top.next; - let result = top.result; - drop(top); - result -} - -/// \memberof AMresultStack -/// \brief The prototype of a function to be called when a value matching the -/// given discriminant cannot be extracted from the result at the top of -/// the given stack. -/// -/// \note Implementing this function is purely optional because its only purpose -/// is to make memory management tolerable for direct usage of this API -/// in C, C++ and Objective-C. -pub type AMpushCallback = - Option ()>; - -/// \memberof AMresultStack -/// \brief Pushes the given result onto the given stack and then either extracts -/// a value matching the given discriminant from that result or, -/// failing that, calls the given function and gets a void value instead. -/// -/// \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. -/// \param[in] result A pointer to an `AMresult` struct. -/// \param[in] discriminant An `AMvalue` variant's corresponding enum tag. -/// \param[in] callback A pointer to a function with the same signature as -/// `AMpushCallback()` or `NULL`. -/// \return An `AMvalue` struct. -/// \pre \p stack `!= NULL`. -/// \pre \p result `!= NULL`. -/// \warning If \p stack `== NULL` then \p result is deallocated in order to -/// prevent a memory leak. -/// \note Calling this function is purely optional because its only purpose is -/// to make memory management tolerable for direct usage of this API in -/// C, C++ and Objective-C. -/// \internal -/// -/// # Safety -/// stack must be a valid AMresultStack pointer pointer -/// result must be a valid AMresult pointer -#[no_mangle] -pub unsafe extern "C" fn AMpush<'a>( - stack: *mut *mut AMresultStack, - result: *mut AMresult, - discriminant: u8, - callback: AMpushCallback, -) -> AMvalue<'a> { - if stack.is_null() { - // There's no stack to push the result onto so it has to be freed in - // order to prevent a memory leak. - AMfree(result); - if let Some(callback) = callback { - callback(stack, discriminant); - } - return AMvalue::Void; - } else if result.is_null() { - if let Some(callback) = callback { - callback(stack, discriminant); - } - return AMvalue::Void; - } - // Always push the result onto the stack, even if it's wrong, so that the - // given callback can retrieve it. - let node = Box::new(AMresultStack::new(result, *stack)); - let top = Box::into_raw(node); - *stack = top; - // Test that the result contains a value. - match AMresultStatus(result) { - AMstatus::Ok => {} - _ => { - if let Some(callback) = callback { - callback(stack, discriminant); - } - return AMvalue::Void; - } - } - // Test that the result's value matches the given discriminant. - let value = AMresultValue(result); - if discriminant != u8::from(&value) { - if let Some(callback) = callback { - callback(stack, discriminant); - } - return AMvalue::Void; - } - value -} diff --git a/automerge-c/src/strs.rs b/automerge-c/src/strs.rs deleted file mode 100644 index a823ecaf..00000000 --- a/automerge-c/src/strs.rs +++ /dev/null @@ -1,344 +0,0 @@ -use std::cmp::Ordering; -use std::ffi::{c_void, CString}; -use std::mem::size_of; -use std::os::raw::c_char; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new(c_strings: &[CString], offset: isize) -> Self { - Self { - len: c_strings.len(), - offset, - ptr: c_strings.as_ptr() as *const c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<*const c_char> { - if self.is_stopped() { - return None; - } - let slice: &[CString] = - unsafe { std::slice::from_raw_parts(self.ptr as *const CString, self.len) }; - let value = slice[self.get_index()].as_ptr(); - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<*const c_char> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[CString] = - unsafe { std::slice::from_raw_parts(self.ptr as *const CString, self.len) }; - Some(slice[self.get_index()].as_ptr()) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) - .try_into() - .unwrap() - } - } -} - -/// \struct AMstrs -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of UTF-8 strings. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMstrs { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_], -} - -impl AMstrs { - pub fn new(c_strings: &[CString]) -> Self { - Self { - detail: Detail::new(c_strings, 0).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<*const c_char> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<*const c_char> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[CString]> for AMstrs { - fn as_ref(&self) -> &[CString] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const CString, detail.len) } - } -} - -impl Default for AMstrs { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMstrs -/// \brief Advances an iterator over a sequence of UTF-8 strings by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] strs A pointer to an `AMstrs` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsAdvance(strs: *mut AMstrs, n: isize) { - if let Some(strs) = strs.as_mut() { - strs.advance(n); - }; -} - -/// \memberof AMstrs -/// \brief Compares the sequences of UTF-8 strings underlying a pair of -/// iterators. -/// -/// \param[in] strs1 A pointer to an `AMstrs` struct. -/// \param[in] strs2 A pointer to an `AMstrs` struct. -/// \return `-1` if \p strs1 `<` \p strs2, `0` if -/// \p strs1 `==` \p strs2 and `1` if -/// \p strs1 `>` \p strs2. -/// \pre \p strs1 `!= NULL`. -/// \pre \p strs2 `!= NULL`. -/// \internal -/// -/// #Safety -/// strs1 must be a valid pointer to an AMstrs -/// strs2 must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsCmp(strs1: *const AMstrs, strs2: *const AMstrs) -> isize { - match (strs1.as_ref(), strs2.as_ref()) { - (Some(strs1), Some(strs2)) => match strs1.as_ref().cmp(strs2.as_ref()) { - Ordering::Less => -1, - Ordering::Equal => 0, - Ordering::Greater => 1, - }, - (None, Some(_)) => -1, - (Some(_), None) => 1, - (None, None) => 0, - } -} - -/// \memberof AMstrs -/// \brief Gets the key at the current position of an iterator over a sequence -/// of UTF-8 strings and then advances it by at most \p |n| positions -/// where the sign of \p n is relative to the iterator's direction. -/// -/// \param[in,out] strs A pointer to an `AMstrs` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A UTF-8 string that's `NULL` when \p strs was previously advanced -/// past its forward/reverse limit. -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsNext(strs: *mut AMstrs, n: isize) -> *const c_char { - if let Some(strs) = strs.as_mut() { - if let Some(key) = strs.next(n) { - return key; - } - } - std::ptr::null() -} - -/// \memberof AMstrs -/// \brief Advances an iterator over a sequence of UTF-8 strings by at most -/// \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the key at its new position. -/// -/// \param[in,out] strs A pointer to an `AMstrs` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A UTF-8 string that's `NULL` when \p strs is presently advanced -/// past its forward/reverse limit. -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsPrev(strs: *mut AMstrs, n: isize) -> *const c_char { - if let Some(strs) = strs.as_mut() { - if let Some(key) = strs.prev(n) { - return key; - } - } - std::ptr::null() -} - -/// \memberof AMstrs -/// \brief Gets the size of the sequence of UTF-8 strings underlying an -/// iterator. -/// -/// \param[in] strs A pointer to an `AMstrs` struct. -/// \return The count of values in \p strs. -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsSize(strs: *const AMstrs) -> usize { - if let Some(strs) = strs.as_ref() { - strs.len() - } else { - 0 - } -} - -/// \memberof AMstrs -/// \brief Creates an iterator over the same sequence of UTF-8 strings as the -/// given one but with the opposite position and direction. -/// -/// \param[in] strs A pointer to an `AMstrs` struct. -/// \return An `AMstrs` struct. -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsReversed(strs: *const AMstrs) -> AMstrs { - if let Some(strs) = strs.as_ref() { - strs.reversed() - } else { - AMstrs::default() - } -} - -/// \memberof AMstrs -/// \brief Creates an iterator at the starting position over the same sequence -/// of UTF-8 strings as the given one. -/// -/// \param[in] strs A pointer to an `AMstrs` struct. -/// \return An `AMstrs` struct -/// \pre \p strs `!= NULL`. -/// \internal -/// -/// #Safety -/// strs must be a valid pointer to an AMstrs -#[no_mangle] -pub unsafe extern "C" fn AMstrsRewound(strs: *const AMstrs) -> AMstrs { - if let Some(strs) = strs.as_ref() { - strs.rewound() - } else { - AMstrs::default() - } -} diff --git a/automerge-c/src/sync/haves.rs b/automerge-c/src/sync/haves.rs deleted file mode 100644 index d359a4dc..00000000 --- a/automerge-c/src/sync/haves.rs +++ /dev/null @@ -1,378 +0,0 @@ -use automerge as am; -use std::collections::BTreeMap; -use std::ffi::c_void; -use std::mem::size_of; - -use crate::sync::have::AMsyncHave; - -#[repr(C)] -struct Detail { - len: usize, - offset: isize, - ptr: *const c_void, - storage: *mut c_void, -} - -/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call -/// (https://github.com/eqrion/cbindgen/issues/252) but it will -/// propagate the name of a constant initialized from it so if the -/// constant's name is a symbolic representation of the value it can be -/// converted into a number by post-processing the header it generated. -pub const USIZE_USIZE_USIZE_USIZE_: usize = size_of::(); - -impl Detail { - fn new( - haves: &[am::sync::Have], - offset: isize, - storage: &mut BTreeMap, - ) -> Self { - let storage: *mut BTreeMap = storage; - Self { - len: haves.len(), - offset, - ptr: haves.as_ptr() as *const c_void, - storage: storage as *mut c_void, - } - } - - pub fn advance(&mut self, n: isize) { - if n == 0 { - return; - } - let len = self.len as isize; - self.offset = if self.offset < 0 { - // It's reversed. - let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); - if unclipped >= 0 { - // Clip it to the forward stop. - len - } else { - std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) - } - } else { - let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); - if unclipped < 0 { - // Clip it to the reverse stop. - -(len + 1) - } else { - std::cmp::max(0, std::cmp::min(unclipped, len)) - } - } - } - - pub fn get_index(&self) -> usize { - (self.offset - + if self.offset < 0 { - self.len as isize - } else { - 0 - }) as usize - } - - pub fn next(&mut self, n: isize) -> Option<*const AMsyncHave> { - if self.is_stopped() { - return None; - } - let slice: &[am::sync::Have] = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) }; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; - let index = self.get_index(); - let value = match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMsyncHave::new(&slice[index])); - storage.get_mut(&index).unwrap() - } - }; - self.advance(n); - Some(value) - } - - pub fn is_stopped(&self) -> bool { - let len = self.len as isize; - self.offset < -len || self.offset == len - } - - pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { - self.advance(-n); - if self.is_stopped() { - return None; - } - let slice: &[am::sync::Have] = - unsafe { std::slice::from_raw_parts(self.ptr as *const am::sync::Have, self.len) }; - let storage = unsafe { &mut *(self.storage as *mut BTreeMap) }; - let index = self.get_index(); - Some(match storage.get_mut(&index) { - Some(value) => value, - None => { - storage.insert(index, AMsyncHave::new(&slice[index])); - storage.get_mut(&index).unwrap() - } - }) - } - - pub fn reversed(&self) -> Self { - Self { - len: self.len, - offset: -(self.offset + 1), - ptr: self.ptr, - storage: self.storage, - } - } - - pub fn rewound(&self) -> Self { - Self { - len: self.len, - offset: if self.offset < 0 { -1 } else { 0 }, - ptr: self.ptr, - storage: self.storage, - } - } -} - -impl From for [u8; USIZE_USIZE_USIZE_USIZE_] { - fn from(detail: Detail) -> Self { - unsafe { - std::slice::from_raw_parts( - (&detail as *const Detail) as *const u8, - USIZE_USIZE_USIZE_USIZE_, - ) - .try_into() - .unwrap() - } - } -} - -/// \struct AMsyncHaves -/// \installed_headerfile -/// \brief A random-access iterator over a sequence of synchronization haves. -#[repr(C)] -#[derive(Eq, PartialEq)] -pub struct AMsyncHaves { - /// An implementation detail that is intentionally opaque. - /// \warning Modifying \p detail will cause undefined behavior. - /// \note The actual size of \p detail will vary by platform, this is just - /// the one for the platform this documentation was built on. - detail: [u8; USIZE_USIZE_USIZE_USIZE_], -} - -impl AMsyncHaves { - pub fn new(haves: &[am::sync::Have], storage: &mut BTreeMap) -> Self { - Self { - detail: Detail::new(haves, 0, storage).into(), - } - } - - pub fn advance(&mut self, n: isize) { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.advance(n); - } - - pub fn len(&self) -> usize { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - detail.len - } - - pub fn next(&mut self, n: isize) -> Option<*const AMsyncHave> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.next(n) - } - - pub fn prev(&mut self, n: isize) -> Option<*const AMsyncHave> { - let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; - detail.prev(n) - } - - pub fn reversed(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.reversed().into(), - } - } - - pub fn rewound(&self) -> Self { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - Self { - detail: detail.rewound().into(), - } - } -} - -impl AsRef<[am::sync::Have]> for AMsyncHaves { - fn as_ref(&self) -> &[am::sync::Have] { - let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; - unsafe { std::slice::from_raw_parts(detail.ptr as *const am::sync::Have, detail.len) } - } -} - -impl Default for AMsyncHaves { - fn default() -> Self { - Self { - detail: [0; USIZE_USIZE_USIZE_USIZE_], - } - } -} - -/// \memberof AMsyncHaves -/// \brief Advances an iterator over a sequence of synchronization haves by at -/// most \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesAdvance(sync_haves: *mut AMsyncHaves, n: isize) { - if let Some(sync_haves) = sync_haves.as_mut() { - sync_haves.advance(n); - }; -} - -/// \memberof AMsyncHaves -/// \brief Tests the equality of two sequences of synchronization haves -/// underlying a pair of iterators. -/// -/// \param[in] sync_haves1 A pointer to an `AMsyncHaves` struct. -/// \param[in] sync_haves2 A pointer to an `AMsyncHaves` struct. -/// \return `true` if \p sync_haves1 `==` \p sync_haves2 and `false` otherwise. -/// \pre \p sync_haves1 `!= NULL`. -/// \pre \p sync_haves2 `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves1 must be a valid pointer to an AMsyncHaves -/// sync_haves2 must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesEqual( - sync_haves1: *const AMsyncHaves, - sync_haves2: *const AMsyncHaves, -) -> bool { - match (sync_haves1.as_ref(), sync_haves2.as_ref()) { - (Some(sync_haves1), Some(sync_haves2)) => sync_haves1.as_ref() == sync_haves2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, - } -} - -/// \memberof AMsyncHaves -/// \brief Gets the synchronization have at the current position of an iterator -/// over a sequence of synchronization haves and then advances it by at -/// most \p |n| positions where the sign of \p n is relative to the -/// iterator's direction. -/// -/// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMsyncHave` struct that's `NULL` when -/// \p sync_haves was previously advanced past its forward/reverse -/// limit. -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesNext( - sync_haves: *mut AMsyncHaves, - n: isize, -) -> *const AMsyncHave { - if let Some(sync_haves) = sync_haves.as_mut() { - if let Some(sync_have) = sync_haves.next(n) { - return sync_have; - } - } - std::ptr::null() -} - -/// \memberof AMsyncHaves -/// \brief Advances an iterator over a sequence of synchronization haves by at -/// most \p |n| positions where the sign of \p n is relative to the -/// iterator's direction and then gets the synchronization have at its -/// new position. -/// -/// \param[in,out] sync_haves A pointer to an `AMsyncHaves` struct. -/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum -/// number of positions to advance. -/// \return A pointer to an `AMsyncHave` struct that's `NULL` when -/// \p sync_haves is presently advanced past its forward/reverse limit. -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesPrev( - sync_haves: *mut AMsyncHaves, - n: isize, -) -> *const AMsyncHave { - if let Some(sync_haves) = sync_haves.as_mut() { - if let Some(sync_have) = sync_haves.prev(n) { - return sync_have; - } - } - std::ptr::null() -} - -/// \memberof AMsyncHaves -/// \brief Gets the size of the sequence of synchronization haves underlying an -/// iterator. -/// -/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. -/// \return The count of values in \p sync_haves. -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesSize(sync_haves: *const AMsyncHaves) -> usize { - if let Some(sync_haves) = sync_haves.as_ref() { - sync_haves.len() - } else { - 0 - } -} - -/// \memberof AMsyncHaves -/// \brief Creates an iterator over the same sequence of synchronization haves -/// as the given one but with the opposite position and direction. -/// -/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. -/// \return An `AMsyncHaves` struct -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesReversed(sync_haves: *const AMsyncHaves) -> AMsyncHaves { - if let Some(sync_haves) = sync_haves.as_ref() { - sync_haves.reversed() - } else { - AMsyncHaves::default() - } -} - -/// \memberof AMsyncHaves -/// \brief Creates an iterator at the starting position over the same sequence -/// of synchronization haves as the given one. -/// -/// \param[in] sync_haves A pointer to an `AMsyncHaves` struct. -/// \return An `AMsyncHaves` struct -/// \pre \p sync_haves `!= NULL`. -/// \internal -/// -/// #Safety -/// sync_haves must be a valid pointer to an AMsyncHaves -#[no_mangle] -pub unsafe extern "C" fn AMsyncHavesRewound(sync_haves: *const AMsyncHaves) -> AMsyncHaves { - if let Some(sync_haves) = sync_haves.as_ref() { - sync_haves.rewound() - } else { - AMsyncHaves::default() - } -} diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt deleted file mode 100644 index 704a27da..00000000 --- a/automerge-c/test/CMakeLists.txt +++ /dev/null @@ -1,57 +0,0 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) - -find_package(cmocka REQUIRED) - -add_executable( - test_${LIBRARY_NAME} - actor_id_tests.c - doc_tests.c - group_state.c - list_tests.c - macro_utils.c - main.c - map_tests.c - stack_utils.c - str_utils.c - ported_wasm/basic_tests.c - ported_wasm/suite.c - ported_wasm/sync_tests.c -) - -set_target_properties(test_${LIBRARY_NAME} PROPERTIES LINKER_LANGUAGE C) - -# \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't -# contain a non-existent path so its build-time include directory -# must be specified for all of its dependent targets instead. -target_include_directories( - test_${LIBRARY_NAME} - PRIVATE "$" -) - -target_link_libraries(test_${LIBRARY_NAME} PRIVATE cmocka ${LIBRARY_NAME}) - -add_dependencies(test_${LIBRARY_NAME} ${LIBRARY_NAME}_artifacts) - -if(BUILD_SHARED_LIBS AND WIN32) - add_custom_command( - TARGET test_${LIBRARY_NAME} - POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy_if_different - ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX} - ${CMAKE_CURRENT_BINARY_DIR} - COMMENT "Copying the DLL built by Cargo into the test directory..." - VERBATIM - ) -endif() - -add_test(NAME test_${LIBRARY_NAME} COMMAND test_${LIBRARY_NAME}) - -add_custom_command( - TARGET test_${LIBRARY_NAME} - POST_BUILD - COMMAND - ${CMAKE_CTEST_COMMAND} --config $ --output-on-failure - COMMENT - "Running the test(s)..." - VERBATIM -) diff --git a/automerge-c/test/actor_id_tests.c b/automerge-c/test/actor_id_tests.c deleted file mode 100644 index 71b0f800..00000000 --- a/automerge-c/test/actor_id_tests.c +++ /dev/null @@ -1,105 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include "str_utils.h" - -typedef struct { - uint8_t* src; - char const* str; - size_t count; -} GroupState; - -static int group_setup(void** state) { - GroupState* group_state = test_calloc(1, sizeof(GroupState)); - group_state->str = "000102030405060708090a0b0c0d0e0f"; - group_state->count = strlen(group_state->str) / 2; - group_state->src = test_malloc(group_state->count); - hex_to_bytes(group_state->str, group_state->src, group_state->count); - *state = group_state; - return 0; -} - -static int group_teardown(void** state) { - GroupState* group_state = *state; - test_free(group_state->src); - test_free(group_state); - return 0; -} - -static void test_AMactorIdInit() { - AMresult* prior_result = NULL; - AMbyteSpan prior_bytes; - char const* prior_str = NULL; - AMresult* result = NULL; - for (size_t i = 0; i != 11; ++i) { - result = AMactorIdInit(); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 1); - AMvalue const value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); - char const* const str = AMactorIdStr(value.actor_id); - if (prior_result) { - size_t const min_count = fmax(bytes.count, prior_bytes.count); - assert_memory_not_equal(bytes.src, prior_bytes.src, min_count); - assert_string_not_equal(str, prior_str); - AMfree(prior_result); - } - prior_result = result; - prior_bytes = bytes; - prior_str = str; - } - AMfree(result); -} - -static void test_AMactorIdInitBytes(void **state) { - GroupState* group_state = *state; - AMresult* const result = AMactorIdInitBytes(group_state->src, group_state->count); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 1); - AMvalue const value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - AMbyteSpan const bytes = AMactorIdBytes(value.actor_id); - assert_int_equal(bytes.count, group_state->count); - assert_memory_equal(bytes.src, group_state->src, bytes.count); - AMfree(result); -} - -static void test_AMactorIdInitStr(void **state) { - GroupState* group_state = *state; - AMresult* const result = AMactorIdInitStr(group_state->str); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 1); - AMvalue const value = AMresultValue(result); - assert_int_equal(value.tag, AM_VALUE_ACTOR_ID); - char const* const str = AMactorIdStr(value.actor_id); - assert_int_equal(strlen(str), group_state->count * 2); - assert_string_equal(str, group_state->str); - AMfree(result); -} - -int run_actor_id_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMactorIdInit), - cmocka_unit_test(test_AMactorIdInitBytes), - cmocka_unit_test(test_AMactorIdInitStr), - }; - - return cmocka_run_group_tests(tests, group_setup, group_teardown); -} diff --git a/automerge-c/test/doc_tests.c b/automerge-c/test/doc_tests.c deleted file mode 100644 index d8059641..00000000 --- a/automerge-c/test/doc_tests.c +++ /dev/null @@ -1,202 +0,0 @@ -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include "group_state.h" -#include "stack_utils.h" -#include "str_utils.h" - -typedef struct { - GroupState* group_state; - char const* actor_id_str; - uint8_t* actor_id_bytes; - size_t actor_id_size; -} TestState; - -static int setup(void** state) { - TestState* test_state = test_calloc(1, sizeof(TestState)); - group_setup((void**)&test_state->group_state); - test_state->actor_id_str = "000102030405060708090a0b0c0d0e0f"; - test_state->actor_id_size = strlen(test_state->actor_id_str) / 2; - test_state->actor_id_bytes = test_malloc(test_state->actor_id_size); - hex_to_bytes(test_state->actor_id_str, test_state->actor_id_bytes, test_state->actor_id_size); - *state = test_state; - return 0; -} - -static int teardown(void** state) { - TestState* test_state = *state; - group_teardown((void**)&test_state->group_state); - test_free(test_state->actor_id_bytes); - test_free(test_state); - return 0; -} - -static void test_AMkeys_empty() { - AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMstrs forward = AMpush(&stack, - AMkeys(doc, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&forward), 0); - AMstrs reverse = AMstrsReversed(&forward); - assert_int_equal(AMstrsSize(&reverse), 0); - assert_null(AMstrsNext(&forward, 1)); - assert_null(AMstrsPrev(&forward, 1)); - assert_null(AMstrsNext(&reverse, 1)); - assert_null(AMstrsPrev(&reverse, 1)); - AMfreeStack(&stack); -} - -static void test_AMkeys_list() { - AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMlistPutInt(doc, AM_ROOT, 0, true, 1)); - AMfree(AMlistPutInt(doc, AM_ROOT, 1, true, 2)); - AMfree(AMlistPutInt(doc, AM_ROOT, 2, true, 3)); - AMstrs forward = AMpush(&stack, - AMkeys(doc, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&forward), 3); - AMstrs reverse = AMstrsReversed(&forward); - assert_int_equal(AMstrsSize(&reverse), 3); - /* Forward iterator forward. */ - char const* str = AMstrsNext(&forward, 1); - assert_ptr_equal(strstr(str, "1@"), str); - str = AMstrsNext(&forward, 1); - assert_ptr_equal(strstr(str, "2@"), str); - str = AMstrsNext(&forward, 1); - assert_ptr_equal(strstr(str, "3@"), str); - assert_null(AMstrsNext(&forward, 1)); - /* Forward iterator reverse. */ - str = AMstrsPrev(&forward, 1); - assert_ptr_equal(strstr(str, "3@"), str); - str = AMstrsPrev(&forward, 1); - assert_ptr_equal(strstr(str, "2@"), str); - str = AMstrsPrev(&forward, 1); - assert_ptr_equal(strstr(str, "1@"), str); - assert_null(AMstrsPrev(&forward, 1)); - /* Reverse iterator forward. */ - str = AMstrsNext(&reverse, 1); - assert_ptr_equal(strstr(str, "3@"), str); - str = AMstrsNext(&reverse, 1); - assert_ptr_equal(strstr(str, "2@"), str); - str = AMstrsNext(&reverse, 1); - assert_ptr_equal(strstr(str, "1@"), str); - /* Reverse iterator reverse. */ - assert_null(AMstrsNext(&reverse, 1)); - str = AMstrsPrev(&reverse, 1); - assert_ptr_equal(strstr(str, "1@"), str); - str = AMstrsPrev(&reverse, 1); - assert_ptr_equal(strstr(str, "2@"), str); - str = AMstrsPrev(&reverse, 1); - assert_ptr_equal(strstr(str, "3@"), str); - assert_null(AMstrsPrev(&reverse, 1)); - AMfreeStack(&stack); -} - -static void test_AMkeys_map() { - AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMmapPutInt(doc, AM_ROOT, "one", 1)); - AMfree(AMmapPutInt(doc, AM_ROOT, "two", 2)); - AMfree(AMmapPutInt(doc, AM_ROOT, "three", 3)); - AMstrs forward = AMpush(&stack, - AMkeys(doc, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&forward), 3); - AMstrs reverse = AMstrsReversed(&forward); - assert_int_equal(AMstrsSize(&reverse), 3); - /* Forward iterator forward. */ - assert_string_equal(AMstrsNext(&forward, 1), "one"); - assert_string_equal(AMstrsNext(&forward, 1), "three"); - assert_string_equal(AMstrsNext(&forward, 1), "two"); - assert_null(AMstrsNext(&forward, 1)); - /* Forward iterator reverse. */ - assert_string_equal(AMstrsPrev(&forward, 1), "two"); - assert_string_equal(AMstrsPrev(&forward, 1), "three"); - assert_string_equal(AMstrsPrev(&forward, 1), "one"); - assert_null(AMstrsPrev(&forward, 1)); - /* Reverse iterator forward. */ - assert_string_equal(AMstrsNext(&reverse, 1), "two"); - assert_string_equal(AMstrsNext(&reverse, 1), "three"); - assert_string_equal(AMstrsNext(&reverse, 1), "one"); - assert_null(AMstrsNext(&reverse, 1)); - /* Reverse iterator reverse. */ - assert_string_equal(AMstrsPrev(&reverse, 1), "one"); - assert_string_equal(AMstrsPrev(&reverse, 1), "three"); - assert_string_equal(AMstrsPrev(&reverse, 1), "two"); - assert_null(AMstrsPrev(&reverse, 1)); - AMfreeStack(&stack); -} - -static void test_AMputActor_bytes(void **state) { - TestState* test_state = *state; - AMactorId const* actor_id = AMpush(&test_state->group_state->stack, - AMactorIdInitBytes( - test_state->actor_id_bytes, - test_state->actor_id_size), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(test_state->group_state->doc, actor_id)); - actor_id = AMpush(&test_state->group_state->stack, - AMgetActorId(test_state->group_state->doc), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMbyteSpan const bytes = AMactorIdBytes(actor_id); - assert_int_equal(bytes.count, test_state->actor_id_size); - assert_memory_equal(bytes.src, test_state->actor_id_bytes, bytes.count); -} - -static void test_AMputActor_str(void **state) { - TestState* test_state = *state; - AMactorId const* actor_id = AMpush(&test_state->group_state->stack, - AMactorIdInitStr(test_state->actor_id_str), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(test_state->group_state->doc, actor_id)); - actor_id = AMpush(&test_state->group_state->stack, - AMgetActorId(test_state->group_state->doc), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - char const* const str = AMactorIdStr(actor_id); - assert_int_equal(strlen(str), test_state->actor_id_size * 2); - assert_string_equal(str, test_state->actor_id_str); -} - -static void test_AMspliceText() { - AMresultStack* stack = NULL; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMspliceText(doc, AM_ROOT, 0, 0, "one + ")); - AMfree(AMspliceText(doc, AM_ROOT, 4, 2, "two = ")); - AMfree(AMspliceText(doc, AM_ROOT, 8, 2, "three")); - char const* const text = AMpush(&stack, - AMtext(doc, AM_ROOT, NULL), - AM_VALUE_STR, - cmocka_cb).str; - assert_string_equal(text, "one two three"); - AMfreeStack(&stack); -} - -int run_doc_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMkeys_empty), - cmocka_unit_test(test_AMkeys_list), - cmocka_unit_test(test_AMkeys_map), - cmocka_unit_test_setup_teardown(test_AMputActor_bytes, setup, teardown), - cmocka_unit_test_setup_teardown(test_AMputActor_str, setup, teardown), - cmocka_unit_test(test_AMspliceText), - }; - - return cmocka_run_group_tests(tests, NULL, NULL); -} diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c deleted file mode 100644 index 0ee14317..00000000 --- a/automerge-c/test/group_state.c +++ /dev/null @@ -1,27 +0,0 @@ -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include "group_state.h" -#include "stack_utils.h" - -int group_setup(void** state) { - GroupState* group_state = test_calloc(1, sizeof(GroupState)); - group_state->doc = AMpush(&group_state->stack, - AMcreate(NULL), - AM_VALUE_DOC, - cmocka_cb).doc; - *state = group_state; - return 0; -} - -int group_teardown(void** state) { - GroupState* group_state = *state; - AMfreeStack(&group_state->stack); - test_free(group_state); - return 0; -} diff --git a/automerge-c/test/group_state.h b/automerge-c/test/group_state.h deleted file mode 100644 index a71d9dc9..00000000 --- a/automerge-c/test/group_state.h +++ /dev/null @@ -1,16 +0,0 @@ -#ifndef GROUP_STATE_H -#define GROUP_STATE_H - -/* local */ -#include - -typedef struct { - AMresultStack* stack; - AMdoc* doc; -} GroupState; - -int group_setup(void** state); - -int group_teardown(void** state); - -#endif /* GROUP_STATE_H */ diff --git a/automerge-c/test/list_tests.c b/automerge-c/test/list_tests.c deleted file mode 100644 index db1dc086..00000000 --- a/automerge-c/test/list_tests.c +++ /dev/null @@ -1,379 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include "group_state.h" -#include "macro_utils.h" -#include "stack_utils.h" - -static void test_AMlistIncrement(void** state) { - GroupState* group_state = *state; - AMfree(AMlistPutCounter(group_state->doc, AM_ROOT, 0, true, 0)); - assert_int_equal(AMpush(&group_state->stack, - AMlistGet(group_state->doc, AM_ROOT, 0, NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 0); - AMfree(AMpop(&group_state->stack)); - AMfree(AMlistIncrement(group_state->doc, AM_ROOT, 0, 3)); - assert_int_equal(AMpush(&group_state->stack, - AMlistGet(group_state->doc, AM_ROOT, 0, NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 3); - AMfree(AMpop(&group_state->stack)); -} - -#define test_AMlistPut(suffix, mode) test_AMlistPut ## suffix ## _ ## mode - -#define static_void_test_AMlistPut(suffix, mode, member, scalar_value) \ -static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMfree(AMlistPut ## suffix(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - scalar_value)); \ - assert_true(AMpush( \ - &group_state->stack, \ - AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ - AMvalue_discriminant(#suffix), \ - cmocka_cb).member == scalar_value); \ - AMfree(AMpop(&group_state->stack)); \ -} - -#define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode - -#define static_void_test_AMlistPutBytes(mode, bytes_value) \ -static void test_AMlistPutBytes_ ## mode(void **state) { \ - static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ - \ - GroupState* group_state = *state; \ - AMfree(AMlistPutBytes(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - bytes_value, \ - BYTES_SIZE)); \ - AMbyteSpan const bytes = AMpush( \ - &group_state->stack, \ - AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ - AM_VALUE_BYTES, \ - cmocka_cb).bytes; \ - assert_int_equal(bytes.count, BYTES_SIZE); \ - assert_memory_equal(bytes.src, bytes_value, BYTES_SIZE); \ - AMfree(AMpop(&group_state->stack)); \ -} - -#define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode - -#define static_void_test_AMlistPutNull(mode) \ -static void test_AMlistPutNull_ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMfree(AMlistPutNull(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"))); \ - AMresult* const result = AMlistGet(group_state->doc, AM_ROOT, 0, NULL); \ - if (AMresultStatus(result) != AM_STATUS_OK) { \ - fail_msg("%s", AMerrorMessage(result)); \ - } \ - assert_int_equal(AMresultSize(result), 1); \ - assert_int_equal(AMresultValue(result).tag, AM_VALUE_NULL); \ - AMfree(result); \ -} - -#define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode - -#define static_void_test_AMlistPutObject(label, mode) \ -static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMobjId const* const obj_id = AMpush( \ - &group_state->stack, \ - AMlistPutObject(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - AMobjType_tag(#label)), \ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - assert_non_null(obj_id); \ - assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ - AMfree(AMpop(&group_state->stack)); \ -} - -#define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode - -#define static_void_test_AMlistPutStr(mode, str_value) \ -static void test_AMlistPutStr_ ## mode(void **state) { \ - GroupState* group_state = *state; \ - AMfree(AMlistPutStr(group_state->doc, \ - AM_ROOT, \ - 0, \ - !strcmp(#mode, "insert"), \ - str_value)); \ - assert_string_equal(AMpush( \ - &group_state->stack, \ - AMlistGet(group_state->doc, AM_ROOT, 0, NULL), \ - AM_VALUE_STR, \ - cmocka_cb).str, str_value); \ - AMfree(AMpop(&group_state->stack)); \ -} - -static_void_test_AMlistPut(Bool, insert, boolean, true) - -static_void_test_AMlistPut(Bool, update, boolean, true) - -static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; - -static_void_test_AMlistPutBytes(insert, BYTES_VALUE) - -static_void_test_AMlistPutBytes(update, BYTES_VALUE) - -static_void_test_AMlistPut(Counter, insert, counter, INT64_MAX) - -static_void_test_AMlistPut(Counter, update, counter, INT64_MAX) - -static_void_test_AMlistPut(F64, insert, f64, DBL_MAX) - -static_void_test_AMlistPut(F64, update, f64, DBL_MAX) - -static_void_test_AMlistPut(Int, insert, int_, INT64_MAX) - -static_void_test_AMlistPut(Int, update, int_, INT64_MAX) - -static_void_test_AMlistPutNull(insert) - -static_void_test_AMlistPutNull(update) - -static_void_test_AMlistPutObject(List, insert) - -static_void_test_AMlistPutObject(List, update) - -static_void_test_AMlistPutObject(Map, insert) - -static_void_test_AMlistPutObject(Map, update) - -static_void_test_AMlistPutObject(Text, insert) - -static_void_test_AMlistPutObject(Text, update) - -static_void_test_AMlistPutStr(insert, "Hello, world!") - -static_void_test_AMlistPutStr(update, "Hello, world!") - -static_void_test_AMlistPut(Timestamp, insert, timestamp, INT64_MAX) - -static_void_test_AMlistPut(Timestamp, update, timestamp, INT64_MAX) - -static_void_test_AMlistPut(Uint, insert, uint, UINT64_MAX) - -static_void_test_AMlistPut(Uint, update, uint, UINT64_MAX) - -static void test_insert_at_index(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - - AMobjId const* const list = AMpush( - &stack, - AMlistPutObject(doc, AM_ROOT, 0, true, AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* Insert both at the same index. */ - AMfree(AMlistPutUint(doc, list, 0, true, 0)); - AMfree(AMlistPutUint(doc, list, 0, true, 1)); - - assert_int_equal(AMobjSize(doc, list, NULL), 2); - AMstrs const keys = AMpush(&stack, - AMkeys(doc, list, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&keys), 2); - AMlistItems const range = AMpush(&stack, - AMlistRange(doc, list, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemsSize(&range), 2); -} - -static void test_get_list_values(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMobjId const* const list = AMpush( - &stack, - AMmapPutObject(doc1, AM_ROOT, "list", AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - - /* Insert elements. */ - AMfree(AMlistPutStr(doc1, list, 0, true, "First")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Second")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Third")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Fourth")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Fifth")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Sixth")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Seventh")); - AMfree(AMlistPutStr(doc1, list, 0, true, "Eighth")); - AMfree(AMcommit(doc1, NULL, NULL)); - - AMchangeHashes const v1 = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMdoc* const doc2 = AMpush(&stack, - AMfork(doc1, NULL), - AM_VALUE_DOC, - cmocka_cb).doc; - - AMfree(AMlistPutStr(doc1, list, 2, false, "Third V2")); - AMfree(AMcommit(doc1, NULL, NULL)); - - AMfree(AMlistPutStr(doc2, list, 2, false, "Third V3")); - AMfree(AMcommit(doc2, NULL, NULL)); - - AMfree(AMmerge(doc1, doc2)); - - AMlistItems range = AMpush(&stack, - AMlistRange(doc1, list, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemsSize(&range), 8); - - AMlistItem const* list_item = NULL; - while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), NULL); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMlistItemObjId(list_item)); - AMfree(result); - } - - range = AMpush(&stack, - AMlistRange(doc1, list, 3, 6, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMlistItems range_back = AMlistItemsReversed(&range); - assert_int_equal(AMlistItemsSize(&range), 3); - assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range, 1)), 3); - assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range_back, 1)), 5); - - range = AMlistItemsRewound(&range); - while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), NULL); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMlistItemObjId(list_item)); - AMfree(result); - } - - range = AMpush(&stack, - AMlistRange(doc1, list, 0, SIZE_MAX, &v1), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemsSize(&range), 8); - while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), &v1); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMlistItemObjId(list_item)); - AMfree(result); - } - - range = AMpush(&stack, - AMlistRange(doc1, list, 3, 6, &v1), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - range_back = AMlistItemsReversed(&range); - assert_int_equal(AMlistItemsSize(&range), 3); - assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range, 1)), 3); - assert_int_equal(AMlistItemIndex(AMlistItemsNext(&range_back, 1)), 5); - - range = AMlistItemsRewound(&range); - while ((list_item = AMlistItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMresult* result = AMlistGet(doc1, list, AMlistItemIndex(list_item), &v1); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMlistItemObjId(list_item)); - AMfree(result); - } - - range = AMpush(&stack, - AMlistRange(doc1, list, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMobjItems values = AMpush(&stack, - AMobjValues(doc1, list, NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - assert_int_equal(AMlistItemsSize(&range), AMobjItemsSize(&values)); - AMobjItem const* value = NULL; - while ((list_item = AMlistItemsNext(&range, 1)) != NULL && - (value = AMobjItemsNext(&values, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMvalue const val2 = AMobjItemValue(value); - assert_true(AMvalueEqual(&val1, &val2)); - assert_true(AMobjIdEqual(AMlistItemObjId(list_item), AMobjItemObjId(value))); - } - - range = AMpush(&stack, - AMlistRange(doc1, list, 0, SIZE_MAX, &v1), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - values = AMpush(&stack, - AMobjValues(doc1, list, &v1), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - assert_int_equal(AMlistItemsSize(&range), AMobjItemsSize(&values)); - while ((list_item = AMlistItemsNext(&range, 1)) != NULL && - (value = AMobjItemsNext(&values, 1)) != NULL) { - AMvalue const val1 = AMlistItemValue(list_item); - AMvalue const val2 = AMobjItemValue(value); - assert_true(AMvalueEqual(&val1, &val2)); - assert_true(AMobjIdEqual(AMlistItemObjId(list_item), AMobjItemObjId(value))); - } -} - -int run_list_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMlistIncrement), - cmocka_unit_test(test_AMlistPut(Bool, insert)), - cmocka_unit_test(test_AMlistPut(Bool, update)), - cmocka_unit_test(test_AMlistPutBytes(insert)), - cmocka_unit_test(test_AMlistPutBytes(update)), - cmocka_unit_test(test_AMlistPut(Counter, insert)), - cmocka_unit_test(test_AMlistPut(Counter, update)), - cmocka_unit_test(test_AMlistPut(F64, insert)), - cmocka_unit_test(test_AMlistPut(F64, update)), - cmocka_unit_test(test_AMlistPut(Int, insert)), - cmocka_unit_test(test_AMlistPut(Int, update)), - cmocka_unit_test(test_AMlistPutNull(insert)), - cmocka_unit_test(test_AMlistPutNull(update)), - cmocka_unit_test(test_AMlistPutObject(List, insert)), - cmocka_unit_test(test_AMlistPutObject(List, update)), - cmocka_unit_test(test_AMlistPutObject(Map, insert)), - cmocka_unit_test(test_AMlistPutObject(Map, update)), - cmocka_unit_test(test_AMlistPutObject(Text, insert)), - cmocka_unit_test(test_AMlistPutObject(Text, update)), - cmocka_unit_test(test_AMlistPutStr(insert)), - cmocka_unit_test(test_AMlistPutStr(update)), - cmocka_unit_test(test_AMlistPut(Timestamp, insert)), - cmocka_unit_test(test_AMlistPut(Timestamp, update)), - cmocka_unit_test(test_AMlistPut(Uint, insert)), - cmocka_unit_test(test_AMlistPut(Uint, update)), - cmocka_unit_test_setup_teardown(test_insert_at_index, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_get_list_values, setup_stack, teardown_stack), - }; - - return cmocka_run_group_tests(tests, group_setup, group_teardown); -} diff --git a/automerge-c/test/macro_utils.c b/automerge-c/test/macro_utils.c deleted file mode 100644 index 35c55b85..00000000 --- a/automerge-c/test/macro_utils.c +++ /dev/null @@ -1,24 +0,0 @@ -#include - -/* local */ -#include "macro_utils.h" - -AMvalueVariant AMvalue_discriminant(char const* suffix) { - if (!strcmp(suffix, "Bool")) return AM_VALUE_BOOLEAN; - else if (!strcmp(suffix, "Bytes")) return AM_VALUE_BYTES; - else if (!strcmp(suffix, "Counter")) return AM_VALUE_COUNTER; - else if (!strcmp(suffix, "F64")) return AM_VALUE_F64; - else if (!strcmp(suffix, "Int")) return AM_VALUE_INT; - else if (!strcmp(suffix, "Null")) return AM_VALUE_NULL; - else if (!strcmp(suffix, "Str")) return AM_VALUE_STR; - else if (!strcmp(suffix, "Timestamp")) return AM_VALUE_TIMESTAMP; - else if (!strcmp(suffix, "Uint")) return AM_VALUE_UINT; - else return AM_VALUE_VOID; -} - -AMobjType AMobjType_tag(char const* obj_type_label) { - if (!strcmp(obj_type_label, "List")) return AM_OBJ_TYPE_LIST; - else if (!strcmp(obj_type_label, "Map")) return AM_OBJ_TYPE_MAP; - else if (!strcmp(obj_type_label, "Text")) return AM_OBJ_TYPE_TEXT; - else return 0; -} diff --git a/automerge-c/test/macro_utils.h b/automerge-c/test/macro_utils.h deleted file mode 100644 index 62e262ce..00000000 --- a/automerge-c/test/macro_utils.h +++ /dev/null @@ -1,24 +0,0 @@ -#ifndef MACRO_UTILS_H -#define MACRO_UTILS_H - -/* local */ -#include - -/** - * \brief Gets the result value discriminant corresponding to a function name - * suffix. - * - * \param[in] suffix A string. - * \return An `AMvalue` struct discriminant. - */ -AMvalueVariant AMvalue_discriminant(char const* suffix); - -/** - * \brief Gets the object type tag corresponding to an object type label. - * - * \param[in] obj_type_label A string. - * \return An `AMobjType` enum tag. - */ -AMobjType AMobjType_tag(char const* obj_type_label); - -#endif /* MACRO_UTILS_H */ diff --git a/automerge-c/test/map_tests.c b/automerge-c/test/map_tests.c deleted file mode 100644 index 85f4ea93..00000000 --- a/automerge-c/test/map_tests.c +++ /dev/null @@ -1,1164 +0,0 @@ -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include "group_state.h" -#include "macro_utils.h" -#include "stack_utils.h" - -static void test_AMmapIncrement(void** state) { - GroupState* group_state = *state; - AMfree(AMmapPutCounter(group_state->doc, AM_ROOT, "Counter", 0)); - assert_int_equal(AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, "Counter", NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 0); - AMfree(AMpop(&group_state->stack)); - AMfree(AMmapIncrement(group_state->doc, AM_ROOT, "Counter", 3)); - assert_int_equal(AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, "Counter", NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 3); - AMfree(AMpop(&group_state->stack)); -} - -#define test_AMmapPut(suffix) test_AMmapPut ## suffix - -#define static_void_test_AMmapPut(suffix, member, scalar_value) \ -static void test_AMmapPut ## suffix(void **state) { \ - GroupState* group_state = *state; \ - AMfree(AMmapPut ## suffix(group_state->doc, \ - AM_ROOT, \ - #suffix, \ - scalar_value)); \ - assert_true(AMpush( \ - &group_state->stack, \ - AMmapGet(group_state->doc, AM_ROOT, #suffix, NULL), \ - AMvalue_discriminant(#suffix), \ - cmocka_cb).member == scalar_value); \ - AMfree(AMpop(&group_state->stack)); \ -} - -static void test_AMmapPutBytes(void **state) { - static char const* const KEY = "Bytes"; - static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; - static size_t const BYTES_SIZE = sizeof(BYTES_VALUE) / sizeof(uint8_t); - - GroupState* group_state = *state; - AMfree(AMmapPutBytes(group_state->doc, - AM_ROOT, - KEY, - BYTES_VALUE, - BYTES_SIZE)); - AMbyteSpan const bytes = AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, KEY, NULL), - AM_VALUE_BYTES, - cmocka_cb).bytes; - assert_int_equal(bytes.count, BYTES_SIZE); - assert_memory_equal(bytes.src, BYTES_VALUE, BYTES_SIZE); - AMfree(AMpop(&group_state->stack)); -} - -static void test_AMmapPutNull(void **state) { - static char const* const KEY = "Null"; - - GroupState* group_state = *state; - AMfree(AMmapPutNull(group_state->doc, AM_ROOT, KEY)); - AMresult* const result = AMmapGet(group_state->doc, AM_ROOT, KEY, NULL); - if (AMresultStatus(result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage(result)); - } - assert_int_equal(AMresultSize(result), 1); - assert_int_equal(AMresultValue(result).tag, AM_VALUE_NULL); - AMfree(result); -} - -#define test_AMmapPutObject(label) test_AMmapPutObject_ ## label - -#define static_void_test_AMmapPutObject(label) \ -static void test_AMmapPutObject_ ## label(void **state) { \ - GroupState* group_state = *state; \ - AMobjId const* const obj_id = AMpush( \ - &group_state->stack, \ - AMmapPutObject(group_state->doc, \ - AM_ROOT, \ - #label, \ - AMobjType_tag(#label)), \ - AM_VALUE_OBJ_ID, \ - cmocka_cb).obj_id; \ - assert_non_null(obj_id); \ - assert_int_equal(AMobjSize(group_state->doc, obj_id, NULL), 0); \ - AMfree(AMpop(&group_state->stack)); \ -} - -static void test_AMmapPutStr(void **state) { - static char const* const KEY = "Str"; - static char const* const STR_VALUE = "Hello, world!"; - - GroupState* group_state = *state; - AMfree(AMmapPutStr(group_state->doc, AM_ROOT, KEY, STR_VALUE)); - assert_string_equal(AMpush(&group_state->stack, - AMmapGet(group_state->doc, AM_ROOT, KEY, NULL), - AM_VALUE_STR, - cmocka_cb).str, STR_VALUE); - AMfree(AMpop(&group_state->stack)); -} - -static_void_test_AMmapPut(Bool, boolean, true) - -static_void_test_AMmapPut(Counter, counter, INT64_MAX) - -static_void_test_AMmapPut(F64, f64, DBL_MAX) - -static_void_test_AMmapPut(Int, int_, INT64_MAX) - -static_void_test_AMmapPutObject(List) - -static_void_test_AMmapPutObject(Map) - -static_void_test_AMmapPutObject(Text) - -static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX) - -static_void_test_AMmapPut(Uint, uint, UINT64_MAX) - -static void test_range_iter_map(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMmapPutUint(doc, AM_ROOT, "a", 3)); - AMfree(AMmapPutUint(doc, AM_ROOT, "b", 4)); - AMfree(AMmapPutUint(doc, AM_ROOT, "c", 5)); - AMfree(AMmapPutUint(doc, AM_ROOT, "d", 6)); - AMfree(AMcommit(doc, NULL, NULL)); - AMfree(AMmapPutUint(doc, AM_ROOT, "a", 7)); - AMfree(AMcommit(doc, NULL, NULL)); - AMfree(AMmapPutUint(doc, AM_ROOT, "a", 8)); - AMfree(AMmapPutUint(doc, AM_ROOT, "d", 9)); - AMfree(AMcommit(doc, NULL, NULL)); - AMactorId const* const actor_id = AMpush(&stack, - AMgetActorId(doc), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMmapItems map_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - assert_int_equal(AMmapItemsSize(&map_items), 4); - - /* ["b"-"d") */ - AMmapItems range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, "b", "d", NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - /* First */ - AMmapItem const* next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "b"); - AMvalue next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 4); - AMobjId const* next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "c"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 5); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - assert_null(AMmapItemsNext(&range, 1)); - - /* ["b"-) */ - range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, "b", NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - /* First */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "b"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 4); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "c"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 5); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "d"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 9); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 7); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Fourth */ - assert_null(AMmapItemsNext(&range, 1)); - - /* [-"d") */ - range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, "d", NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - /* First */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "a"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 8); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 6); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "b"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 4); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "c"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 5); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Fourth */ - assert_null(AMmapItemsNext(&range, 1)); - - /* ["a"-) */ - range = AMpush(&stack, - AMmapRange(doc, AM_ROOT, "a", NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - /* First */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "a"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 8); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 6); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "b"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 4); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "c"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 5); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Fourth */ - next = AMmapItemsNext(&range, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "d"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_UINT); - assert_int_equal(next_value.uint, 9); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 7); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Fifth */ - assert_null(AMmapItemsNext(&range, 1)); -} - -static void test_map_range_back_and_forth_single(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id = AMpush(&stack, - AMgetActorId(doc), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - - AMfree(AMmapPutStr(doc, AM_ROOT, "1", "a")); - AMfree(AMmapPutStr(doc, AM_ROOT, "2", "b")); - AMfree(AMmapPutStr(doc, AM_ROOT, "3", "c")); - - /* Forward, back, back. */ - AMmapItems range_all = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - /* First */ - AMmapItem const* next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - AMvalue next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); - AMobjId const* next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - AMmapItems range_back_all = AMmapItemsReversed(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - AMvalue next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); - AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "b"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - - /* Forward, back, forward. */ - range_all = AMmapItemsRewound(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - /* First */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "b"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - - /* Forward, forward, forward. */ - range_all = AMmapItemsRewound(&range_all); - /* First */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "b"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "3"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "c"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Forward stop */ - assert_null(AMmapItemsNext(&range_all, 1)); - - /* Back, back, back. */ - range_back_all = AMmapItemsRewound(&range_back_all); - /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "b"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* First */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "1"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "a"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Back stop */ - assert_null(AMmapItemsNext(&range_back_all, 1)); -} - -static void test_map_range_back_and_forth_double(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id1= AMpush(&stack, - AMactorIdInitBytes("\0", 1), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(doc1, actor_id1)); - - AMfree(AMmapPutStr(doc1, AM_ROOT, "1", "a")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "2", "b")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "3", "c")); - - /* The second actor should win all conflicts here. */ - AMdoc* const doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id2 = AMpush(&stack, - AMactorIdInitBytes("\1", 1), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(doc2, actor_id2)); - AMfree(AMmapPutStr(doc2, AM_ROOT, "1", "aa")); - AMfree(AMmapPutStr(doc2, AM_ROOT, "2", "bb")); - AMfree(AMmapPutStr(doc2, AM_ROOT, "3", "cc")); - - AMfree(AMmerge(doc1, doc2)); - - /* Forward, back, back. */ - AMmapItems range_all = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - /* First */ - AMmapItem const* next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - AMvalue next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); - AMobjId const* next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - AMmapItems range_back_all = AMmapItemsReversed(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - AMvalue next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); - AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "bb"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - - /* Forward, back, forward. */ - range_all = AMmapItemsRewound(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - /* First */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "bb"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - - /* Forward, forward, forward. */ - range_all = AMmapItemsRewound(&range_all); - /* First */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Second */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "bb"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "3"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "cc"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Forward stop */ - assert_null(AMmapItemsNext(&range_all, 1)); - - /* Back, back, back. */ - range_back_all = AMmapItemsRewound(&range_back_all); - /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "bb"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* First */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "1"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "aa"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Back stop */ - assert_null(AMmapItemsNext(&range_back_all, 1)); -} - -static void test_map_range_at_back_and_forth_single(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id = AMpush(&stack, - AMgetActorId(doc), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - - AMfree(AMmapPutStr(doc, AM_ROOT, "1", "a")); - AMfree(AMmapPutStr(doc, AM_ROOT, "2", "b")); - AMfree(AMmapPutStr(doc, AM_ROOT, "3", "c")); - - AMchangeHashes const heads = AMpush(&stack, - AMgetHeads(doc), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - - /* Forward, back, back. */ - AMmapItems range_all = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, &heads), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - /* First */ - AMmapItem const* next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - AMvalue next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); - AMobjId const* next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - AMmapItems range_back_all = AMmapItemsReversed(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - AMvalue next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); - AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "b"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - - /* Forward, back, forward. */ - range_all = AMmapItemsRewound(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - /* First */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "b"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - - /* Forward, forward, forward. */ - range_all = AMmapItemsRewound(&range_all); - /* First */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "a"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Second */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "b"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Third */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "3"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "c"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 0); - /* Forward stop */ - assert_null(AMmapItemsNext(&range_all, 1)); - - /* Back, back, back. */ - range_back_all = AMmapItemsRewound(&range_back_all); - /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "c"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "b"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* First */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "1"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "a"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); - /* Back stop */ - assert_null(AMmapItemsNext(&range_back_all, 1)); -} - -static void test_map_range_at_back_and_forth_double(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id1= AMpush(&stack, - AMactorIdInitBytes("\0", 1), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(doc1, actor_id1)); - - AMfree(AMmapPutStr(doc1, AM_ROOT, "1", "a")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "2", "b")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "3", "c")); - - /* The second actor should win all conflicts here. */ - AMdoc* const doc2 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMactorId const* const actor_id2= AMpush(&stack, - AMactorIdInitBytes("\1", 1), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id; - AMfree(AMsetActorId(doc2, actor_id2)); - AMfree(AMmapPutStr(doc2, AM_ROOT, "1", "aa")); - AMfree(AMmapPutStr(doc2, AM_ROOT, "2", "bb")); - AMfree(AMmapPutStr(doc2, AM_ROOT, "3", "cc")); - - AMfree(AMmerge(doc1, doc2)); - AMchangeHashes const heads = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - - /* Forward, back, back. */ - AMmapItems range_all = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, NULL, NULL, &heads), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - /* First */ - AMmapItem const* next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - AMvalue next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); - AMobjId const* next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - AMmapItems range_back_all = AMmapItemsReversed(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - AMmapItem const* next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - AMvalue next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); - AMobjId const* next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "bb"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - - /* Forward, back, forward. */ - range_all = AMmapItemsRewound(&range_all); - range_back_all = AMmapItemsRewound(&range_back_all); - /* First */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "bb"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - - /* Forward, forward, forward. */ - range_all = AMmapItemsRewound(&range_all); - /* First */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "1"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "aa"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Second */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "2"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "bb"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Third */ - next = AMmapItemsNext(&range_all, 1); - assert_non_null(next); - assert_string_equal(AMmapItemKey(next), "3"); - next_value = AMmapItemValue(next); - assert_int_equal(next_value.tag, AM_VALUE_STR); - assert_string_equal(next_value.str, "cc"); - next_obj_id = AMmapItemObjId(next); - assert_int_equal(AMobjIdCounter(next_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_obj_id), 1); - /* Forward stop */ - assert_null(AMmapItemsNext(&range_all, 1)); - - /* Back, back, back. */ - range_back_all = AMmapItemsRewound(&range_back_all); - /* Third */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "3"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "cc"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Second */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "2"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "bb"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* First */ - next_back = AMmapItemsNext(&range_back_all, 1); - assert_non_null(next_back); - assert_string_equal(AMmapItemKey(next_back), "1"); - next_back_value = AMmapItemValue(next_back); - assert_int_equal(next_back_value.tag, AM_VALUE_STR); - assert_string_equal(next_back_value.str, "aa"); - next_back_obj_id = AMmapItemObjId(next_back); - assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); - assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); - assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); - /* Back stop */ - assert_null(AMmapItemsNext(&range_back_all, 1)); -} - -static void test_get_range_values(void** state) { - AMresultStack* stack = *state; - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMmapPutStr(doc1, AM_ROOT, "aa", "aaa")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "bb", "bbb")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "cc", "ccc")); - AMfree(AMmapPutStr(doc1, AM_ROOT, "dd", "ddd")); - AMfree(AMcommit(doc1, NULL, NULL)); - - AMchangeHashes const v1 = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMdoc* const doc2 = AMpush(&stack, AMfork(doc1, NULL), AM_VALUE_DOC, cmocka_cb).doc; - - AMfree(AMmapPutStr(doc1, AM_ROOT, "cc", "ccc V2")); - AMfree(AMcommit(doc1, NULL, NULL)); - - AMfree(AMmapPutStr(doc2, AM_ROOT, "cc", "ccc V3")); - AMfree(AMcommit(doc2, NULL, NULL)); - - AMfree(AMmerge(doc1, doc2)); - - AMmapItems range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, "b", "d", NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItems range_back = AMmapItemsReversed(&range); - assert_int_equal(AMmapItemsSize(&range), 2); - - AMmapItem const* map_item = NULL; - while ((map_item = AMmapItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), NULL); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMmapItemObjId(map_item)); - AMfree(result); - } - - assert_int_equal(AMmapItemsSize(&range_back), 2); - - while ((map_item = AMmapItemsNext(&range_back, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), NULL); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMmapItemObjId(map_item)); - AMfree(result); - } - - range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, "b", "d", &v1), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - range_back = AMmapItemsReversed(&range); - assert_int_equal(AMmapItemsSize(&range), 2); - - while ((map_item = AMmapItemsNext(&range, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), &v1); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMmapItemObjId(map_item)); - AMfree(result); - } - - assert_int_equal(AMmapItemsSize(&range_back), 2); - - while ((map_item = AMmapItemsNext(&range_back, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMresult* result = AMmapGet(doc1, AM_ROOT, AMmapItemKey(map_item), &v1); - AMvalue const val2 = AMresultValue(result); - assert_true(AMvalueEqual(&val1, &val2)); - assert_non_null(AMmapItemObjId(map_item)); - AMfree(result); - } - - range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMobjItems values = AMpush(&stack, - AMobjValues(doc1, AM_ROOT, NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - assert_int_equal(AMmapItemsSize(&range), AMobjItemsSize(&values)); - AMobjItem const* value = NULL; - while ((map_item = AMmapItemsNext(&range, 1)) != NULL && - (value = AMobjItemsNext(&values, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMvalue const val2 = AMobjItemValue(value); - assert_true(AMvalueEqual(&val1, &val2)); - assert_true(AMobjIdEqual(AMmapItemObjId(map_item), AMobjItemObjId(value))); - } - - range = AMpush(&stack, - AMmapRange(doc1, AM_ROOT, NULL, NULL, &v1), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - values = AMpush(&stack, - AMobjValues(doc1, AM_ROOT, &v1), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - assert_int_equal(AMmapItemsSize(&range), AMobjItemsSize(&values)); - while ((map_item = AMmapItemsNext(&range, 1)) != NULL && - (value = AMobjItemsNext(&values, 1)) != NULL) { - AMvalue const val1 = AMmapItemValue(map_item); - AMvalue const val2 = AMobjItemValue(value); - assert_true(AMvalueEqual(&val1, &val2)); - assert_true(AMobjIdEqual(AMmapItemObjId(map_item), AMobjItemObjId(value))); - } -} - -int run_map_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test(test_AMmapIncrement), - cmocka_unit_test(test_AMmapPut(Bool)), - cmocka_unit_test(test_AMmapPutBytes), - cmocka_unit_test(test_AMmapPut(Counter)), - cmocka_unit_test(test_AMmapPut(F64)), - cmocka_unit_test(test_AMmapPut(Int)), - cmocka_unit_test(test_AMmapPutNull), - cmocka_unit_test(test_AMmapPutObject(List)), - cmocka_unit_test(test_AMmapPutObject(Map)), - cmocka_unit_test(test_AMmapPutObject(Text)), - cmocka_unit_test(test_AMmapPutStr), - cmocka_unit_test(test_AMmapPut(Timestamp)), - cmocka_unit_test(test_AMmapPut(Uint)), - cmocka_unit_test_setup_teardown(test_range_iter_map, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_single, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_double, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_single, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_double, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_get_range_values, setup_stack, teardown_stack), - }; - - return cmocka_run_group_tests(tests, group_setup, group_teardown); -} diff --git a/automerge-c/test/ported_wasm/basic_tests.c b/automerge-c/test/ported_wasm/basic_tests.c deleted file mode 100644 index 147b140d..00000000 --- a/automerge-c/test/ported_wasm/basic_tests.c +++ /dev/null @@ -1,1755 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include -#include "../stack_utils.h" - -/** - * \brief default import init() should return a promise - */ -static void test_default_import_init_should_return_a_promise(void** state); - -/** - * \brief should create, clone and free - */ -static void test_create_clone_and_free(void** state) { - AMresultStack* stack = *state; - /* const doc1 = create() */ - AMdoc* const doc1 = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const doc2 = doc1.clone() */ - AMdoc* const doc2 = AMpush(&stack, AMclone(doc1), AM_VALUE_DOC, cmocka_cb).doc; -} - -/** - * \brief should be able to start and commit - */ -static void test_start_and_commit(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* doc.commit() */ - AMpush(&stack, AMcommit(doc, NULL, NULL), AM_VALUE_CHANGE_HASHES, cmocka_cb); -} - -/** - * \brief getting a nonexistent prop does not throw an error - */ -static void test_getting_a_nonexistent_prop_does_not_throw_an_error(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const root = "_root" */ - /* const result = doc.getWithType(root, "hello") */ - /* assert.deepEqual(result, undefined) */ - AMpush(&stack, - AMmapGet(doc, AM_ROOT, "hello", NULL), - AM_VALUE_VOID, - cmocka_cb); -} - -/** - * \brief should be able to set and get a simple value - */ -static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { - AMresultStack* stack = *state; - /* const doc: Automerge = create("aabbcc") */ - AMdoc* const doc = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr("aabbcc"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* const root = "_root" */ - /* let result */ - /* */ - /* doc.put(root, "hello", "world") */ - AMfree(AMmapPutStr(doc, AM_ROOT, "hello", "world")); - /* doc.put(root, "number1", 5, "uint") */ - AMfree(AMmapPutUint(doc, AM_ROOT, "number1", 5)); - /* doc.put(root, "number2", 5) */ - AMfree(AMmapPutInt(doc, AM_ROOT, "number2", 5)); - /* doc.put(root, "number3", 5.5) */ - AMfree(AMmapPutF64(doc, AM_ROOT, "number3", 5.5)); - /* doc.put(root, "number4", 5.5, "f64") */ - AMfree(AMmapPutF64(doc, AM_ROOT, "number4", 5.5)); - /* doc.put(root, "number5", 5.5, "int") */ - AMfree(AMmapPutInt(doc, AM_ROOT, "number5", 5.5)); - /* doc.put(root, "bool", true) */ - AMfree(AMmapPutBool(doc, AM_ROOT, "bool", true)); - /* doc.put(root, "time1", 1000, "timestamp") */ - AMfree(AMmapPutTimestamp(doc, AM_ROOT, "time1", 1000)); - /* doc.put(root, "time2", new Date(1001)) */ - AMfree(AMmapPutTimestamp(doc, AM_ROOT, "time2", 1001)); - /* doc.putObject(root, "list", []); */ - AMfree(AMmapPutObject(doc, AM_ROOT, "list", AM_OBJ_TYPE_LIST)); - /* doc.put(root, "null", null) */ - AMfree(AMmapPutNull(doc, AM_ROOT, "null")); - /* */ - /* result = doc.getWithType(root, "hello") */ - /* assert.deepEqual(result, ["str", "world"]) */ - /* assert.deepEqual(doc.get("/", "hello"), "world") */ - assert_string_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "hello", NULL), - AM_VALUE_STR, - cmocka_cb).str, "world"); - /* assert.deepEqual(doc.get("/", "hello"), "world") */ - /* */ - /* result = doc.getWithType(root, "number1") */ - /* assert.deepEqual(result, ["uint", 5]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "number1", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 5); - /* assert.deepEqual(doc.get("/", "number1"), 5) */ - /* */ - /* result = doc.getWithType(root, "number2") */ - /* assert.deepEqual(result, ["int", 5]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "number2", NULL), - AM_VALUE_INT, - cmocka_cb).int_, 5); - /* */ - /* result = doc.getWithType(root, "number3") */ - /* assert.deepEqual(result, ["f64", 5.5]) */ - assert_float_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "number3", NULL), - AM_VALUE_F64, - cmocka_cb).f64, 5.5, DBL_EPSILON); - /* */ - /* result = doc.getWithType(root, "number4") */ - /* assert.deepEqual(result, ["f64", 5.5]) */ - assert_float_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "number4", NULL), - AM_VALUE_F64, - cmocka_cb).f64, 5.5, DBL_EPSILON); - /* */ - /* result = doc.getWithType(root, "number5") */ - /* assert.deepEqual(result, ["int", 5]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "number5", NULL), - AM_VALUE_INT, - cmocka_cb).int_, 5); - /* */ - /* result = doc.getWithType(root, "bool") */ - /* assert.deepEqual(result, ["boolean", true]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "bool", NULL), - AM_VALUE_BOOLEAN, - cmocka_cb).boolean, true); - /* */ - /* doc.put(root, "bool", false, "boolean") */ - AMfree(AMmapPutBool(doc, AM_ROOT, "bool", false)); - /* */ - /* result = doc.getWithType(root, "bool") */ - /* assert.deepEqual(result, ["boolean", false]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "bool", NULL), - AM_VALUE_BOOLEAN, - cmocka_cb).boolean, false); - /* */ - /* result = doc.getWithType(root, "time1") */ - /* assert.deepEqual(result, ["timestamp", new Date(1000)]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "time1", NULL), - AM_VALUE_TIMESTAMP, - cmocka_cb).timestamp, 1000); - /* */ - /* result = doc.getWithType(root, "time2") */ - /* assert.deepEqual(result, ["timestamp", new Date(1001)]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "time2", NULL), - AM_VALUE_TIMESTAMP, - cmocka_cb).timestamp, 1001); - /* */ - /* result = doc.getWithType(root, "list") */ - /* assert.deepEqual(result, ["list", "10@aabbcc"]); */ - AMobjId const* const list = AMpush(&stack, - AMmapGet(doc, AM_ROOT, "list", NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - assert_int_equal(AMobjIdCounter(list), 10); - assert_string_equal(AMactorIdStr(AMobjIdActorId(list)), "aabbcc"); - /* */ - /* result = doc.getWithType(root, "null") */ - /* assert.deepEqual(result, ["null", null]); */ - AMpush(&stack, - AMmapGet(doc, AM_ROOT, "null", NULL), - AM_VALUE_NULL, - cmocka_cb); -} - -/** - * \brief should be able to use bytes - */ -static void test_should_be_able_to_use_bytes(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* doc.put("_root", "data1", new Uint8Array([10, 11, 12])); */ - static uint8_t const DATA1[] = {10, 11, 12}; - AMfree(AMmapPutBytes(doc, AM_ROOT, "data1", DATA1, sizeof(DATA1))); - /* doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); */ - static uint8_t const DATA2[] = {13, 14, 15}; - AMfree(AMmapPutBytes(doc, AM_ROOT, "data2", DATA2, sizeof(DATA2))); - /* const value1 = doc.getWithType("_root", "data1") */ - AMbyteSpan const value1 = AMpush(&stack, - AMmapGet(doc, AM_ROOT, "data1", NULL), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* assert.deepEqual(value1, ["bytes", new Uint8Array([10, 11, 12])]); */ - assert_int_equal(value1.count, sizeof(DATA1)); - assert_memory_equal(value1.src, DATA1, sizeof(DATA1)); - /* const value2 = doc.getWithType("_root", "data2") */ - AMbyteSpan const value2 = AMpush(&stack, - AMmapGet(doc, AM_ROOT, "data2", NULL), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* assert.deepEqual(value2, ["bytes", new Uint8Array([13, 14, 15])]); */ - assert_int_equal(value2.count, sizeof(DATA2)); - assert_memory_equal(value2.src, DATA2, sizeof(DATA2)); -} - -/** - * \brief should be able to make subobjects - */ -static void test_should_be_able_to_make_subobjects(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const root = "_root" */ - /* let result */ - /* */ - /* const submap = doc.putObject(root, "submap", {}) */ - AMobjId const* const submap = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, "submap", AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* doc.put(submap, "number", 6, "uint") */ - AMfree(AMmapPutUint(doc, submap, "number", 6)); - /* assert.strictEqual(doc.pendingOps(), 2) */ - assert_int_equal(AMpendingOps(doc), 2); - /* */ - /* result = doc.getWithType(root, "submap") */ - /* assert.deepEqual(result, ["map", submap]) */ - assert_true(AMobjIdEqual(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "submap", NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id, - submap)); - /* */ - /* result = doc.getWithType(submap, "number") */ - /* assert.deepEqual(result, ["uint", 6]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, submap, "number", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, - 6); -} - -/** - * \brief should be able to make lists - */ -static void test_should_be_able_to_make_lists(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const root = "_root" */ - /* */ - /* const sublist = doc.putObject(root, "numbers", []) */ - AMobjId const* const sublist = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, "numbers", AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* doc.insert(sublist, 0, "a"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, "a")); - /* doc.insert(sublist, 1, "b"); */ - AMfree(AMlistPutStr(doc, sublist, 1, true, "b")); - /* doc.insert(sublist, 2, "c"); */ - AMfree(AMlistPutStr(doc, sublist, 2, true, "c")); - /* doc.insert(sublist, 0, "z"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, "z")); - /* */ - /* assert.deepEqual(doc.getWithType(sublist, 0), ["str", "z"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, sublist, 0, NULL), - AM_VALUE_STR, - cmocka_cb).str, "z"); - /* assert.deepEqual(doc.getWithType(sublist, 1), ["str", "a"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, sublist, 1, NULL), - AM_VALUE_STR, - cmocka_cb).str, "a"); - /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, sublist, 2, NULL), - AM_VALUE_STR, - cmocka_cb).str, "b"); - /* assert.deepEqual(doc.getWithType(sublist, 3), ["str", "c"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, sublist, 3, NULL), - AM_VALUE_STR, - cmocka_cb).str, "c"); - /* assert.deepEqual(doc.length(sublist), 4) */ - assert_int_equal(AMobjSize(doc, sublist, NULL), 4); - /* */ - /* doc.put(sublist, 2, "b v2"); */ - AMfree(AMlistPutStr(doc, sublist, 2, false, "b v2")); - /* */ - /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b v2"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, sublist, 2, NULL), - AM_VALUE_STR, - cmocka_cb).str, "b v2"); - /* assert.deepEqual(doc.length(sublist), 4) */ - assert_int_equal(AMobjSize(doc, sublist, NULL), 4); -} - -/** - * \brief lists have insert, set, splice, and push ops - */ -static void test_lists_have_insert_set_splice_and_push_ops(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const root = "_root" */ - /* */ - /* const sublist = doc.putObject(root, "letters", []) */ - AMobjId const* const sublist = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, "letters", AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* doc.insert(sublist, 0, "a"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, "a")); - /* doc.insert(sublist, 0, "b"); */ - AMfree(AMlistPutStr(doc, sublist, 0, true, "b")); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "a"] }) */ - AMmapItems doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "b"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_null(AMlistItemsNext(&list_items, 1)); - } - /* doc.push(sublist, "c"); */ - AMfree(AMlistPutStr(doc, sublist, SIZE_MAX, true, "c")); - /* const heads = doc.getHeads() */ - AMchangeHashes const heads = AMpush(&stack, - AMgetHeads(doc), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c"] }) */ - doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "b"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "c"); - assert_null(AMlistItemsNext(&list_items, 1)); - } - /* doc.push(sublist, 3, "timestamp"); */ - AMfree(AMlistPutTimestamp(doc, sublist, SIZE_MAX, true, 3)); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new Date(3)] })*/ - doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "b"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "c"); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, - 3); - assert_null(AMlistItemsNext(&list_items, 1)); - } - /* doc.splice(sublist, 1, 1, ["d", "e", "f"]); */ - static AMvalue const DATA[] = {{.str_tag = AM_VALUE_STR, .str = "d"}, - {.str_tag = AM_VALUE_STR, .str = "e"}, - {.str_tag = AM_VALUE_STR, .str = "f"}}; - AMfree(AMsplice(doc, sublist, 1, 1, DATA, sizeof(DATA)/sizeof(AMvalue))); - /* assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", new Date(3)] })*/ - doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "b"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "d"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "e"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "f"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "c"); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, - 3); - assert_null(AMlistItemsNext(&list_items, 1)); - } - /* doc.put(sublist, 0, "z"); */ - AMfree(AMlistPutStr(doc, sublist, 0, false, "z")); - /* assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", new Date(3)] })*/ - doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "z"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "d"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "e"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "f"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "c"); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).timestamp, - 3); - assert_null(AMlistItemsNext(&list_items, 1)); - } - /* assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new Date(3)])*/ - AMlistItems sublist_items = AMpush( - &stack, - AMlistRange(doc, sublist, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, - "z"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, - "d"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, - "e"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, - "f"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).str, - "c"); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&sublist_items, 1)).timestamp, - 3); - assert_null(AMlistItemsNext(&sublist_items, 1)); - /* assert.deepEqual(doc.length(sublist), 6) */ - assert_int_equal(AMobjSize(doc, sublist, NULL), 6); - /* assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] })*/ - doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, &heads), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "letters"); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, &heads), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "b"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_string_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, - "c"); - assert_null(AMlistItemsNext(&list_items, 1)); - } -} - -/** - * \brief should be able to delete non-existent props - */ -static void test_should_be_able_to_delete_non_existent_props(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* */ - /* doc.put("_root", "foo", "bar") */ - AMfree(AMmapPutStr(doc, AM_ROOT, "foo", "bar")); - /* doc.put("_root", "bip", "bap") */ - AMfree(AMmapPutStr(doc, AM_ROOT, "bip", "bap")); - /* const hash1 = doc.commit() */ - AMchangeHashes const hash1 = AMpush(&stack, - AMcommit(doc, NULL, NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* */ - /* assert.deepEqual(doc.keys("_root"), ["bip", "foo"]) */ - AMstrs keys = AMpush(&stack, - AMkeys(doc, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_string_equal(AMstrsNext(&keys, 1), "bip"); - assert_string_equal(AMstrsNext(&keys, 1), "foo"); - /* */ - /* doc.delete("_root", "foo") */ - AMfree(AMmapDelete(doc, AM_ROOT, "foo")); - /* doc.delete("_root", "baz") */ - AMfree(AMmapDelete(doc, AM_ROOT, "baz")); - /* const hash2 = doc.commit() */ - AMchangeHashes const hash2 = AMpush(&stack, - AMcommit(doc, NULL, NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* */ - /* assert.deepEqual(doc.keys("_root"), ["bip"]) */ - keys = AMpush(&stack, - AMkeys(doc, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_string_equal(AMstrsNext(&keys, 1), "bip"); - /* assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) */ - keys = AMpush(&stack, - AMkeys(doc, AM_ROOT, &hash1), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_string_equal(AMstrsNext(&keys, 1), "bip"); - assert_string_equal(AMstrsNext(&keys, 1), "foo"); - /* assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) */ - keys = AMpush(&stack, - AMkeys(doc, AM_ROOT, &hash2), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_string_equal(AMstrsNext(&keys, 1), "bip"); -} - -/** - * \brief should be able to del - */ -static void test_should_be_able_to_del(void **state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const root = "_root" */ - /* */ - /* doc.put(root, "xxx", "xxx"); */ - AMfree(AMmapPutStr(doc, AM_ROOT, "xxx", "xxx")); - /* assert.deepEqual(doc.getWithType(root, "xxx"), ["str", "xxx"]) */ - assert_string_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "xxx", NULL), - AM_VALUE_STR, - cmocka_cb).str, "xxx"); - /* doc.delete(root, "xxx"); */ - AMfree(AMmapDelete(doc, AM_ROOT, "xxx")); - /* assert.deepEqual(doc.getWithType(root, "xxx"), undefined) */ - AMpush(&stack, - AMmapGet(doc, AM_ROOT, "xxx", NULL), - AM_VALUE_VOID, - cmocka_cb); -} - -/** - * \brief should be able to use counters - */ -static void test_should_be_able_to_use_counters(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const root = "_root" */ - /* */ - /* doc.put(root, "counter", 10, "counter"); */ - AMfree(AMmapPutCounter(doc, AM_ROOT, "counter", 10)); - /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 10]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "counter", NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 10); - /* doc.increment(root, "counter", 10); */ - AMfree(AMmapIncrement(doc, AM_ROOT, "counter", 10)); - /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 20]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "counter", NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 20); - /* doc.increment(root, "counter", -5); */ - AMfree(AMmapIncrement(doc, AM_ROOT, "counter", -5)); - /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 15]) */ - assert_int_equal(AMpush(&stack, - AMmapGet(doc, AM_ROOT, "counter", NULL), - AM_VALUE_COUNTER, - cmocka_cb).counter, 15); -} - -/** - * \brief should be able to splice text - */ -static void test_should_be_able_to_splice_text(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const root = "_root"; */ - /* */ - /* const text = doc.putObject(root, "text", ""); */ - AMobjId const* const text = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* doc.splice(text, 0, 0, "hello ") */ - AMfree(AMspliceText(doc, text, 0, 0, "hello ")); - /* doc.splice(text, 6, 0, ["w", "o", "r", "l", "d"]) */ - static AMvalue const WORLD[] = {{.str_tag = AM_VALUE_STR, .str = "w"}, - {.str_tag = AM_VALUE_STR, .str = "o"}, - {.str_tag = AM_VALUE_STR, .str = "r"}, - {.str_tag = AM_VALUE_STR, .str = "l"}, - {.str_tag = AM_VALUE_STR, .str = "d"}}; - AMfree(AMsplice(doc, text, 6, 0, WORLD, sizeof(WORLD)/sizeof(AMvalue))); - /* doc.splice(text, 11, 0, ["!", "?"]) */ - static AMvalue const INTERROBANG[] = {{.str_tag = AM_VALUE_STR, .str = "!"}, - {.str_tag = AM_VALUE_STR, .str = "?"}}; - AMfree(AMsplice(doc, text, 11, 0, INTERROBANG, sizeof(INTERROBANG)/sizeof(AMvalue))); - /* assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 0, NULL), - AM_VALUE_STR, - cmocka_cb).str, "h"); - /* assert.deepEqual(doc.getWithType(text, 1), ["str", "e"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 1, NULL), - AM_VALUE_STR, - cmocka_cb).str, "e"); - /* assert.deepEqual(doc.getWithType(text, 9), ["str", "l"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 9, NULL), - AM_VALUE_STR, - cmocka_cb).str, "l"); - /* assert.deepEqual(doc.getWithType(text, 10), ["str", "d"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 10, NULL), - AM_VALUE_STR, - cmocka_cb).str, "d"); - /* assert.deepEqual(doc.getWithType(text, 11), ["str", "!"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 11, NULL), - AM_VALUE_STR, - cmocka_cb).str, "!"); - /* assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) */ - assert_string_equal(AMpush(&stack, - AMlistGet(doc, text, 12, NULL), - AM_VALUE_STR, - cmocka_cb).str, "?"); -} - -/** - * \brief should be able to insert objects into text - */ -static void test_should_be_able_to_insert_objects_into_text(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const text = doc.putObject("/", "text", "Hello world"); */ - AMobjId const* const text = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMspliceText(doc, text, 0, 0, "Hello world")); - /* const obj = doc.insertObject(text, 6, { hello: "world" }); */ - AMobjId const* const obj = AMpush( - &stack, - AMlistPutObject(doc, text, 6, true, AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMmapPutStr(doc, obj, "hello", "world")); - /* assert.deepEqual(doc.text(text), "Hello \ufffcworld"); */ - assert_string_equal(AMpush(&stack, - AMtext(doc, text, NULL), - AM_VALUE_STR, - cmocka_cb).str, "Hello \ufffcworld"); - /* assert.deepEqual(doc.getWithType(text, 6), ["map", obj]); */ - assert_true(AMobjIdEqual(AMpush(&stack, - AMlistGet(doc, text, 6, NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id, obj)); - /* assert.deepEqual(doc.getWithType(obj, "hello"), ["str", "world"]); */ - assert_string_equal(AMpush(&stack, - AMmapGet(doc, obj, "hello", NULL), - AM_VALUE_STR, - cmocka_cb).str, "world"); -} - -/** - * \brief should be able save all or incrementally - */ -static void test_should_be_able_to_save_all_or_incrementally(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* */ - /* doc.put("_root", "foo", 1) */ - AMfree(AMmapPutInt(doc, AM_ROOT, "foo", 1)); - /* */ - /* const save1 = doc.save() */ - AMbyteSpan const save1 = AMpush(&stack, - AMsave(doc), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* */ - /* doc.put("_root", "bar", 2) */ - AMfree(AMmapPutInt(doc, AM_ROOT, "bar", 2)); - /* */ - /* const saveMidway = doc.clone().save(); */ - AMbyteSpan const saveMidway = AMpush(&stack, - AMsave( - AMpush(&stack, - AMclone(doc), - AM_VALUE_DOC, - cmocka_cb).doc), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* */ - /* const save2 = doc.saveIncremental(); */ - AMbyteSpan const save2 = AMpush(&stack, - AMsaveIncremental(doc), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* */ - /* doc.put("_root", "baz", 3); */ - AMfree(AMmapPutInt(doc, AM_ROOT, "baz", 3)); - /* */ - /* const save3 = doc.saveIncremental(); */ - AMbyteSpan const save3 = AMpush(&stack, - AMsaveIncremental(doc), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* */ - /* const saveA = doc.save(); */ - AMbyteSpan const saveA = AMpush(&stack, - AMsave(doc), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* const saveB = new Uint8Array([...save1, ...save2, ...save3]); */ - size_t const saveB_count = save1.count + save2.count + save3.count; - uint8_t* const saveB_src = test_malloc(saveB_count); - memcpy(saveB_src, save1.src, save1.count); - memcpy(saveB_src + save1.count, save2.src, save2.count); - memcpy(saveB_src + save1.count + save2.count, save3.src, save3.count); - /* */ - /* assert.notDeepEqual(saveA, saveB); */ - assert_memory_not_equal(saveA.src, saveB_src, saveA.count); - /* */ - /* const docA = load(saveA); */ - AMdoc* const docA = AMpush(&stack, - AMload(saveA.src, saveA.count), - AM_VALUE_DOC, - cmocka_cb).doc; - /* const docB = load(saveB); */ - AMdoc* const docB = AMpush(&stack, - AMload(saveB_src, saveB_count), - AM_VALUE_DOC, - cmocka_cb).doc; - test_free(saveB_src); - /* const docC = load(saveMidway) */ - AMdoc* const docC = AMpush(&stack, - AMload(saveMidway.src, saveMidway.count), - AM_VALUE_DOC, - cmocka_cb).doc; - /* docC.loadIncremental(save3) */ - AMfree(AMloadIncremental(docC, save3.src, save3.count)); - /* */ - /* assert.deepEqual(docA.keys("_root"), docB.keys("_root")); */ - AMstrs const keysA = AMpush(&stack, - AMkeys(docA, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - AMstrs const keysB = AMpush(&stack, - AMkeys(docB, AM_ROOT, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsCmp(&keysA, &keysB), 0); - /* assert.deepEqual(docA.save(), docB.save()); */ - AMbyteSpan const save = AMpush(&stack, - AMsave(docA), - AM_VALUE_BYTES, - cmocka_cb).bytes; - assert_memory_equal(save.src, - AMpush(&stack, - AMsave(docB), - AM_VALUE_BYTES, - cmocka_cb).bytes.src, - save.count); - /* assert.deepEqual(docA.save(), docC.save()); */ - assert_memory_equal(save.src, - AMpush(&stack, - AMsave(docC), - AM_VALUE_BYTES, - cmocka_cb).bytes.src, - save.count); -} - -/** - * \brief should be able to splice text #2 - */ -static void test_should_be_able_to_splice_text_2(void** state) { - AMresultStack* stack = *state; - /* const doc = create() */ - AMdoc* const doc = AMpush(&stack, AMcreate(NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* const text = doc.putObject("_root", "text", ""); */ - AMobjId const* const text = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* doc.splice(text, 0, 0, "hello world"); */ - AMfree(AMspliceText(doc, text, 0, 0, "hello world")); - /* const hash1 = doc.commit(); */ - AMchangeHashes const hash1 = AMpush(&stack, - AMcommit(doc, NULL, NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* doc.splice(text, 6, 0, "big bad "); */ - AMfree(AMspliceText(doc, text, 6, 0, "big bad ")); - /* const hash2 = doc.commit(); */ - AMchangeHashes const hash2 = AMpush(&stack, - AMcommit(doc, NULL, NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* assert.strictEqual(doc.text(text), "hello big bad world") */ - assert_string_equal(AMpush(&stack, - AMtext(doc, text, NULL), - AM_VALUE_STR, - cmocka_cb).str, "hello big bad world"); - /* assert.strictEqual(doc.length(text), 19) */ - assert_int_equal(AMobjSize(doc, text, NULL), 19); - /* assert.strictEqual(doc.text(text, [hash1]), "hello world") */ - assert_string_equal(AMpush(&stack, - AMtext(doc, text, &hash1), - AM_VALUE_STR, - cmocka_cb).str, "hello world"); - /* assert.strictEqual(doc.length(text, [hash1]), 11) */ - assert_int_equal(AMobjSize(doc, text, &hash1), 11); - /* assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") */ - assert_string_equal(AMpush(&stack, - AMtext(doc, text, &hash2), - AM_VALUE_STR, - cmocka_cb).str, "hello big bad world"); - /* assert.strictEqual(doc.length(text, [hash2]), 19) */ - assert_int_equal(AMobjSize(doc, text, &hash2), 19); -} - -/** - * \brief local inc increments all visible counters in a map - */ -static void test_local_inc_increments_all_visible_counters_in_a_map(void** state) { - AMresultStack* stack = *state; - /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* doc1.put("_root", "hello", "world") */ - AMfree(AMmapPutStr(doc1, AM_ROOT, "hello", "world")); - /* const doc2 = load(doc1.save(), "bbbb"); */ - AMbyteSpan const save = AMpush(&stack, - AMsave(doc1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMdoc* const doc2 = AMpush(&stack, - AMload(save.src, save.count), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc2, AMpush(&stack, - AMactorIdInitStr("bbbb"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - /* const doc3 = load(doc1.save(), "cccc"); */ - AMdoc* const doc3 = AMpush(&stack, - AMload(save.src, save.count), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc3, AMpush(&stack, - AMactorIdInitStr("cccc"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - /* let heads = doc1.getHeads() */ - AMchangeHashes const heads1 = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* doc1.put("_root", "cnt", 20) */ - AMfree(AMmapPutInt(doc1, AM_ROOT, "cnt", 20)); - /* doc2.put("_root", "cnt", 0, "counter") */ - AMfree(AMmapPutCounter(doc2, AM_ROOT, "cnt", 0)); - /* doc3.put("_root", "cnt", 10, "counter") */ - AMfree(AMmapPutCounter(doc3, AM_ROOT, "cnt", 10)); - /* doc1.applyChanges(doc2.getChanges(heads)) */ - AMchanges const changes2 = AMpush(&stack, - AMgetChanges(doc2, &heads1), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(doc1, &changes2)); - /* doc1.applyChanges(doc3.getChanges(heads)) */ - AMchanges const changes3 = AMpush(&stack, - AMgetChanges(doc3, &heads1), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(doc1, &changes3)); - /* let result = doc1.getAll("_root", "cnt") */ - AMobjItems result = AMpush(&stack, - AMmapGetAll(doc1, AM_ROOT, "cnt", NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - /* assert.deepEqual(result, [ - ['int', 20, '2@aaaa'], - ['counter', 0, '2@bbbb'], - ['counter', 10, '2@cccc'], - ]) */ - AMobjItem const* result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).int_, 20); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "aaaa"); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 0); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "bbbb"); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 10); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "cccc"); - /* doc1.increment("_root", "cnt", 5) */ - AMfree(AMmapIncrement(doc1, AM_ROOT, "cnt", 5)); - /* result = doc1.getAll("_root", "cnt") */ - result = AMpush(&stack, - AMmapGetAll(doc1, AM_ROOT, "cnt", NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - /* assert.deepEqual(result, [ - ['counter', 5, '2@bbbb'], - ['counter', 15, '2@cccc'], - ]) */ - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 5); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "bbbb"); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 15); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "cccc"); - /* */ - /* const save1 = doc1.save() */ - AMbyteSpan const save1 = AMpush(&stack, - AMsave(doc1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* const doc4 = load(save1) */ - AMdoc* const doc4 = AMpush(&stack, - AMload(save1.src, save1.count), - AM_VALUE_DOC, - cmocka_cb).doc; - /* assert.deepEqual(doc4.save(), save1); */ - assert_memory_equal(AMpush(&stack, - AMsave(doc4), - AM_VALUE_BYTES, - cmocka_cb).bytes.src, - save1.src, - save1.count); -} - -/** - * \brief local inc increments all visible counters in a sequence - */ -static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** state) { - AMresultStack* stack = *state; - /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* const seq = doc1.putObject("_root", "seq", []) */ - AMobjId const* const seq = AMpush( - &stack, - AMmapPutObject(doc1, AM_ROOT, "seq", AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* doc1.insert(seq, 0, "hello") */ - AMfree(AMlistPutStr(doc1, seq, 0, true, "hello")); - /* const doc2 = load(doc1.save(), "bbbb"); */ - AMbyteSpan const save1 = AMpush(&stack, - AMsave(doc1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMdoc* const doc2 = AMpush(&stack, - AMload(save1.src, save1.count), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc2, AMpush(&stack, - AMactorIdInitStr("bbbb"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - /* const doc3 = load(doc1.save(), "cccc"); */ - AMdoc* const doc3 = AMpush(&stack, - AMload(save1.src, save1.count), - AM_VALUE_DOC, - cmocka_cb).doc; - AMfree(AMsetActorId(doc3, AMpush(&stack, - AMactorIdInitStr("cccc"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - /* let heads = doc1.getHeads() */ - AMchangeHashes const heads1 = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* doc1.put(seq, 0, 20) */ - AMfree(AMlistPutInt(doc1, seq, 0, false, 20)); - /* doc2.put(seq, 0, 0, "counter") */ - AMfree(AMlistPutCounter(doc2, seq, 0, false, 0)); - /* doc3.put(seq, 0, 10, "counter") */ - AMfree(AMlistPutCounter(doc3, seq, 0, false, 10)); - /* doc1.applyChanges(doc2.getChanges(heads)) */ - AMchanges const changes2 = AMpush(&stack, - AMgetChanges(doc2, &heads1), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(doc1, &changes2)); - /* doc1.applyChanges(doc3.getChanges(heads)) */ - AMchanges const changes3 = AMpush(&stack, - AMgetChanges(doc3, &heads1), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(doc1, &changes3)); - /* let result = doc1.getAll(seq, 0) */ - AMobjItems result = AMpush(&stack, - AMlistGetAll(doc1, seq, 0, NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - /* assert.deepEqual(result, [ - ['int', 20, '3@aaaa'], - ['counter', 0, '3@bbbb'], - ['counter', 10, '3@cccc'], - ]) */ - AMobjItem const* result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).int_, 20); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "aaaa"); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 0); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "bbbb"); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 10); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "cccc"); - /* doc1.increment(seq, 0, 5) */ - AMfree(AMlistIncrement(doc1, seq, 0, 5)); - /* result = doc1.getAll(seq, 0) */ - result = AMpush(&stack, - AMlistGetAll(doc1, seq, 0, NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - /* assert.deepEqual(result, [ - ['counter', 5, '3@bbbb'], - ['counter', 15, '3@cccc'], - ]) */ - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 5); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "bbbb"); - result_item = AMobjItemsNext(&result, 1); - assert_int_equal(AMobjItemValue(result_item).counter, 15); - assert_int_equal(AMobjIdCounter(AMobjItemObjId(result_item)), 3); - assert_string_equal(AMactorIdStr(AMobjIdActorId(AMobjItemObjId(result_item))), - "cccc"); - /* */ - /* const save = doc1.save() */ - AMbyteSpan const save = AMpush(&stack, - AMsave(doc1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* const doc4 = load(save) */ - AMdoc* const doc4 = AMpush(&stack, - AMload(save.src, save.count), - AM_VALUE_DOC, - cmocka_cb).doc; - /* assert.deepEqual(doc4.save(), save); */ - assert_memory_equal(AMpush(&stack, - AMsave(doc4), - AM_VALUE_BYTES, - cmocka_cb).bytes.src, - save.src, - save.count); -} - -/** - * \brief paths can be used instead of objids - */ -static void test_paths_can_be_used_instead_of_objids(void** state); - -/** - * \brief should be able to fetch changes by hash - */ -static void test_should_be_able_to_fetch_changes_by_hash(void** state) { - AMresultStack* stack = *state; - /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* const doc2 = create("bbbb") */ - AMdoc* const doc2 = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr("bbbb"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* doc1.put("/", "a", "b") */ - AMfree(AMmapPutStr(doc1, AM_ROOT, "a", "b")); - /* doc2.put("/", "b", "c") */ - AMfree(AMmapPutStr(doc2, AM_ROOT, "b", "c")); - /* const head1 = doc1.getHeads() */ - AMchangeHashes head1 = AMpush(&stack, - AMgetHeads(doc1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* const head2 = doc2.getHeads() */ - AMchangeHashes head2 = AMpush(&stack, - AMgetHeads(doc2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* const change1 = doc1.getChangeByHash(head1[0]) - if (change1 === null) { throw new RangeError("change1 should not be null") }*/ - AMbyteSpan const change_hash1 = AMchangeHashesNext(&head1, 1); - AMchanges change1 = AMpush( - &stack, - AMgetChangeByHash(doc1, change_hash1.src, change_hash1.count), - AM_VALUE_CHANGES, - cmocka_cb).changes; - /* const change2 = doc1.getChangeByHash(head2[0]) - assert.deepEqual(change2, null) */ - AMbyteSpan const change_hash2 = AMchangeHashesNext(&head2, 1); - AMpush(&stack, - AMgetChangeByHash(doc1, change_hash2.src, change_hash2.count), - AM_VALUE_VOID, - cmocka_cb); - /* assert.deepEqual(decodeChange(change1).hash, head1[0]) */ - assert_memory_equal(AMchangeHash(AMchangesNext(&change1, 1)).src, - change_hash1.src, - change_hash1.count); -} - -/** - * \brief recursive sets are possible - */ -static void test_recursive_sets_are_possible(void** state) { - AMresultStack* stack = *state; - /* const doc = create("aaaa") */ - AMdoc* const doc = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]])*/ - AMobjId const* const l1 = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, "list", AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - { - AMobjId const* const map = AMpush( - &stack, - AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMmapPutStr(doc, map, "foo", "bar")); - AMobjId const* const list = AMpush( - &stack, - AMlistPutObject(doc, l1, SIZE_MAX, true, AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - for (int value = 1; value != 4; ++value) { - AMfree(AMlistPutInt(doc, list, SIZE_MAX, true, value)); - } - } - /* const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) */ - AMobjId const* const l2 = AMpush( - &stack, - AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - { - AMobjId const* const list = AMpush( - &stack, - AMmapPutObject(doc, l2, "zip", AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, "a")); - AMfree(AMlistPutStr(doc, list, SIZE_MAX, true, "b")); - } - /* const l3 = doc.putObject("_root", "info1", "hello world") // 'text' object*/ - AMobjId const* const l3 = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, "info1", AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMspliceText(doc, l3, 0, 0, "hello world")); - /* doc.put("_root", "info2", "hello world") // 'str' */ - AMfree(AMmapPutStr(doc, AM_ROOT, "info2", "hello world")); - /* const l4 = doc.putObject("_root", "info3", "hello world") */ - AMobjId const* const l4 = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, "info3", AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - AMfree(AMspliceText(doc, l4, 0, 0, "hello world")); - /* assert.deepEqual(doc.materialize(), { - "list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]], - "info1": "hello world", - "info2": "hello world", - "info3": "hello world", - }) */ - AMmapItems doc_items = AMpush(&stack, - AMmapRange(doc, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "info1"); - assert_string_equal(AMpush(&stack, - AMtext(doc, AMmapItemObjId(doc_item), NULL), - AM_VALUE_STR, - cmocka_cb).str, "hello world"); - doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "info2"); - assert_string_equal(AMmapItemValue(doc_item).str, "hello world"); - doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "info3"); - assert_string_equal(AMpush(&stack, - AMtext(doc, AMmapItemObjId(doc_item), NULL), - AM_VALUE_STR, - cmocka_cb).str, "hello world"); - doc_item = AMmapItemsNext(&doc_items, 1); - assert_string_equal(AMmapItemKey(doc_item), "list"); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(doc_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMlistItem const* list_item = AMlistItemsNext(&list_items, 1); - { - AMmapItems map_items = AMpush( - &stack, - AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - assert_string_equal(AMmapItemKey(map_item), "zip"); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_string_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_string_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).str, - "b"); - } - } - list_item = AMlistItemsNext(&list_items, 1); - { - AMmapItems map_items = AMpush( - &stack, - AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - assert_string_equal(AMmapItemKey(map_item), "foo"); - assert_string_equal(AMmapItemValue(map_item).str, "bar"); - } - list_item = AMlistItemsNext(&list_items, 1); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMlistItemObjId(list_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).int_, - 1); - assert_int_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).int_, - 2); - assert_int_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).int_, - 3); - } - } - /* assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) */ - AMmapItems map_items = AMpush( - &stack, - AMmapRange(doc, l2, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - assert_string_equal(AMmapItemKey(map_item), "zip"); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_string_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).str, - "a"); - assert_string_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).str, - "b"); - } - /* assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]])*/ - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, l1, 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - AMlistItem const* list_item = AMlistItemsNext(&list_items, 1); - { - AMmapItems map_items = AMpush( - &stack, - AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - assert_string_equal(AMmapItemKey(map_item), "zip"); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMmapItemObjId(map_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_string_equal( - AMlistItemValue(AMlistItemsNext(&list_items, 1)).str, "a"); - assert_string_equal(AMlistItemValue( - AMlistItemsNext(&list_items, 1)).str, "b"); - } - } - list_item = AMlistItemsNext(&list_items, 1); - { - AMmapItems map_items = AMpush( - &stack, - AMmapRange(doc, AMlistItemObjId(list_item), NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItem const* map_item = AMmapItemsNext(&map_items, 1); - assert_string_equal(AMmapItemKey(map_item), "foo"); - assert_string_equal(AMmapItemValue(map_item).str, "bar"); - } - list_item = AMlistItemsNext(&list_items, 1); - { - AMlistItems list_items = AMpush( - &stack, - AMlistRange(doc, AMlistItemObjId(list_item), 0, SIZE_MAX, NULL), - AM_VALUE_LIST_ITEMS, - cmocka_cb).list_items; - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).int_, - 1); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).int_, - 2); - assert_int_equal(AMlistItemValue(AMlistItemsNext(&list_items, 1)).int_, - 3); - } - /* assert.deepEqual(doc.materialize(l4), "hello world") */ - assert_string_equal(AMpush(&stack, - AMtext(doc, l4, NULL), - AM_VALUE_STR, - cmocka_cb).str, "hello world"); -} - -/** - * \brief only returns an object id when objects are created - */ -static void test_only_returns_an_object_id_when_objects_are_created(void** state) { - AMresultStack* stack = *state; - /* const doc = create("aaaa") */ - AMdoc* const doc = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* const r1 = doc.put("_root", "foo", "bar") - assert.deepEqual(r1, null); */ - AMpush(&stack, - AMmapPutStr(doc, AM_ROOT, "foo", "bar"), - AM_VALUE_VOID, - cmocka_cb); - /* const r2 = doc.putObject("_root", "list", []) */ - AMobjId const* const r2 = AMpush( - &stack, - AMmapPutObject(doc, AM_ROOT, "list", AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* const r3 = doc.put("_root", "counter", 10, "counter") - assert.deepEqual(r3, null); */ - AMpush(&stack, - AMmapPutCounter(doc, AM_ROOT, "counter", 10), - AM_VALUE_VOID, - cmocka_cb); - /* const r4 = doc.increment("_root", "counter", 1) - assert.deepEqual(r4, null); */ - AMpush(&stack, - AMmapIncrement(doc, AM_ROOT, "counter", 1), - AM_VALUE_VOID, - cmocka_cb); - /* const r5 = doc.delete("_root", "counter") - assert.deepEqual(r5, null); */ - AMpush(&stack, - AMmapDelete(doc, AM_ROOT, "counter"), - AM_VALUE_VOID, - cmocka_cb); - /* const r6 = doc.insert(r2, 0, 10); - assert.deepEqual(r6, null); */ - AMpush(&stack, - AMlistPutInt(doc, r2, 0, true, 10), - AM_VALUE_VOID, - cmocka_cb); - /* const r7 = doc.insertObject(r2, 0, {}); */ - AMobjId const* const r7 = AMpush( - &stack, - AMlistPutObject(doc, r2, 0, true, AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* const r8 = doc.splice(r2, 1, 0, ["a", "b", "c"]); */ - AMvalue const STRS[] = {{.str_tag = AM_VALUE_STR, .str = "a", - .str_tag = AM_VALUE_STR, .str = "b", - .str_tag = AM_VALUE_STR, .str = "c"}}; - AMpush(&stack, - AMsplice(doc, r2, 1, 0, STRS, sizeof(STRS)/sizeof(AMvalue)), - AM_VALUE_VOID, - cmocka_cb); - /* assert.deepEqual(r2, "2@aaaa"); */ - assert_int_equal(AMobjIdCounter(r2), 2); - assert_string_equal(AMactorIdStr(AMobjIdActorId(r2)), "aaaa"); - /* assert.deepEqual(r7, "7@aaaa"); */ - assert_int_equal(AMobjIdCounter(r7), 7); - assert_string_equal(AMactorIdStr(AMobjIdActorId(r7)), "aaaa"); -} - -/** - * \brief objects without properties are preserved - */ -static void test_objects_without_properties_are_preserved(void** state) { - AMresultStack* stack = *state; - /* const doc1 = create("aaaa") */ - AMdoc* const doc1 = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* const a = doc1.putObject("_root", "a", {}); */ - AMobjId const* const a = AMpush( - &stack, - AMmapPutObject(doc1, AM_ROOT, "a", AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* const b = doc1.putObject("_root", "b", {}); */ - AMobjId const* const b = AMpush( - &stack, - AMmapPutObject(doc1, AM_ROOT, "b", AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* const c = doc1.putObject("_root", "c", {}); */ - AMobjId const* const c = AMpush( - &stack, - AMmapPutObject(doc1, AM_ROOT, "c", AM_OBJ_TYPE_MAP), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* const d = doc1.put(c, "d", "dd"); */ - AMfree(AMmapPutStr(doc1, c, "d", "dd")); - /* const saved = doc1.save(); */ - AMbyteSpan const saved = AMpush(&stack, - AMsave(doc1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* const doc2 = load(saved); */ - AMdoc* const doc2 = AMpush(&stack, - AMload(saved.src, saved.count), - AM_VALUE_DOC, - cmocka_cb).doc; - /* assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) */ - AMmapItems doc_items = AMpush(&stack, - AMmapRange(doc2, AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), a)); - /* assert.deepEqual(doc2.keys(a), []) */ - AMstrs keys = AMpush(&stack, - AMkeys(doc1, a, NULL), - AM_VALUE_STRS, - cmocka_cb).strs; - assert_int_equal(AMstrsSize(&keys), 0); - /* assert.deepEqual(doc2.getWithType("_root", "b"), ["map", b]) */ - assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), b)); - /* assert.deepEqual(doc2.keys(b), []) */ - keys = AMpush(&stack, AMkeys(doc1, b, NULL), AM_VALUE_STRS, cmocka_cb).strs; - assert_int_equal(AMstrsSize(&keys), 0); - /* assert.deepEqual(doc2.getWithType("_root", "c"), ["map", c]) */ - assert_true(AMobjIdEqual(AMmapItemObjId(AMmapItemsNext(&doc_items, 1)), c)); - /* assert.deepEqual(doc2.keys(c), ["d"]) */ - keys = AMpush(&stack, AMkeys(doc1, c, NULL), AM_VALUE_STRS, cmocka_cb).strs; - assert_string_equal(AMstrsNext(&keys, 1), "d"); - /* assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) */ - AMobjItems obj_items = AMpush(&stack, - AMobjValues(doc1, c, NULL), - AM_VALUE_OBJ_ITEMS, - cmocka_cb).obj_items; - assert_string_equal(AMobjItemValue(AMobjItemsNext(&obj_items, 1)).str, "dd"); -} - -/** - * \brief should allow you to forkAt a heads - */ -static void test_should_allow_you_to_forkAt_a_heads(void** state) { - AMresultStack* stack = *state; - /* const A = create("aaaaaa") */ - AMdoc* const A = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr("aaaaaa"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* A.put("/", "key1", "val1"); */ - AMfree(AMmapPutStr(A, AM_ROOT, "key1", "val1")); - /* A.put("/", "key2", "val2"); */ - AMfree(AMmapPutStr(A, AM_ROOT, "key2", "val2")); - /* const heads1 = A.getHeads(); */ - AMchangeHashes const heads1 = AMpush(&stack, - AMgetHeads(A), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* const B = A.fork("bbbbbb") */ - AMdoc* const B = AMpush(&stack, AMfork(A, NULL), AM_VALUE_DOC, cmocka_cb).doc; - AMfree(AMsetActorId(B, AMpush(&stack, - AMactorIdInitStr("bbbbbb"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - /* A.put("/", "key3", "val3"); */ - AMfree(AMmapPutStr(A, AM_ROOT, "key3", "val3")); - /* B.put("/", "key4", "val4"); */ - AMfree(AMmapPutStr(B, AM_ROOT, "key4", "val4")); - /* A.merge(B) */ - AMfree(AMmerge(A, B)); - /* const heads2 = A.getHeads(); */ - AMchangeHashes const heads2 = AMpush(&stack, - AMgetHeads(A), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - /* A.put("/", "key5", "val5"); */ - AMfree(AMmapPutStr(A, AM_ROOT, "key5", "val5")); - /* assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", heads1))*/ - AMmapItems AforkAt1_items = AMpush( - &stack, - AMmapRange( - AMpush(&stack, AMfork(A, &heads1), AM_VALUE_DOC, cmocka_cb).doc, - AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItems A1_items = AMpush(&stack, - AMmapRange(A, AM_ROOT, NULL, NULL, &heads1), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - assert_true(AMmapItemsEqual(&AforkAt1_items, &A1_items)); - /* assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", heads2))*/ - AMmapItems AforkAt2_items = AMpush( - &stack, - AMmapRange( - AMpush(&stack, AMfork(A, &heads2), AM_VALUE_DOC, cmocka_cb).doc, - AM_ROOT, NULL, NULL, NULL), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - AMmapItems A2_items = AMpush(&stack, - AMmapRange(A, AM_ROOT, NULL, NULL, &heads2), - AM_VALUE_MAP_ITEMS, - cmocka_cb).map_items; - assert_true(AMmapItemsEqual(&AforkAt2_items, &A2_items)); -} - -/** - * \brief should handle merging text conflicts then saving & loading - */ -static void test_should_handle_merging_text_conflicts_then_saving_and_loading(void** state) { - AMresultStack* stack = *state; - /* const A = create("aabbcc") */ - AMdoc* const A = AMpush(&stack, - AMcreate(AMpush(&stack, - AMactorIdInitStr("aabbcc"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* const At = A.putObject('_root', 'text', "") */ - AMobjId const* const At = AMpush( - &stack, - AMmapPutObject(A, AM_ROOT, "text", AM_OBJ_TYPE_TEXT), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - /* A.splice(At, 0, 0, 'hello') */ - AMfree(AMspliceText(A, At, 0, 0, "hello")); - /* */ - /* const B = A.fork() */ - AMdoc* const B = AMpush(&stack, AMfork(A, NULL), AM_VALUE_DOC, cmocka_cb).doc; - /* */ - /* assert.deepEqual(B.getWithType("_root", "text"), ["text", At]) */ - assert_string_equal(AMpush(&stack, - AMtext(B, - AMpush(&stack, - AMmapGet(B, AM_ROOT, "text", NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id, - NULL), - AM_VALUE_STR, - cmocka_cb).str, - AMpush(&stack, - AMtext(A, At, NULL), - AM_VALUE_STR, - cmocka_cb).str); - /* */ - /* B.splice(At, 4, 1) */ - AMfree(AMspliceText(B, At, 4, 1, NULL)); - /* B.splice(At, 4, 0, '!') */ - AMfree(AMspliceText(B, At, 4, 0, "!")); - /* B.splice(At, 5, 0, ' ') */ - AMfree(AMspliceText(B, At, 5, 0, " ")); - /* B.splice(At, 6, 0, 'world') */ - AMfree(AMspliceText(B, At, 6, 0, "world")); - /* */ - /* A.merge(B) */ - AMfree(AMmerge(A, B)); - /* */ - /* const binary = A.save() */ - AMbyteSpan const binary = AMpush(&stack, - AMsave(A), - AM_VALUE_BYTES, - cmocka_cb).bytes; - /* */ - /* const C = load(binary) */ - AMdoc* const C = AMpush(&stack, - AMload(binary.src, binary.count), - AM_VALUE_DOC, - cmocka_cb).doc; - /* */ - /* assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc'])*/ - AMobjId const* const C_text = AMpush(&stack, - AMmapGet(C, AM_ROOT, "text", NULL), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; - assert_int_equal(AMobjIdCounter(C_text), 1); - assert_string_equal(AMactorIdStr(AMobjIdActorId(C_text)), "aabbcc"); - /* assert.deepEqual(C.text(At), 'hell! world') */ - assert_string_equal(AMpush(&stack, - AMtext(C, At, NULL), - AM_VALUE_STR, - cmocka_cb).str, "hell! world"); -} - -int run_ported_wasm_basic_tests(void) { - const struct CMUnitTest tests[] = { - cmocka_unit_test_setup_teardown(test_create_clone_and_free, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_start_and_commit, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_getting_a_nonexistent_prop_does_not_throw_an_error, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_set_and_get_a_simple_value, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_use_bytes, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_make_subobjects, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_make_lists, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_lists_have_insert_set_splice_and_push_ops, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_delete_non_existent_props, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_del, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_use_counters, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_insert_objects_into_text, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_save_all_or_incrementally, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text_2, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_map, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_sequence, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_be_able_to_fetch_changes_by_hash, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_recursive_sets_are_possible, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_only_returns_an_object_id_when_objects_are_created, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_objects_without_properties_are_preserved, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_allow_you_to_forkAt_a_heads, setup_stack, teardown_stack), - cmocka_unit_test_setup_teardown(test_should_handle_merging_text_conflicts_then_saving_and_loading, setup_stack, teardown_stack) - }; - - return cmocka_run_group_tests(tests, NULL, NULL); -} diff --git a/automerge-c/test/stack_utils.c b/automerge-c/test/stack_utils.c deleted file mode 100644 index 8eb8b72d..00000000 --- a/automerge-c/test/stack_utils.c +++ /dev/null @@ -1,30 +0,0 @@ -#include -#include -#include - -/* third-party */ -#include - -/* local */ -#include "stack_utils.h" - -void cmocka_cb(AMresultStack** stack, uint8_t discriminant) { - assert_non_null(stack); - assert_non_null(*stack); - assert_non_null((*stack)->result); - if (AMresultStatus((*stack)->result) != AM_STATUS_OK) { - fail_msg("%s", AMerrorMessage((*stack)->result)); - } - assert_int_equal(AMresultValue((*stack)->result).tag, discriminant); -} - -int setup_stack(void** state) { - *state = NULL; - return 0; -} - -int teardown_stack(void** state) { - AMresultStack* stack = *state; - AMfreeStack(&stack); - return 0; -} diff --git a/automerge-c/test/stack_utils.h b/automerge-c/test/stack_utils.h deleted file mode 100644 index 473feebc..00000000 --- a/automerge-c/test/stack_utils.h +++ /dev/null @@ -1,38 +0,0 @@ -#ifndef STACK_UTILS_H -#define STACK_UTILS_H - -#include - -/* local */ -#include - -/** - * \brief Reports an error through a cmocka assertion. - * - * \param[in,out] stack A pointer to a pointer to an `AMresultStack` struct. - * \param[in] discriminant An `AMvalueVariant` enum tag. - * \pre \p stack` != NULL`. - */ -void cmocka_cb(AMresultStack** stack, uint8_t discriminant); - -/** - * \brief Allocates a result stack for storing the results allocated during one - * or more test cases. - * - * \param[in,out] state A pointer to a pointer to an `AMresultStack` struct. - * \pre \p state` != NULL`. - * \warning The `AMresultStack` struct returned through \p state must be - * deallocated with `teardown_stack()` in order to prevent memory leaks. - */ -int setup_stack(void** state); - -/** - * \brief Deallocates a result stack after deallocating any results that were - * stored in it by one or more test cases. - * - * \param[in] state A pointer to a pointer to an `AMresultStack` struct. - * \pre \p state` != NULL`. - */ -int teardown_stack(void** state); - -#endif /* STACK_UTILS_H */ diff --git a/automerge-c/test/str_utils.h b/automerge-c/test/str_utils.h deleted file mode 100644 index b9985683..00000000 --- a/automerge-c/test/str_utils.h +++ /dev/null @@ -1,14 +0,0 @@ -#ifndef STR_UTILS_H -#define STR_UTILS_H - -/** - * \brief Converts a hexadecimal string into a sequence of bytes. - * - * \param[in] hex_str A string. - * \param[in] src A pointer to a contiguous sequence of bytes. - * \param[in] count The number of bytes to copy to \p src. - * \pre \p count `<=` length of \p src. - */ -void hex_to_bytes(char const* hex_str, uint8_t* src, size_t const count); - -#endif /* STR_UTILS_H */ diff --git a/automerge-cli/Cargo.lock b/automerge-cli/Cargo.lock deleted file mode 100644 index a330ee89..00000000 --- a/automerge-cli/Cargo.lock +++ /dev/null @@ -1,857 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] - -[[package]] -name = "anyhow" -version = "1.0.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd" - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi", - "libc", - "winapi", -] - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "automerge" -version = "0.1.0" -dependencies = [ - "flate2", - "fxhash", - "hex", - "itertools", - "js-sys", - "leb128", - "nonzero_ext", - "rand", - "serde", - "sha2", - "smol_str", - "thiserror", - "tinyvec", - "tracing", - "unicode-segmentation", - "uuid", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "automerge-cli" -version = "0.1.0" -dependencies = [ - "anyhow", - "atty", - "automerge", - "clap", - "colored_json", - "combine", - "duct", - "maplit", - "serde_json", - "thiserror", - "tracing-subscriber", -] - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "block-buffer" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" -dependencies = [ - "generic-array", -] - -[[package]] -name = "bumpalo" -version = "3.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] -name = "bytes" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "clap" -version = "3.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced1892c55c910c1219e98d6fc8d71f6bddba7905866ce740066d8bfea859312" -dependencies = [ - "atty", - "bitflags", - "clap_derive", - "indexmap", - "lazy_static", - "os_str_bytes", - "strsim", - "termcolor", - "textwrap", -] - -[[package]] -name = "clap_derive" -version = "3.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da95d038ede1a964ce99f49cbe27a7fb538d1da595e4b4f70b8c8f338d17bf16" -dependencies = [ - "heck", - "proc-macro-error", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "colored_json" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd32eb54d016e203b7c2600e3a7802c75843a92e38ccc4869aefeca21771a64" -dependencies = [ - "ansi_term", - "atty", - "libc", - "serde", - "serde_json", -] - -[[package]] -name = "combine" -version = "4.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50b727aacc797f9fc28e355d21f34709ac4fc9adecfe470ad07b8f4464f53062" -dependencies = [ - "bytes", - "memchr", -] - -[[package]] -name = "cpufeatures" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" -dependencies = [ - "libc", -] - -[[package]] -name = "crc32fast" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crypto-common" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "digest" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" -dependencies = [ - "block-buffer", - "crypto-common", -] - -[[package]] -name = "duct" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc6a0a59ed0888e0041cf708e66357b7ae1a82f1c67247e1f93b5e0818f7d8d" -dependencies = [ - "libc", - "once_cell", - "os_pipe", - "shared_child", -] - -[[package]] -name = "either" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" - -[[package]] -name = "flate2" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" -dependencies = [ - "cfg-if", - "crc32fast", - "libc", - "miniz_oxide", -] - -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - -[[package]] -name = "generic-array" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi", - "wasm-bindgen", -] - -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" - -[[package]] -name = "heck" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "indexmap" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" -dependencies = [ - "autocfg", - "hashbrown", -] - -[[package]] -name = "itertools" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" - -[[package]] -name = "js-sys" -version = "0.3.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "leb128" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" - -[[package]] -name = "libc" -version = "0.2.119" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4" - -[[package]] -name = "log" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "maplit" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" - -[[package]] -name = "memchr" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" - -[[package]] -name = "miniz_oxide" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" -dependencies = [ - "adler", - "autocfg", -] - -[[package]] -name = "nonzero_ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44a1290799eababa63ea60af0cbc3f03363e328e58f32fb0294798ed3e85f444" - -[[package]] -name = "once_cell" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" - -[[package]] -name = "os_pipe" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb233f06c2307e1f5ce2ecad9f8121cffbbee2c95428f44ea85222e460d0d213" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "os_str_bytes" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64" -dependencies = [ - "memchr", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" - -[[package]] -name = "ppv-lite86" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro2" -version = "1.0.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" -dependencies = [ - "unicode-xid", -] - -[[package]] -name = "quote" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" -dependencies = [ - "getrandom", -] - -[[package]] -name = "ryu" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" - -[[package]] -name = "serde" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_json" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "sha2" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sharded-slab" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "shared_child" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6be9f7d5565b1483af3e72975e2dee33879b3b86bd48c0929fccf6585d79e65a" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "smallvec" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" - -[[package]] -name = "smol_str" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61d15c83e300cce35b7c8cd39ff567c1ef42dde6d4a1a38dbdbf9a59902261bd" -dependencies = [ - "serde", -] - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "syn" -version = "1.0.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "termcolor" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "textwrap" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb" - -[[package]] -name = "thiserror" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "thread_local" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" -dependencies = [ - "once_cell", -] - -[[package]] -name = "tinyvec" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" - -[[package]] -name = "tracing" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6c650a8ef0cd2dd93736f033d21cbd1224c5a967aa0c258d00fcf7dafef9b9f" -dependencies = [ - "cfg-if", - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tracing-core" -version = "0.1.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23" -dependencies = [ - "lazy_static", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3" -dependencies = [ - "lazy_static", - "log", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0ab7bdc962035a87fba73f3acca9b8a8d0034c2e6f60b84aeaaddddc155dce" -dependencies = [ - "ansi_term", - "sharded-slab", - "smallvec", - "thread_local", - "tracing-core", - "tracing-log", -] - -[[package]] -name = "typenum" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" - -[[package]] -name = "unicode-segmentation" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" - -[[package]] -name = "unicode-xid" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" - -[[package]] -name = "uuid" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" -dependencies = [ - "getrandom", - "serde", -] - -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "wasi" -version = "0.10.2+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" - -[[package]] -name = "wasm-bindgen" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" -dependencies = [ - "bumpalo", - "lazy_static", - "log", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" - -[[package]] -name = "web-sys" -version = "0.3.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/automerge-js/README.md b/automerge-js/README.md deleted file mode 100644 index 707c51bb..00000000 --- a/automerge-js/README.md +++ /dev/null @@ -1,25 +0,0 @@ -## Automerge JS - -This is a reimplementation of Automerge as a JavaScript wrapper around the "automerge-wasm". - -This package is in alpha and feedback in welcome. - -The primary differences between using this package and "automerge" are as follows: - -1. The low level api needs to plugged in via the use function. The only current implementation of "automerge-wasm" but another could used in theory. - -```javascript -import * as Automerge from "automerge-js"; -import * as wasm_api from "automerge-wasm"; - -// browsers require an async wasm load - see automerge-wasm docs -Automerge.use(wasm_api); -``` - -2. There is no front-end back-end split, and no patch format or patch observer. These concepts don't make sense with the wasm implementation. - -3. The basic `Doc` object is now a Proxy object and will behave differently in a repl environment. - -4. The 'Text' class is currently very slow and needs to be re-worked. - -Beyond this please refer to the Automerge [README](http://github.com/automerge/automerge/) for further information. diff --git a/automerge-js/config/cjs.json b/automerge-js/config/cjs.json deleted file mode 100644 index d7f8c63f..00000000 --- a/automerge-js/config/cjs.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "outDir": "../dist/cjs" - } -} diff --git a/automerge-js/config/mjs.json b/automerge-js/config/mjs.json deleted file mode 100644 index 8f964400..00000000 --- a/automerge-js/config/mjs.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "target": "es6", - "module": "es6", - "outDir": "../dist/mjs" - } -} diff --git a/automerge-js/examples/webpack/src/index.js b/automerge-js/examples/webpack/src/index.js deleted file mode 100644 index 876c1940..00000000 --- a/automerge-js/examples/webpack/src/index.js +++ /dev/null @@ -1,22 +0,0 @@ -import * as Automerge from "automerge-js" -import init from "automerge-wasm" - -// hello world code that will run correctly on web or node - -init().then((api) => { - Automerge.use(api) - let doc = Automerge.init() - doc = Automerge.change(doc, (d) => d.hello = "from automerge-js") - const result = JSON.stringify(doc) - - if (typeof document !== 'undefined') { - // browser - const element = document.createElement('div'); - element.innerHTML = JSON.stringify(result) - document.body.appendChild(element); - } else { - // server - console.log("node:", result) - } -}) - diff --git a/automerge-js/index.d.ts b/automerge-js/index.d.ts deleted file mode 100644 index a18505c2..00000000 --- a/automerge-js/index.d.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { API as LowLevelApi } from "automerge-types"; -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, MaterializeValue } from "automerge-types"; -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-types"; - -export { API as LowLevelApi } from "automerge-types"; -export { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-types"; -export { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-types"; - -export type ChangeOptions = { - message?: string; - time?: number; -}; - -export class Int { - value: number; - constructor(value: number); -} - -export class Uint { - value: number; - constructor(value: number); -} - -export class Float64 { - value: number; - constructor(value: number); -} - -export class Counter { - value: number; - constructor(value?: number); - valueOf(): number; - toString(): string; - toJSON(): number; -} - -export class Text { - elems: AutomergeValue[]; - constructor(text?: string | string[]); - get length(): number; - get(index: number): AutomergeValue | undefined; - [index: number]: AutomergeValue | undefined; - [Symbol.iterator](): { - next(): { - done: boolean; - value: AutomergeValue; - } | { - done: boolean; - value?: undefined; - }; - }; - toString(): string; - toSpans(): AutomergeValue[]; - toJSON(): string; - set(index: number, value: AutomergeValue): void; - insertAt(index: number, ...values: AutomergeValue[]): void; - deleteAt(index: number, numDelete?: number): void; - map(callback: (e: AutomergeValue) => T): void; -} - -export type Doc = { - readonly [P in keyof T]: T[P]; -}; - -export type ChangeFn = (doc: T) => void; - -export interface State { - change: DecodedChange; - snapshot: T; -} - -export type ScalarValue = string | number | null | boolean | Date | Counter | Uint8Array; - -export type AutomergeValue = ScalarValue | {[key: string]: AutomergeValue;} | Array; - -type Conflicts = { - [key: string]: AutomergeValue; -}; - -export function use(api: LowLevelApi): void; -export function getBackend(doc: Doc) : Automerge; -export function init(actor?: ActorId): Doc; -export function clone(doc: Doc): Doc; -export function free(doc: Doc): void; -export function from(initialState: T | Doc, actor?: ActorId): Doc; -export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc; -export function emptyChange(doc: Doc, options: ChangeOptions): unknown; -export function load(data: Uint8Array, actor?: ActorId): Doc; -export function save(doc: Doc): Uint8Array; -export function merge(local: Doc, remote: Doc): Doc; -export function getActorId(doc: Doc): ActorId; -export function getConflicts(doc: Doc, prop: Prop): Conflicts | undefined; -export function getLastLocalChange(doc: Doc): Change | undefined; -export function getObjectId(doc: Doc): ObjID; -export function getChanges(oldState: Doc, newState: Doc): Change[]; -export function getAllChanges(doc: Doc): Change[]; -export function applyChanges(doc: Doc, changes: Change[]): [Doc]; -export function getHistory(doc: Doc): State[]; -export function equals(val1: Doc, val2: Doc): boolean; -export function encodeSyncState(state: SyncState): Uint8Array; -export function decodeSyncState(state: Uint8Array): SyncState; -export function generateSyncMessage(doc: Doc, inState: SyncState): [SyncState, SyncMessage | null]; -export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage): [Doc, SyncState, null]; -export function initSyncState(): SyncState; -export function encodeChange(change: DecodedChange): Change; -export function decodeChange(data: Change): DecodedChange; -export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; -export function decodeSyncMessage(message: SyncMessage): DecodedSyncMessage; -export function getMissingDeps(doc: Doc, heads: Heads): Heads; -export function getHeads(doc: Doc): Heads; -export function dump(doc: Doc): void; -export function toJS(doc: Doc): MaterializeValue; -export function uuid(): string; diff --git a/automerge-js/package.json b/automerge-js/package.json deleted file mode 100644 index 228d94b8..00000000 --- a/automerge-js/package.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "name": "automerge-js", - "collaborators": [ - "Orion Henry ", - "Martin Kleppmann" - ], - "version": "0.1.12", - "description": "Reimplementation of `automerge` on top of the automerge-wasm backend", - "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-js", - "repository": "github:automerge/automerge-rs", - "files": [ - "README.md", - "LICENSE", - "package.json", - "index.d.ts", - "dist/cjs/constants.js", - "dist/cjs/types.js", - "dist/cjs/numbers.js", - "dist/cjs/index.js", - "dist/cjs/uuid.js", - "dist/cjs/counter.js", - "dist/cjs/low_level.js", - "dist/cjs/text.js", - "dist/cjs/proxies.js", - "dist/mjs/constants.js", - "dist/mjs/types.js", - "dist/mjs/numbers.js", - "dist/mjs/index.js", - "dist/mjs/uuid.js", - "dist/mjs/counter.js", - "dist/mjs/low_level.js", - "dist/mjs/text.js", - "dist/mjs/proxies.js" - ], - "types": "index.d.ts", - "module": "./dist/mjs/index.js", - "main": "./dist/cjs/index.js", - "license": "MIT", - "scripts": { - "lint": "eslint src", - "build": "tsc -p config/mjs.json && tsc -p config/cjs.json", - "test": "ts-mocha test/*.ts" - }, - "devDependencies": { - "@types/expect": "^24.3.0", - "@types/mocha": "^9.1.1", - "@types/uuid": "^8.3.4", - "@typescript-eslint/eslint-plugin": "^5.25.0", - "@typescript-eslint/parser": "^5.25.0", - "automerge-wasm": "^0.1.6", - "eslint": "^8.15.0", - "fast-sha256": "^1.3.0", - "mocha": "^10.0.0", - "pako": "^2.0.4", - "ts-mocha": "^10.0.0", - "typescript": "^4.6.4" - }, - "dependencies": { - "automerge-types": "0.1.5", - "uuid": "^8.3" - } -} diff --git a/automerge-js/src/constants.ts b/automerge-js/src/constants.ts deleted file mode 100644 index e37835d1..00000000 --- a/automerge-js/src/constants.ts +++ /dev/null @@ -1,23 +0,0 @@ -// Properties of the document root object -//const OPTIONS = Symbol('_options') // object containing options passed to init() -//const CACHE = Symbol('_cache') // map from objectId to immutable object -export const STATE = Symbol.for('_am_state') // object containing metadata about current state (e.g. sequence numbers) -export const HEADS = Symbol.for('_am_heads') // object containing metadata about current state (e.g. sequence numbers) -export const TRACE = Symbol.for('_am_trace') // object containing metadata about current state (e.g. sequence numbers) -export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current state (e.g. sequence numbers) -export const READ_ONLY = Symbol.for('_am_readOnly') // object containing metadata about current state (e.g. sequence numbers) -export const FROZEN = Symbol.for('_am_frozen') // object containing metadata about current state (e.g. sequence numbers) - -export const UINT = Symbol.for('_am_uint') -export const INT = Symbol.for('_am_int') -export const F64 = Symbol.for('_am_f64') -export const COUNTER = Symbol.for('_am_counter') -export const TEXT = Symbol.for('_am_text') - -// Properties of all Automerge objects -//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string) -//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts -//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback -//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element - - diff --git a/automerge-js/src/index.ts b/automerge-js/src/index.ts deleted file mode 100644 index 109b093c..00000000 --- a/automerge-js/src/index.ts +++ /dev/null @@ -1,397 +0,0 @@ - -export { uuid } from './uuid' - -import { rootProxy, listProxy, textProxy, mapProxy } from "./proxies" -import { STATE, HEADS, TRACE, OBJECT_ID, READ_ONLY, FROZEN } from "./constants" - -import { AutomergeValue, Counter } from "./types" -export { AutomergeValue, Text, Counter, Int, Uint, Float64 } from "./types" - -import { API } from "automerge-types"; -import { ApiHandler, UseApi } from "./low_level" - -import { Actor as ActorId, Prop, ObjID, Change, DecodedChange, Heads, Automerge, MaterializeValue } from "automerge-types" -import { JsSyncState as SyncState, SyncMessage, DecodedSyncMessage } from "automerge-types" - -export type ChangeOptions = { message?: string, time?: number } - -export type Doc = { readonly [P in keyof T]: Doc } - -export type ChangeFn = (doc: T) => void - -export interface State { - change: DecodedChange - snapshot: T -} - -export function use(api: API) { - UseApi(api) -} - -export function getBackend(doc: Doc) : Automerge { - return _state(doc) -} - -function _state(doc: Doc) : Automerge { - const state = Reflect.get(doc,STATE) - if (state == undefined) { - throw new RangeError("must be the document root") - } - return state -} - -function _frozen(doc: Doc) : boolean { - return Reflect.get(doc,FROZEN) === true -} - -function _heads(doc: Doc) : Heads | undefined { - return Reflect.get(doc,HEADS) -} - -function _trace(doc: Doc) : string | undefined { - return Reflect.get(doc,TRACE) -} - -function _set_heads(doc: Doc, heads: Heads) { - Reflect.set(doc,HEADS,heads) - Reflect.set(doc,TRACE,(new Error()).stack) -} - -function _clear_heads(doc: Doc) { - Reflect.set(doc,HEADS,undefined) - Reflect.set(doc,TRACE,undefined) -} - -function _obj(doc: Doc) : ObjID { - return Reflect.get(doc,OBJECT_ID) -} - -function _readonly(doc: Doc) : boolean { - return Reflect.get(doc,READ_ONLY) === true -} - -export function init(actor?: ActorId) : Doc{ - if (typeof actor !== "string") { - actor = undefined - } - const state = ApiHandler.create(actor) - return rootProxy(state, true); -} - -export function clone(doc: Doc) : Doc { - const state = _state(doc).clone() - return rootProxy(state, true); -} - -export function free(doc: Doc) { - return _state(doc).free() -} - -export function from(initialState: T | Doc, actor?: ActorId): Doc { - return change(init(actor), (d) => Object.assign(d, initialState)) -} - -export function change(doc: Doc, options: string | ChangeOptions | ChangeFn, callback?: ChangeFn): Doc { - if (typeof options === 'function') { - return _change(doc, {}, options) - } else if (typeof callback === 'function') { - if (typeof options === "string") { - options = { message: options } - } - return _change(doc, options, callback) - } else { - throw RangeError("Invalid args for change") - } -} - -function _change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc { - - - if (typeof callback !== "function") { - throw new RangeError("invalid change function"); - } - - if (doc === undefined || _state(doc) === undefined || _obj(doc) !== "_root") { - throw new RangeError("must be the document root"); - } - if (_frozen(doc) === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (!!_heads(doc) === true) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); - } - if (_readonly(doc) === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const state = _state(doc) - const heads = state.getHeads() - try { - _set_heads(doc,heads) - Reflect.set(doc,FROZEN,true) - const root : T = rootProxy(state); - callback(root) - if (state.pendingOps() === 0) { - Reflect.set(doc,FROZEN,false) - _clear_heads(doc) - return doc - } else { - state.commit(options.message, options.time) - return rootProxy(state, true); - } - } catch (e) { - //console.log("ERROR: ",e) - Reflect.set(doc,FROZEN,false) - _clear_heads(doc) - state.rollback() - throw e - } -} - -export function emptyChange(doc: Doc, options: ChangeOptions) { - if (options === undefined) { - options = {} - } - if (typeof options === "string") { - options = { message: options } - } - - if (doc === undefined || _state(doc) === undefined || _obj(doc) !== "_root") { - throw new RangeError("must be the document root"); - } - if (_frozen(doc) === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (_readonly(doc) === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - - const state = _state(doc) - state.commit(options.message, options.time) - return rootProxy(state, true); -} - -export function load(data: Uint8Array, actor?: ActorId) : Doc { - const state = ApiHandler.load(data, actor) - return rootProxy(state, true); -} - -export function save(doc: Doc) : Uint8Array { - const state = _state(doc) - return state.save() -} - -export function merge(local: Doc, remote: Doc) : Doc { - if (!!_heads(local) === true) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(local)); - } - const localState = _state(local) - const heads = localState.getHeads() - const remoteState = _state(remote) - const changes = localState.getChangesAdded(remoteState) - localState.applyChanges(changes) - _set_heads(local,heads) - return rootProxy(localState, true) -} - -export function getActorId(doc: Doc) : ActorId { - const state = _state(doc) - return state.getActorId() -} - -type Conflicts = { [key: string]: AutomergeValue } - -function conflictAt(context : Automerge, objectId: ObjID, prop: Prop) : Conflicts | undefined { - const values = context.getAll(objectId, prop) - if (values.length <= 1) { - return - } - const result : Conflicts = {} - for (const fullVal of values) { - switch (fullVal[0]) { - case "map": - result[fullVal[1]] = mapProxy(context, fullVal[1], [ prop ], true) - break; - case "list": - result[fullVal[1]] = listProxy(context, fullVal[1], [ prop ], true) - break; - case "text": - result[fullVal[1]] = textProxy(context, fullVal[1], [ prop ], true) - break; - //case "table": - //case "cursor": - case "str": - case "uint": - case "int": - case "f64": - case "boolean": - case "bytes": - case "null": - result[fullVal[2]] = fullVal[1] - break; - case "counter": - result[fullVal[2]] = new Counter(fullVal[1]) - break; - case "timestamp": - result[fullVal[2]] = new Date(fullVal[1]) - break; - default: - throw RangeError(`datatype ${fullVal[0]} unimplemented`) - } - } - return result -} - -export function getConflicts(doc: Doc, prop: Prop) : Conflicts | undefined { - const state = _state(doc) - const objectId = _obj(doc) - return conflictAt(state, objectId, prop) -} - -export function getLastLocalChange(doc: Doc) : Change | undefined { - const state = _state(doc) - return state.getLastLocalChange() || undefined -} - -export function getObjectId(doc: Doc) : ObjID { - return _obj(doc) -} - -export function getChanges(oldState: Doc, newState: Doc) : Change[] { - const o = _state(oldState) - const n = _state(newState) - const heads = _heads(oldState) - return n.getChanges(heads || o.getHeads()) -} - -export function getAllChanges(doc: Doc) : Change[] { - const state = _state(doc) - return state.getChanges([]) -} - -export function applyChanges(doc: Doc, changes: Change[]) : [Doc] { - if (doc === undefined || _obj(doc) !== "_root") { - throw new RangeError("must be the document root"); - } - if (_frozen(doc) === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (_readonly(doc) === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const state = _state(doc) - const heads = state.getHeads() - state.applyChanges(changes) - _set_heads(doc,heads) - return [rootProxy(state, true)]; -} - -export function getHistory(doc: Doc) : State[] { - const history = getAllChanges(doc) - return history.map((change, index) => ({ - get change () { - return decodeChange(change) - }, - get snapshot () { - const [state] = applyChanges(init(), history.slice(0, index + 1)) - return state - } - }) - ) -} - -// FIXME : no tests -export function equals(val1: unknown, val2: unknown) : boolean { - if (!isObject(val1) || !isObject(val2)) return val1 === val2 - const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort() - if (keys1.length !== keys2.length) return false - for (let i = 0; i < keys1.length; i++) { - if (keys1[i] !== keys2[i]) return false - if (!equals(val1[keys1[i]], val2[keys2[i]])) return false - } - return true -} - -export function encodeSyncState(state: SyncState) : Uint8Array { - return ApiHandler.encodeSyncState(ApiHandler.importSyncState(state)) -} - -export function decodeSyncState(state: Uint8Array) : SyncState { - return ApiHandler.exportSyncState(ApiHandler.decodeSyncState(state)) -} - -export function generateSyncMessage(doc: Doc, inState: SyncState) : [ SyncState, SyncMessage | null ] { - const state = _state(doc) - const syncState = ApiHandler.importSyncState(inState) - const message = state.generateSyncMessage(syncState) - const outState = ApiHandler.exportSyncState(syncState) - return [ outState, message ] -} - -export function receiveSyncMessage(doc: Doc, inState: SyncState, message: SyncMessage) : [ Doc, SyncState, null ] { - const syncState = ApiHandler.importSyncState(inState) - if (doc === undefined || _obj(doc) !== "_root") { - throw new RangeError("must be the document root"); - } - if (_frozen(doc) === true) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (!!_heads(doc) === true) { - throw new RangeError("Attempting to change an out of date document - set at: " + _trace(doc)); - } - if (_readonly(doc) === false) { - throw new RangeError("Calls to Automerge.change cannot be nested") - } - const state = _state(doc) - const heads = state.getHeads() - state.receiveSyncMessage(syncState, message) - _set_heads(doc,heads) - const outState = ApiHandler.exportSyncState(syncState) - return [rootProxy(state, true), outState, null]; -} - -export function initSyncState() : SyncState { - return ApiHandler.exportSyncState(ApiHandler.initSyncState()) -} - -export function encodeChange(change: DecodedChange) : Change { - return ApiHandler.encodeChange(change) -} - -export function decodeChange(data: Change) : DecodedChange { - return ApiHandler.decodeChange(data) -} - -export function encodeSyncMessage(message: DecodedSyncMessage) : SyncMessage { - return ApiHandler.encodeSyncMessage(message) -} - -export function decodeSyncMessage(message: SyncMessage) : DecodedSyncMessage { - return ApiHandler.decodeSyncMessage(message) -} - -export function getMissingDeps(doc: Doc, heads: Heads) : Heads { - const state = _state(doc) - return state.getMissingDeps(heads) -} - -export function getHeads(doc: Doc) : Heads { - const state = _state(doc) - return _heads(doc) || state.getHeads() -} - -export function dump(doc: Doc) { - const state = _state(doc) - state.dump() -} - -// FIXME - return T? -export function toJS(doc: Doc) : MaterializeValue { - const state = _state(doc) - const heads = _heads(doc) - return state.materialize("_root", heads) -} - - -function isObject(obj: unknown) : obj is Record { - return typeof obj === 'object' && obj !== null -} diff --git a/automerge-js/src/low_level.ts b/automerge-js/src/low_level.ts deleted file mode 100644 index cf0695d9..00000000 --- a/automerge-js/src/low_level.ts +++ /dev/null @@ -1,25 +0,0 @@ - -import { Automerge, Change, DecodedChange, Actor, SyncState, SyncMessage, JsSyncState, DecodedSyncMessage } from "automerge-types" -import { API } from "automerge-types" - -export function UseApi(api: API) { - for (const k in api) { - ApiHandler[k] = api[k] - } -} - -/* eslint-disable */ -export const ApiHandler : API = { - create(actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, - load(data: Uint8Array, actor?: Actor): Automerge { throw new RangeError("Automerge.use() not called") }, - encodeChange(change: DecodedChange): Change { throw new RangeError("Automerge.use() not called") }, - decodeChange(change: Change): DecodedChange { throw new RangeError("Automerge.use() not called") }, - initSyncState(): SyncState { throw new RangeError("Automerge.use() not called") }, - encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { throw new RangeError("Automerge.use() not called") }, - decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage { throw new RangeError("Automerge.use() not called") }, - encodeSyncState(state: SyncState): Uint8Array { throw new RangeError("Automerge.use() not called") }, - decodeSyncState(data: Uint8Array): SyncState { throw new RangeError("Automerge.use() not called") }, - exportSyncState(state: SyncState): JsSyncState { throw new RangeError("Automerge.use() not called") }, - importSyncState(state: JsSyncState): SyncState { throw new RangeError("Automerge.use() not called") }, -} -/* eslint-enable */ diff --git a/automerge-js/src/proxies.ts b/automerge-js/src/proxies.ts deleted file mode 100644 index f202b116..00000000 --- a/automerge-js/src/proxies.ts +++ /dev/null @@ -1,644 +0,0 @@ - -import { Automerge, Heads, ObjID } from "automerge-types" -import { Prop } from "automerge-types" -import { AutomergeValue, ScalarValue, MapValue, ListValue, TextValue } from "./types" -import { Int, Uint, Float64 } from "./numbers" -import { Counter, getWriteableCounter } from "./counter" -import { Text } from "./text" -import { STATE, HEADS, TRACE, FROZEN, OBJECT_ID, READ_ONLY, COUNTER, INT, UINT, F64, TEXT } from "./constants" - -function parseListIndex(key) { - if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10) - if (typeof key !== 'number') { - // throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key)) - return key - } - if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) { - throw new RangeError('A list index must be positive, but you passed ' + key) - } - return key -} - -function valueAt(target, prop: Prop) : AutomergeValue | undefined { - const { context, objectId, path, readonly, heads} = target - const value = context.getWithType(objectId, prop, heads) - if (value === null) { - return - } - const datatype = value[0] - const val = value[1] - switch (datatype) { - case undefined: return; - case "map": return mapProxy(context, val, [ ... path, prop ], readonly, heads); - case "list": return listProxy(context, val, [ ... path, prop ], readonly, heads); - case "text": return textProxy(context, val, [ ... path, prop ], readonly, heads); - //case "table": - //case "cursor": - case "str": return val; - case "uint": return val; - case "int": return val; - case "f64": return val; - case "boolean": return val; - case "null": return null; - case "bytes": return val; - case "timestamp": return val; - case "counter": { - if (readonly) { - return new Counter(val); - } else { - return getWriteableCounter(val, context, path, objectId, prop) - } - } - default: - throw RangeError(`datatype ${datatype} unimplemented`) - } -} - -function import_value(value) { - switch (typeof value) { - case 'object': - if (value == null) { - return [ null, "null"] - } else if (value[UINT]) { - return [ value.value, "uint" ] - } else if (value[INT]) { - return [ value.value, "int" ] - } else if (value[F64]) { - return [ value.value, "f64" ] - } else if (value[COUNTER]) { - return [ value.value, "counter" ] - } else if (value[TEXT]) { - return [ value, "text" ] - } else if (value instanceof Date) { - return [ value.getTime(), "timestamp" ] - } else if (value instanceof Uint8Array) { - return [ value, "bytes" ] - } else if (value instanceof Array) { - return [ value, "list" ] - } else if (Object.getPrototypeOf(value) === Object.getPrototypeOf({})) { - return [ value, "map" ] - } else if (value[OBJECT_ID]) { - throw new RangeError('Cannot create a reference to an existing document object') - } else { - throw new RangeError(`Cannot assign unknown object: ${value}`) - } - break; - case 'boolean': - return [ value, "boolean" ] - case 'number': - if (Number.isInteger(value)) { - return [ value, "int" ] - } else { - return [ value, "f64" ] - } - break; - case 'string': - return [ value ] - break; - default: - throw new RangeError(`Unsupported type of value: ${typeof value}`) - } -} - -const MapHandler = { - get (target, key) : AutomergeValue { - const { context, objectId, readonly, frozen, heads, cache } = target - if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] } - if (key === OBJECT_ID) return objectId - if (key === READ_ONLY) return readonly - if (key === FROZEN) return frozen - if (key === HEADS) return heads - if (key === TRACE) return target.trace - if (key === STATE) return context; - if (!cache[key]) { - cache[key] = valueAt(target, key) - } - return cache[key] - }, - - set (target, key, val) { - const { context, objectId, path, readonly, frozen} = target - target.cache = {} // reset cache on set - if (val && val[OBJECT_ID]) { - throw new RangeError('Cannot create a reference to an existing document object') - } - if (key === FROZEN) { - target.frozen = val - return true - } - if (key === HEADS) { - target.heads = val - return true - } - if (key === TRACE) { - target.trace = val - return true - } - const [ value, datatype ] = import_value(val) - if (frozen) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (readonly) { - throw new RangeError(`Object property "${key}" cannot be modified`) - } - switch (datatype) { - case "list": { - const list = context.putObject(objectId, key, []) - const proxyList = listProxy(context, list, [ ... path, key ], readonly ); - for (let i = 0; i < value.length; i++) { - proxyList[i] = value[i] - } - break - } - case "text": { - const text = context.putObject(objectId, key, "", "text") - const proxyText = textProxy(context, text, [ ... path, key ], readonly ); - for (let i = 0; i < value.length; i++) { - proxyText[i] = value.get(i) - } - break - } - case "map": { - const map = context.putObject(objectId, key, {}) - const proxyMap = mapProxy(context, map, [ ... path, key ], readonly ); - for (const key in value) { - proxyMap[key] = value[key] - } - break; - } - default: - context.put(objectId, key, value, datatype) - } - return true - }, - - deleteProperty (target, key) { - const { context, objectId, readonly } = target - target.cache = {} // reset cache on delete - if (readonly) { - throw new RangeError(`Object property "${key}" cannot be modified`) - } - context.delete(objectId, key) - return true - }, - - has (target, key) { - const value = this.get(target, key) - return value !== undefined - }, - - getOwnPropertyDescriptor (target, key) { - // const { context, objectId } = target - const value = this.get(target, key) - if (typeof value !== 'undefined') { - return { - configurable: true, enumerable: true, value - } - } - }, - - ownKeys (target) { - const { context, objectId, heads} = target - // FIXME - this is a tmp workaround until fix the dupe key bug in keys() - let keys = context.keys(objectId, heads) - return [...new Set(keys)] - }, -} - - -const ListHandler = { - get (target, index) { - const {context, objectId, readonly, frozen, heads } = target - index = parseListIndex(index) - if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } } - if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } - if (index === OBJECT_ID) return objectId - if (index === READ_ONLY) return readonly - if (index === FROZEN) return frozen - if (index === HEADS) return heads - if (index === TRACE) return target.trace - if (index === STATE) return context; - if (index === 'length') return context.length(objectId, heads); - if (index === Symbol.iterator) { - let i = 0; - return function *() { - // FIXME - ugly - let value = valueAt(target, i) - while (value !== undefined) { - yield value - i += 1 - value = valueAt(target, i) - } - } - } - if (typeof index === 'number') { - return valueAt(target, index) - } else { - return listMethods(target)[index] - } - }, - - set (target, index, val) { - const {context, objectId, path, readonly, frozen } = target - index = parseListIndex(index) - if (val && val[OBJECT_ID]) { - throw new RangeError('Cannot create a reference to an existing document object') - } - if (index === FROZEN) { - target.frozen = val - return true - } - if (index === HEADS) { - target.heads = val - return true - } - if (index === TRACE) { - target.trace = val - return true - } - if (typeof index == "string") { - throw new RangeError('list index must be a number') - } - const [ value, datatype] = import_value(val) - if (frozen) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (readonly) { - throw new RangeError(`Object property "${index}" cannot be modified`) - } - switch (datatype) { - case "list": { - let list - if (index >= context.length(objectId)) { - list = context.insertObject(objectId, index, []) - } else { - list = context.putObject(objectId, index, []) - } - const proxyList = listProxy(context, list, [ ... path, index ], readonly); - proxyList.splice(0,0,...value) - break; - } - case "text": { - let text - if (index >= context.length(objectId)) { - text = context.insertObject(objectId, index, "", "text") - } else { - text = context.putObject(objectId, index, "", "text") - } - const proxyText = textProxy(context, text, [ ... path, index ], readonly); - proxyText.splice(0,0,...value) - break; - } - case "map": { - let map - if (index >= context.length(objectId)) { - map = context.insertObject(objectId, index, {}) - } else { - map = context.putObject(objectId, index, {}) - } - const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); - for (const key in value) { - proxyMap[key] = value[key] - } - break; - } - default: - if (index >= context.length(objectId)) { - context.insert(objectId, index, value, datatype) - } else { - context.put(objectId, index, value, datatype) - } - } - return true - }, - - deleteProperty (target, index) { - const {context, objectId} = target - index = parseListIndex(index) - if (context.get(objectId, index)[0] == "counter") { - throw new TypeError('Unsupported operation: deleting a counter from a list') - } - context.delete(objectId, index) - return true - }, - - has (target, index) { - const {context, objectId, heads} = target - index = parseListIndex(index) - if (typeof index === 'number') { - return index < context.length(objectId, heads) - } - return index === 'length' - }, - - getOwnPropertyDescriptor (target, index) { - const {context, objectId, heads} = target - - if (index === 'length') return {writable: true, value: context.length(objectId, heads) } - if (index === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId} - - index = parseListIndex(index) - - const value = valueAt(target, index) - return { configurable: true, enumerable: true, value } - }, - - getPrototypeOf(target) { return Object.getPrototypeOf(target) }, - ownKeys (/*target*/) : string[] { - const keys : string[] = [] - // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array - // but not uncommenting it causes for (i in list) {} to not enumerate values properly - //const {context, objectId, heads } = target - //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } - keys.push("length"); - return keys - } -} - -const TextHandler = Object.assign({}, ListHandler, { - get (target, index) { - // FIXME this is a one line change from ListHandler.get() - const {context, objectId, readonly, frozen, heads } = target - index = parseListIndex(index) - if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] } - if (index === Symbol.hasInstance) { return (instance) => { return Array.isArray(instance) } } - if (index === OBJECT_ID) return objectId - if (index === READ_ONLY) return readonly - if (index === FROZEN) return frozen - if (index === HEADS) return heads - if (index === TRACE) return target.trace - if (index === STATE) return context; - if (index === 'length') return context.length(objectId, heads); - if (index === Symbol.iterator) { - let i = 0; - return function *() { - let value = valueAt(target, i) - while (value !== undefined) { - yield value - i += 1 - value = valueAt(target, i) - } - } - } - if (typeof index === 'number') { - return valueAt(target, index) - } else { - return textMethods(target)[index] || listMethods(target)[index] - } - }, - getPrototypeOf(/*target*/) { - return Object.getPrototypeOf(new Text()) - }, -}) - -export function mapProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : MapValue { - return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler) -} - -export function listProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : ListValue { - const target = [] - Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) - return new Proxy(target, ListHandler) -} - -export function textProxy(context: Automerge, objectId: ObjID, path?: Prop[], readonly?: boolean, heads?: Heads) : TextValue { - const target = [] - Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}) - return new Proxy(target, TextHandler) -} - -export function rootProxy(context: Automerge, readonly?: boolean) : T { - /* eslint-disable-next-line */ - return mapProxy(context, "_root", [], !!readonly) -} - -function listMethods(target) { - const {context, objectId, path, readonly, frozen, heads} = target - const methods = { - deleteAt(index, numDelete) { - if (typeof numDelete === 'number') { - context.splice(objectId, index, numDelete) - } else { - context.delete(objectId, index) - } - return this - }, - - fill(val: ScalarValue, start: number, end: number) { - // FIXME needs tests - const [value, datatype] = import_value(val) - start = parseListIndex(start || 0) - end = parseListIndex(end || context.length(objectId)) - for (let i = start; i < end; i++) { - context.put(objectId, i, value, datatype) - } - return this - }, - - indexOf(o, start = 0) { - const length = context.length(objectId) - for (let i = start; i < length; i++) { - const value = context.getWithType(objectId, i, heads) - if (value && value[1] === o[OBJECT_ID] || value[1] === o) { - return i - } - } - return -1 - }, - - insertAt(index, ...values) { - this.splice(index, 0, ...values) - return this - }, - - pop() { - const length = context.length(objectId) - if (length == 0) { - return undefined - } - const last = valueAt(target, length - 1) - context.delete(objectId, length - 1) - return last - }, - - push(...values) { - const len = context.length(objectId) - this.splice(len, 0, ...values) - return context.length(objectId) - }, - - shift() { - if (context.length(objectId) == 0) return - const first = valueAt(target, 0) - context.delete(objectId, 0) - return first - }, - - splice(index, del, ...vals) { - index = parseListIndex(index) - del = parseListIndex(del) - for (const val of vals) { - if (val && val[OBJECT_ID]) { - throw new RangeError('Cannot create a reference to an existing document object') - } - } - if (frozen) { - throw new RangeError("Attempting to use an outdated Automerge document") - } - if (readonly) { - throw new RangeError("Sequence object cannot be modified outside of a change block") - } - const result : AutomergeValue[] = [] - for (let i = 0; i < del; i++) { - const value = valueAt(target, index) - if (value !== undefined) { - result.push(value) - } - context.delete(objectId, index) - } - const values = vals.map((val) => import_value(val)) - for (const [value,datatype] of values) { - switch (datatype) { - case "list": { - const list = context.insertObject(objectId, index, []) - const proxyList = listProxy(context, list, [ ... path, index ], readonly); - proxyList.splice(0,0,...value) - break; - } - case "text": { - const text = context.insertObject(objectId, index, "", "text") - const proxyText = textProxy(context, text, [ ... path, index ], readonly); - proxyText.splice(0,0,...value) - break; - } - case "map": { - const map = context.insertObject(objectId, index, {}) - const proxyMap = mapProxy(context, map, [ ... path, index ], readonly); - for (const key in value) { - proxyMap[key] = value[key] - } - break; - } - default: - context.insert(objectId, index, value, datatype) - } - index += 1 - } - return result - }, - - unshift(...values) { - this.splice(0, 0, ...values) - return context.length(objectId) - }, - - entries() { - const i = 0; - const iterator = { - next: () => { - const value = valueAt(target, i) - if (value === undefined) { - return { value: undefined, done: true } - } else { - return { value: [ i, value ], done: false } - } - } - } - return iterator - }, - - keys() { - let i = 0; - const len = context.length(objectId, heads) - const iterator = { - next: () => { - let value : undefined | number = undefined - if (i < len) { value = i; i++ } - return { value, done: true } - } - } - return iterator - }, - - values() { - const i = 0; - const iterator = { - next: () => { - const value = valueAt(target, i) - if (value === undefined) { - return { value: undefined, done: true } - } else { - return { value, done: false } - } - } - } - return iterator - } - } - - // Read-only methods that can delegate to the JavaScript built-in implementations - // FIXME - super slow - for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', - 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight', - 'slice', 'some', 'toLocaleString', 'toString']) { - methods[method] = (...args) => { - const list : AutomergeValue = [] - let value - do { - value = valueAt(target, list.length) - if (value !== undefined) { - list.push(value) - } - } while (value !== undefined) - - return list[method](...args) - } - } - - return methods -} - -function textMethods(target) { - const {context, objectId, heads } = target - const methods = { - set (index: number, value) { - return this[index] = value - }, - get (index: number) : AutomergeValue { - return this[index] - }, - toString () : string { - return context.text(objectId, heads).replace(//g,'') - }, - toSpans () : AutomergeValue[] { - const spans : AutomergeValue[] = [] - let chars = '' - const length = context.length(objectId) - for (let i = 0; i < length; i++) { - const value = this[i] - if (typeof value === 'string') { - chars += value - } else { - if (chars.length > 0) { - spans.push(chars) - chars = '' - } - spans.push(value) - } - } - if (chars.length > 0) { - spans.push(chars) - } - return spans - }, - toJSON () : string { - return this.toString() - }, - indexOf(o, start = 0) { - const text = context.text(objectId) - return text.indexOf(o,start) - } - } - return methods -} - diff --git a/automerge-js/src/text.ts b/automerge-js/src/text.ts deleted file mode 100644 index d93cd061..00000000 --- a/automerge-js/src/text.ts +++ /dev/null @@ -1,136 +0,0 @@ -import { Value } from "automerge-types" -import { TEXT } from "./constants" - -export class Text { - elems: Value[] - - constructor (text?: string | string[]) { - //const instance = Object.create(Text.prototype) - if (typeof text === 'string') { - this.elems = [...text] - } else if (Array.isArray(text)) { - this.elems = text - } else if (text === undefined) { - this.elems = [] - } else { - throw new TypeError(`Unsupported initial value for Text: ${text}`) - } - Reflect.defineProperty(this, TEXT, { value: true }) - } - - get length () : number { - return this.elems.length - } - - get (index: number) : Value | undefined { - return this.elems[index] - } - - /** - * Iterates over the text elements character by character, including any - * inline objects. - */ - [Symbol.iterator] () { - const elems = this.elems - let index = -1 - return { - next () { - index += 1 - if (index < elems.length) { - return {done: false, value: elems[index]} - } else { - return {done: true} - } - } - } - } - - /** - * Returns the content of the Text object as a simple string, ignoring any - * non-character elements. - */ - toString() : string { - // Concatting to a string is faster than creating an array and then - // .join()ing for small (<100KB) arrays. - // https://jsperf.com/join-vs-loop-w-type-test - let str = '' - for (const elem of this.elems) { - if (typeof elem === 'string') str += elem - } - return str - } - - /** - * Returns the content of the Text object as a sequence of strings, - * interleaved with non-character elements. - * - * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans: - * => ['ab', {x: 3}, 'cd'] - */ - toSpans() : Value[] { - const spans : Value[] = [] - let chars = '' - for (const elem of this.elems) { - if (typeof elem === 'string') { - chars += elem - } else { - if (chars.length > 0) { - spans.push(chars) - chars = '' - } - spans.push(elem) - } - } - if (chars.length > 0) { - spans.push(chars) - } - return spans - } - - /** - * Returns the content of the Text object as a simple string, so that the - * JSON serialization of an Automerge document represents text nicely. - */ - toJSON() : string { - return this.toString() - } - - /** - * Updates the list item at position `index` to a new value `value`. - */ - set (index: number, value: Value) { - this.elems[index] = value - } - - /** - * Inserts new list items `values` starting at position `index`. - */ - insertAt(index: number, ...values: Value[]) { - this.elems.splice(index, 0, ... values) - } - - /** - * Deletes `numDelete` list items starting at position `index`. - * if `numDelete` is not given, one item is deleted. - */ - deleteAt(index: number, numDelete = 1) { - this.elems.splice(index, numDelete) - } - - map(callback: (e: Value) => T) { - this.elems.map(callback) - } - - -} - -// Read-only methods that can delegate to the JavaScript built-in array -for (const method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes', - 'indexOf', 'join', 'lastIndexOf', 'reduce', 'reduceRight', - 'slice', 'some', 'toLocaleString']) { - Text.prototype[method] = function (...args) { - const array = [...this] - return array[method](...args) - } -} - diff --git a/automerge-js/src/types.ts b/automerge-js/src/types.ts deleted file mode 100644 index e75a3854..00000000 --- a/automerge-js/src/types.ts +++ /dev/null @@ -1,12 +0,0 @@ - -export { Text } from "./text" -export { Counter } from "./counter" -export { Int, Uint, Float64 } from "./numbers" - -import { Counter } from "./counter" - -export type AutomergeValue = ScalarValue | { [key: string]: AutomergeValue } | Array -export type MapValue = { [key: string]: AutomergeValue } -export type ListValue = Array -export type TextValue = Array -export type ScalarValue = string | number | null | boolean | Date | Counter | Uint8Array diff --git a/automerge-js/src/uuid.ts b/automerge-js/src/uuid.ts deleted file mode 100644 index 5ddb5ae6..00000000 --- a/automerge-js/src/uuid.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { v4 } from 'uuid' - -function defaultFactory() { - return v4().replace(/-/g, '') -} - -let factory = defaultFactory - -interface UUIDFactory extends Function { - setFactory(f: typeof factory): void; - reset(): void; -} - -export const uuid : UUIDFactory = () => { - return factory() -} - -uuid.setFactory = newFactory => { factory = newFactory } - -uuid.reset = () => { factory = defaultFactory } - diff --git a/automerge-js/test/basic_test.ts b/automerge-js/test/basic_test.ts deleted file mode 100644 index d2e98939..00000000 --- a/automerge-js/test/basic_test.ts +++ /dev/null @@ -1,178 +0,0 @@ -import * as tt from "automerge-types" -import * as assert from 'assert' -import * as util from 'util' -import * as Automerge from '../src' -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) - -describe('Automerge', () => { - describe('basics', () => { - it('should init clone and free', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.clone(doc1); - }) - - it('handle basic set and read on root object', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.hello = "world" - d.big = "little" - d.zip = "zop" - d.app = "dap" - assert.deepEqual(d, { hello: "world", big: "little", zip: "zop", app: "dap" }) - }) - assert.deepEqual(doc2, { hello: "world", big: "little", zip: "zop", app: "dap" }) - }) - - it('handle basic sets over many changes', () => { - let doc1 = Automerge.init() - let timestamp = new Date(); - let counter = new Automerge.Counter(100); - let bytes = new Uint8Array([10,11,12]); - let doc2 = Automerge.change(doc1, (d) => { - d.hello = "world" - }) - let doc3 = Automerge.change(doc2, (d) => { - d.counter1 = counter - }) - let doc4 = Automerge.change(doc3, (d) => { - d.timestamp1 = timestamp - }) - let doc5 = Automerge.change(doc4, (d) => { - d.app = null - }) - let doc6 = Automerge.change(doc5, (d) => { - d.bytes1 = bytes - }) - let doc7 = Automerge.change(doc6, (d) => { - d.uint = new Automerge.Uint(1) - d.int = new Automerge.Int(-1) - d.float64 = new Automerge.Float64(5.5) - d.number1 = 100 - d.number2 = -45.67 - d.true = true - d.false = false - }) - - assert.deepEqual(doc7, { hello: "world", true: true, false: false, int: -1, uint: 1, float64: 5.5, number1: 100, number2: -45.67, counter1: counter, timestamp1: timestamp, bytes1: bytes, app: null }) - - let changes = Automerge.getAllChanges(doc7) - let t1 = Automerge.init() - ;let [t2] = Automerge.applyChanges(t1, changes) - assert.deepEqual(doc7,t2) - }) - - it('handle overwrites to values', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.hello = "world1" - }) - let doc3 = Automerge.change(doc2, (d) => { - d.hello = "world2" - }) - let doc4 = Automerge.change(doc3, (d) => { - d.hello = "world3" - }) - let doc5 = Automerge.change(doc4, (d) => { - d.hello = "world4" - }) - assert.deepEqual(doc5, { hello: "world4" } ) - }) - - it('handle set with object value', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.subobj = { hello: "world", subsubobj: { zip: "zop" } } - }) - assert.deepEqual(doc2, { subobj: { hello: "world", subsubobj: { zip: "zop" } } }) - }) - - it('handle simple list creation', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => d.list = []) - assert.deepEqual(doc2, { list: []}) - }) - - it('handle simple lists', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.list = [ 1, 2, 3 ] - }) - assert.deepEqual(doc2.list.length, 3) - assert.deepEqual(doc2.list[0], 1) - assert.deepEqual(doc2.list[1], 2) - assert.deepEqual(doc2.list[2], 3) - assert.deepEqual(doc2, { list: [1,2,3] }) - // assert.deepStrictEqual(Automerge.toJS(doc2), { list: [1,2,3] }) - - let doc3 = Automerge.change(doc2, (d) => { - d.list[1] = "a" - }) - - assert.deepEqual(doc3.list.length, 3) - assert.deepEqual(doc3.list[0], 1) - assert.deepEqual(doc3.list[1], "a") - assert.deepEqual(doc3.list[2], 3) - assert.deepEqual(doc3, { list: [1,"a",3] }) - }) - it('handle simple lists', () => { - let doc1 = Automerge.init() - let doc2 = Automerge.change(doc1, (d) => { - d.list = [ 1, 2, 3 ] - }) - let changes = Automerge.getChanges(doc1, doc2) - let docB1 = Automerge.init() - ;let [docB2] = Automerge.applyChanges(docB1, changes) - assert.deepEqual(docB2, doc2); - }) - it('handle text', () => { - let doc1 = Automerge.init() - let tmp = new Automerge.Text("hello") - let doc2 = Automerge.change(doc1, (d) => { - d.list = new Automerge.Text("hello") - d.list.insertAt(2,"Z") - }) - let changes = Automerge.getChanges(doc1, doc2) - let docB1 = Automerge.init() - ;let [docB2] = Automerge.applyChanges(docB1, changes) - assert.deepEqual(docB2, doc2); - }) - - it('have many list methods', () => { - let doc1 = Automerge.from({ list: [1,2,3] }) - assert.deepEqual(doc1, { list: [1,2,3] }); - let doc2 = Automerge.change(doc1, (d) => { - d.list.splice(1,1,9,10) - }) - assert.deepEqual(doc2, { list: [1,9,10,3] }); - let doc3 = Automerge.change(doc2, (d) => { - d.list.push(11,12) - }) - assert.deepEqual(doc3, { list: [1,9,10,3,11,12] }); - let doc4 = Automerge.change(doc3, (d) => { - d.list.unshift(2,2) - }) - assert.deepEqual(doc4, { list: [2,2,1,9,10,3,11,12] }); - let doc5 = Automerge.change(doc4, (d) => { - d.list.shift() - }) - assert.deepEqual(doc5, { list: [2,1,9,10,3,11,12] }); - let doc6 = Automerge.change(doc5, (d) => { - d.list.insertAt(3,100,101) - }) - assert.deepEqual(doc6, { list: [2,1,9,100,101,10,3,11,12] }); - }) - - it('allows access to the backend', () => { - let doc = Automerge.init() - assert.deepEqual(Object.keys(Automerge.getBackend(doc)), ["ptr"]) - }) - - it('lists and text have indexof', () => { - let doc = Automerge.from({ list: [0,1,2,3,4,5,6], text: new Automerge.Text("hello world") }) - console.log(doc.list.indexOf(5)) - console.log(doc.text.indexOf("world")) - }) - }) -}) diff --git a/automerge-js/test/columnar_test.ts b/automerge-js/test/columnar_test.ts deleted file mode 100644 index fc01741b..00000000 --- a/automerge-js/test/columnar_test.ts +++ /dev/null @@ -1,100 +0,0 @@ -import * as assert from 'assert' -import { checkEncoded } from './helpers' -import * as Automerge from '../src' -import { encodeChange, decodeChange } from '../src' -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) - -describe('change encoding', () => { - it('should encode text edits', () => { - /* - const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: '', deps: [], ops: [ - {action: 'makeText', obj: '_root', key: 'text', insert: false, pred: []}, - {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []}, - {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', insert: false, pred: ['2@aaaa']}, - {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []}, - {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []} - ]} - */ - const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: null, deps: [], ops: [ - {action: 'makeText', obj: '_root', key: 'text', pred: []}, - {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []}, - {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', pred: ['2@aaaa']}, - {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []}, - {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []} - ]} - checkEncoded(encodeChange(change1), [ - 0x85, 0x6f, 0x4a, 0x83, // magic bytes - 0xe2, 0xbd, 0xfb, 0xf5, // checksum - 1, 94, 0, 2, 0xaa, 0xaa, // chunkType: change, length, deps, actor 'aaaa' - 1, 1, 9, 0, 0, // seq, startOp, time, message, actor list - 12, 0x01, 4, 0x02, 4, // column count, objActor, objCtr - 0x11, 8, 0x13, 7, 0x15, 8, // keyActor, keyCtr, keyStr - 0x34, 4, 0x42, 6, // insert, action - 0x56, 6, 0x57, 3, // valLen, valRaw - 0x70, 6, 0x71, 2, 0x73, 2, // predNum, predActor, predCtr - 0, 1, 4, 0, // objActor column: null, 0, 0, 0, 0 - 0, 1, 4, 1, // objCtr column: null, 1, 1, 1, 1 - 0, 2, 0x7f, 0, 0, 1, 0x7f, 0, // keyActor column: null, null, 0, null, 0 - 0, 1, 0x7c, 0, 2, 0x7e, 4, // keyCtr column: null, 0, 2, 0, 4 - 0x7f, 4, 0x74, 0x65, 0x78, 0x74, 0, 4, // keyStr column: 'text', null, null, null, null - 1, 1, 1, 2, // insert column: false, true, false, true, true - 0x7d, 4, 1, 3, 2, 1, // action column: makeText, set, del, set, set - 0x7d, 0, 0x16, 0, 2, 0x16, // valLen column: 0, 0x16, 0, 0x16, 0x16 - 0x68, 0x48, 0x69, // valRaw column: 'h', 'H', 'i' - 2, 0, 0x7f, 1, 2, 0, // predNum column: 0, 0, 1, 0, 0 - 0x7f, 0, // predActor column: 0 - 0x7f, 2 // predCtr column: 2 - ]) - const decoded = decodeChange(encodeChange(change1)) - assert.deepStrictEqual(decoded, Object.assign({hash: decoded.hash}, change1)) - }) - - // FIXME - skipping this b/c it was never implemented in the rust impl and isnt trivial -/* - it.skip('should require strict ordering of preds', () => { - const change = new Uint8Array([ - 133, 111, 74, 131, 31, 229, 112, 44, 1, 105, 1, 58, 30, 190, 100, 253, 180, 180, 66, 49, 126, - 81, 142, 10, 3, 35, 140, 189, 231, 34, 145, 57, 66, 23, 224, 149, 64, 97, 88, 140, 168, 194, - 229, 4, 244, 209, 58, 138, 67, 140, 1, 152, 236, 250, 2, 0, 1, 4, 55, 234, 66, 242, 8, 21, 11, - 52, 1, 66, 2, 86, 3, 87, 10, 112, 2, 113, 3, 115, 4, 127, 9, 99, 111, 109, 109, 111, 110, 86, - 97, 114, 1, 127, 1, 127, 166, 1, 52, 48, 57, 49, 52, 57, 52, 53, 56, 50, 127, 2, 126, 0, 1, - 126, 139, 1, 0 - ]) - assert.throws(() => { decodeChange(change) }, /operation IDs are not in ascending order/) - }) -*/ - - describe('with trailing bytes', () => { - let change = new Uint8Array([ - 0x85, 0x6f, 0x4a, 0x83, // magic bytes - 0xb2, 0x98, 0x9e, 0xa9, // checksum - 1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234' - 1, 1, 252, 250, 220, 255, 5, // seq, startOp, time - 14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111, 110, // message: 'Initialization' - 0, 6, // actor list, column count - 0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action - 0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum - 0x7f, 1, 0x78, // keyStr: 'x' - 1, // insert: false - 0x7f, 1, // action: set - 0x7f, 19, // valLen: 1 byte of type uint - 1, // valRaw: 1 - 0x7f, 0, // predNum: 0 - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 // 10 trailing bytes - ]) - - it('should allow decoding and re-encoding', () => { - // NOTE: This calls the JavaScript encoding and decoding functions, even when the WebAssembly - // backend is loaded. Should the wasm backend export its own functions for testing? - checkEncoded(change, encodeChange(decodeChange(change))) - }) - - it('should be preserved in document encoding', () => { - const [doc] = Automerge.applyChanges(Automerge.init(), [change]) - const [reconstructed] = Automerge.getAllChanges(Automerge.load(Automerge.save(doc))) - checkEncoded(change, reconstructed) - }) - }) -}) diff --git a/automerge-js/test/legacy_tests.ts b/automerge-js/test/legacy_tests.ts deleted file mode 100644 index 50cecbc4..00000000 --- a/automerge-js/test/legacy_tests.ts +++ /dev/null @@ -1,1421 +0,0 @@ -import * as assert from 'assert' -import * as Automerge from '../src' -import { assertEqualsOneOf } from './helpers' -import { decodeChange } from './legacy/columnar' -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) - -const UUID_PATTERN = /^[0-9a-f]{32}$/ -const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/ - -// CORE FEATURES -// -// TODO - Cursors -// TODO - Tables -// TODO - on-pass load() & reconstruct change from opset -// TODO - micro-patches (needed for fully hydrated object in js) -// TODO - valueAt(heads) / GC -// -// AUTOMERGE UNSUPPORTED -// -// TODO - patchCallback - - -describe('Automerge', () => { - describe('initialization ', () => { - it('should initially be an empty map', () => { - const doc = Automerge.init() - assert.deepStrictEqual(doc, {}) - }) - - it('should allow instantiating from an existing object', () => { - const initialState = { birds: { wrens: 3, magpies: 4 } } - const doc = Automerge.from(initialState) - assert.deepStrictEqual(doc, initialState) - }) - - it('should allow merging of an object initialized with `from`', () => { - let doc1 = Automerge.from({ cards: [] }) - let doc2 = Automerge.merge(Automerge.init(), doc1) - assert.deepStrictEqual(doc2, { cards: [] }) - }) - - it('should allow passing an actorId when instantiating from an existing object', () => { - const actorId = '1234' - let doc = Automerge.from({ foo: 1 }, actorId) - assert.strictEqual(Automerge.getActorId(doc), '1234') - }) - - it('accepts an empty object as initial state', () => { - const doc = Automerge.from({}) - assert.deepStrictEqual(doc, {}) - }) - - it('accepts an array as initial state, but converts it to an object', () => { - const doc = Automerge.from(['a', 'b', 'c']) - assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' }) - }) - - it('accepts strings as initial values, but treats them as an array of characters', () => { - const doc = Automerge.from('abc') - assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' }) - }) - - it('ignores numbers provided as initial values', () => { - const doc = Automerge.from(123) - assert.deepStrictEqual(doc, {}) - }) - - it('ignores booleans provided as initial values', () => { - const doc1 = Automerge.from(false) - assert.deepStrictEqual(doc1, {}) - const doc2 = Automerge.from(true) - assert.deepStrictEqual(doc2, {}) - }) - }) - - describe('sequential use', () => { - let s1, s2 - beforeEach(() => { - s1 = Automerge.init() - }) - - it('should not mutate objects', () => { - s2 = Automerge.change(s1, doc => doc.foo = 'bar') - assert.strictEqual(s1.foo, undefined) - assert.strictEqual(s2.foo, 'bar') - }) - - it('changes should be retrievable', () => { - const change1 = Automerge.getLastLocalChange(s1) - s2 = Automerge.change(s1, doc => doc.foo = 'bar') - const change2 = Automerge.getLastLocalChange(s2) - assert.strictEqual(change1, undefined) - const change = decodeChange(change2) - assert.deepStrictEqual(change, { - actor: change.actor, deps: [], seq: 1, startOp: 1, - hash: change.hash, message: '', time: change.time, - ops: [{obj: '_root', key: 'foo', action: 'set', insert: false, value: 'bar', pred: []}] - }) - }) - - it('should not register any conflicts on repeated assignment', () => { - assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined) - s1 = Automerge.change(s1, 'change', doc => doc.foo = 'one') - assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined) - s1 = Automerge.change(s1, 'change', doc => doc.foo = 'two') - assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined) - }) - - describe('changes', () => { - it('should group several changes', () => { - s2 = Automerge.change(s1, 'change message', doc => { - doc.first = 'one' - assert.strictEqual(doc.first, 'one') - doc.second = 'two' - assert.deepStrictEqual(doc, { - first: 'one', second: 'two' - }) - }) - assert.deepStrictEqual(s1, {}) - assert.deepStrictEqual(s2, {first: 'one', second: 'two'}) - }) - - it('should freeze objects if desired', () => { - s1 = Automerge.init({freeze: true}) - s2 = Automerge.change(s1, doc => doc.foo = 'bar') - try { - s2.foo = 'lemon' - } catch (e) { } - assert.strictEqual(s2.foo, 'bar') - - let deleted = false - try { - deleted = delete s2.foo - } catch (e) { } - assert.strictEqual(s2.foo, 'bar') - assert.strictEqual(deleted, false) - - Automerge.change(s2, () => { - try { - s2.foo = 'lemon' - } catch (e) { } - assert.strictEqual(s2.foo, 'bar') - }) - - assert.throws(() => { Object.assign(s2, {x: 4}) }) - assert.strictEqual(s2.x, undefined) - }) - - it('should allow repeated reading and writing of values', () => { - s2 = Automerge.change(s1, 'change message', doc => { - doc.value = 'a' - assert.strictEqual(doc.value, 'a') - doc.value = 'b' - doc.value = 'c' - assert.strictEqual(doc.value, 'c') - }) - assert.deepStrictEqual(s1, {}) - assert.deepStrictEqual(s2, {value: 'c'}) - }) - - it('should not record conflicts when writing the same field several times within one change', () => { - s1 = Automerge.change(s1, 'change message', doc => { - doc.value = 'a' - doc.value = 'b' - doc.value = 'c' - }) - assert.strictEqual(s1.value, 'c') - assert.strictEqual(Automerge.getConflicts(s1, 'value'), undefined) - }) - - it('should return the unchanged state object if nothing changed', () => { - s2 = Automerge.change(s1, () => {}) - assert.strictEqual(s2, s1) - }) - - it('should ignore field updates that write the existing value', () => { - s1 = Automerge.change(s1, doc => doc.field = 123) - s2 = Automerge.change(s1, doc => doc.field = 123) - assert.strictEqual(s2, s1) - }) - - it('should not ignore field updates that resolve a conflict', () => { - s2 = Automerge.merge(Automerge.init(), s1) - s1 = Automerge.change(s1, doc => doc.field = 123) - s2 = Automerge.change(s2, doc => doc.field = 321) - s1 = Automerge.merge(s1, s2) - assert.strictEqual(Object.keys(Automerge.getConflicts(s1, 'field')).length, 2) - const resolved = Automerge.change(s1, doc => doc.field = s1.field) - assert.notStrictEqual(resolved, s1) - assert.deepStrictEqual(resolved, {field: s1.field}) - assert.strictEqual(Automerge.getConflicts(resolved, 'field'), undefined) - }) - - it('should ignore list element updates that write the existing value', () => { - s1 = Automerge.change(s1, doc => doc.list = [123]) - s2 = Automerge.change(s1, doc => doc.list[0] = 123) - assert.strictEqual(s2, s1) - }) - - it('should not ignore list element updates that resolve a conflict', () => { - s1 = Automerge.change(s1, doc => doc.list = [1]) - s2 = Automerge.merge(Automerge.init(), s1) - s1 = Automerge.change(s1, doc => doc.list[0] = 123) - s2 = Automerge.change(s2, doc => doc.list[0] = 321) - s1 = Automerge.merge(s1, s2) - assert.deepStrictEqual(Automerge.getConflicts(s1.list, 0), { - [`3@${Automerge.getActorId(s1)}`]: 123, - [`3@${Automerge.getActorId(s2)}`]: 321 - }) - const resolved = Automerge.change(s1, doc => doc.list[0] = s1.list[0]) - assert.deepStrictEqual(resolved, s1) - assert.notStrictEqual(resolved, s1) - assert.strictEqual(Automerge.getConflicts(resolved.list, 0), undefined) - }) - - it('should sanity-check arguments', () => { - s1 = Automerge.change(s1, doc => doc.nested = {}) - assert.throws(() => { Automerge.change({}, doc => doc.foo = 'bar') }, /must be the document root/) - assert.throws(() => { Automerge.change(s1.nested, doc => doc.foo = 'bar') }, /must be the document root/) - }) - - it('should not allow nested change blocks', () => { - assert.throws(() => { - Automerge.change(s1, doc1 => { - Automerge.change(doc1, doc2 => { - doc2.foo = 'bar' - }) - }) - }, /Calls to Automerge.change cannot be nested/) - assert.throws(() => { - s1 = Automerge.change(s1, doc1 => { - s2 = Automerge.change(s1, doc2 => doc2.two = 2) - doc1.one = 1 - }) - }, /Attempting to use an outdated Automerge document/) - }) - - it('should not allow the same base document to be used for multiple changes', () => { - assert.throws(() => { - Automerge.change(s1, doc => doc.one = 1) - Automerge.change(s1, doc => doc.two = 2) - }, /Attempting to use an outdated Automerge document/) - }) - - it('should allow a document to be cloned', () => { - s1 = Automerge.change(s1, doc => doc.zero = 0) - s2 = Automerge.clone(s1) - s1 = Automerge.change(s1, doc => doc.one = 1) - s2 = Automerge.change(s2, doc => doc.two = 2) - assert.deepStrictEqual(s1, {zero: 0, one: 1}) - assert.deepStrictEqual(s2, {zero: 0, two: 2}) - Automerge.free(s1) - Automerge.free(s2) - }) - - it('should work with Object.assign merges', () => { - s1 = Automerge.change(s1, doc1 => { - doc1.stuff = {foo: 'bar', baz: 'blur'} - }) - s1 = Automerge.change(s1, doc1 => { - doc1.stuff = Object.assign({}, doc1.stuff, {baz: 'updated!'}) - }) - assert.deepStrictEqual(s1, {stuff: {foo: 'bar', baz: 'updated!'}}) - }) - - it('should support Date objects in maps', () => { - const now = new Date() - s1 = Automerge.change(s1, doc => doc.now = now) - let changes = Automerge.getAllChanges(s1) - ;[s2] = Automerge.applyChanges(Automerge.init(), changes) - assert.strictEqual(s2.now instanceof Date, true) - assert.strictEqual(s2.now.getTime(), now.getTime()) - }) - - it('should support Date objects in lists', () => { - const now = new Date() - s1 = Automerge.change(s1, doc => doc.list = [now]) - let changes = Automerge.getAllChanges(s1) - ;[s2] = Automerge.applyChanges(Automerge.init(), changes) - assert.strictEqual(s2.list[0] instanceof Date, true) - assert.strictEqual(s2.list[0].getTime(), now.getTime()) - }) - - /* - it.skip('should call patchCallback if supplied', () => { - const callbacks = [], actor = Automerge.getActorId(s1) - const s2 = Automerge.change(s1, { - patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local}) - }, doc => { - doc.birds = ['Goldfinch'] - }) - assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch, { - actor, seq: 1, maxOp: 2, deps: [], clock: {[actor]: 1}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { - objectId: `1@${actor}`, type: 'list', edits: [ - {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {'type': 'value', value: 'Goldfinch'}} - ] - }}}} - }) - assert.strictEqual(callbacks[0].before, s1) - assert.strictEqual(callbacks[0].after, s2) - assert.strictEqual(callbacks[0].local, true) - }) - */ - - /* - it.skip('should call a patchCallback set up on document initialisation', () => { - const callbacks = [] - s1 = Automerge.init({ - patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local}) - }) - const s2 = Automerge.change(s1, doc => doc.bird = 'Goldfinch') - const actor = Automerge.getActorId(s1) - assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch, { - actor, seq: 1, maxOp: 1, deps: [], clock: {[actor]: 1}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}} - }) - assert.strictEqual(callbacks[0].before, s1) - assert.strictEqual(callbacks[0].after, s2) - assert.strictEqual(callbacks[0].local, true) - }) - */ - }) - - describe('emptyChange()', () => { - it('should append an empty change to the history', () => { - s1 = Automerge.change(s1, 'first change', doc => doc.field = 123) - s2 = Automerge.emptyChange(s1, 'empty change') - assert.notStrictEqual(s2, s1) - assert.deepStrictEqual(s2, s1) - assert.deepStrictEqual(Automerge.getHistory(s2).map(state => state.change.message), ['first change', 'empty change']) - }) - - it('should reference dependencies', () => { - s1 = Automerge.change(s1, doc => doc.field = 123) - s2 = Automerge.merge(Automerge.init(), s1) - s2 = Automerge.change(s2, doc => doc.other = 'hello') - s1 = Automerge.emptyChange(Automerge.merge(s1, s2)) - const history = Automerge.getHistory(s1) - const emptyChange = history[2].change - assert.deepStrictEqual(emptyChange.deps, [history[0].change.hash, history[1].change.hash].sort()) - assert.deepStrictEqual(emptyChange.ops, []) - }) - }) - - describe('root object', () => { - it('should handle single-property assignment', () => { - s1 = Automerge.change(s1, 'set bar', doc => doc.foo = 'bar') - s1 = Automerge.change(s1, 'set zap', doc => doc.zip = 'zap') - assert.strictEqual(s1.foo, 'bar') - assert.strictEqual(s1.zip, 'zap') - assert.deepStrictEqual(s1, {foo: 'bar', zip: 'zap'}) - }) - - it('should allow floating-point values', () => { - s1 = Automerge.change(s1, doc => doc.number = 1589032171.1) - assert.strictEqual(s1.number, 1589032171.1) - }) - - it('should handle multi-property assignment', () => { - s1 = Automerge.change(s1, 'multi-assign', doc => { - Object.assign(doc, {foo: 'bar', answer: 42}) - }) - assert.strictEqual(s1.foo, 'bar') - assert.strictEqual(s1.answer, 42) - assert.deepStrictEqual(s1, {foo: 'bar', answer: 42}) - }) - - it('should handle root property deletion', () => { - s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar'; doc.something = null }) - s1 = Automerge.change(s1, 'del foo', doc => { delete doc.foo }) - assert.strictEqual(s1.foo, undefined) - assert.strictEqual(s1.something, null) - assert.deepStrictEqual(s1, {something: null}) - }) - - it('should follow JS delete behavior', () => { - s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar' }) - let deleted - s1 = Automerge.change(s1, 'del foo', doc => { - deleted = delete doc.foo - }) - assert.strictEqual(deleted, true) - let deleted2 - assert.doesNotThrow(() => { - s1 = Automerge.change(s1, 'del baz', doc => { - deleted2 = delete doc.baz - }) - }) - assert.strictEqual(deleted2, true) - }) - - it('should allow the type of a property to be changed', () => { - s1 = Automerge.change(s1, 'set number', doc => doc.prop = 123) - assert.strictEqual(s1.prop, 123) - s1 = Automerge.change(s1, 'set string', doc => doc.prop = '123') - assert.strictEqual(s1.prop, '123') - s1 = Automerge.change(s1, 'set null', doc => doc.prop = null) - assert.strictEqual(s1.prop, null) - s1 = Automerge.change(s1, 'set bool', doc => doc.prop = true) - assert.strictEqual(s1.prop, true) - }) - - it('should require property names to be valid', () => { - assert.throws(() => { - Automerge.change(s1, 'foo', doc => doc[''] = 'x') - }, /must not be an empty string/) - }) - - it('should not allow assignment of unsupported datatypes', () => { - Automerge.change(s1, doc => { - assert.throws(() => { doc.foo = undefined }, /Unsupported type of value: undefined/) - assert.throws(() => { doc.foo = {prop: undefined} }, /Unsupported type of value: undefined/) - assert.throws(() => { doc.foo = () => {} }, /Unsupported type of value: function/) - assert.throws(() => { doc.foo = Symbol('foo') }, /Unsupported type of value: symbol/) - }) - }) - }) - - describe('nested maps', () => { - it('should assign an objectId to nested maps', () => { - s1 = Automerge.change(s1, doc => { doc.nested = {} }) - let id = Automerge.getObjectId(s1.nested) - assert.strictEqual(OPID_PATTERN.test(Automerge.getObjectId(s1.nested)), true) - assert.notEqual(Automerge.getObjectId(s1.nested), '_root') - }) - - it('should handle assignment of a nested property', () => { - s1 = Automerge.change(s1, 'first change', doc => { - doc.nested = {} - doc.nested.foo = 'bar' - }) - s1 = Automerge.change(s1, 'second change', doc => { - doc.nested.one = 1 - }) - assert.deepStrictEqual(s1, {nested: {foo: 'bar', one: 1}}) - assert.deepStrictEqual(s1.nested, {foo: 'bar', one: 1}) - assert.strictEqual(s1.nested.foo, 'bar') - assert.strictEqual(s1.nested.one, 1) - }) - - it('should handle assignment of an object literal', () => { - s1 = Automerge.change(s1, doc => { - doc.textStyle = {bold: false, fontSize: 12} - }) - assert.deepStrictEqual(s1, {textStyle: {bold: false, fontSize: 12}}) - assert.deepStrictEqual(s1.textStyle, {bold: false, fontSize: 12}) - assert.strictEqual(s1.textStyle.bold, false) - assert.strictEqual(s1.textStyle.fontSize, 12) - }) - - it('should handle assignment of multiple nested properties', () => { - s1 = Automerge.change(s1, doc => { - doc.textStyle = {bold: false, fontSize: 12} - Object.assign(doc.textStyle, {typeface: 'Optima', fontSize: 14}) - }) - assert.strictEqual(s1.textStyle.typeface, 'Optima') - assert.strictEqual(s1.textStyle.bold, false) - assert.strictEqual(s1.textStyle.fontSize, 14) - assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', bold: false, fontSize: 14}) - }) - - it('should handle arbitrary-depth nesting', () => { - s1 = Automerge.change(s1, doc => { - doc.a = {b: {c: {d: {e: {f: {g: 'h'}}}}}} - }) - s1 = Automerge.change(s1, doc => { - doc.a.b.c.d.e.f.i = 'j' - }) - assert.deepStrictEqual(s1, {a: { b: { c: { d: { e: { f: { g: 'h', i: 'j'}}}}}}}) - assert.strictEqual(s1.a.b.c.d.e.f.g, 'h') - assert.strictEqual(s1.a.b.c.d.e.f.i, 'j') - }) - - it('should allow an old object to be replaced with a new one', () => { - s1 = Automerge.change(s1, 'change 1', doc => { - doc.myPet = {species: 'dog', legs: 4, breed: 'dachshund'} - }) - s2 = Automerge.change(s1, 'change 2', doc => { - doc.myPet = {species: 'koi', variety: '紅白', colors: {red: true, white: true, black: false}} - }) - assert.deepStrictEqual(s1.myPet, { - species: 'dog', legs: 4, breed: 'dachshund' - }) - assert.strictEqual(s1.myPet.breed, 'dachshund') - assert.deepStrictEqual(s2.myPet, { - species: 'koi', variety: '紅白', - colors: {red: true, white: true, black: false} - }) - assert.strictEqual(s2.myPet.breed, undefined) - assert.strictEqual(s2.myPet.variety, '紅白') - }) - - it('should allow fields to be changed between primitive and nested map', () => { - s1 = Automerge.change(s1, doc => doc.color = '#ff7f00') - assert.strictEqual(s1.color, '#ff7f00') - s1 = Automerge.change(s1, doc => doc.color = {red: 255, green: 127, blue: 0}) - assert.deepStrictEqual(s1.color, {red: 255, green: 127, blue: 0}) - s1 = Automerge.change(s1, doc => doc.color = '#ff7f00') - assert.strictEqual(s1.color, '#ff7f00') - }) - - it('should not allow several references to the same map object', () => { - s1 = Automerge.change(s1, doc => doc.object = {}) - assert.throws(() => { - Automerge.change(s1, doc => { doc.x = doc.object }) - }, /Cannot create a reference to an existing document object/) - assert.throws(() => { - Automerge.change(s1, doc => { doc.x = s1.object }) - }, /Cannot create a reference to an existing document object/) - assert.throws(() => { - Automerge.change(s1, doc => { doc.x = {}; doc.y = doc.x }) - }, /Cannot create a reference to an existing document object/) - }) - - it('should not allow object-copying idioms', () => { - s1 = Automerge.change(s1, doc => { - doc.items = [{id: 'id1', name: 'one'}, {id: 'id2', name: 'two'}] - }) - // People who have previously worked with immutable state in JavaScript may be tempted - // to use idioms like this, which don't work well with Automerge -- see e.g. - // https://github.com/automerge/automerge/issues/260 - assert.throws(() => { - Automerge.change(s1, doc => { - doc.items = [...doc.items, {id: 'id3', name: 'three'}] - }) - }, /Cannot create a reference to an existing document object/) - }) - - it('should handle deletion of properties within a map', () => { - s1 = Automerge.change(s1, 'set style', doc => { - doc.textStyle = {typeface: 'Optima', bold: false, fontSize: 12} - }) - s1 = Automerge.change(s1, 'non-bold', doc => delete doc.textStyle.bold) - assert.strictEqual(s1.textStyle.bold, undefined) - assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', fontSize: 12}) - }) - - it('should handle deletion of references to a map', () => { - s1 = Automerge.change(s1, 'make rich text doc', doc => { - Object.assign(doc, {title: 'Hello', textStyle: {typeface: 'Optima', fontSize: 12}}) - }) - s1 = Automerge.change(s1, doc => delete doc.textStyle) - assert.strictEqual(s1.textStyle, undefined) - assert.deepStrictEqual(s1, {title: 'Hello'}) - }) - - it('should validate field names', () => { - s1 = Automerge.change(s1, doc => doc.nested = {}) - assert.throws(() => { Automerge.change(s1, doc => doc.nested[''] = 'x') }, /must not be an empty string/) - assert.throws(() => { Automerge.change(s1, doc => doc.nested = {'': 'x'}) }, /must not be an empty string/) - }) - }) - - describe('lists', () => { - it('should allow elements to be inserted', () => { - s1 = Automerge.change(s1, doc => doc.noodles = []) - s1 = Automerge.change(s1, doc => doc.noodles.insertAt(0, 'udon', 'soba')) - s1 = Automerge.change(s1, doc => doc.noodles.insertAt(1, 'ramen')) - assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']}) - assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba']) - assert.strictEqual(s1.noodles[0], 'udon') - assert.strictEqual(s1.noodles[1], 'ramen') - assert.strictEqual(s1.noodles[2], 'soba') - assert.strictEqual(s1.noodles.length, 3) - }) - - it('should handle assignment of a list literal', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) - assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']}) - assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba']) - assert.strictEqual(s1.noodles[0], 'udon') - assert.strictEqual(s1.noodles[1], 'ramen') - assert.strictEqual(s1.noodles[2], 'soba') - assert.strictEqual(s1.noodles[3], undefined) - assert.strictEqual(s1.noodles.length, 3) - }) - - it('should only allow numeric indexes', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) - s1 = Automerge.change(s1, doc => doc.noodles[1] = 'Ramen!') - assert.strictEqual(s1.noodles[1], 'Ramen!') - s1 = Automerge.change(s1, doc => doc.noodles['1'] = 'RAMEN!!!') - assert.strictEqual(s1.noodles[1], 'RAMEN!!!') - assert.throws(() => { Automerge.change(s1, doc => doc.noodles.favourite = 'udon') }, /list index must be a number/) - assert.throws(() => { Automerge.change(s1, doc => doc.noodles[''] = 'udon') }, /list index must be a number/) - assert.throws(() => { Automerge.change(s1, doc => doc.noodles['1e6'] = 'udon') }, /list index must be a number/) - }) - - it('should handle deletion of list elements', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) - s1 = Automerge.change(s1, doc => delete doc.noodles[1]) - assert.deepStrictEqual(s1.noodles, ['udon', 'soba']) - s1 = Automerge.change(s1, doc => doc.noodles.deleteAt(1)) - assert.deepStrictEqual(s1.noodles, ['udon']) - assert.strictEqual(s1.noodles[0], 'udon') - assert.strictEqual(s1.noodles[1], undefined) - assert.strictEqual(s1.noodles[2], undefined) - assert.strictEqual(s1.noodles.length, 1) - }) - - it('should handle assignment of individual list indexes', () => { - s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon', 'ramen', 'soba']) - s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi') - assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi', 'soba']) - assert.strictEqual(s1.japaneseFood[0], 'udon') - assert.strictEqual(s1.japaneseFood[1], 'sushi') - assert.strictEqual(s1.japaneseFood[2], 'soba') - assert.strictEqual(s1.japaneseFood[3], undefined) - assert.strictEqual(s1.japaneseFood.length, 3) - }) - - it('concurrent edits insert in reverse actorid order if counters equal', () => { - s1 = Automerge.init('aaaa') - s2 = Automerge.init('bbbb') - s1 = Automerge.change(s1, doc => doc.list = []) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "2@bbbb")) - s2 = Automerge.merge(s2, s1) - assert.deepStrictEqual(Automerge.toJS(s2).list, ["2@bbbb", "2@aaaa"]) - }) - - it('concurrent edits insert in reverse counter order if different', () => { - s1 = Automerge.init('aaaa') - s2 = Automerge.init('bbbb') - s1 = Automerge.change(s1, doc => doc.list = []) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) - s2 = Automerge.change(s2, doc => doc.foo = "2@bbbb") - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "3@bbbb")) - s2 = Automerge.merge(s2, s1) - assert.deepStrictEqual(s2.list, ["3@bbbb", "2@aaaa"]) - }) - - it('should treat out-by-one assignment as insertion', () => { - s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon']) - s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi') - assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi']) - assert.strictEqual(s1.japaneseFood[0], 'udon') - assert.strictEqual(s1.japaneseFood[1], 'sushi') - assert.strictEqual(s1.japaneseFood[2], undefined) - assert.strictEqual(s1.japaneseFood.length, 2) - }) - - it('should not allow out-of-range assignment', () => { - s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon']) - assert.throws(() => { Automerge.change(s1, doc => doc.japaneseFood[4] = 'ramen') }, /is out of bounds/) - }) - - it('should allow bulk assignment of multiple list indexes', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba']) - s1 = Automerge.change(s1, doc => Object.assign(doc.noodles, {0: 'うどん', 2: 'そば'})) - assert.deepStrictEqual(s1.noodles, ['うどん', 'ramen', 'そば']) - assert.strictEqual(s1.noodles[0], 'うどん') - assert.strictEqual(s1.noodles[1], 'ramen') - assert.strictEqual(s1.noodles[2], 'そば') - assert.strictEqual(s1.noodles.length, 3) - }) - - it('should handle nested objects', () => { - s1 = Automerge.change(s1, doc => doc.noodles = [{type: 'ramen', dishes: ['tonkotsu', 'shoyu']}]) - s1 = Automerge.change(s1, doc => doc.noodles.push({type: 'udon', dishes: ['tempura udon']})) - s1 = Automerge.change(s1, doc => doc.noodles[0].dishes.push('miso')) - assert.deepStrictEqual(s1, {noodles: [ - {type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso']}, - {type: 'udon', dishes: ['tempura udon']} - ]}) - assert.deepStrictEqual(s1.noodles[0], { - type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso'] - }) - assert.deepStrictEqual(s1.noodles[1], { - type: 'udon', dishes: ['tempura udon'] - }) - }) - - it('should handle nested lists', () => { - s1 = Automerge.change(s1, doc => doc.noodleMatrix = [['ramen', 'tonkotsu', 'shoyu']]) - s1 = Automerge.change(s1, doc => doc.noodleMatrix.push(['udon', 'tempura udon'])) - s1 = Automerge.change(s1, doc => doc.noodleMatrix[0].push('miso')) - assert.deepStrictEqual(s1.noodleMatrix, [['ramen', 'tonkotsu', 'shoyu', 'miso'], ['udon', 'tempura udon']]) - assert.deepStrictEqual(s1.noodleMatrix[0], ['ramen', 'tonkotsu', 'shoyu', 'miso']) - assert.deepStrictEqual(s1.noodleMatrix[1], ['udon', 'tempura udon']) - }) - - it('should handle deep nesting', () => { - s1 = Automerge.change(s1, doc => doc.nesting = { - maps: { m1: { m2: { foo: "bar", baz: {} }, m2a: { } } }, - lists: [ [ 1, 2, 3 ], [ [ 3, 4, 5, [6]], 7 ] ], - mapsinlists: [ { foo: "bar" }, [ { bar: "baz" } ] ], - listsinmaps: { foo: [1, 2, 3], bar: [ [ { baz: "123" } ] ] } - }) - s1 = Automerge.change(s1, doc => { - doc.nesting.maps.m1a = "123" - doc.nesting.maps.m1.m2.baz.xxx = "123" - delete doc.nesting.maps.m1.m2a - doc.nesting.lists.shift() - doc.nesting.lists[0][0].pop() - doc.nesting.lists[0][0].push(100) - doc.nesting.mapsinlists[0].foo = "baz" - doc.nesting.mapsinlists[1][0].foo = "bar" - delete doc.nesting.mapsinlists[1] - doc.nesting.listsinmaps.foo.push(4) - doc.nesting.listsinmaps.bar[0][0].baz = "456" - delete doc.nesting.listsinmaps.bar - }) - assert.deepStrictEqual(s1, { nesting: { - maps: { m1: { m2: { foo: "bar", baz: { xxx: "123" } } }, m1a: "123" }, - lists: [ [ [ 3, 4, 5, 100 ], 7 ] ], - mapsinlists: [ { foo: "baz" } ], - listsinmaps: { foo: [1, 2, 3, 4] } - }}) - }) - - it('should handle replacement of the entire list', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen']) - s1 = Automerge.change(s1, doc => doc.japaneseNoodles = doc.noodles.slice()) - s1 = Automerge.change(s1, doc => doc.noodles = ['wonton', 'pho']) - assert.deepStrictEqual(s1, { - noodles: ['wonton', 'pho'], - japaneseNoodles: ['udon', 'soba', 'ramen'] - }) - assert.deepStrictEqual(s1.noodles, ['wonton', 'pho']) - assert.strictEqual(s1.noodles[0], 'wonton') - assert.strictEqual(s1.noodles[1], 'pho') - assert.strictEqual(s1.noodles[2], undefined) - assert.strictEqual(s1.noodles.length, 2) - }) - - it('should allow assignment to change the type of a list element', () => { - s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen']) - assert.deepStrictEqual(s1.noodles, ['udon', 'soba', 'ramen']) - s1 = Automerge.change(s1, doc => doc.noodles[1] = {type: 'soba', options: ['hot', 'cold']}) - assert.deepStrictEqual(s1.noodles, ['udon', {type: 'soba', options: ['hot', 'cold']}, 'ramen']) - s1 = Automerge.change(s1, doc => doc.noodles[1] = ['hot soba', 'cold soba']) - assert.deepStrictEqual(s1.noodles, ['udon', ['hot soba', 'cold soba'], 'ramen']) - s1 = Automerge.change(s1, doc => doc.noodles[1] = 'soba is the best') - assert.deepStrictEqual(s1.noodles, ['udon', 'soba is the best', 'ramen']) - }) - - it('should allow list creation and assignment in the same change callback', () => { - s1 = Automerge.change(Automerge.init(), doc => { - doc.letters = ['a', 'b', 'c'] - doc.letters[1] = 'd' - }) - assert.strictEqual(s1.letters[1], 'd') - }) - - it('should allow adding and removing list elements in the same change callback', () => { - s1 = Automerge.change(Automerge.init(), doc => doc.noodles = []) - s1 = Automerge.change(s1, doc => { - doc.noodles.push('udon') - doc.noodles.deleteAt(0) - }) - assert.deepStrictEqual(s1, {noodles: []}) - // do the add-remove cycle twice, test for #151 (https://github.com/automerge/automerge/issues/151) - s1 = Automerge.change(s1, doc => { - doc.noodles.push('soba') - doc.noodles.deleteAt(0) - }) - assert.deepStrictEqual(s1, {noodles: []}) - }) - - it('should handle arbitrary-depth nesting', () => { - s1 = Automerge.change(s1, doc => doc.maze = [[[[[[[['noodles', ['here']]]]]]]]]) - s1 = Automerge.change(s1, doc => doc.maze[0][0][0][0][0][0][0][1].unshift('found')) - assert.deepStrictEqual(s1.maze, [[[[[[[['noodles', ['found', 'here']]]]]]]]]) - assert.deepStrictEqual(s1.maze[0][0][0][0][0][0][0][1][1], 'here') - s2 = Automerge.load(Automerge.save(s1)) - assert.deepStrictEqual(s1,s2) - }) - - it('should not allow several references to the same list object', () => { - s1 = Automerge.change(s1, doc => doc.list = []) - assert.throws(() => { - Automerge.change(s1, doc => { doc.x = doc.list }) - }, /Cannot create a reference to an existing document object/) - assert.throws(() => { - Automerge.change(s1, doc => { doc.x = s1.list }) - }, /Cannot create a reference to an existing document object/) - assert.throws(() => { - Automerge.change(s1, doc => { doc.x = []; doc.y = doc.x }) - }, /Cannot create a reference to an existing document object/) - }) - }) - - describe('counters', () => { - // counter - it('should allow deleting counters from maps', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = {wrens: new Automerge.Counter(1)}) - const s2 = Automerge.change(s1, doc => doc.birds.wrens.increment(2)) - const s3 = Automerge.change(s2, doc => delete doc.birds.wrens) - assert.deepStrictEqual(s2, {birds: {wrens: new Automerge.Counter(3)}}) - assert.deepStrictEqual(s3, {birds: {}}) - }) - - // counter - /* - it('should not allow deleting counters from lists', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.recordings = [new Automerge.Counter(1)]) - const s2 = Automerge.change(s1, doc => doc.recordings[0].increment(2)) - assert.deepStrictEqual(s2, {recordings: [new Automerge.Counter(3)]}) - assert.throws(() => { Automerge.change(s2, doc => doc.recordings.deleteAt(0)) }, /Unsupported operation/) - }) - */ - }) - }) - - describe('concurrent use', () => { - let s1, s2, s3, s4 - beforeEach(() => { - s1 = Automerge.init() - s2 = Automerge.init() - s3 = Automerge.init() - s4 = Automerge.init() - }) - - it('should merge concurrent updates of different properties', () => { - s1 = Automerge.change(s1, doc => doc.foo = 'bar') - s2 = Automerge.change(s2, doc => doc.hello = 'world') - s3 = Automerge.merge(s1, s2) - assert.strictEqual(s3.foo, 'bar') - assert.strictEqual(s3.hello, 'world') - assert.deepStrictEqual(s3, {foo: 'bar', hello: 'world'}) - assert.strictEqual(Automerge.getConflicts(s3, 'foo'), undefined) - assert.strictEqual(Automerge.getConflicts(s3, 'hello'), undefined) - s4 = Automerge.load(Automerge.save(s3)) - assert.deepEqual(s3,s4) - }) - - it('should add concurrent increments of the same property', () => { - s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter()) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.counter.increment()) - s2 = Automerge.change(s2, doc => doc.counter.increment(2)) - s3 = Automerge.merge(s1, s2) - assert.strictEqual(s1.counter.value, 1) - assert.strictEqual(s2.counter.value, 2) - assert.strictEqual(s3.counter.value, 3) - assert.strictEqual(Automerge.getConflicts(s3, 'counter'), undefined) - s4 = Automerge.load(Automerge.save(s3)) - assert.deepEqual(s3,s4) - }) - - it('should add increments only to the values they precede', () => { - s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter(0)) - s1 = Automerge.change(s1, doc => doc.counter.increment()) - s2 = Automerge.change(s2, doc => doc.counter = new Automerge.Counter(100)) - s2 = Automerge.change(s2, doc => doc.counter.increment(3)) - s3 = Automerge.merge(s1, s2) - if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3, {counter: new Automerge.Counter(1)}) - } else { - assert.deepStrictEqual(s3, {counter: new Automerge.Counter(103)}) - } - assert.deepStrictEqual(Automerge.getConflicts(s3, 'counter'), { - [`1@${Automerge.getActorId(s1)}`]: new Automerge.Counter(1), - [`1@${Automerge.getActorId(s2)}`]: new Automerge.Counter(103) - }) - s4 = Automerge.load(Automerge.save(s3)) - assert.deepEqual(s3,s4) - }) - - it('should detect concurrent updates of the same field', () => { - s1 = Automerge.change(s1, doc => doc.field = 'one') - s2 = Automerge.change(s2, doc => doc.field = 'two') - s3 = Automerge.merge(s1, s2) - if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3, {field: 'one'}) - } else { - assert.deepStrictEqual(s3, {field: 'two'}) - } - assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), { - [`1@${Automerge.getActorId(s1)}`]: 'one', - [`1@${Automerge.getActorId(s2)}`]: 'two' - }) - }) - - it('should detect concurrent updates of the same list element', () => { - s1 = Automerge.change(s1, doc => doc.birds = ['finch']) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds[0] = 'greenfinch') - s2 = Automerge.change(s2, doc => doc.birds[0] = 'goldfinch') - s3 = Automerge.merge(s1, s2) - if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3.birds, ['greenfinch']) - } else { - assert.deepStrictEqual(s3.birds, ['goldfinch']) - } - assert.deepStrictEqual(Automerge.getConflicts(s3.birds, 0), { - [`3@${Automerge.getActorId(s1)}`]: 'greenfinch', - [`3@${Automerge.getActorId(s2)}`]: 'goldfinch' - }) - }) - - it('should handle assignment conflicts of different types', () => { - s1 = Automerge.change(s1, doc => doc.field = 'string') - s2 = Automerge.change(s2, doc => doc.field = ['list']) - s3 = Automerge.change(s3, doc => doc.field = {thing: 'map'}) - s1 = Automerge.merge(Automerge.merge(s1, s2), s3) - assertEqualsOneOf(s1.field, 'string', ['list'], {thing: 'map'}) - assert.deepStrictEqual(Automerge.getConflicts(s1, 'field'), { - [`1@${Automerge.getActorId(s1)}`]: 'string', - [`1@${Automerge.getActorId(s2)}`]: ['list'], - [`1@${Automerge.getActorId(s3)}`]: {thing: 'map'} - }) - }) - - it('should handle changes within a conflicting map field', () => { - s1 = Automerge.change(s1, doc => doc.field = 'string') - s2 = Automerge.change(s2, doc => doc.field = {}) - s2 = Automerge.change(s2, doc => doc.field.innerKey = 42) - s3 = Automerge.merge(s1, s2) - assertEqualsOneOf(s3.field, 'string', {innerKey: 42}) - assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), { - [`1@${Automerge.getActorId(s1)}`]: 'string', - [`1@${Automerge.getActorId(s2)}`]: {innerKey: 42} - }) - }) - - it('should handle changes within a conflicting list element', () => { - s1 = Automerge.change(s1, doc => doc.list = ['hello']) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.list[0] = {map1: true}) - s1 = Automerge.change(s1, doc => doc.list[0].key = 1) - s2 = Automerge.change(s2, doc => doc.list[0] = {map2: true}) - s2 = Automerge.change(s2, doc => doc.list[0].key = 2) - s3 = Automerge.merge(s1, s2) - if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { - assert.deepStrictEqual(s3.list, [{map1: true, key: 1}]) - } else { - assert.deepStrictEqual(s3.list, [{map2: true, key: 2}]) - } - assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), { - [`3@${Automerge.getActorId(s1)}`]: {map1: true, key: 1}, - [`3@${Automerge.getActorId(s2)}`]: {map2: true, key: 2} - }) - }) - - it('should not merge concurrently assigned nested maps', () => { - s1 = Automerge.change(s1, doc => doc.config = {background: 'blue'}) - s2 = Automerge.change(s2, doc => doc.config = {logo_url: 'logo.png'}) - s3 = Automerge.merge(s1, s2) - assertEqualsOneOf(s3.config, {background: 'blue'}, {logo_url: 'logo.png'}) - assert.deepStrictEqual(Automerge.getConflicts(s3, 'config'), { - [`1@${Automerge.getActorId(s1)}`]: {background: 'blue'}, - [`1@${Automerge.getActorId(s2)}`]: {logo_url: 'logo.png'} - }) - }) - - it('should clear conflicts after assigning a new value', () => { - s1 = Automerge.change(s1, doc => doc.field = 'one') - s2 = Automerge.change(s2, doc => doc.field = 'two') - s3 = Automerge.merge(s1, s2) - s3 = Automerge.change(s3, doc => doc.field = 'three') - assert.deepStrictEqual(s3, {field: 'three'}) - assert.strictEqual(Automerge.getConflicts(s3, 'field'), undefined) - s2 = Automerge.merge(s2, s3) - assert.deepStrictEqual(s2, {field: 'three'}) - assert.strictEqual(Automerge.getConflicts(s2, 'field'), undefined) - }) - - it('should handle concurrent insertions at different list positions', () => { - s1 = Automerge.change(s1, doc => doc.list = ['one', 'three']) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.list.splice(1, 0, 'two')) - s2 = Automerge.change(s2, doc => doc.list.push('four')) - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3, {list: ['one', 'two', 'three', 'four']}) - assert.strictEqual(Automerge.getConflicts(s3, 'list'), undefined) - }) - - it('should handle concurrent insertions at the same list position', () => { - s1 = Automerge.change(s1, doc => doc.birds = ['parakeet']) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds.push('starling')) - s2 = Automerge.change(s2, doc => doc.birds.push('chaffinch')) - s3 = Automerge.merge(s1, s2) - assertEqualsOneOf(s3.birds, ['parakeet', 'starling', 'chaffinch'], ['parakeet', 'chaffinch', 'starling']) - s2 = Automerge.merge(s2, s3) - assert.deepStrictEqual(s2, s3) - }) - - it('should handle concurrent assignment and deletion of a map entry', () => { - // Add-wins semantics - s1 = Automerge.change(s1, doc => doc.bestBird = 'robin') - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => delete doc.bestBird) - s2 = Automerge.change(s2, doc => doc.bestBird = 'magpie') - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s1, {}) - assert.deepStrictEqual(s2, {bestBird: 'magpie'}) - assert.deepStrictEqual(s3, {bestBird: 'magpie'}) - assert.strictEqual(Automerge.getConflicts(s3, 'bestBird'), undefined) - }) - - it('should handle concurrent assignment and deletion of a list element', () => { - // Concurrent assignment ressurects a deleted list element. Perhaps a little - // surprising, but consistent with add-wins semantics of maps (see test above) - s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch']) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds[1] = 'starling') - s2 = Automerge.change(s2, doc => doc.birds.splice(1, 1)) - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s1.birds, ['blackbird', 'starling', 'goldfinch']) - assert.deepStrictEqual(s2.birds, ['blackbird', 'goldfinch']) - assert.deepStrictEqual(s3.birds, ['blackbird', 'starling', 'goldfinch']) - s4 = Automerge.load(Automerge.save(s3)) - assert.deepStrictEqual(s3, s4); - }) - - it('should handle insertion after a deleted list element', () => { - s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch']) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds.splice(1, 2)) - s2 = Automerge.change(s2, doc => doc.birds.splice(2, 0, 'starling')) - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3, {birds: ['blackbird', 'starling']}) - assert.deepStrictEqual(Automerge.merge(s2, s3), {birds: ['blackbird', 'starling']}) - }) - - it('should handle concurrent deletion of the same element', () => { - s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant']) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds.deleteAt(1)) // buzzard - s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3.birds, ['albatross', 'cormorant']) - }) - - it('should handle concurrent deletion of different elements', () => { - s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant']) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.birds.deleteAt(0)) // albatross - s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s3.birds, ['cormorant']) - }) - - it('should handle concurrent updates at different levels of the tree', () => { - // A delete higher up in the tree overrides an update in a subtree - s1 = Automerge.change(s1, doc => doc.animals = {birds: {pink: 'flamingo', black: 'starling'}, mammals: ['badger']}) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.animals.birds.brown = 'sparrow') - s2 = Automerge.change(s2, doc => delete doc.animals.birds) - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s1.animals, { - birds: { - pink: 'flamingo', brown: 'sparrow', black: 'starling' - }, - mammals: ['badger'] - }) - assert.deepStrictEqual(s2.animals, {mammals: ['badger']}) - assert.deepStrictEqual(s3.animals, {mammals: ['badger']}) - }) - - it('should handle updates of concurrently deleted objects', () => { - s1 = Automerge.change(s1, doc => doc.birds = {blackbird: {feathers: 'black'}}) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => delete doc.birds.blackbird) - s2 = Automerge.change(s2, doc => doc.birds.blackbird.beak = 'orange') - s3 = Automerge.merge(s1, s2) - assert.deepStrictEqual(s1, {birds: {}}) - }) - - it('should not interleave sequence insertions at the same position', () => { - s1 = Automerge.change(s1, doc => doc.wisdom = []) - s2 = Automerge.merge(s2, s1) - s1 = Automerge.change(s1, doc => doc.wisdom.push('to', 'be', 'is', 'to', 'do')) - s2 = Automerge.change(s2, doc => doc.wisdom.push('to', 'do', 'is', 'to', 'be')) - s3 = Automerge.merge(s1, s2) - assertEqualsOneOf(s3.wisdom, - ['to', 'be', 'is', 'to', 'do', 'to', 'do', 'is', 'to', 'be'], - ['to', 'do', 'is', 'to', 'be', 'to', 'be', 'is', 'to', 'do']) - // In case you're wondering: http://quoteinvestigator.com/2013/09/16/do-be-do/ - }) - - describe('multiple insertions at the same list position', () => { - it('should handle insertion by greater actor ID', () => { - s1 = Automerge.init('aaaa') - s2 = Automerge.init('bbbb') - s1 = Automerge.change(s1, doc => doc.list = ['two']) - s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one')) - assert.deepStrictEqual(s2.list, ['one', 'two']) - }) - - it('should handle insertion by lesser actor ID', () => { - s1 = Automerge.init('bbbb') - s2 = Automerge.init('aaaa') - s1 = Automerge.change(s1, doc => doc.list = ['two']) - s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one')) - assert.deepStrictEqual(s2.list, ['one', 'two']) - }) - - it('should handle insertion regardless of actor ID', () => { - s1 = Automerge.change(s1, doc => doc.list = ['two']) - s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one')) - assert.deepStrictEqual(s2.list, ['one', 'two']) - }) - - it('should make insertion order consistent with causality', () => { - s1 = Automerge.change(s1, doc => doc.list = ['four']) - s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.unshift('three')) - s1 = Automerge.merge(s1, s2) - s1 = Automerge.change(s1, doc => doc.list.unshift('two')) - s2 = Automerge.merge(s2, s1) - s2 = Automerge.change(s2, doc => doc.list.unshift('one')) - assert.deepStrictEqual(s2.list, ['one', 'two', 'three', 'four']) - }) - }) - }) - - describe('saving and loading', () => { - it('should save and restore an empty document', () => { - let s = Automerge.load(Automerge.save(Automerge.init())) - assert.deepStrictEqual(s, {}) - }) - - it('should generate a new random actor ID', () => { - let s1 = Automerge.init() - let s2 = Automerge.load(Automerge.save(s1)) - assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s1).toString()), true) - assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s2).toString()), true) - assert.notEqual(Automerge.getActorId(s1), Automerge.getActorId(s2)) - }) - - it('should allow a custom actor ID to be set', () => { - let s = Automerge.load(Automerge.save(Automerge.init()), '333333') - assert.strictEqual(Automerge.getActorId(s), '333333') - }) - - it('should reconstitute complex datatypes', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.todos = [{title: 'water plants', done: false}]) - let s2 = Automerge.load(Automerge.save(s1)) - assert.deepStrictEqual(s2, {todos: [{title: 'water plants', done: false}]}) - }) - - it('should save and load maps with @ symbols in the keys', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc["123@4567"] = "hello") - let s2 = Automerge.load(Automerge.save(s1)) - assert.deepStrictEqual(s2, { "123@4567": "hello" }) - }) - - it('should reconstitute conflicts', () => { - let s1 = Automerge.change(Automerge.init('111111'), doc => doc.x = 3) - let s2 = Automerge.change(Automerge.init('222222'), doc => doc.x = 5) - s1 = Automerge.merge(s1, s2) - let s3 = Automerge.load(Automerge.save(s1)) - assert.strictEqual(s1.x, 5) - assert.strictEqual(s3.x, 5) - assert.deepStrictEqual(Automerge.getConflicts(s1, 'x'), {'1@111111': 3, '1@222222': 5}) - assert.deepStrictEqual(Automerge.getConflicts(s3, 'x'), {'1@111111': 3, '1@222222': 5}) - }) - - it('should reconstitute element ID counters', () => { - const s1 = Automerge.init('01234567') - const s2 = Automerge.change(s1, doc => doc.list = ['a']) - const listId = Automerge.getObjectId(s2.list) - const changes12 = Automerge.getAllChanges(s2).map(decodeChange) - assert.deepStrictEqual(changes12, [{ - hash: changes12[0].hash, actor: '01234567', seq: 1, startOp: 1, - time: changes12[0].time, message: '', deps: [], ops: [ - {obj: '_root', action: 'makeList', key: 'list', insert: false, pred: []}, - {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'a', pred: []} - ] - }]) - const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0)) - const s4 = Automerge.load(Automerge.save(s3), '01234567') - const s5 = Automerge.change(s4, doc => doc.list.push('b')) - const changes45 = Automerge.getAllChanges(s5).map(decodeChange) - assert.deepStrictEqual(s5, {list: ['b']}) - assert.deepStrictEqual(changes45[2], { - hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 4, - time: changes45[2].time, message: '', deps: [changes45[1].hash], ops: [ - {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'b', pred: []} - ] - }) - }) - - it('should allow a reloaded list to be mutated', () => { - let doc = Automerge.change(Automerge.init(), doc => doc.foo = []) - doc = Automerge.load(Automerge.save(doc)) - doc = Automerge.change(doc, 'add', doc => doc.foo.push(1)) - doc = Automerge.load(Automerge.save(doc)) - assert.deepStrictEqual(doc.foo, [1]) - }) - - it('should reload a document containing deflated columns', () => { - // In this test, the keyCtr column is long enough for deflate compression to kick in, but the - // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr. - // When checking whether the columns appear in ascending order, we must ignore the deflate bit. - let doc = Automerge.change(Automerge.init(), doc => { - doc.list = [] - for (let i = 0; i < 200; i++) doc.list.insertAt(Math.floor(Math.random() * i), 'a') - }) - Automerge.load(Automerge.save(doc)) - let expected = [] - for (let i = 0; i < 200; i++) expected.push('a') - assert.deepStrictEqual(doc, {list: expected}) - }) - - /* - it.skip('should call patchCallback if supplied', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) - const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) - const callbacks = [], actor = Automerge.getActorId(s1) - const reloaded = Automerge.load(Automerge.save(s2), { - patchCallback(patch, before, after, local) { - callbacks.push({patch, before, after, local}) - } - }) - assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch, { - maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { - objectId: `1@${actor}`, type: 'list', edits: [ - {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']} - ] - }}}} - }) - assert.deepStrictEqual(callbacks[0].before, {}) - assert.strictEqual(callbacks[0].after, reloaded) - assert.strictEqual(callbacks[0].local, false) - }) - */ - }) - - describe('history API', () => { - it('should return an empty history for an empty document', () => { - assert.deepStrictEqual(Automerge.getHistory(Automerge.init()), []) - }) - - it('should make past document states accessible', () => { - let s = Automerge.init() - s = Automerge.change(s, doc => doc.config = {background: 'blue'}) - s = Automerge.change(s, doc => doc.birds = ['mallard']) - s = Automerge.change(s, doc => doc.birds.unshift('oystercatcher')) - assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.snapshot), [ - {config: {background: 'blue'}}, - {config: {background: 'blue'}, birds: ['mallard']}, - {config: {background: 'blue'}, birds: ['oystercatcher', 'mallard']} - ]) - }) - - it('should make change messages accessible', () => { - let s = Automerge.init() - s = Automerge.change(s, 'Empty Bookshelf', doc => doc.books = []) - s = Automerge.change(s, 'Add Orwell', doc => doc.books.push('Nineteen Eighty-Four')) - s = Automerge.change(s, 'Add Huxley', doc => doc.books.push('Brave New World')) - assert.deepStrictEqual(s.books, ['Nineteen Eighty-Four', 'Brave New World']) - assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.change.message), - ['Empty Bookshelf', 'Add Orwell', 'Add Huxley']) - }) - }) - - describe('changes API', () => { - it('should return an empty list on an empty document', () => { - let changes = Automerge.getAllChanges(Automerge.init()) - assert.deepStrictEqual(changes, []) - }) - - it('should return an empty list when nothing changed', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) - assert.deepStrictEqual(Automerge.getChanges(s1, s1), []) - }) - - it('should do nothing when applying an empty list of changes', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) - assert.deepStrictEqual(Automerge.applyChanges(s1, [])[0], s1) - }) - - it('should return all changes when compared to an empty document', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) - let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) - let changes = Automerge.getChanges(Automerge.init(), s2) - assert.strictEqual(changes.length, 2) - }) - - it('should allow a document copy to be reconstructed from scratch', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) - let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) - let changes = Automerge.getAllChanges(s2) - let [s3] = Automerge.applyChanges(Automerge.init(), changes) - assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch']) - }) - - it('should return changes since the last given version', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) - let changes1 = Automerge.getAllChanges(s1) - let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) - let changes2 = Automerge.getChanges(s1, s2) - assert.strictEqual(changes1.length, 1) // Add Chaffinch - assert.strictEqual(changes2.length, 1) // Add Bullfinch - }) - - it('should incrementally apply changes since the last given version', () => { - let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch']) - let changes1 = Automerge.getAllChanges(s1) - let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch')) - let changes2 = Automerge.getChanges(s1, s2) - let [s3] = Automerge.applyChanges(Automerge.init(), changes1) - let [s4] = Automerge.applyChanges(s3, changes2) - assert.deepStrictEqual(s3.birds, ['Chaffinch']) - assert.deepStrictEqual(s4.birds, ['Chaffinch', 'Bullfinch']) - }) - - it('should handle updates to a list element', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch', 'Bullfinch']) - let s2 = Automerge.change(s1, doc => doc.birds[0] = 'Goldfinch') - let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) - assert.deepStrictEqual(s3.birds, ['Goldfinch', 'Bullfinch']) - assert.strictEqual(Automerge.getConflicts(s3.birds, 0), undefined) - }) - - // TEXT - it('should handle updates to a text object', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('ab')) - let s2 = Automerge.change(s1, doc => doc.text.set(0, 'A')) - let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2)) - assert.deepStrictEqual([...s3.text], ['A', 'b']) - }) - - /* - it.skip('should report missing dependencies', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) - let s2 = Automerge.merge(Automerge.init(), s1) - s2 = Automerge.change(s2, doc => doc.birds.push('Bullfinch')) - let changes = Automerge.getAllChanges(s2) - let [s3, patch] = Automerge.applyChanges(Automerge.init(), [changes[1]]) - assert.deepStrictEqual(s3, {}) - assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), - decodeChange(changes[1]).deps) - assert.strictEqual(patch.pendingChanges, 1) - ;[s3, patch] = Automerge.applyChanges(s3, [changes[0]]) - assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch']) - assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), []) - assert.strictEqual(patch.pendingChanges, 0) - }) - */ - - it('should report missing dependencies with out-of-order applyChanges', () => { - let s0 = Automerge.init() - let s1 = Automerge.change(s0, doc => doc.test = ['a']) - let changes01 = Automerge.getAllChanges(s1) - let s2 = Automerge.change(s1, doc => doc.test = ['b']) - let changes12 = Automerge.getChanges(s1, s2) - let s3 = Automerge.change(s2, doc => doc.test = ['c']) - let changes23 = Automerge.getChanges(s2, s3) - let s4 = Automerge.init() - let [s5] = Automerge.applyChanges(s4, changes23) - let [s6] = Automerge.applyChanges(s5, changes12) -// assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s6)), [decodeChange(changes01[0]).hash]) - assert.deepStrictEqual(Automerge.getMissingDeps(s6), [decodeChange(changes01[0]).hash]) - }) - - /* - it.skip('should call patchCallback if supplied when applying changes', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) - const callbacks = [], actor = Automerge.getActorId(s1) - const before = Automerge.init() - const [after, patch] = Automerge.applyChanges(before, Automerge.getAllChanges(s1), { - patchCallback(patch, before, after, local) { - callbacks.push({patch, before, after, local}) - } - }) - assert.strictEqual(callbacks.length, 1) - assert.deepStrictEqual(callbacks[0].patch, { - maxOp: 2, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { - objectId: `1@${actor}`, type: 'list', edits: [ - {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {type: 'value', value: 'Goldfinch'}} - ] - }}}} - }) - assert.strictEqual(callbacks[0].patch, patch) - assert.strictEqual(callbacks[0].before, before) - assert.strictEqual(callbacks[0].after, after) - assert.strictEqual(callbacks[0].local, false) - }) - */ - - /* - it.skip('should merge multiple applied changes into one patch', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch']) - const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch')) - const patches = [], actor = Automerge.getActorId(s2) - Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2), - {patchCallback: p => patches.push(p)}) - assert.deepStrictEqual(patches, [{ - maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: { - objectId: `1@${actor}`, type: 'list', edits: [ - {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']} - ] - }}}} - }]) - }) - */ - - /* - it.skip('should call a patchCallback registered on doc initialisation', () => { - const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch') - const patches = [], actor = Automerge.getActorId(s1) - const before = Automerge.init({patchCallback: p => patches.push(p)}) - Automerge.applyChanges(before, Automerge.getAllChanges(s1)) - assert.deepStrictEqual(patches, [{ - maxOp: 1, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0, - diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}} - }]) - }) - */ - }) -}) diff --git a/automerge-js/test/text_test.ts b/automerge-js/test/text_test.ts deleted file mode 100644 index e55287ce..00000000 --- a/automerge-js/test/text_test.ts +++ /dev/null @@ -1,701 +0,0 @@ -import * as assert from 'assert' -import * as Automerge from '../src' -import { assertEqualsOneOf } from './helpers' -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) - -function attributeStateToAttributes(accumulatedAttributes) { - const attributes = {} - Object.entries(accumulatedAttributes).forEach(([key, values]) => { - if (values.length && values[0] !== null) { - attributes[key] = values[0] - } - }) - return attributes -} - -function isEquivalent(a, b) { - const aProps = Object.getOwnPropertyNames(a) - const bProps = Object.getOwnPropertyNames(b) - - if (aProps.length != bProps.length) { - return false - } - - for (let i = 0; i < aProps.length; i++) { - const propName = aProps[i] - if (a[propName] !== b[propName]) { - return false - } - } - - return true -} - -function isControlMarker(pseudoCharacter) { - return typeof pseudoCharacter === 'object' && pseudoCharacter.attributes -} - -function opFrom(text, attributes) { - let op = { insert: text } - if (Object.keys(attributes).length > 0) { - op.attributes = attributes - } - return op -} - -function accumulateAttributes(span, accumulatedAttributes) { - Object.entries(span).forEach(([key, value]) => { - if (!accumulatedAttributes[key]) { - accumulatedAttributes[key] = [] - } - if (value === null) { - if (accumulatedAttributes[key].length === 0 || accumulatedAttributes[key] === null) { - accumulatedAttributes[key].unshift(null) - } else { - accumulatedAttributes[key].shift() - } - } else { - if (accumulatedAttributes[key][0] === null) { - accumulatedAttributes[key].shift() - } else { - accumulatedAttributes[key].unshift(value) - } - } - }) - return accumulatedAttributes -} - -function automergeTextToDeltaDoc(text) { - let ops = [] - let controlState = {} - let currentString = "" - let attributes = {} - text.toSpans().forEach((span) => { - if (isControlMarker(span)) { - controlState = accumulateAttributes(span.attributes, controlState) - } else { - let next = attributeStateToAttributes(controlState) - - // if the next span has the same calculated attributes as the current span - // don't bother outputting it as a separate span, just let it ride - if (typeof span === 'string' && isEquivalent(next, attributes)) { - currentString = currentString + span - return - } - - if (currentString) { - ops.push(opFrom(currentString, attributes)) - } - - // If we've got a string, we might be able to concatenate it to another - // same-attributed-string, so remember it and go to the next iteration. - if (typeof span === 'string') { - currentString = span - attributes = next - } else { - // otherwise we have an embed "character" and should output it immediately. - // embeds are always one-"character" in length. - ops.push(opFrom(span, next)) - currentString = '' - attributes = {} - } - } - }) - - // at the end, flush any accumulated string out - if (currentString) { - ops.push(opFrom(currentString, attributes)) - } - - return ops -} - -function inverseAttributes(attributes) { - let invertedAttributes = {} - Object.keys(attributes).forEach((key) => { - invertedAttributes[key] = null - }) - return invertedAttributes -} - -function applyDeleteOp(text, offset, op) { - let length = op.delete - while (length > 0) { - if (isControlMarker(text.get(offset))) { - offset += 1 - } else { - // we need to not delete control characters, but we do delete embed characters - text.deleteAt(offset, 1) - length -= 1 - } - } - return [text, offset] -} - -function applyRetainOp(text, offset, op) { - let length = op.retain - - if (op.attributes) { - text.insertAt(offset, { attributes: op.attributes }) - offset += 1 - } - - while (length > 0) { - const char = text.get(offset) - offset += 1 - if (!isControlMarker(char)) { - length -= 1 - } - } - - if (op.attributes) { - text.insertAt(offset, { attributes: inverseAttributes(op.attributes) }) - offset += 1 - } - - return [text, offset] -} - - -function applyInsertOp(text, offset, op) { - let originalOffset = offset - - if (typeof op.insert === 'string') { - text.insertAt(offset, ...op.insert.split('')) - offset += op.insert.length - } else { - // we have an embed or something similar - text.insertAt(offset, op.insert) - offset += 1 - } - - if (op.attributes) { - text.insertAt(originalOffset, { attributes: op.attributes }) - offset += 1 - } - if (op.attributes) { - text.insertAt(offset, { attributes: inverseAttributes(op.attributes) }) - offset += 1 - } - return [text, offset] -} - -// XXX: uhhhhh, why can't I pass in text? -function applyDeltaDocToAutomergeText(delta, doc) { - let offset = 0 - - delta.forEach(op => { - if (op.retain) { - [, offset] = applyRetainOp(doc.text, offset, op) - } else if (op.delete) { - [, offset] = applyDeleteOp(doc.text, offset, op) - } else if (op.insert) { - [, offset] = applyInsertOp(doc.text, offset, op) - } - }) -} - -describe('Automerge.Text', () => { - let s1, s2 - beforeEach(() => { - s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text()) - s2 = Automerge.merge(Automerge.init(), s1) - }) - - it('should support insertion', () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a')) - assert.strictEqual(s1.text.length, 1) - assert.strictEqual(s1.text.get(0), 'a') - assert.strictEqual(s1.text.toString(), 'a') - //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`) - }) - - it('should support deletion', () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c')) - s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1)) - assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text.get(0), 'a') - assert.strictEqual(s1.text.get(1), 'c') - assert.strictEqual(s1.text.toString(), 'ac') - }) - - it("should support implicit and explicit deletion", () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) - s1 = Automerge.change(s1, doc => doc.text.deleteAt(1)) - s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0)) - assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text.get(0), "a") - assert.strictEqual(s1.text.get(1), "c") - assert.strictEqual(s1.text.toString(), "ac") - }) - - it('should handle concurrent insertion', () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c')) - s2 = Automerge.change(s2, doc => doc.text.insertAt(0, 'x', 'y', 'z')) - s1 = Automerge.merge(s1, s2) - assert.strictEqual(s1.text.length, 6) - assertEqualsOneOf(s1.text.toString(), 'abcxyz', 'xyzabc') - assertEqualsOneOf(s1.text.join(''), 'abcxyz', 'xyzabc') - }) - - it('should handle text and other ops in the same change', () => { - s1 = Automerge.change(s1, doc => { - doc.foo = 'bar' - doc.text.insertAt(0, 'a') - }) - assert.strictEqual(s1.foo, 'bar') - assert.strictEqual(s1.text.toString(), 'a') - assert.strictEqual(s1.text.join(''), 'a') - }) - - it('should serialize to JSON as a simple string', () => { - s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', '"', 'b')) - assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}') - }) - - it('should allow modification before an object is assigned to a document', () => { - s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text() - text.insertAt(0, 'a', 'b', 'c', 'd') - text.deleteAt(2) - doc.text = text - assert.strictEqual(doc.text.toString(), 'abd') - assert.strictEqual(doc.text.join(''), 'abd') - }) - assert.strictEqual(s1.text.toString(), 'abd') - assert.strictEqual(s1.text.join(''), 'abd') - }) - - it('should allow modification after an object is assigned to a document', () => { - s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text() - doc.text = text - doc.text.insertAt(0, 'a', 'b', 'c', 'd') - doc.text.deleteAt(2) - assert.strictEqual(doc.text.toString(), 'abd') - assert.strictEqual(doc.text.join(''), 'abd') - }) - assert.strictEqual(s1.text.join(''), 'abd') - }) - - it('should not allow modification outside of a change callback', () => { - assert.throws(() => s1.text.insertAt(0, 'a'), /object cannot be modified outside of a change block/) - }) - - describe('with initial value', () => { - it('should accept a string as initial value', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('init')) - assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text.get(0), 'i') - assert.strictEqual(s1.text.get(1), 'n') - assert.strictEqual(s1.text.get(2), 'i') - assert.strictEqual(s1.text.get(3), 't') - assert.strictEqual(s1.text.toString(), 'init') - }) - - it('should accept an array as initial value', () => { - let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text(['i', 'n', 'i', 't'])) - assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text.get(0), 'i') - assert.strictEqual(s1.text.get(1), 'n') - assert.strictEqual(s1.text.get(2), 'i') - assert.strictEqual(s1.text.get(3), 't') - assert.strictEqual(s1.text.toString(), 'init') - }) - - it('should initialize text in Automerge.from()', () => { - let s1 = Automerge.from({text: new Automerge.Text('init')}) - assert.strictEqual(s1.text.length, 4) - assert.strictEqual(s1.text.get(0), 'i') - assert.strictEqual(s1.text.get(1), 'n') - assert.strictEqual(s1.text.get(2), 'i') - assert.strictEqual(s1.text.get(3), 't') - assert.strictEqual(s1.text.toString(), 'init') - }) - - it('should encode the initial value as a change', () => { - const s1 = Automerge.from({text: new Automerge.Text('init')}) - const changes = Automerge.getAllChanges(s1) - assert.strictEqual(changes.length, 1) - const [s2] = Automerge.applyChanges(Automerge.init(), changes) - assert.strictEqual(s2.text instanceof Automerge.Text, true) - assert.strictEqual(s2.text.toString(), 'init') - assert.strictEqual(s2.text.join(''), 'init') - }) - - it('should allow immediate access to the value', () => { - Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text('init') - assert.strictEqual(text.length, 4) - assert.strictEqual(text.get(0), 'i') - assert.strictEqual(text.toString(), 'init') - doc.text = text - assert.strictEqual(doc.text.length, 4) - assert.strictEqual(doc.text.get(0), 'i') - assert.strictEqual(doc.text.toString(), 'init') - }) - }) - - it('should allow pre-assignment modification of the initial value', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text('init') - text.deleteAt(3) - assert.strictEqual(text.join(''), 'ini') - doc.text = text - assert.strictEqual(doc.text.join(''), 'ini') - assert.strictEqual(doc.text.toString(), 'ini') - }) - assert.strictEqual(s1.text.toString(), 'ini') - assert.strictEqual(s1.text.join(''), 'ini') - }) - - it('should allow post-assignment modification of the initial value', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - const text = new Automerge.Text('init') - doc.text = text - doc.text.deleteAt(0) - doc.text.insertAt(0, 'I') - assert.strictEqual(doc.text.join(''), 'Init') - assert.strictEqual(doc.text.toString(), 'Init') - }) - assert.strictEqual(s1.text.join(''), 'Init') - assert.strictEqual(s1.text.toString(), 'Init') - }) - }) - - describe('non-textual control characters', () => { - let s1 - beforeEach(() => { - s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text() - doc.text.insertAt(0, 'a') - doc.text.insertAt(1, { attribute: 'bold' }) - }) - }) - - it('should allow fetching non-textual characters', () => { - assert.deepEqual(s1.text.get(1), { attribute: 'bold' }) - //assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`) - }) - - it('should include control characters in string length', () => { - assert.strictEqual(s1.text.length, 2) - assert.strictEqual(s1.text.get(0), 'a') - }) - - it('should exclude control characters from toString()', () => { - assert.strictEqual(s1.text.toString(), 'a') - }) - - it('should allow control characters to be updated', () => { - const s2 = Automerge.change(s1, doc => doc.text.get(1).attribute = 'italic') - const s3 = Automerge.load(Automerge.save(s2)) - assert.strictEqual(s1.text.get(1).attribute, 'bold') - assert.strictEqual(s2.text.get(1).attribute, 'italic') - assert.strictEqual(s3.text.get(1).attribute, 'italic') - }) - - describe('spans interface to Text', () => { - it('should return a simple string as a single span', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('hello world') - }) - assert.deepEqual(s1.text.toSpans(), ['hello world']) - }) - it('should return an empty string as an empty array', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text() - }) - assert.deepEqual(s1.text.toSpans(), []) - }) - it('should split a span at a control character', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('hello world') - doc.text.insertAt(5, { attributes: { bold: true } }) - }) - assert.deepEqual(s1.text.toSpans(), - ['hello', { attributes: { bold: true } }, ' world']) - }) - it('should allow consecutive control characters', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('hello world') - doc.text.insertAt(5, { attributes: { bold: true } }) - doc.text.insertAt(6, { attributes: { italic: true } }) - }) - assert.deepEqual(s1.text.toSpans(), - ['hello', - { attributes: { bold: true } }, - { attributes: { italic: true } }, - ' world' - ]) - }) - it('should allow non-consecutive control characters', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('hello world') - doc.text.insertAt(5, { attributes: { bold: true } }) - doc.text.insertAt(12, { attributes: { italic: true } }) - }) - assert.deepEqual(s1.text.toSpans(), - ['hello', - { attributes: { bold: true } }, - ' world', - { attributes: { italic: true } } - ]) - }) - - it('should be convertable into a Quill delta', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Gandalf the Grey') - doc.text.insertAt(0, { attributes: { bold: true } }) - doc.text.insertAt(7 + 1, { attributes: { bold: null } }) - doc.text.insertAt(12 + 2, { attributes: { color: '#cccccc' } }) - }) - - let deltaDoc = automergeTextToDeltaDoc(s1.text) - - // From https://quilljs.com/docs/delta/ - let expectedDoc = [ - { insert: 'Gandalf', attributes: { bold: true } }, - { insert: ' the ' }, - { insert: 'Grey', attributes: { color: '#cccccc' } } - ] - - assert.deepEqual(deltaDoc, expectedDoc) - }) - - it('should support embeds', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('') - doc.text.insertAt(0, { attributes: { link: 'https://quilljs.com' } }) - doc.text.insertAt(1, { - image: 'https://quilljs.com/assets/images/icon.png' - }) - doc.text.insertAt(2, { attributes: { link: null } }) - }) - - let deltaDoc = automergeTextToDeltaDoc(s1.text) - - // From https://quilljs.com/docs/delta/ - let expectedDoc = [{ - // An image link - insert: { - image: 'https://quilljs.com/assets/images/icon.png' - }, - attributes: { - link: 'https://quilljs.com' - } - }] - - assert.deepEqual(deltaDoc, expectedDoc) - }) - - it('should handle concurrent overlapping spans', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Gandalf the Grey') - }) - - let s2 = Automerge.merge(Automerge.init(), s1) - - let s3 = Automerge.change(s1, doc => { - doc.text.insertAt(8, { attributes: { bold: true } }) - doc.text.insertAt(16 + 1, { attributes: { bold: null } }) - }) - - let s4 = Automerge.change(s2, doc => { - doc.text.insertAt(0, { attributes: { bold: true } }) - doc.text.insertAt(11 + 1, { attributes: { bold: null } }) - }) - - let merged = Automerge.merge(s3, s4) - - let deltaDoc = automergeTextToDeltaDoc(merged.text) - - // From https://quilljs.com/docs/delta/ - let expectedDoc = [ - { insert: 'Gandalf the Grey', attributes: { bold: true } }, - ] - - assert.deepEqual(deltaDoc, expectedDoc) - }) - - it('should handle debolding spans', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Gandalf the Grey') - }) - - let s2 = Automerge.merge(Automerge.init(), s1) - - let s3 = Automerge.change(s1, doc => { - doc.text.insertAt(0, { attributes: { bold: true } }) - doc.text.insertAt(16 + 1, { attributes: { bold: null } }) - }) - - let s4 = Automerge.change(s2, doc => { - doc.text.insertAt(8, { attributes: { bold: null } }) - doc.text.insertAt(11 + 1, { attributes: { bold: true } }) - }) - - - let merged = Automerge.merge(s3, s4) - - let deltaDoc = automergeTextToDeltaDoc(merged.text) - - // From https://quilljs.com/docs/delta/ - let expectedDoc = [ - { insert: 'Gandalf ', attributes: { bold: true } }, - { insert: 'the' }, - { insert: ' Grey', attributes: { bold: true } }, - ] - - assert.deepEqual(deltaDoc, expectedDoc) - }) - - // xxx: how would this work for colors? - it('should handle destyling across destyled spans', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Gandalf the Grey') - }) - - let s2 = Automerge.merge(Automerge.init(), s1) - - let s3 = Automerge.change(s1, doc => { - doc.text.insertAt(0, { attributes: { bold: true } }) - doc.text.insertAt(16 + 1, { attributes: { bold: null } }) - }) - - let s4 = Automerge.change(s2, doc => { - doc.text.insertAt(8, { attributes: { bold: null } }) - doc.text.insertAt(11 + 1, { attributes: { bold: true } }) - }) - - let merged = Automerge.merge(s3, s4) - - let final = Automerge.change(merged, doc => { - doc.text.insertAt(3 + 1, { attributes: { bold: null } }) - doc.text.insertAt(doc.text.length, { attributes: { bold: true } }) - }) - - let deltaDoc = automergeTextToDeltaDoc(final.text) - - // From https://quilljs.com/docs/delta/ - let expectedDoc = [ - { insert: 'Gan', attributes: { bold: true } }, - { insert: 'dalf the Grey' }, - ] - - assert.deepEqual(deltaDoc, expectedDoc) - }) - - it('should apply an insert', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Hello world') - }) - - const delta = [ - { retain: 6 }, - { insert: 'reader' }, - { delete: 5 } - ] - - let s2 = Automerge.change(s1, doc => { - applyDeltaDocToAutomergeText(delta, doc) - }) - - //assert.strictEqual(s2.text.join(''), 'Hello reader') - assert.strictEqual(s2.text.toString(), 'Hello reader') - }) - - it('should apply an insert with control characters', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Hello world') - }) - - const delta = [ - { retain: 6 }, - { insert: 'reader', attributes: { bold: true } }, - { delete: 5 }, - { insert: '!' } - ] - - let s2 = Automerge.change(s1, doc => { - applyDeltaDocToAutomergeText(delta, doc) - }) - - assert.strictEqual(s2.text.toString(), 'Hello reader!') - assert.deepEqual(s2.text.toSpans(), [ - "Hello ", - { attributes: { bold: true } }, - "reader", - { attributes: { bold: null } }, - "!" - ]) - }) - - it('should account for control characters in retain/delete lengths', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('Hello world') - doc.text.insertAt(4, { attributes: { color: '#ccc' } }) - doc.text.insertAt(10, { attributes: { color: '#f00' } }) - }) - - const delta = [ - { retain: 6 }, - { insert: 'reader', attributes: { bold: true } }, - { delete: 5 }, - { insert: '!' } - ] - - let s2 = Automerge.change(s1, doc => { - applyDeltaDocToAutomergeText(delta, doc) - }) - - assert.strictEqual(s2.text.toString(), 'Hello reader!') - assert.deepEqual(s2.text.toSpans(), [ - "Hell", - { attributes: { color: '#ccc'} }, - "o ", - { attributes: { bold: true } }, - "reader", - { attributes: { bold: null } }, - { attributes: { color: '#f00'} }, - "!" - ]) - }) - - it('should support embeds', () => { - let s1 = Automerge.change(Automerge.init(), doc => { - doc.text = new Automerge.Text('') - }) - - let deltaDoc = [{ - // An image link - insert: { - image: 'https://quilljs.com/assets/images/icon.png' - }, - attributes: { - link: 'https://quilljs.com' - } - }] - - let s2 = Automerge.change(s1, doc => { - applyDeltaDocToAutomergeText(deltaDoc, doc) - }) - - assert.deepEqual(s2.text.toSpans(), [ - { attributes: { link: 'https://quilljs.com' } }, - { image: 'https://quilljs.com/assets/images/icon.png'}, - { attributes: { link: null } }, - ]) - }) - }) - }) - - it('should support unicode when creating text', () => { - s1 = Automerge.from({ - text: new Automerge.Text('🐦') - }) - assert.strictEqual(s1.text.get(0), '🐦') - }) -}) diff --git a/automerge-js/test/uuid_test.ts b/automerge-js/test/uuid_test.ts deleted file mode 100644 index 1bed4f49..00000000 --- a/automerge-js/test/uuid_test.ts +++ /dev/null @@ -1,35 +0,0 @@ -import * as assert from 'assert' -import * as Automerge from '../src' -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) - -const uuid = Automerge.uuid - -describe('uuid', () => { - afterEach(() => { - uuid.reset() - }) - - describe('default implementation', () => { - it('generates unique values', () => { - assert.notEqual(uuid(), uuid()) - }) - }) - - describe('custom implementation', () => { - let counter - - function customUuid() { - return `custom-uuid-${counter++}` - } - - before(() => uuid.setFactory(customUuid)) - beforeEach(() => counter = 0) - - it('invokes the custom factory', () => { - assert.equal(uuid(), 'custom-uuid-0') - assert.equal(uuid(), 'custom-uuid-1') - }) - }) -}) diff --git a/automerge-js/tsconfig.json b/automerge-js/tsconfig.json deleted file mode 100644 index 01500ed5..00000000 --- a/automerge-js/tsconfig.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "compilerOptions": { - "target": "es2016", - "sourceMap": false, - "declaration": false, - "resolveJsonModule": true, - "module": "commonjs", - "moduleResolution": "node", - "noImplicitAny": false, - "allowSyntheticDefaultImports": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "noFallthroughCasesInSwitch": true, - "skipLibCheck": true, - "outDir": "./dist" - }, - "include": [ "src/**/*" ], - "exclude": [ - "./dist/**/*", - "./node_modules" - ] -} diff --git a/automerge-wasm/.eslintrc.cjs b/automerge-wasm/.eslintrc.cjs deleted file mode 100644 index 80e08d55..00000000 --- a/automerge-wasm/.eslintrc.cjs +++ /dev/null @@ -1,11 +0,0 @@ -module.exports = { - root: true, - parser: '@typescript-eslint/parser', - plugins: [ - '@typescript-eslint', - ], - extends: [ - 'eslint:recommended', - 'plugin:@typescript-eslint/recommended', - ], -}; diff --git a/automerge-wasm/examples/webpack/webpack.config.js b/automerge-wasm/examples/webpack/webpack.config.js deleted file mode 100644 index 3ab0e798..00000000 --- a/automerge-wasm/examples/webpack/webpack.config.js +++ /dev/null @@ -1,35 +0,0 @@ -const path = require('path'); -const nodeExternals = require('webpack-node-externals'); - -// the most basic webpack config for node or web targets for automerge-wasm - -const serverConfig = { - // basic setup for bundling a node package - target: 'node', - externals: [nodeExternals()], - externalsPresets: { node: true }, - - entry: './src/index.js', - output: { - filename: 'node.js', - path: path.resolve(__dirname, 'dist'), - }, - mode: "development", // or production -}; - -const clientConfig = { - target: 'web', - entry: './src/index.js', - output: { - filename: 'main.js', - path: path.resolve(__dirname, 'public'), - }, - mode: "development", // or production - performance: { // we dont want the wasm blob to generate warnings - hints: false, - maxEntrypointSize: 512000, - maxAssetSize: 512000 - } -}; - -module.exports = [serverConfig, clientConfig]; diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts deleted file mode 100644 index aef38dbe..00000000 --- a/automerge-wasm/index.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Automerge as VanillaAutomerge } from "automerge-types" - -export * from "automerge-types" -export { default } from "automerge-types" - -export class Automerge extends VanillaAutomerge { - // experimental api can go here - applyPatches(obj: any): any; - - // override old methods that return automerge - clone(actor?: string): Automerge; - fork(actor?: string): Automerge; - forkAt(heads: Heads, actor?: string): Automerge; -} - -export function create(actor?: Actor): Automerge; -export function load(data: Uint8Array, actor?: Actor): Automerge; diff --git a/automerge-wasm/nodejs-index.js b/automerge-wasm/nodejs-index.js deleted file mode 100644 index 4a42f201..00000000 --- a/automerge-wasm/nodejs-index.js +++ /dev/null @@ -1,5 +0,0 @@ -let wasm = require("./bindgen") -module.exports = wasm -module.exports.load = module.exports.loadDoc -delete module.exports.loadDoc -module.exports.init = () => (new Promise((resolve,reject) => { resolve(module.exports) })) diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json deleted file mode 100644 index 0410dd52..00000000 --- a/automerge-wasm/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "collaborators": [ - "Orion Henry ", - "Alex Good ", - "Martin Kleppmann" - ], - "name": "automerge-wasm", - "description": "wasm-bindgen bindings to the automerge rust implementation", - "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", - "repository": "github:automerge/automerge-rs", - "version": "0.1.6", - "license": "MIT", - "files": [ - "README.md", - "LICENSE", - "package.json", - "index.d.ts", - "nodejs/index.js", - "nodejs/bindgen.js", - "nodejs/bindgen_bg.wasm", - "web/index.js", - "web/bindgen.js", - "web/bindgen_bg.wasm" - ], - "types": "index.d.ts", - "module": "./web/index.js", - "main": "./nodejs/index.js", - "scripts": { - "lint": "eslint test/*.ts", - "build": "cross-env PROFILE=dev TARGET=nodejs FEATURES='' yarn target", - "release": "cross-env PROFILE=release yarn buildall", - "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target", - "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET -- $FEATURES && cp $TARGET-index.js $TARGET/index.js", - "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" - }, - "devDependencies": { - "@types/expect": "^24.3.0", - "@types/jest": "^27.4.0", - "@types/mocha": "^9.1.0", - "@types/node": "^17.0.13", - "@types/uuid": "^8.3.4", - "@typescript-eslint/eslint-plugin": "^5.25.0", - "@typescript-eslint/parser": "^5.25.0", - "cross-env": "^7.0.3", - "eslint": "^8.16.0", - "fast-sha256": "^1.3.0", - "mocha": "^9.1.3", - "pako": "^2.0.4", - "rimraf": "^3.0.2", - "ts-mocha": "^9.0.2", - "typescript": "^4.6.4" - }, - "dependencies": { - "automerge-types": "0.1.5" - } -} diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs deleted file mode 100644 index 1f67e6ec..00000000 --- a/automerge-wasm/src/interop.rs +++ /dev/null @@ -1,603 +0,0 @@ -use crate::AutoCommit; -use automerge as am; -use automerge::transaction::Transactable; -use automerge::{Change, ChangeHash, Prop}; -use js_sys::{Array, Function, Object, Reflect, Uint8Array}; -use std::collections::{BTreeSet, HashSet}; -use std::fmt::Display; -use wasm_bindgen::prelude::*; -use wasm_bindgen::JsCast; - -use crate::{observer::Patch, ObjId, ScalarValue, Value}; - -pub(crate) struct JS(pub(crate) JsValue); -pub(crate) struct AR(pub(crate) Array); - -impl From for JsValue { - fn from(ar: AR) -> Self { - ar.0.into() - } -} - -impl From for JsValue { - fn from(js: JS) -> Self { - js.0 - } -} - -impl From for JS { - fn from(state: am::sync::State) -> Self { - let shared_heads: JS = state.shared_heads.into(); - let last_sent_heads: JS = state.last_sent_heads.into(); - let their_heads: JS = state.their_heads.into(); - let their_need: JS = state.their_need.into(); - let sent_hashes: JS = state.sent_hashes.into(); - let their_have = if let Some(have) = &state.their_have { - JsValue::from(AR::from(have.as_slice()).0) - } else { - JsValue::null() - }; - let result: JsValue = Object::new().into(); - // we can unwrap here b/c we made the object and know its not frozen - Reflect::set(&result, &"sharedHeads".into(), &shared_heads.0).unwrap(); - Reflect::set(&result, &"lastSentHeads".into(), &last_sent_heads.0).unwrap(); - Reflect::set(&result, &"theirHeads".into(), &their_heads.0).unwrap(); - Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap(); - Reflect::set(&result, &"theirHave".into(), &their_have).unwrap(); - Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap(); - JS(result) - } -} - -impl From> for JS { - fn from(heads: Vec) -> Self { - let heads: Array = heads - .iter() - .map(|h| JsValue::from_str(&h.to_string())) - .collect(); - JS(heads.into()) - } -} - -impl From> for JS { - fn from(heads: HashSet) -> Self { - let result: JsValue = Object::new().into(); - for key in &heads { - Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); - } - JS(result) - } -} - -impl From> for JS { - fn from(heads: BTreeSet) -> Self { - let result: JsValue = Object::new().into(); - for key in &heads { - Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); - } - JS(result) - } -} - -impl From>> for JS { - fn from(heads: Option>) -> Self { - if let Some(v) = heads { - let v: Array = v - .iter() - .map(|h| JsValue::from_str(&h.to_string())) - .collect(); - JS(v.into()) - } else { - JS(JsValue::null()) - } - } -} - -impl TryFrom for HashSet { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let mut result = HashSet::new(); - for key in Reflect::own_keys(&value.0)?.iter() { - if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { - result.insert(serde_wasm_bindgen::from_value(key).map_err(to_js_err)?); - } - } - Ok(result) - } -} - -impl TryFrom for BTreeSet { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let mut result = BTreeSet::new(); - for key in Reflect::own_keys(&value.0)?.iter() { - if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() { - result.insert(serde_wasm_bindgen::from_value(key).map_err(to_js_err)?); - } - } - Ok(result) - } -} - -impl TryFrom for Vec { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let value = value.0.dyn_into::()?; - let value: Result, _> = - value.iter().map(serde_wasm_bindgen::from_value).collect(); - let value = value.map_err(to_js_err)?; - Ok(value) - } -} - -impl From for Option> { - fn from(value: JS) -> Self { - let value = value.0.dyn_into::().ok()?; - let value: Result, _> = - value.iter().map(serde_wasm_bindgen::from_value).collect(); - let value = value.ok()?; - Some(value) - } -} - -impl TryFrom for Vec { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let value = value.0.dyn_into::()?; - let changes: Result, _> = value.iter().map(|j| j.dyn_into()).collect(); - let changes = changes?; - let changes = changes.iter().try_fold(Vec::new(), |mut acc, arr| { - match automerge::Change::try_from(arr.to_vec().as_slice()) { - Ok(c) => acc.push(c), - Err(e) => return Err(to_js_err(e)), - } - Ok(acc) - })?; - Ok(changes) - } -} - -impl TryFrom for am::sync::State { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let value = value.0; - let shared_heads = js_get(&value, "sharedHeads")?.try_into()?; - let last_sent_heads = js_get(&value, "lastSentHeads")?.try_into()?; - let their_heads = js_get(&value, "theirHeads")?.into(); - let their_need = js_get(&value, "theirNeed")?.into(); - let their_have = js_get(&value, "theirHave")?.try_into()?; - let sent_hashes = js_get(&value, "sentHashes")?.try_into()?; - Ok(am::sync::State { - shared_heads, - last_sent_heads, - their_heads, - their_need, - their_have, - sent_hashes, - }) - } -} - -impl TryFrom for Option> { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - if value.0.is_null() { - Ok(None) - } else { - Ok(Some(value.try_into()?)) - } - } -} - -impl TryFrom for Vec { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let value = value.0.dyn_into::()?; - let have: Result, JsValue> = value - .iter() - .map(|s| { - let last_sync = js_get(&s, "lastSync")?.try_into()?; - let bloom = js_get(&s, "bloom")?.try_into()?; - Ok(am::sync::Have { last_sync, bloom }) - }) - .collect(); - let have = have?; - Ok(have) - } -} - -impl TryFrom for am::sync::BloomFilter { - type Error = JsValue; - - fn try_from(value: JS) -> Result { - let value: Uint8Array = value.0.dyn_into()?; - let value = value.to_vec(); - let value = value.as_slice().try_into().map_err(to_js_err)?; - Ok(value) - } -} - -impl From<&[ChangeHash]> for AR { - fn from(value: &[ChangeHash]) -> Self { - AR(value - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect()) - } -} - -impl From<&[Change]> for AR { - fn from(value: &[Change]) -> Self { - let changes: Array = value - .iter() - .map(|c| Uint8Array::from(c.raw_bytes())) - .collect(); - AR(changes) - } -} - -impl From<&[am::sync::Have]> for AR { - fn from(value: &[am::sync::Have]) -> Self { - AR(value - .iter() - .map(|have| { - let last_sync: Array = have - .last_sync - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes() - let bloom = Uint8Array::from(have.bloom.to_bytes().as_slice()); - let obj: JsValue = Object::new().into(); - // we can unwrap here b/c we created the object and know its not frozen - Reflect::set(&obj, &"lastSync".into(), &last_sync.into()).unwrap(); - Reflect::set(&obj, &"bloom".into(), &bloom.into()).unwrap(); - obj - }) - .collect()) - } -} - -pub(crate) fn to_js_err(err: T) -> JsValue { - js_sys::Error::new(&std::format!("{}", err)).into() -} - -pub(crate) fn js_get>(obj: J, prop: &str) -> Result { - Ok(JS(Reflect::get(&obj.into(), &prop.into())?)) -} - -pub(crate) fn js_set>(obj: &JsValue, prop: &str, val: V) -> Result { - Reflect::set(obj, &prop.into(), &val.into()) -} - -pub(crate) fn to_prop(p: JsValue) -> Result { - if let Some(s) = p.as_string() { - Ok(Prop::Map(s)) - } else if let Some(n) = p.as_f64() { - Ok(Prop::Seq(n as usize)) - } else { - Err(to_js_err("prop must me a string or number")) - } -} - -pub(crate) fn to_objtype( - value: &JsValue, - datatype: &Option, -) -> Option<(am::ObjType, Vec<(Prop, JsValue)>)> { - match datatype.as_deref() { - Some("map") => { - let map = value.clone().dyn_into::().ok()?; - // FIXME unwrap - let map = js_sys::Object::keys(&map) - .iter() - .zip(js_sys::Object::values(&map).iter()) - .map(|(key, val)| (key.as_string().unwrap().into(), val)) - .collect(); - Some((am::ObjType::Map, map)) - } - Some("list") => { - let list = value.clone().dyn_into::().ok()?; - let list = list - .iter() - .enumerate() - .map(|(i, e)| (i.into(), e)) - .collect(); - Some((am::ObjType::List, list)) - } - Some("text") => { - let text = value.as_string()?; - let text = text - .chars() - .enumerate() - .map(|(i, ch)| (i.into(), ch.to_string().into())) - .collect(); - Some((am::ObjType::Text, text)) - } - Some(_) => None, - None => { - if let Ok(list) = value.clone().dyn_into::() { - let list = list - .iter() - .enumerate() - .map(|(i, e)| (i.into(), e)) - .collect(); - Some((am::ObjType::List, list)) - } else if let Ok(map) = value.clone().dyn_into::() { - // FIXME unwrap - let map = js_sys::Object::keys(&map) - .iter() - .zip(js_sys::Object::values(&map).iter()) - .map(|(key, val)| (key.as_string().unwrap().into(), val)) - .collect(); - Some((am::ObjType::Map, map)) - } else if let Some(text) = value.as_string() { - let text = text - .chars() - .enumerate() - .map(|(i, ch)| (i.into(), ch.to_string().into())) - .collect(); - Some((am::ObjType::Text, text)) - } else { - None - } - } - } -} - -pub(crate) fn get_heads(heads: Option) -> Option> { - let heads = heads?; - let heads: Result, _> = - heads.iter().map(serde_wasm_bindgen::from_value).collect(); - heads.ok() -} - -pub(crate) fn map_to_js(doc: &AutoCommit, obj: &ObjId) -> JsValue { - let keys = doc.keys(obj); - let map = Object::new(); - for k in keys { - let val = doc.get(obj, &k); - match val { - Ok(Some((Value::Object(o), exid))) - if o == am::ObjType::Map || o == am::ObjType::Table => - { - Reflect::set(&map, &k.into(), &map_to_js(doc, &exid)).unwrap(); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { - Reflect::set(&map, &k.into(), &list_to_js(doc, &exid)).unwrap(); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { - Reflect::set(&map, &k.into(), &doc.text(&exid).unwrap().into()).unwrap(); - } - Ok(Some((Value::Scalar(v), _))) => { - Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); - } - _ => (), - }; - } - map.into() -} - -pub(crate) fn map_to_js_at(doc: &AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue { - let keys = doc.keys(obj); - let map = Object::new(); - for k in keys { - let val = doc.get_at(obj, &k, heads); - match val { - Ok(Some((Value::Object(o), exid))) - if o == am::ObjType::Map || o == am::ObjType::Table => - { - Reflect::set(&map, &k.into(), &map_to_js_at(doc, &exid, heads)).unwrap(); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { - Reflect::set(&map, &k.into(), &list_to_js_at(doc, &exid, heads)).unwrap(); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { - Reflect::set(&map, &k.into(), &doc.text_at(&exid, heads).unwrap().into()).unwrap(); - } - Ok(Some((Value::Scalar(v), _))) => { - Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap(); - } - _ => (), - }; - } - map.into() -} - -pub(crate) fn list_to_js(doc: &AutoCommit, obj: &ObjId) -> JsValue { - let len = doc.length(obj); - let array = Array::new(); - for i in 0..len { - let val = doc.get(obj, i as usize); - match val { - Ok(Some((Value::Object(o), exid))) - if o == am::ObjType::Map || o == am::ObjType::Table => - { - array.push(&map_to_js(doc, &exid)); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { - array.push(&list_to_js(doc, &exid)); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { - array.push(&doc.text(&exid).unwrap().into()); - } - Ok(Some((Value::Scalar(v), _))) => { - array.push(&ScalarValue(v).into()); - } - _ => (), - }; - } - array.into() -} - -pub(crate) fn list_to_js_at(doc: &AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue { - let len = doc.length(obj); - let array = Array::new(); - for i in 0..len { - let val = doc.get_at(obj, i as usize, heads); - match val { - Ok(Some((Value::Object(o), exid))) - if o == am::ObjType::Map || o == am::ObjType::Table => - { - array.push(&map_to_js_at(doc, &exid, heads)); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => { - array.push(&list_to_js_at(doc, &exid, heads)); - } - Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => { - array.push(&doc.text_at(exid, heads).unwrap().into()); - } - Ok(Some((Value::Scalar(v), _))) => { - array.push(&ScalarValue(v).into()); - } - _ => (), - }; - } - array.into() -} - -/* -pub(crate) fn export_values<'a, V: Iterator>>(val: V) -> Array { - val.map(|v| export_value(&v)).collect() -} -*/ - -pub(crate) fn export_value(val: &Value<'_>) -> JsValue { - match val { - Value::Object(o) if o == &am::ObjType::Map || o == &am::ObjType::Table => { - Object::new().into() - } - Value::Object(_) => Array::new().into(), - Value::Scalar(v) => ScalarValue(v.clone()).into(), - } -} - -pub(crate) fn apply_patch(obj: JsValue, patch: &Patch) -> Result { - apply_patch2(obj, patch, 0) -} - -pub(crate) fn apply_patch2(obj: JsValue, patch: &Patch, depth: usize) -> Result { - match (js_to_map_seq(&obj)?, patch.path().get(depth)) { - (JsObj::Map(o), Some(Prop::Map(key))) => { - let sub_obj = Reflect::get(&obj, &key.into())?; - let new_value = apply_patch2(sub_obj, patch, depth + 1)?; - let result = - Reflect::construct(&o.constructor(), &Array::new())?.dyn_into::()?; - let result = Object::assign(&result, &o).into(); - Reflect::set(&result, &key.into(), &new_value)?; - Ok(result) - } - (JsObj::Seq(a), Some(Prop::Seq(index))) => { - let index = JsValue::from_f64(*index as f64); - let sub_obj = Reflect::get(&obj, &index)?; - let new_value = apply_patch2(sub_obj, patch, depth + 1)?; - let result = Reflect::construct(&a.constructor(), &a)?; - //web_sys::console::log_2(&format!("NEW VAL {}: ", tmpi).into(), &new_value); - Reflect::set(&result, &index, &new_value)?; - Ok(result) - } - (JsObj::Map(o), None) => { - let result = - Reflect::construct(&o.constructor(), &Array::new())?.dyn_into::()?; - let result = Object::assign(&result, &o); - match patch { - Patch::PutMap { key, value, .. } => { - let result = result.into(); - Reflect::set(&result, &key.into(), &export_value(value))?; - Ok(result) - } - Patch::DeleteMap { key, .. } => { - Reflect::delete_property(&result, &key.into())?; - Ok(result.into()) - } - Patch::Increment { prop, value, .. } => { - let result = result.into(); - if let Prop::Map(key) = prop { - let key = key.into(); - let old_val = Reflect::get(&o, &key)?; - if let Some(old) = old_val.as_f64() { - Reflect::set(&result, &key, &JsValue::from(old + *value as f64))?; - Ok(result) - } else { - Err(to_js_err("cant increment a non number value")) - } - } else { - Err(to_js_err("cant increment an index on a map")) - } - } - Patch::Insert { .. } => Err(to_js_err("cannot insert into map")), - Patch::DeleteSeq { .. } => Err(to_js_err("cannot splice a map")), - Patch::PutSeq { .. } => Err(to_js_err("cannot array index a map")), - } - } - (JsObj::Seq(a), None) => { - match patch { - Patch::PutSeq { index, value, .. } => { - let result = Reflect::construct(&a.constructor(), &a)?; - Reflect::set(&result, &(*index as f64).into(), &export_value(value))?; - Ok(result) - } - Patch::DeleteSeq { index, .. } => { - let result = &a.dyn_into::()?; - let mut f = |_, i, _| i != *index as u32; - let result = result.filter(&mut f); - - Ok(result.into()) - } - Patch::Insert { index, values, .. } => { - let from = Reflect::get(&a.constructor().into(), &"from".into())? - .dyn_into::()?; - let result = from.call1(&JsValue::undefined(), &a)?.dyn_into::()?; - // TODO: should be one function call - for (i, v) in values.iter().enumerate() { - result.splice(*index as u32 + i as u32, 0, &export_value(v)); - } - Ok(result.into()) - } - Patch::Increment { prop, value, .. } => { - let result = Reflect::construct(&a.constructor(), &a)?; - if let Prop::Seq(index) = prop { - let index = (*index as f64).into(); - let old_val = Reflect::get(&a, &index)?; - if let Some(old) = old_val.as_f64() { - Reflect::set(&result, &index, &JsValue::from(old + *value as f64))?; - Ok(result) - } else { - Err(to_js_err("cant increment a non number value")) - } - } else { - Err(to_js_err("cant increment a key on a seq")) - } - } - Patch::DeleteMap { .. } => Err(to_js_err("cannot delete from a seq")), - Patch::PutMap { .. } => Err(to_js_err("cannot set key in seq")), - } - } - (_, _) => Err(to_js_err(format!( - "object/patch missmatch {:?} depth={:?}", - patch, depth - ))), - } -} - -#[derive(Debug)] -enum JsObj { - Map(Object), - Seq(Array), -} - -fn js_to_map_seq(value: &JsValue) -> Result { - if let Ok(array) = value.clone().dyn_into::() { - Ok(JsObj::Seq(array)) - } else if let Ok(obj) = value.clone().dyn_into::() { - Ok(JsObj::Map(obj)) - } else { - Err(to_js_err("obj is not Object or Array")) - } -} diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs deleted file mode 100644 index c039d171..00000000 --- a/automerge-wasm/src/lib.rs +++ /dev/null @@ -1,834 +0,0 @@ -#![doc( - html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg", - html_favicon_url = "https:///raw.githubusercontent.com/automerge/automerge-rs/main/img/favicon.ico" -)] -#![warn( - missing_debug_implementations, - // missing_docs, // TODO: add documentation! - rust_2021_compatibility, - rust_2018_idioms, - unreachable_pub, - bad_style, - const_err, - dead_code, - improper_ctypes, - non_shorthand_field_patterns, - no_mangle_generic_items, - overflowing_literals, - path_statements, - patterns_in_fns_without_body, - private_in_public, - unconditional_recursion, - unused, - unused_allocation, - unused_comparisons, - unused_parens, - while_true -)] -#![allow(clippy::unused_unit)] -use am::transaction::CommitOptions; -use am::transaction::Transactable; -use automerge as am; -use automerge::{Change, ObjId, Prop, Value, ROOT}; -use js_sys::{Array, Object, Uint8Array}; -use serde::Serialize; -use std::convert::TryInto; -use wasm_bindgen::prelude::*; -use wasm_bindgen::JsCast; - -mod interop; -mod observer; -mod sync; -mod value; - -use observer::Observer; - -use interop::{ - apply_patch, get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, - to_js_err, to_objtype, to_prop, AR, JS, -}; -use sync::SyncState; -use value::{datatype, ScalarValue}; - -#[allow(unused_macros)] -macro_rules! log { - ( $( $t:tt )* ) => { - web_sys::console::log_1(&format!( $( $t )* ).into()); - }; -} - -type AutoCommit = am::AutoCommitWithObs; - -#[cfg(feature = "wee_alloc")] -#[global_allocator] -static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; - -#[wasm_bindgen] -#[derive(Debug)] -pub struct Automerge { - doc: AutoCommit, -} - -#[wasm_bindgen] -impl Automerge { - pub fn new(actor: Option) -> Result { - let mut doc = AutoCommit::default(); - if let Some(a) = actor { - let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec()); - doc.set_actor(a); - } - Ok(Automerge { doc }) - } - - #[allow(clippy::should_implement_trait)] - pub fn clone(&mut self, actor: Option) -> Result { - let mut automerge = Automerge { - doc: self.doc.clone(), - }; - if let Some(s) = actor { - let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.doc.set_actor(actor); - } - Ok(automerge) - } - - pub fn fork(&mut self, actor: Option) -> Result { - let mut automerge = Automerge { - doc: self.doc.fork(), - }; - if let Some(s) = actor { - let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.doc.set_actor(actor); - } - Ok(automerge) - } - - #[wasm_bindgen(js_name = forkAt)] - pub fn fork_at(&mut self, heads: JsValue, actor: Option) -> Result { - let deps: Vec<_> = JS(heads).try_into()?; - let mut automerge = Automerge { - doc: self.doc.fork_at(&deps)?, - }; - if let Some(s) = actor { - let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - automerge.doc.set_actor(actor); - } - Ok(automerge) - } - - pub fn free(self) {} - - #[wasm_bindgen(js_name = pendingOps)] - pub fn pending_ops(&self) -> JsValue { - (self.doc.pending_ops() as u32).into() - } - - pub fn commit(&mut self, message: Option, time: Option) -> JsValue { - let mut commit_opts = CommitOptions::default(); - if let Some(message) = message { - commit_opts.set_message(message); - } - if let Some(time) = time { - commit_opts.set_time(time as i64); - } - let hash = self.doc.commit_with(commit_opts); - JsValue::from_str(&hex::encode(&hash.0)) - } - - pub fn merge(&mut self, other: &mut Automerge) -> Result { - let heads = self.doc.merge(&mut other.doc)?; - let heads: Array = heads - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - Ok(heads) - } - - pub fn rollback(&mut self) -> f64 { - self.doc.rollback() as f64 - } - - pub fn keys(&self, obj: JsValue, heads: Option) -> Result { - let obj = self.import(obj)?; - let result = if let Some(heads) = get_heads(heads) { - self.doc - .keys_at(&obj, &heads) - .map(|s| JsValue::from_str(&s)) - .collect() - } else { - self.doc.keys(&obj).map(|s| JsValue::from_str(&s)).collect() - }; - Ok(result) - } - - pub fn text(&self, obj: JsValue, heads: Option) -> Result { - let obj = self.import(obj)?; - if let Some(heads) = get_heads(heads) { - Ok(self.doc.text_at(&obj, &heads)?) - } else { - Ok(self.doc.text(&obj)?) - } - } - - pub fn splice( - &mut self, - obj: JsValue, - start: f64, - delete_count: f64, - text: JsValue, - ) -> Result<(), JsValue> { - let obj = self.import(obj)?; - let start = start as usize; - let delete_count = delete_count as usize; - let mut vals = vec![]; - if let Some(t) = text.as_string() { - self.doc.splice_text(&obj, start, delete_count, &t)?; - } else { - if let Ok(array) = text.dyn_into::() { - for i in array.iter() { - let value = self - .import_scalar(&i, &None) - .ok_or_else(|| to_js_err("expected scalar"))?; - vals.push(value); - } - } - self.doc - .splice(&obj, start, delete_count, vals.into_iter())?; - } - Ok(()) - } - - pub fn push(&mut self, obj: JsValue, value: JsValue, datatype: JsValue) -> Result<(), JsValue> { - let obj = self.import(obj)?; - let value = self - .import_scalar(&value, &datatype.as_string()) - .ok_or_else(|| to_js_err("invalid scalar value"))?; - let index = self.doc.length(&obj); - self.doc.insert(&obj, index, value)?; - Ok(()) - } - - #[wasm_bindgen(js_name = pushObject)] - pub fn push_object(&mut self, obj: JsValue, value: JsValue) -> Result, JsValue> { - let obj = self.import(obj)?; - let (value, subvals) = - to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; - let index = self.doc.length(&obj); - let opid = self.doc.insert_object(&obj, index, value)?; - self.subset(&opid, subvals)?; - Ok(opid.to_string().into()) - } - - pub fn insert( - &mut self, - obj: JsValue, - index: f64, - value: JsValue, - datatype: JsValue, - ) -> Result<(), JsValue> { - let obj = self.import(obj)?; - let index = index as f64; - let value = self - .import_scalar(&value, &datatype.as_string()) - .ok_or_else(|| to_js_err("expected scalar value"))?; - self.doc.insert(&obj, index as usize, value)?; - Ok(()) - } - - #[wasm_bindgen(js_name = insertObject)] - pub fn insert_object( - &mut self, - obj: JsValue, - index: f64, - value: JsValue, - ) -> Result, JsValue> { - let obj = self.import(obj)?; - let index = index as f64; - let (value, subvals) = - to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; - let opid = self.doc.insert_object(&obj, index as usize, value)?; - self.subset(&opid, subvals)?; - Ok(opid.to_string().into()) - } - - pub fn put( - &mut self, - obj: JsValue, - prop: JsValue, - value: JsValue, - datatype: JsValue, - ) -> Result<(), JsValue> { - let obj = self.import(obj)?; - let prop = self.import_prop(prop)?; - let value = self - .import_scalar(&value, &datatype.as_string()) - .ok_or_else(|| to_js_err("expected scalar value"))?; - self.doc.put(&obj, prop, value)?; - Ok(()) - } - - #[wasm_bindgen(js_name = putObject)] - pub fn put_object( - &mut self, - obj: JsValue, - prop: JsValue, - value: JsValue, - ) -> Result { - let obj = self.import(obj)?; - let prop = self.import_prop(prop)?; - let (value, subvals) = - to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?; - let opid = self.doc.put_object(&obj, prop, value)?; - self.subset(&opid, subvals)?; - Ok(opid.to_string().into()) - } - - fn subset(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), JsValue> { - for (p, v) in vals { - let (value, subvals) = self.import_value(&v, None)?; - //let opid = self.0.set(id, p, value)?; - let opid = match (p, value) { - (Prop::Map(s), Value::Object(objtype)) => { - Some(self.doc.put_object(obj, s, objtype)?) - } - (Prop::Map(s), Value::Scalar(scalar)) => { - self.doc.put(obj, s, scalar.into_owned())?; - None - } - (Prop::Seq(i), Value::Object(objtype)) => { - Some(self.doc.insert_object(obj, i, objtype)?) - } - (Prop::Seq(i), Value::Scalar(scalar)) => { - self.doc.insert(obj, i, scalar.into_owned())?; - None - } - }; - if let Some(opid) = opid { - self.subset(&opid, subvals)?; - } - } - Ok(()) - } - - pub fn increment( - &mut self, - obj: JsValue, - prop: JsValue, - value: JsValue, - ) -> Result<(), JsValue> { - let obj = self.import(obj)?; - let prop = self.import_prop(prop)?; - let value: f64 = value - .as_f64() - .ok_or_else(|| to_js_err("increment needs a numeric value"))?; - self.doc.increment(&obj, prop, value as i64)?; - Ok(()) - } - - #[wasm_bindgen(js_name = get)] - pub fn get( - &self, - obj: JsValue, - prop: JsValue, - heads: Option, - ) -> Result { - let obj = self.import(obj)?; - let prop = to_prop(prop); - let heads = get_heads(heads); - if let Ok(prop) = prop { - let value = if let Some(h) = heads { - self.doc.get_at(&obj, prop, &h)? - } else { - self.doc.get(&obj, prop)? - }; - match value { - Some((Value::Object(_), obj_id)) => Ok(obj_id.to_string().into()), - Some((Value::Scalar(value), _)) => Ok(ScalarValue(value).into()), - None => Ok(JsValue::undefined()), - } - } else { - Ok(JsValue::undefined()) - } - } - - #[wasm_bindgen(js_name = getWithType)] - pub fn get_with_type( - &self, - obj: JsValue, - prop: JsValue, - heads: Option, - ) -> Result { - let obj = self.import(obj)?; - let result = Array::new(); - let prop = to_prop(prop); - let heads = get_heads(heads); - if let Ok(prop) = prop { - let value = if let Some(h) = heads { - self.doc.get_at(&obj, prop, &h)? - } else { - self.doc.get(&obj, prop)? - }; - match value { - Some((Value::Object(obj_type), obj_id)) => { - result.push(&obj_type.to_string().into()); - result.push(&obj_id.to_string().into()); - Ok(result.into()) - } - Some((Value::Scalar(value), _)) => { - result.push(&datatype(&value).into()); - result.push(&ScalarValue(value).into()); - Ok(result.into()) - } - None => Ok(JsValue::null()), - } - } else { - Ok(JsValue::null()) - } - } - - #[wasm_bindgen(js_name = getAll)] - pub fn get_all( - &self, - obj: JsValue, - arg: JsValue, - heads: Option, - ) -> Result { - let obj = self.import(obj)?; - let result = Array::new(); - let prop = to_prop(arg); - if let Ok(prop) = prop { - let values = if let Some(heads) = get_heads(heads) { - self.doc.get_all_at(&obj, prop, &heads) - } else { - self.doc.get_all(&obj, prop) - } - .map_err(to_js_err)?; - for value in values { - match value { - (Value::Object(obj_type), obj_id) => { - let sub = Array::new(); - sub.push(&obj_type.to_string().into()); - sub.push(&obj_id.to_string().into()); - result.push(&sub.into()); - } - (Value::Scalar(value), id) => { - let sub = Array::new(); - sub.push(&datatype(&value).into()); - sub.push(&ScalarValue(value).into()); - sub.push(&id.to_string().into()); - result.push(&sub.into()); - } - } - } - } - Ok(result) - } - - #[wasm_bindgen(js_name = enablePatches)] - pub fn enable_patches(&mut self, enable: JsValue) -> Result<(), JsValue> { - let enable = enable - .as_bool() - .ok_or_else(|| to_js_err("must pass a bool to enable_patches"))?; - self.doc.observer().enable(enable); - Ok(()) - } - - #[wasm_bindgen(js_name = applyPatches)] - pub fn apply_patches(&mut self, mut object: JsValue) -> Result { - let patches = self.doc.observer().take_patches(); - for p in patches { - object = apply_patch(object, &p)?; - } - Ok(object) - } - - #[wasm_bindgen(js_name = popPatches)] - pub fn pop_patches(&mut self) -> Result { - // transactions send out observer updates as they occur, not waiting for them to be - // committed. - // If we pop the patches then we won't be able to revert them. - - let patches = self.doc.observer().take_patches(); - let result = Array::new(); - for p in patches { - result.push(&p.try_into()?); - } - Ok(result) - } - - pub fn length(&self, obj: JsValue, heads: Option) -> Result { - let obj = self.import(obj)?; - if let Some(heads) = get_heads(heads) { - Ok(self.doc.length_at(&obj, &heads) as f64) - } else { - Ok(self.doc.length(&obj) as f64) - } - } - - pub fn delete(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> { - let obj = self.import(obj)?; - let prop = to_prop(prop)?; - self.doc.delete(&obj, prop).map_err(to_js_err)?; - Ok(()) - } - - pub fn save(&mut self) -> Uint8Array { - Uint8Array::from(self.doc.save().as_slice()) - } - - #[wasm_bindgen(js_name = saveIncremental)] - pub fn save_incremental(&mut self) -> Uint8Array { - let bytes = self.doc.save_incremental(); - Uint8Array::from(bytes.as_slice()) - } - - #[wasm_bindgen(js_name = loadIncremental)] - pub fn load_incremental(&mut self, data: Uint8Array) -> Result { - let data = data.to_vec(); - let len = self.doc.load_incremental(&data).map_err(to_js_err)?; - Ok(len as f64) - } - - #[wasm_bindgen(js_name = applyChanges)] - pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> { - let changes: Vec<_> = JS(changes).try_into()?; - self.doc.apply_changes(changes).map_err(to_js_err)?; - Ok(()) - } - - #[wasm_bindgen(js_name = getChanges)] - pub fn get_changes(&mut self, have_deps: JsValue) -> Result { - let deps: Vec<_> = JS(have_deps).try_into()?; - let changes = self.doc.get_changes(&deps)?; - let changes: Array = changes - .iter() - .map(|c| Uint8Array::from(c.raw_bytes())) - .collect(); - Ok(changes) - } - - #[wasm_bindgen(js_name = getChangeByHash)] - pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result { - self.doc.ensure_transaction_closed(); - let hash = serde_wasm_bindgen::from_value(hash).map_err(to_js_err)?; - let change = self.doc.get_change_by_hash(&hash); - if let Some(c) = change { - Ok(Uint8Array::from(c.raw_bytes()).into()) - } else { - Ok(JsValue::null()) - } - } - - #[wasm_bindgen(js_name = getChangesAdded)] - pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result { - let changes = self.doc.get_changes_added(&mut other.doc); - let changes: Array = changes - .iter() - .map(|c| Uint8Array::from(c.raw_bytes())) - .collect(); - Ok(changes) - } - - #[wasm_bindgen(js_name = getHeads)] - pub fn get_heads(&mut self) -> Array { - let heads = self.doc.get_heads(); - let heads: Array = heads - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - heads - } - - #[wasm_bindgen(js_name = getActorId)] - pub fn get_actor_id(&self) -> String { - let actor = self.doc.get_actor(); - actor.to_string() - } - - #[wasm_bindgen(js_name = getLastLocalChange)] - pub fn get_last_local_change(&mut self) -> Result { - if let Some(change) = self.doc.get_last_local_change() { - Ok(Uint8Array::from(change.raw_bytes()).into()) - } else { - Ok(JsValue::null()) - } - } - - pub fn dump(&mut self) { - self.doc.dump() - } - - #[wasm_bindgen(js_name = getMissingDeps)] - pub fn get_missing_deps(&mut self, heads: Option) -> Result { - let heads = get_heads(heads).unwrap_or_default(); - let deps = self.doc.get_missing_deps(&heads); - let deps: Array = deps - .iter() - .map(|h| JsValue::from_str(&hex::encode(&h.0))) - .collect(); - Ok(deps) - } - - #[wasm_bindgen(js_name = receiveSyncMessage)] - pub fn receive_sync_message( - &mut self, - state: &mut SyncState, - message: Uint8Array, - ) -> Result<(), JsValue> { - let message = message.to_vec(); - let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?; - self.doc - .receive_sync_message(&mut state.0, message) - .map_err(to_js_err)?; - Ok(()) - } - - #[wasm_bindgen(js_name = generateSyncMessage)] - pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result { - if let Some(message) = self.doc.generate_sync_message(&mut state.0) { - Ok(Uint8Array::from(message.encode().as_slice()).into()) - } else { - Ok(JsValue::null()) - } - } - - #[wasm_bindgen(js_name = toJS)] - pub fn to_js(&self) -> JsValue { - map_to_js(&self.doc, &ROOT) - } - - pub fn materialize(&self, obj: JsValue, heads: Option) -> Result { - let obj = self.import(obj).unwrap_or(ROOT); - let heads = get_heads(heads); - if let Some(heads) = heads { - match self.doc.object_type(&obj) { - Some(am::ObjType::Map) => Ok(map_to_js_at(&self.doc, &obj, heads.as_slice())), - Some(am::ObjType::List) => Ok(list_to_js_at(&self.doc, &obj, heads.as_slice())), - Some(am::ObjType::Text) => Ok(self.doc.text_at(&obj, heads.as_slice())?.into()), - Some(am::ObjType::Table) => Ok(map_to_js_at(&self.doc, &obj, heads.as_slice())), - None => Err(to_js_err(format!("invalid obj {}", obj))), - } - } else { - match self.doc.object_type(&obj) { - Some(am::ObjType::Map) => Ok(map_to_js(&self.doc, &obj)), - Some(am::ObjType::List) => Ok(list_to_js(&self.doc, &obj)), - Some(am::ObjType::Text) => Ok(self.doc.text(&obj)?.into()), - Some(am::ObjType::Table) => Ok(map_to_js(&self.doc, &obj)), - None => Err(to_js_err(format!("invalid obj {}", obj))), - } - } - } - - fn import(&self, id: JsValue) -> Result { - if let Some(s) = id.as_string() { - if let Some(post) = s.strip_prefix('/') { - let mut obj = ROOT; - let mut is_map = true; - let parts = post.split('/'); - for prop in parts { - if prop.is_empty() { - break; - } - let val = if is_map { - self.doc.get(obj, prop)? - } else { - self.doc.get(obj, am::Prop::Seq(prop.parse().unwrap()))? - }; - match val { - Some((am::Value::Object(am::ObjType::Map), id)) => { - is_map = true; - obj = id; - } - Some((am::Value::Object(am::ObjType::Table), id)) => { - is_map = true; - obj = id; - } - Some((am::Value::Object(_), id)) => { - is_map = false; - obj = id; - } - None => return Err(to_js_err(format!("invalid path '{}'", s))), - _ => return Err(to_js_err(format!("path '{}' is not an object", s))), - }; - } - Ok(obj) - } else { - Ok(self.doc.import(&s)?) - } - } else { - Err(to_js_err("invalid objid")) - } - } - - fn import_prop(&self, prop: JsValue) -> Result { - if let Some(s) = prop.as_string() { - Ok(s.into()) - } else if let Some(n) = prop.as_f64() { - Ok((n as usize).into()) - } else { - Err(to_js_err(format!("invalid prop {:?}", prop))) - } - } - - fn import_scalar(&self, value: &JsValue, datatype: &Option) -> Option { - match datatype.as_deref() { - Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean), - Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)), - Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)), - Some("str") => value.as_string().map(|v| am::ScalarValue::Str(v.into())), - Some("f64") => value.as_f64().map(am::ScalarValue::F64), - Some("bytes") => Some(am::ScalarValue::Bytes( - value.clone().dyn_into::().unwrap().to_vec(), - )), - Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)), - Some("timestamp") => { - if let Some(v) = value.as_f64() { - Some(am::ScalarValue::Timestamp(v as i64)) - } else if let Ok(d) = value.clone().dyn_into::() { - Some(am::ScalarValue::Timestamp(d.get_time() as i64)) - } else { - None - } - } - Some("null") => Some(am::ScalarValue::Null), - Some(_) => None, - None => { - if value.is_null() { - Some(am::ScalarValue::Null) - } else if let Some(b) = value.as_bool() { - Some(am::ScalarValue::Boolean(b)) - } else if let Some(s) = value.as_string() { - Some(am::ScalarValue::Str(s.into())) - } else if let Some(n) = value.as_f64() { - if (n.round() - n).abs() < f64::EPSILON { - Some(am::ScalarValue::Int(n as i64)) - } else { - Some(am::ScalarValue::F64(n)) - } - } else if let Ok(d) = value.clone().dyn_into::() { - Some(am::ScalarValue::Timestamp(d.get_time() as i64)) - } else if let Ok(o) = &value.clone().dyn_into::() { - Some(am::ScalarValue::Bytes(o.to_vec())) - } else { - None - } - } - } - } - - fn import_value( - &self, - value: &JsValue, - datatype: Option, - ) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), JsValue> { - match self.import_scalar(value, &datatype) { - Some(val) => Ok((val.into(), vec![])), - None => { - if let Some((o, subvals)) = to_objtype(value, &datatype) { - Ok((o.into(), subvals)) - } else { - web_sys::console::log_2(&"Invalid value".into(), value); - Err(to_js_err("invalid value")) - } - } - } - } -} - -#[wasm_bindgen(js_name = create)] -pub fn init(actor: Option) -> Result { - console_error_panic_hook::set_once(); - Automerge::new(actor) -} - -#[wasm_bindgen(js_name = loadDoc)] -pub fn load(data: Uint8Array, actor: Option) -> Result { - let data = data.to_vec(); - let mut doc = AutoCommit::load(&data).map_err(to_js_err)?; - if let Some(s) = actor { - let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec()); - doc.set_actor(actor); - } - Ok(Automerge { doc }) -} - -#[wasm_bindgen(js_name = encodeChange)] -pub fn encode_change(change: JsValue) -> Result { - // Alex: Technically we should be using serde_wasm_bindgen::from_value instead of into_serde. - // Unfortunately serde_wasm_bindgen::from_value fails for some inscrutable reason, so instead - // we use into_serde (sorry to future me). - #[allow(deprecated)] - let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?; - let change: Change = change.into(); - Ok(Uint8Array::from(change.raw_bytes())) -} - -#[wasm_bindgen(js_name = decodeChange)] -pub fn decode_change(change: Uint8Array) -> Result { - let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?; - let change: am::ExpandedChange = change.decode(); - let serializer = serde_wasm_bindgen::Serializer::json_compatible(); - change.serialize(&serializer).map_err(to_js_err) -} - -#[wasm_bindgen(js_name = initSyncState)] -pub fn init_sync_state() -> SyncState { - SyncState(am::sync::State::new()) -} - -// this is needed to be compatible with the automerge-js api -#[wasm_bindgen(js_name = importSyncState)] -pub fn import_sync_state(state: JsValue) -> Result { - Ok(SyncState(JS(state).try_into()?)) -} - -// this is needed to be compatible with the automerge-js api -#[wasm_bindgen(js_name = exportSyncState)] -pub fn export_sync_state(state: SyncState) -> JsValue { - JS::from(state.0).into() -} - -#[wasm_bindgen(js_name = encodeSyncMessage)] -pub fn encode_sync_message(message: JsValue) -> Result { - let heads = js_get(&message, "heads")?.try_into()?; - let need = js_get(&message, "need")?.try_into()?; - let changes = js_get(&message, "changes")?.try_into()?; - let have = js_get(&message, "have")?.try_into()?; - Ok(Uint8Array::from( - am::sync::Message { - heads, - need, - have, - changes, - } - .encode() - .as_slice(), - )) -} - -#[wasm_bindgen(js_name = decodeSyncMessage)] -pub fn decode_sync_message(msg: Uint8Array) -> Result { - let data = msg.to_vec(); - let msg = am::sync::Message::decode(&data).map_err(to_js_err)?; - let heads = AR::from(msg.heads.as_slice()); - let need = AR::from(msg.need.as_slice()); - let changes = AR::from(msg.changes.as_slice()); - let have = AR::from(msg.have.as_slice()); - let obj = Object::new().into(); - js_set(&obj, "heads", heads)?; - js_set(&obj, "need", need)?; - js_set(&obj, "have", have)?; - js_set(&obj, "changes", changes)?; - Ok(obj) -} - -#[wasm_bindgen(js_name = encodeSyncState)] -pub fn encode_sync_state(state: SyncState) -> Result { - let state = state.0; - Ok(Uint8Array::from(state.encode().as_slice())) -} - -#[wasm_bindgen(js_name = decodeSyncState)] -pub fn decode_sync_state(data: Uint8Array) -> Result { - SyncState::decode(data) -} diff --git a/automerge-wasm/src/observer.rs b/automerge-wasm/src/observer.rs deleted file mode 100644 index c7adadc8..00000000 --- a/automerge-wasm/src/observer.rs +++ /dev/null @@ -1,302 +0,0 @@ -#![allow(dead_code)] - -use crate::interop::{export_value, js_set}; -use automerge::{ObjId, OpObserver, Parents, Prop, Value}; -use js_sys::{Array, Object}; -use wasm_bindgen::prelude::*; - -#[derive(Debug, Clone, Default)] -pub(crate) struct Observer { - enabled: bool, - patches: Vec, -} - -impl Observer { - pub(crate) fn take_patches(&mut self) -> Vec { - std::mem::take(&mut self.patches) - } - pub(crate) fn enable(&mut self, enable: bool) { - if self.enabled && !enable { - self.patches.truncate(0) - } - self.enabled = enable; - } -} - -#[derive(Debug, Clone)] -pub(crate) enum Patch { - PutMap { - obj: ObjId, - path: Vec, - key: String, - value: Value<'static>, - conflict: bool, - }, - PutSeq { - obj: ObjId, - path: Vec, - index: usize, - value: Value<'static>, - conflict: bool, - }, - Insert { - obj: ObjId, - path: Vec, - index: usize, - values: Vec>, - }, - Increment { - obj: ObjId, - path: Vec, - prop: Prop, - value: i64, - }, - DeleteMap { - obj: ObjId, - path: Vec, - key: String, - }, - DeleteSeq { - obj: ObjId, - path: Vec, - index: usize, - length: usize, - }, -} - -impl OpObserver for Observer { - fn insert( - &mut self, - mut parents: Parents<'_>, - obj: ObjId, - index: usize, - tagged_value: (Value<'_>, ObjId), - ) { - if self.enabled { - if let Some(Patch::Insert { - obj: tail_obj, - index: tail_index, - values, - .. - }) = self.patches.last_mut() - { - if tail_obj == &obj && *tail_index + values.len() == index { - values.push(tagged_value.0.to_owned()); - return; - } - } - let path = parents.path().into_iter().map(|p| p.1).collect(); - let value = tagged_value.0.to_owned(); - let patch = Patch::Insert { - path, - obj, - index, - values: vec![value], - }; - self.patches.push(patch); - } - } - - fn put( - &mut self, - mut parents: Parents<'_>, - obj: ObjId, - prop: Prop, - tagged_value: (Value<'_>, ObjId), - conflict: bool, - ) { - if self.enabled { - let path = parents.path().into_iter().map(|p| p.1).collect(); - let value = tagged_value.0.to_owned(); - let patch = match prop { - Prop::Map(key) => Patch::PutMap { - path, - obj, - key, - value, - conflict, - }, - Prop::Seq(index) => Patch::PutSeq { - path, - obj, - index, - value, - conflict, - }, - }; - self.patches.push(patch); - } - } - - fn increment( - &mut self, - mut parents: Parents<'_>, - obj: ObjId, - prop: Prop, - tagged_value: (i64, ObjId), - ) { - if self.enabled { - let path = parents.path().into_iter().map(|p| p.1).collect(); - let value = tagged_value.0; - self.patches.push(Patch::Increment { - path, - obj, - prop, - value, - }) - } - } - - fn delete(&mut self, mut parents: Parents<'_>, obj: ObjId, prop: Prop) { - if self.enabled { - let path = parents.path().into_iter().map(|p| p.1).collect(); - let patch = match prop { - Prop::Map(key) => Patch::DeleteMap { path, obj, key }, - Prop::Seq(index) => Patch::DeleteSeq { - path, - obj, - index, - length: 1, - }, - }; - self.patches.push(patch) - } - } - - fn merge(&mut self, other: &Self) { - self.patches.extend_from_slice(other.patches.as_slice()) - } - - fn branch(&self) -> Self { - Observer { - patches: vec![], - enabled: self.enabled, - } - } -} - -fn prop_to_js(p: &Prop) -> JsValue { - match p { - Prop::Map(key) => JsValue::from_str(key), - Prop::Seq(index) => JsValue::from_f64(*index as f64), - } -} - -fn export_path(path: &[Prop], end: &Prop) -> Array { - let result = Array::new(); - for p in path { - result.push(&prop_to_js(p)); - } - result.push(&prop_to_js(end)); - result -} - -impl Patch { - pub(crate) fn path(&self) -> &[Prop] { - match &self { - Self::PutMap { path, .. } => path.as_slice(), - Self::PutSeq { path, .. } => path.as_slice(), - Self::Increment { path, .. } => path.as_slice(), - Self::Insert { path, .. } => path.as_slice(), - Self::DeleteMap { path, .. } => path.as_slice(), - Self::DeleteSeq { path, .. } => path.as_slice(), - } - } -} - -impl TryFrom for JsValue { - type Error = JsValue; - - fn try_from(p: Patch) -> Result { - let result = Object::new(); - match p { - Patch::PutMap { - path, - key, - value, - conflict, - .. - } => { - js_set(&result, "action", "put")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Map(key)), - )?; - js_set(&result, "value", export_value(&value))?; - js_set(&result, "conflict", &JsValue::from_bool(conflict))?; - Ok(result.into()) - } - Patch::PutSeq { - path, - index, - value, - conflict, - .. - } => { - js_set(&result, "action", "put")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Seq(index)), - )?; - js_set(&result, "value", export_value(&value))?; - js_set(&result, "conflict", &JsValue::from_bool(conflict))?; - Ok(result.into()) - } - Patch::Insert { - path, - index, - values, - .. - } => { - js_set(&result, "action", "splice")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Seq(index)), - )?; - js_set( - &result, - "values", - values.iter().map(export_value).collect::(), - )?; - Ok(result.into()) - } - Patch::Increment { - path, prop, value, .. - } => { - js_set(&result, "action", "inc")?; - js_set(&result, "path", export_path(path.as_slice(), &prop))?; - js_set(&result, "value", &JsValue::from_f64(value as f64))?; - Ok(result.into()) - } - Patch::DeleteMap { path, key, .. } => { - js_set(&result, "action", "del")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Map(key)), - )?; - Ok(result.into()) - } - Patch::DeleteSeq { - path, - index, - length, - .. - } => { - js_set(&result, "action", "del")?; - js_set( - &result, - "path", - export_path(path.as_slice(), &Prop::Seq(index)), - )?; - if length > 1 { - js_set(&result, "length", length)?; - } - Ok(result.into()) - } - } - } -} diff --git a/automerge-wasm/src/value.rs b/automerge-wasm/src/value.rs deleted file mode 100644 index 98ea5f1b..00000000 --- a/automerge-wasm/src/value.rs +++ /dev/null @@ -1,40 +0,0 @@ -use std::borrow::Cow; - -use automerge as am; -use js_sys::Uint8Array; -use wasm_bindgen::prelude::*; - -#[derive(Debug)] -pub struct ScalarValue<'a>(pub(crate) Cow<'a, am::ScalarValue>); - -impl<'a> From> for JsValue { - fn from(val: ScalarValue<'a>) -> Self { - match &*val.0 { - am::ScalarValue::Bytes(v) => Uint8Array::from(v.as_slice()).into(), - am::ScalarValue::Str(v) => v.to_string().into(), - am::ScalarValue::Int(v) => (*v as f64).into(), - am::ScalarValue::Uint(v) => (*v as f64).into(), - am::ScalarValue::F64(v) => (*v).into(), - am::ScalarValue::Counter(v) => (f64::from(v)).into(), - am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(), - am::ScalarValue::Boolean(v) => (*v).into(), - am::ScalarValue::Null => JsValue::null(), - am::ScalarValue::Unknown { bytes, .. } => Uint8Array::from(bytes.as_slice()).into(), - } - } -} - -pub(crate) fn datatype(s: &am::ScalarValue) -> String { - match s { - am::ScalarValue::Bytes(_) => "bytes".into(), - am::ScalarValue::Str(_) => "str".into(), - am::ScalarValue::Int(_) => "int".into(), - am::ScalarValue::Uint(_) => "uint".into(), - am::ScalarValue::F64(_) => "f64".into(), - am::ScalarValue::Counter(_) => "counter".into(), - am::ScalarValue::Timestamp(_) => "timestamp".into(), - am::ScalarValue::Boolean(_) => "boolean".into(), - am::ScalarValue::Null => "null".into(), - am::ScalarValue::Unknown { type_code, .. } => format!("unknown{}", type_code), - } -} diff --git a/automerge-wasm/test/apply.ts b/automerge-wasm/test/apply.ts deleted file mode 100644 index 18b53758..00000000 --- a/automerge-wasm/test/apply.ts +++ /dev/null @@ -1,100 +0,0 @@ - -import { describe, it } from 'mocha'; -//@ts-ignore -import assert from 'assert' -//@ts-ignore -import init, { create, load } from '..' - -describe('Automerge', () => { - describe('Patch Apply', () => { - it('apply nested sets on maps', () => { - let start : any = { hello: { mellow: { yellow: "world", x: 1 }, y : 2 } } - let doc1 = create() - doc1.putObject("/", "hello", start.hello); - let mat = doc1.materialize("/") - let doc2 = create() - doc2.enablePatches(true) - doc2.merge(doc1) - - let base = doc2.applyPatches({}) - assert.deepEqual(mat, start) - assert.deepEqual(base, start) - - doc2.delete("/hello/mellow", "yellow"); - delete start.hello.mellow.yellow; - base = doc2.applyPatches(base) - mat = doc2.materialize("/") - - assert.deepEqual(mat, start) - assert.deepEqual(base, start) - }) - - it('apply patches on lists', () => { - //let start = { list: [1,2,3,4,5,6] } - let start = { list: [1,2,3,4] } - let doc1 = create() - doc1.putObject("/", "list", start.list); - let mat = doc1.materialize("/") - let doc2 = create() - doc2.enablePatches(true) - doc2.merge(doc1) - mat = doc1.materialize("/") - let base = doc2.applyPatches({}) - assert.deepEqual(mat, start) - assert.deepEqual(base, start) - - doc2.delete("/list", 3); - start.list.splice(3,1) - base = doc2.applyPatches(base) - - assert.deepEqual(base, start) - }) - - it('apply patches on lists of lists of lists', () => { - let start = { list: - [ - [ - [ 1, 2, 3, 4, 5, 6], - [ 7, 8, 9,10,11,12], - ], - [ - [ 7, 8, 9,10,11,12], - [ 1, 2, 3, 4, 5, 6], - ] - ] - } - let doc1 = create() - doc1.enablePatches(true) - doc1.putObject("/", "list", start.list); - let mat = doc1.materialize("/") - let base = doc1.applyPatches({}) - assert.deepEqual(mat, start) - - doc1.delete("/list/0/1", 3) - start.list[0][1].splice(3,1) - - doc1.delete("/list/0", 0) - start.list[0].splice(0,1) - - mat = doc1.materialize("/") - base = doc1.applyPatches(base) - assert.deepEqual(mat, start) - assert.deepEqual(base, start) - }) - - it('large inserts should make one splice patch', () => { - let doc1 = create() - doc1.enablePatches(true) - doc1.putObject("/", "list", "abc"); - let patches = doc1.popPatches() - assert.deepEqual( patches, [ - { action: 'put', conflict: false, path: [ 'list' ], value: [] }, - { action: 'splice', path: [ 'list', 0 ], values: [ 'a', 'b', 'c' ] }]) - }) - }) -}) - -// FIXME: handle conflicts correctly on apply -// TODO: squash puts -// TODO: merge deletes -// TODO: elide `conflict: false` diff --git a/automerge-wasm/types/LICENSE b/automerge-wasm/types/LICENSE deleted file mode 100644 index 63b21502..00000000 --- a/automerge-wasm/types/LICENSE +++ /dev/null @@ -1,10 +0,0 @@ -MIT License - -Copyright 2022, Ink & Switch LLC - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/automerge-wasm/types/package.json b/automerge-wasm/types/package.json deleted file mode 100644 index 7b6852ae..00000000 --- a/automerge-wasm/types/package.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "collaborators": [ - "Orion Henry " - ], - "name": "automerge-types", - "description": "typescript types for low level automerge api", - "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", - "repository": "github:automerge/automerge-rs", - "version": "0.1.5", - "license": "MIT", - "files": [ - "LICENSE", - "package.json", - "index.d.ts" - ], - "types": "index.d.ts", - "main": "" -} diff --git a/automerge-wasm/web-index.js b/automerge-wasm/web-index.js deleted file mode 100644 index 9bbe47df..00000000 --- a/automerge-wasm/web-index.js +++ /dev/null @@ -1,49 +0,0 @@ -export { - loadDoc as load, - create, - encodeChange, - decodeChange, - initSyncState, - encodeSyncMessage, - decodeSyncMessage, - encodeSyncState, - decodeSyncState, - exportSyncState, - importSyncState, -} from "./bindgen.js" -import { - loadDoc as load, - create, - encodeChange, - decodeChange, - initSyncState, - encodeSyncMessage, - decodeSyncMessage, - encodeSyncState, - decodeSyncState, - exportSyncState, - importSyncState, -} from "./bindgen.js" - -let api = { - load, - create, - encodeChange, - decodeChange, - initSyncState, - encodeSyncMessage, - decodeSyncMessage, - encodeSyncState, - decodeSyncState, - exportSyncState, - importSyncState -} - -import wasm_init from "./bindgen.js" - -export function init() { - return new Promise((resolve,reject) => wasm_init().then(() => { - resolve({ ... api, load, create }) - })) -} - diff --git a/automerge/src/clocks.rs b/automerge/src/clocks.rs deleted file mode 100644 index 60fc5c71..00000000 --- a/automerge/src/clocks.rs +++ /dev/null @@ -1,44 +0,0 @@ -use crate::{ - clock::{Clock, ClockData}, - Change, ChangeHash, -}; -use std::collections::HashMap; - -pub(crate) struct Clocks(HashMap); - -#[derive(Debug, thiserror::Error)] -#[error("attempted to derive a clock for a change with dependencies we don't have")] -pub struct MissingDep(ChangeHash); - -impl Clocks { - pub(crate) fn new() -> Self { - Self(HashMap::new()) - } - - pub(crate) fn add_change( - &mut self, - change: &Change, - actor_index: usize, - ) -> Result<(), MissingDep> { - let mut clock = Clock::new(); - for hash in change.deps() { - let c = self.0.get(hash).ok_or(MissingDep(*hash))?; - clock.merge(c); - } - clock.include( - actor_index, - ClockData { - max_op: change.max_op(), - seq: change.seq(), - }, - ); - self.0.insert(change.hash(), clock); - Ok(()) - } -} - -impl From for HashMap { - fn from(c: Clocks) -> Self { - c.0 - } -} diff --git a/automerge/src/exid.rs b/automerge/src/exid.rs deleted file mode 100644 index 2c174e28..00000000 --- a/automerge/src/exid.rs +++ /dev/null @@ -1,82 +0,0 @@ -use crate::ActorId; -use serde::Serialize; -use serde::Serializer; -use std::cmp::{Ord, Ordering}; -use std::fmt; -use std::hash::{Hash, Hasher}; - -#[derive(Debug, Clone)] -pub enum ExId { - Root, - Id(u64, ActorId, usize), -} - -impl PartialEq for ExId { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - (ExId::Root, ExId::Root) => true, - (ExId::Id(ctr1, actor1, _), ExId::Id(ctr2, actor2, _)) - if ctr1 == ctr2 && actor1 == actor2 => - { - true - } - _ => false, - } - } -} - -impl Eq for ExId {} - -impl fmt::Display for ExId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - ExId::Root => write!(f, "_root"), - ExId::Id(ctr, actor, _) => write!(f, "{}@{}", ctr, actor), - } - } -} - -impl Hash for ExId { - fn hash(&self, state: &mut H) { - match self { - ExId::Root => 0.hash(state), - ExId::Id(ctr, actor, _) => { - ctr.hash(state); - actor.hash(state); - } - } - } -} - -impl Ord for ExId { - fn cmp(&self, other: &Self) -> Ordering { - match (self, other) { - (ExId::Root, ExId::Root) => Ordering::Equal, - (ExId::Root, _) => Ordering::Less, - (_, ExId::Root) => Ordering::Greater, - (ExId::Id(c1, a1, _), ExId::Id(c2, a2, _)) if c1 == c2 => a2.cmp(a1), - (ExId::Id(c1, _, _), ExId::Id(c2, _, _)) => c1.cmp(c2), - } - } -} - -impl PartialOrd for ExId { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Serialize for ExId { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(self.to_string().as_str()) - } -} - -impl AsRef for ExId { - fn as_ref(&self) -> &ExId { - self - } -} diff --git a/automerge/src/lib.rs b/automerge/src/lib.rs deleted file mode 100644 index df33e096..00000000 --- a/automerge/src/lib.rs +++ /dev/null @@ -1,112 +0,0 @@ -#![doc( - html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg", - html_favicon_url = "https:///raw.githubusercontent.com/automerge/automerge-rs/main/img/favicon.ico" -)] -#![warn( - missing_debug_implementations, - // missing_docs, // TODO: add documentation! - rust_2018_idioms, - unreachable_pub, - bad_style, - const_err, - dead_code, - improper_ctypes, - non_shorthand_field_patterns, - no_mangle_generic_items, - overflowing_literals, - path_statements, - patterns_in_fns_without_body, - private_in_public, - unconditional_recursion, - unused, - unused_allocation, - unused_comparisons, - unused_parens, - while_true -)] - -#[doc(hidden)] -#[macro_export] -macro_rules! log { - ( $( $t:tt )* ) => { - { - use $crate::__log; - __log!( $( $t )* ); - } - } - } - -#[cfg(all(feature = "wasm", target_family = "wasm"))] -#[doc(hidden)] -#[macro_export] -macro_rules! __log { - ( $( $t:tt )* ) => { - web_sys::console::log_1(&format!( $( $t )* ).into()); - } - } - -#[cfg(not(all(feature = "wasm", target_family = "wasm")))] -#[doc(hidden)] -#[macro_export] -macro_rules! __log { - ( $( $t:tt )* ) => { - println!( $( $t )* ); - } - } - -mod autocommit; -mod automerge; -mod autoserde; -mod change; -mod clock; -mod clocks; -mod columnar; -mod convert; -mod error; -mod exid; -mod indexed_cache; -mod keys; -mod keys_at; -mod legacy; -mod list_range; -mod list_range_at; -mod map_range; -mod map_range_at; -mod op_observer; -mod op_set; -mod op_tree; -mod parents; -mod query; -mod storage; -pub mod sync; -pub mod transaction; -mod types; -mod value; -mod values; -#[cfg(feature = "optree-visualisation")] -mod visualisation; - -pub use crate::automerge::Automerge; -pub use autocommit::{AutoCommit, AutoCommitWithObs}; -pub use autoserde::AutoSerde; -pub use change::{Change, LoadError as LoadChangeError}; -pub use error::AutomergeError; -pub use error::InvalidActorId; -pub use error::InvalidChangeHashSlice; -pub use exid::ExId as ObjId; -pub use keys::Keys; -pub use keys_at::KeysAt; -pub use legacy::Change as ExpandedChange; -pub use list_range::ListRange; -pub use list_range_at::ListRangeAt; -pub use map_range::MapRange; -pub use map_range_at::MapRangeAt; -pub use op_observer::OpObserver; -pub use op_observer::Patch; -pub use op_observer::VecOpObserver; -pub use parents::Parents; -pub use types::{ActorId, ChangeHash, ObjType, OpType, Prop}; -pub use value::{ScalarValue, Value}; -pub use values::Values; - -pub const ROOT: ObjId = ObjId::Root; diff --git a/automerge/src/op_observer.rs b/automerge/src/op_observer.rs deleted file mode 100644 index db3fdf92..00000000 --- a/automerge/src/op_observer.rs +++ /dev/null @@ -1,236 +0,0 @@ -use crate::exid::ExId; -use crate::Parents; -use crate::Prop; -use crate::Value; - -/// An observer of operations applied to the document. -pub trait OpObserver: Default + Clone { - /// A new value has been inserted into the given object. - /// - /// - `parents`: A parents iterator that can be used to collect path information - /// - `objid`: the object that has been inserted into. - /// - `index`: the index the new value has been inserted at. - /// - `tagged_value`: the value that has been inserted and the id of the operation that did the - /// insert. - fn insert( - &mut self, - parents: Parents<'_>, - objid: ExId, - index: usize, - tagged_value: (Value<'_>, ExId), - ); - - /// A new value has been put into the given object. - /// - /// - `parents`: A parents iterator that can be used to collect path information - /// - `objid`: the object that has been put into. - /// - `prop`: the prop that the value as been put at. - /// - `tagged_value`: the value that has been put into the object and the id of the operation - /// that did the put. - /// - `conflict`: whether this put conflicts with other operations. - fn put( - &mut self, - parents: Parents<'_>, - objid: ExId, - prop: Prop, - tagged_value: (Value<'_>, ExId), - conflict: bool, - ); - - /// A counter has been incremented. - /// - /// - `parents`: A parents iterator that can be used to collect path information - /// - `objid`: the object that contains the counter. - /// - `prop`: they prop that the chounter is at. - /// - `tagged_value`: the amount the counter has been incremented by, and the the id of the - /// increment operation. - fn increment( - &mut self, - parents: Parents<'_>, - objid: ExId, - prop: Prop, - tagged_value: (i64, ExId), - ); - - /// A value has beeen deleted. - /// - /// - `parents`: A parents iterator that can be used to collect path information - /// - `objid`: the object that has been deleted in. - /// - `prop`: the prop of the value that has been deleted. - fn delete(&mut self, parents: Parents<'_>, objid: ExId, prop: Prop); - - /// Branch of a new op_observer later to be merged - /// - /// Called by AutoCommit when creating a new transaction. Observer branch - /// will be merged on `commit()` or thrown away on `rollback()` - /// - fn branch(&self) -> Self { - Self::default() - } - - /// Merge observed information from a transaction. - /// - /// Called by AutoCommit on `commit()` - /// - /// - `other`: Another Op Observer of the same type - fn merge(&mut self, other: &Self); -} - -impl OpObserver for () { - fn insert( - &mut self, - _parents: Parents<'_>, - _objid: ExId, - _index: usize, - _tagged_value: (Value<'_>, ExId), - ) { - } - - fn put( - &mut self, - _parents: Parents<'_>, - _objid: ExId, - _prop: Prop, - _tagged_value: (Value<'_>, ExId), - _conflict: bool, - ) { - } - - fn increment( - &mut self, - _parents: Parents<'_>, - _objid: ExId, - _prop: Prop, - _tagged_value: (i64, ExId), - ) { - } - - fn delete(&mut self, _parents: Parents<'_>, _objid: ExId, _prop: Prop) {} - - fn merge(&mut self, _other: &Self) {} -} - -/// Capture operations into a [`Vec`] and store them as patches. -#[derive(Default, Debug, Clone)] -pub struct VecOpObserver { - patches: Vec, -} - -impl VecOpObserver { - /// Take the current list of patches, leaving the internal list empty and ready for new - /// patches. - pub fn take_patches(&mut self) -> Vec { - std::mem::take(&mut self.patches) - } -} - -impl OpObserver for VecOpObserver { - fn insert( - &mut self, - mut parents: Parents<'_>, - obj: ExId, - index: usize, - (value, id): (Value<'_>, ExId), - ) { - let path = parents.path(); - self.patches.push(Patch::Insert { - obj, - path, - index, - value: (value.into_owned(), id), - }); - } - - fn put( - &mut self, - mut parents: Parents<'_>, - obj: ExId, - prop: Prop, - (value, id): (Value<'_>, ExId), - conflict: bool, - ) { - let path = parents.path(); - self.patches.push(Patch::Put { - obj, - path, - prop, - value: (value.into_owned(), id), - conflict, - }); - } - - fn increment( - &mut self, - mut parents: Parents<'_>, - obj: ExId, - prop: Prop, - tagged_value: (i64, ExId), - ) { - let path = parents.path(); - self.patches.push(Patch::Increment { - obj, - path, - prop, - value: tagged_value, - }); - } - - fn delete(&mut self, mut parents: Parents<'_>, obj: ExId, prop: Prop) { - let path = parents.path(); - self.patches.push(Patch::Delete { obj, path, prop }) - } - - fn merge(&mut self, other: &Self) { - self.patches.extend_from_slice(other.patches.as_slice()) - } -} - -/// A notification to the application that something has changed in a document. -#[derive(Debug, Clone, PartialEq)] -pub enum Patch { - /// Associating a new value with a prop in a map, or an existing list element - Put { - /// path to the object - path: Vec<(ExId, Prop)>, - /// The object that was put into. - obj: ExId, - /// The prop that the new value was put at. - prop: Prop, - /// The value that was put, and the id of the operation that put it there. - value: (Value<'static>, ExId), - /// Whether this put conflicts with another. - conflict: bool, - }, - /// Inserting a new element into a list/text - Insert { - /// path to the object - path: Vec<(ExId, Prop)>, - /// The object that was inserted into. - obj: ExId, - /// The index that the new value was inserted at. - index: usize, - /// The value that was inserted, and the id of the operation that inserted it there. - value: (Value<'static>, ExId), - }, - /// Incrementing a counter. - Increment { - /// path to the object - path: Vec<(ExId, Prop)>, - /// The object that was incremented in. - obj: ExId, - /// The prop that was incremented. - prop: Prop, - /// The amount that the counter was incremented by, and the id of the operation that - /// did the increment. - value: (i64, ExId), - }, - /// Deleting an element from a list/text - Delete { - /// path to the object - path: Vec<(ExId, Prop)>, - /// The object that was deleted from. - obj: ExId, - /// The prop that was deleted. - prop: Prop, - }, -} diff --git a/automerge/src/op_tree.rs b/automerge/src/op_tree.rs deleted file mode 100644 index 6cd5bdf9..00000000 --- a/automerge/src/op_tree.rs +++ /dev/null @@ -1,809 +0,0 @@ -use std::{ - cmp::{min, Ordering}, - fmt::Debug, - mem, - ops::RangeBounds, -}; - -pub(crate) use crate::op_set::OpSetMetadata; -use crate::{ - clock::Clock, - query::{self, Index, QueryResult, ReplaceArgs, TreeQuery}, -}; -use crate::{ - types::{ObjId, Op, OpId}, - ObjType, -}; -use std::collections::HashSet; - -pub(crate) const B: usize = 16; - -mod iter; -pub(crate) use iter::OpTreeIter; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct OpTree { - pub(crate) internal: OpTreeInternal, - pub(crate) objtype: ObjType, - /// The id of the parent object, root has no parent. - pub(crate) parent: Option, -} - -impl OpTree { - pub(crate) fn new() -> Self { - Self { - internal: Default::default(), - objtype: ObjType::Map, - parent: None, - } - } - - pub(crate) fn iter(&self) -> OpTreeIter<'_> { - self.internal.iter() - } - - pub(crate) fn len(&self) -> usize { - self.internal.len() - } -} - -#[derive(Clone, Debug)] -pub(crate) struct OpTreeInternal { - pub(crate) root_node: Option, -} - -#[derive(Clone, Debug)] -pub(crate) struct OpTreeNode { - pub(crate) children: Vec, - pub(crate) elements: Vec, - pub(crate) index: Index, - length: usize, -} - -impl OpTreeInternal { - /// Construct a new, empty, sequence. - pub(crate) fn new() -> Self { - Self { root_node: None } - } - - /// Get the length of the sequence. - pub(crate) fn len(&self) -> usize { - self.root_node.as_ref().map_or(0, |n| n.len()) - } - - pub(crate) fn keys(&self) -> Option> { - self.root_node.as_ref().map(query::Keys::new) - } - - pub(crate) fn keys_at(&self, clock: Clock) -> Option> { - self.root_node - .as_ref() - .map(|root| query::KeysAt::new(root, clock)) - } - - pub(crate) fn map_range<'a, R: RangeBounds>( - &'a self, - range: R, - meta: &'a OpSetMetadata, - ) -> Option> { - self.root_node - .as_ref() - .map(|node| query::MapRange::new(range, node, meta)) - } - - pub(crate) fn map_range_at<'a, R: RangeBounds>( - &'a self, - range: R, - meta: &'a OpSetMetadata, - clock: Clock, - ) -> Option> { - self.root_node - .as_ref() - .map(|node| query::MapRangeAt::new(range, node, meta, clock)) - } - - pub(crate) fn list_range>( - &self, - range: R, - ) -> Option> { - self.root_node - .as_ref() - .map(|node| query::ListRange::new(range, node)) - } - - pub(crate) fn list_range_at>( - &self, - range: R, - clock: Clock, - ) -> Option> { - self.root_node - .as_ref() - .map(|node| query::ListRangeAt::new(range, clock, node)) - } - - pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, mut query: Q, m: &OpSetMetadata) -> Q - where - Q: TreeQuery<'a>, - { - self.root_node - .as_ref() - .map(|root| match query.query_node_with_metadata(root, m) { - QueryResult::Descend => root.search(&mut query, m, None), - QueryResult::Skip(skip) => root.search(&mut query, m, Some(skip)), - _ => true, - }); - query - } - - /// Create an iterator through the sequence. - pub(crate) fn iter(&self) -> OpTreeIter<'_> { - iter::OpTreeIter::new(self) - } - - /// Insert the `element` into the sequence at `index`. - /// - /// # Panics - /// - /// Panics if `index > len`. - pub(crate) fn insert(&mut self, index: usize, element: Op) { - assert!( - index <= self.len(), - "tried to insert at {} but len is {}", - index, - self.len() - ); - - let old_len = self.len(); - if let Some(root) = self.root_node.as_mut() { - #[cfg(debug_assertions)] - root.check(); - - if root.is_full() { - let original_len = root.len(); - let new_root = OpTreeNode::new(); - - // move new_root to root position - let old_root = mem::replace(root, new_root); - - root.length += old_root.len(); - root.index = old_root.index.clone(); - root.children.push(old_root); - root.split_child(0); - - assert_eq!(original_len, root.len()); - - // after splitting the root has one element and two children, find which child the - // index is in - let first_child_len = root.children[0].len(); - let (child, insertion_index) = if first_child_len < index { - (&mut root.children[1], index - (first_child_len + 1)) - } else { - (&mut root.children[0], index) - }; - root.length += 1; - root.index.insert(&element); - child.insert_into_non_full_node(insertion_index, element) - } else { - root.insert_into_non_full_node(index, element) - } - } else { - let mut root = OpTreeNode::new(); - root.insert_into_non_full_node(index, element); - self.root_node = Some(root) - } - assert_eq!(self.len(), old_len + 1, "{:#?}", self); - } - - /// Get the `element` at `index` in the sequence. - pub(crate) fn get(&self, index: usize) -> Option<&Op> { - self.root_node.as_ref().and_then(|n| n.get(index)) - } - - // this replaces get_mut() because it allows the indexes to update correctly - pub(crate) fn update(&mut self, index: usize, f: F) - where - F: FnMut(&mut Op), - { - if self.len() > index { - self.root_node.as_mut().unwrap().update(index, f); - } - } - - /// Removes the element at `index` from the sequence. - /// - /// # Panics - /// - /// Panics if `index` is out of bounds. - pub(crate) fn remove(&mut self, index: usize) -> Op { - if let Some(root) = self.root_node.as_mut() { - #[cfg(debug_assertions)] - let len = root.check(); - let old = root.remove(index); - - if root.elements.is_empty() { - if root.is_leaf() { - self.root_node = None; - } else { - self.root_node = Some(root.children.remove(0)); - } - } - - #[cfg(debug_assertions)] - debug_assert_eq!(len, self.root_node.as_ref().map_or(0, |r| r.check()) + 1); - old - } else { - panic!("remove from empty tree") - } - } -} - -impl OpTreeNode { - fn new() -> Self { - Self { - elements: Vec::new(), - children: Vec::new(), - index: Default::default(), - length: 0, - } - } - - pub(crate) fn search<'a, 'b: 'a, Q>( - &'b self, - query: &mut Q, - m: &OpSetMetadata, - skip: Option, - ) -> bool - where - Q: TreeQuery<'a>, - { - if self.is_leaf() { - let skip = skip.unwrap_or(0); - for e in self.elements.iter().skip(skip) { - if query.query_element_with_metadata(e, m) == QueryResult::Finish { - return true; - } - } - false - } else { - let mut skip = skip.unwrap_or(0); - for (child_index, child) in self.children.iter().enumerate() { - match skip.cmp(&child.len()) { - Ordering::Greater => { - // not in this child at all - // take off the number of elements in the child as well as the next element - skip -= child.len() + 1; - } - Ordering::Equal => { - // just try the element - skip -= child.len(); - if let Some(e) = self.elements.get(child_index) { - if query.query_element_with_metadata(e, m) == QueryResult::Finish { - return true; - } - } - } - Ordering::Less => { - // descend and try find it - match query.query_node_with_metadata(child, m) { - QueryResult::Descend => { - // search in the child node, passing in the number of items left to - // skip - if child.search(query, m, Some(skip)) { - return true; - } - } - QueryResult::Finish => return true, - QueryResult::Next => (), - QueryResult::Skip(_) => panic!("had skip from non-root node"), - } - if let Some(e) = self.elements.get(child_index) { - if query.query_element_with_metadata(e, m) == QueryResult::Finish { - return true; - } - } - // reset the skip to zero so we continue iterating normally - skip = 0; - } - } - } - false - } - } - - pub(crate) fn len(&self) -> usize { - self.length - } - - fn reindex(&mut self) { - let mut index = Index::new(); - for c in &self.children { - index.merge(&c.index); - } - for e in &self.elements { - index.insert(e); - } - self.index = index - } - - fn is_leaf(&self) -> bool { - self.children.is_empty() - } - - fn is_full(&self) -> bool { - self.elements.len() >= 2 * B - 1 - } - - /// Returns the child index and the given index adjusted for the cumulative index before that - /// child. - fn find_child_index(&self, index: usize) -> (usize, usize) { - let mut cumulative_len = 0; - for (child_index, child) in self.children.iter().enumerate() { - if cumulative_len + child.len() >= index { - return (child_index, index - cumulative_len); - } else { - cumulative_len += child.len() + 1; - } - } - panic!("index {} not found in node with len {}", index, self.len()) - } - - fn insert_into_non_full_node(&mut self, index: usize, element: Op) { - assert!(!self.is_full()); - - self.index.insert(&element); - - if self.is_leaf() { - self.length += 1; - self.elements.insert(index, element); - } else { - let (child_index, sub_index) = self.find_child_index(index); - let child = &mut self.children[child_index]; - - if child.is_full() { - self.split_child(child_index); - - // child structure has changed so we need to find the index again - let (child_index, sub_index) = self.find_child_index(index); - let child = &mut self.children[child_index]; - child.insert_into_non_full_node(sub_index, element); - } else { - child.insert_into_non_full_node(sub_index, element); - } - self.length += 1; - } - } - - // A utility function to split the child `full_child_index` of this node - // Note that `full_child_index` must be full when this function is called. - fn split_child(&mut self, full_child_index: usize) { - let original_len_self = self.len(); - - let full_child = &mut self.children[full_child_index]; - - // Create a new node which is going to store (B-1) keys - // of the full child. - let mut successor_sibling = OpTreeNode::new(); - - let original_len = full_child.len(); - assert!(full_child.is_full()); - - successor_sibling.elements = full_child.elements.split_off(B); - - if !full_child.is_leaf() { - successor_sibling.children = full_child.children.split_off(B); - } - - let middle = full_child.elements.pop().unwrap(); - - full_child.length = - full_child.elements.len() + full_child.children.iter().map(|c| c.len()).sum::(); - - successor_sibling.length = successor_sibling.elements.len() - + successor_sibling - .children - .iter() - .map(|c| c.len()) - .sum::(); - - let z_len = successor_sibling.len(); - - let full_child_len = full_child.len(); - - full_child.reindex(); - successor_sibling.reindex(); - - self.children - .insert(full_child_index + 1, successor_sibling); - - self.elements.insert(full_child_index, middle); - - assert_eq!(full_child_len + z_len + 1, original_len, "{:#?}", self); - - assert_eq!(original_len_self, self.len()); - } - - fn remove_from_leaf(&mut self, index: usize) -> Op { - self.length -= 1; - self.elements.remove(index) - } - - fn remove_element_from_non_leaf(&mut self, index: usize, element_index: usize) -> Op { - self.length -= 1; - if self.children[element_index].elements.len() >= B { - let total_index = self.cumulative_index(element_index); - // recursively delete index - 1 in predecessor_node - let predecessor = self.children[element_index].remove(index - 1 - total_index); - // replace element with that one - mem::replace(&mut self.elements[element_index], predecessor) - } else if self.children[element_index + 1].elements.len() >= B { - // recursively delete index + 1 in successor_node - let total_index = self.cumulative_index(element_index + 1); - let successor = self.children[element_index + 1].remove(index + 1 - total_index); - // replace element with that one - mem::replace(&mut self.elements[element_index], successor) - } else { - let middle_element = self.elements.remove(element_index); - let successor_child = self.children.remove(element_index + 1); - self.children[element_index].merge(middle_element, successor_child); - - let total_index = self.cumulative_index(element_index); - self.children[element_index].remove(index - total_index) - } - } - - fn cumulative_index(&self, child_index: usize) -> usize { - self.children[0..child_index] - .iter() - .map(|c| c.len() + 1) - .sum() - } - - fn remove_from_internal_child(&mut self, index: usize, mut child_index: usize) -> Op { - if self.children[child_index].elements.len() < B - && if child_index > 0 { - self.children[child_index - 1].elements.len() < B - } else { - true - } - && if child_index + 1 < self.children.len() { - self.children[child_index + 1].elements.len() < B - } else { - true - } - { - // if the child and its immediate siblings have B-1 elements merge the child - // with one sibling, moving an element from this node into the new merged node - // to be the median - - if child_index > 0 { - let middle = self.elements.remove(child_index - 1); - - // use the predessor sibling - let successor = self.children.remove(child_index); - child_index -= 1; - - self.children[child_index].merge(middle, successor); - } else { - let middle = self.elements.remove(child_index); - - // use the sucessor sibling - let successor = self.children.remove(child_index + 1); - - self.children[child_index].merge(middle, successor); - } - } else if self.children[child_index].elements.len() < B { - if child_index > 0 - && self - .children - .get(child_index - 1) - .map_or(false, |c| c.elements.len() >= B) - { - let last_element = self.children[child_index - 1].elements.pop().unwrap(); - assert!(!self.children[child_index - 1].elements.is_empty()); - self.children[child_index - 1].length -= 1; - self.children[child_index - 1].index.remove(&last_element); - - let parent_element = - mem::replace(&mut self.elements[child_index - 1], last_element); - - self.children[child_index].index.insert(&parent_element); - self.children[child_index] - .elements - .insert(0, parent_element); - self.children[child_index].length += 1; - - if let Some(last_child) = self.children[child_index - 1].children.pop() { - self.children[child_index - 1].length -= last_child.len(); - self.children[child_index - 1].reindex(); - self.children[child_index].length += last_child.len(); - self.children[child_index].children.insert(0, last_child); - self.children[child_index].reindex(); - } - } else if self - .children - .get(child_index + 1) - .map_or(false, |c| c.elements.len() >= B) - { - let first_element = self.children[child_index + 1].elements.remove(0); - self.children[child_index + 1].index.remove(&first_element); - self.children[child_index + 1].length -= 1; - - assert!(!self.children[child_index + 1].elements.is_empty()); - - let parent_element = mem::replace(&mut self.elements[child_index], first_element); - - self.children[child_index].length += 1; - self.children[child_index].index.insert(&parent_element); - self.children[child_index].elements.push(parent_element); - - if !self.children[child_index + 1].is_leaf() { - let first_child = self.children[child_index + 1].children.remove(0); - self.children[child_index + 1].length -= first_child.len(); - self.children[child_index + 1].reindex(); - self.children[child_index].length += first_child.len(); - - self.children[child_index].children.push(first_child); - self.children[child_index].reindex(); - } - } - } - self.length -= 1; - let total_index = self.cumulative_index(child_index); - self.children[child_index].remove(index - total_index) - } - - fn check(&self) -> usize { - let l = self.elements.len() + self.children.iter().map(|c| c.check()).sum::(); - assert_eq!(self.len(), l, "{:#?}", self); - - l - } - - pub(crate) fn remove(&mut self, index: usize) -> Op { - let original_len = self.len(); - if self.is_leaf() { - let v = self.remove_from_leaf(index); - self.index.remove(&v); - assert_eq!(original_len, self.len() + 1); - debug_assert_eq!(self.check(), self.len()); - v - } else { - let mut total_index = 0; - for (child_index, child) in self.children.iter().enumerate() { - match (total_index + child.len()).cmp(&index) { - Ordering::Less => { - // should be later on in the loop - total_index += child.len() + 1; - continue; - } - Ordering::Equal => { - let v = self.remove_element_from_non_leaf( - index, - min(child_index, self.elements.len() - 1), - ); - self.index.remove(&v); - assert_eq!(original_len, self.len() + 1); - debug_assert_eq!(self.check(), self.len()); - return v; - } - Ordering::Greater => { - let v = self.remove_from_internal_child(index, child_index); - self.index.remove(&v); - assert_eq!(original_len, self.len() + 1); - debug_assert_eq!(self.check(), self.len()); - return v; - } - } - } - panic!( - "index not found to remove {} {} {} {}", - index, - total_index, - self.len(), - self.check() - ); - } - } - - fn merge(&mut self, middle: Op, successor_sibling: OpTreeNode) { - self.index.insert(&middle); - self.index.merge(&successor_sibling.index); - self.elements.push(middle); - self.elements.extend(successor_sibling.elements); - self.children.extend(successor_sibling.children); - self.length += successor_sibling.length + 1; - assert!(self.is_full()); - } - - /// Update the operation at the given index using the provided function. - /// - /// This handles updating the indices after the update. - pub(crate) fn update(&mut self, index: usize, f: F) -> ReplaceArgs - where - F: FnOnce(&mut Op), - { - if self.is_leaf() { - let new_element = self.elements.get_mut(index).unwrap(); - let old_id = new_element.id; - let old_visible = new_element.visible(); - f(new_element); - let replace_args = ReplaceArgs { - old_id, - new_id: new_element.id, - old_visible, - new_visible: new_element.visible(), - new_key: new_element.elemid_or_key(), - }; - self.index.replace(&replace_args); - replace_args - } else { - let mut cumulative_len = 0; - let len = self.len(); - for (child_index, child) in self.children.iter_mut().enumerate() { - match (cumulative_len + child.len()).cmp(&index) { - Ordering::Less => { - cumulative_len += child.len() + 1; - } - Ordering::Equal => { - let new_element = self.elements.get_mut(child_index).unwrap(); - let old_id = new_element.id; - let old_visible = new_element.visible(); - f(new_element); - let replace_args = ReplaceArgs { - old_id, - new_id: new_element.id, - old_visible, - new_visible: new_element.visible(), - new_key: new_element.elemid_or_key(), - }; - self.index.replace(&replace_args); - return replace_args; - } - Ordering::Greater => { - let replace_args = child.update(index - cumulative_len, f); - self.index.replace(&replace_args); - return replace_args; - } - } - } - panic!("Invalid index to set: {} but len was {}", index, len) - } - } - - pub(crate) fn last(&self) -> &Op { - if self.is_leaf() { - // node is never empty so this is safe - self.elements.last().unwrap() - } else { - // if not a leaf then there is always at least one child - self.children.last().unwrap().last() - } - } - - pub(crate) fn get(&self, index: usize) -> Option<&Op> { - if self.is_leaf() { - return self.elements.get(index); - } else { - let mut cumulative_len = 0; - for (child_index, child) in self.children.iter().enumerate() { - match (cumulative_len + child.len()).cmp(&index) { - Ordering::Less => { - cumulative_len += child.len() + 1; - } - Ordering::Equal => return self.elements.get(child_index), - Ordering::Greater => { - return child.get(index - cumulative_len); - } - } - } - } - None - } -} - -impl Default for OpTreeInternal { - fn default() -> Self { - Self::new() - } -} - -impl PartialEq for OpTreeInternal { - fn eq(&self, other: &Self) -> bool { - self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b) - } -} - -impl<'a> IntoIterator for &'a OpTreeInternal { - type Item = &'a Op; - - type IntoIter = Iter<'a>; - - fn into_iter(self) -> Self::IntoIter { - Iter { - inner: self, - index: 0, - } - } -} - -pub(crate) struct Iter<'a> { - inner: &'a OpTreeInternal, - index: usize, -} - -impl<'a> Iterator for Iter<'a> { - type Item = &'a Op; - - fn next(&mut self) -> Option { - self.index += 1; - self.inner.get(self.index - 1) - } - - fn nth(&mut self, n: usize) -> Option { - self.index += n + 1; - self.inner.get(self.index - 1) - } -} - -#[derive(Debug, Clone, PartialEq)] -struct CounterData { - pos: usize, - val: i64, - succ: HashSet, - op: Op, -} - -#[cfg(test)] -mod tests { - use crate::legacy as amp; - use crate::types::{Op, OpId}; - - use super::*; - - fn op() -> Op { - let zero = OpId(0, 0); - Op { - id: zero, - action: amp::OpType::Put(0.into()), - key: zero.into(), - succ: Default::default(), - pred: Default::default(), - insert: false, - } - } - - #[test] - fn insert() { - let mut t: OpTree = OpTree::new(); - - t.internal.insert(0, op()); - t.internal.insert(1, op()); - t.internal.insert(0, op()); - t.internal.insert(0, op()); - t.internal.insert(0, op()); - t.internal.insert(3, op()); - t.internal.insert(4, op()); - } - - #[test] - fn insert_book() { - let mut t: OpTree = OpTree::new(); - - for i in 0..100 { - t.internal.insert(i % 2, op()); - } - } - - #[test] - fn insert_book_vec() { - let mut t: OpTree = OpTree::new(); - let mut v = Vec::new(); - - for i in 0..100 { - t.internal.insert(i % 3, op()); - v.insert(i % 3, op()); - - assert_eq!(v, t.internal.iter().cloned().collect::>()) - } - } -} diff --git a/automerge/src/parents.rs b/automerge/src/parents.rs deleted file mode 100644 index 83e9b1c2..00000000 --- a/automerge/src/parents.rs +++ /dev/null @@ -1,35 +0,0 @@ -use crate::op_set::OpSet; -use crate::types::ObjId; -use crate::{exid::ExId, Prop}; - -#[derive(Debug)] -pub struct Parents<'a> { - pub(crate) obj: ObjId, - pub(crate) ops: &'a OpSet, -} - -impl<'a> Parents<'a> { - pub fn path(&mut self) -> Vec<(ExId, Prop)> { - let mut path = self.collect::>(); - path.reverse(); - path - } -} - -impl<'a> Iterator for Parents<'a> { - type Item = (ExId, Prop); - - fn next(&mut self) -> Option { - if self.obj.is_root() { - None - } else if let Some((obj, key)) = self.ops.parent_object(&self.obj) { - self.obj = obj; - Some(( - self.ops.id_to_exid(self.obj.0), - self.ops.export_key(self.obj, key), - )) - } else { - None - } - } -} diff --git a/automerge/src/query/elem_id_pos.rs b/automerge/src/query/elem_id_pos.rs deleted file mode 100644 index 809b6061..00000000 --- a/automerge/src/query/elem_id_pos.rs +++ /dev/null @@ -1,56 +0,0 @@ -use crate::{ - op_tree::OpTreeNode, - types::{ElemId, Key}, -}; - -use super::{QueryResult, TreeQuery}; - -/// Lookup the index in the list that this elemid occupies. -pub(crate) struct ElemIdPos { - elemid: ElemId, - pos: usize, - found: bool, -} - -impl ElemIdPos { - pub(crate) fn new(elemid: ElemId) -> Self { - Self { - elemid, - pos: 0, - found: false, - } - } - - pub(crate) fn index(&self) -> Option { - if self.found { - Some(self.pos) - } else { - None - } - } -} - -impl<'a> TreeQuery<'a> for ElemIdPos { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - // if index has our element then we can continue - if child.index.has_visible(&Key::Seq(self.elemid)) { - // element is in this node somewhere - QueryResult::Descend - } else { - // not in this node, try the next one - self.pos += child.index.visible_len(); - QueryResult::Next - } - } - - fn query_element(&mut self, element: &crate::types::Op) -> QueryResult { - if element.elemid() == Some(self.elemid) { - // this is it - self.found = true; - return QueryResult::Finish; - } else if element.visible() { - self.pos += 1; - } - QueryResult::Next - } -} diff --git a/automerge/src/query/len.rs b/automerge/src/query/len.rs deleted file mode 100644 index 697d0430..00000000 --- a/automerge/src/query/len.rs +++ /dev/null @@ -1,21 +0,0 @@ -use crate::op_tree::OpTreeNode; -use crate::query::{QueryResult, TreeQuery}; -use std::fmt::Debug; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct Len { - pub(crate) len: usize, -} - -impl Len { - pub(crate) fn new() -> Self { - Len { len: 0 } - } -} - -impl<'a> TreeQuery<'a> for Len { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - self.len = child.index.visible_len(); - QueryResult::Finish - } -} diff --git a/automerge/src/query/seek_op.rs b/automerge/src/query/seek_op.rs deleted file mode 100644 index 023c431a..00000000 --- a/automerge/src/query/seek_op.rs +++ /dev/null @@ -1,145 +0,0 @@ -use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, Op, HEAD}; -use std::cmp::Ordering; -use std::fmt::Debug; - -#[derive(Debug, Clone, PartialEq)] -pub(crate) struct SeekOp<'a> { - /// the op we are looking for - op: &'a Op, - /// The position to insert at - pub(crate) pos: usize, - /// The indices of ops that this op overwrites - pub(crate) succ: Vec, - /// whether a position has been found - found: bool, - /// The found start position of the key if there is one yet (for map objects). - start: Option, -} - -impl<'a> SeekOp<'a> { - pub(crate) fn new(op: &'a Op) -> Self { - SeekOp { - op, - succ: vec![], - pos: 0, - found: false, - start: None, - } - } - - fn lesser_insert(&self, op: &Op, m: &OpSetMetadata) -> bool { - op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less - } - - fn greater_opid(&self, op: &Op, m: &OpSetMetadata) -> bool { - m.lamport_cmp(op.id, self.op.id) == Ordering::Greater - } - - fn is_target_insert(&self, op: &Op) -> bool { - op.insert && op.elemid() == self.op.key.elemid() - } -} - -impl<'a> TreeQuery<'a> for SeekOp<'a> { - fn query_node_with_metadata(&mut self, child: &OpTreeNode, m: &OpSetMetadata) -> QueryResult { - if self.found { - return QueryResult::Descend; - } - match self.op.key { - Key::Seq(HEAD) => { - while self.pos < child.len() { - let op = child.get(self.pos).unwrap(); - if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { - break; - } - self.pos += 1; - } - QueryResult::Finish - } - Key::Seq(e) => { - if child.index.ops.contains(&e.0) { - QueryResult::Descend - } else { - self.pos += child.len(); - QueryResult::Next - } - } - Key::Map(_) => { - if let Some(start) = self.start { - if self.pos + child.len() >= start { - // skip empty nodes - if child.index.visible_len() == 0 { - self.pos += child.len(); - QueryResult::Next - } else { - QueryResult::Descend - } - } else { - self.pos += child.len(); - QueryResult::Next - } - } else { - // in the root node find the first op position for the key - let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); - self.start = Some(start); - self.pos = start; - QueryResult::Skip(start) - } - } - } - } - - fn query_element_with_metadata(&mut self, e: &Op, m: &OpSetMetadata) -> QueryResult { - match self.op.key { - Key::Map(_) => { - // don't bother looking at things past our key - if e.key != self.op.key { - return QueryResult::Finish; - } - - if self.op.overwrites(e) { - self.succ.push(self.pos); - } - - if m.lamport_cmp(e.id, self.op.id) == Ordering::Greater { - return QueryResult::Finish; - } - - self.pos += 1; - QueryResult::Next - } - Key::Seq(_) => { - if !self.found { - if self.is_target_insert(e) { - self.found = true; - if self.op.overwrites(e) { - self.succ.push(self.pos); - } - } - self.pos += 1; - QueryResult::Next - } else { - // we have already found the target - if self.op.overwrites(e) { - self.succ.push(self.pos); - } - if self.op.insert { - if self.lesser_insert(e, m) { - QueryResult::Finish - } else { - self.pos += 1; - QueryResult::Next - } - } else if e.insert || self.greater_opid(e, m) { - QueryResult::Finish - } else { - self.pos += 1; - QueryResult::Next - } - } - } - } - } -} diff --git a/automerge/src/storage/parse/leb128.rs b/automerge/src/storage/parse/leb128.rs deleted file mode 100644 index 800253c9..00000000 --- a/automerge/src/storage/parse/leb128.rs +++ /dev/null @@ -1,118 +0,0 @@ -use core::mem::size_of; -use std::num::NonZeroU64; - -use super::{take1, Input, ParseError, ParseResult}; - -#[derive(PartialEq, thiserror::Error, Debug, Clone)] -pub(crate) enum Error { - #[error("leb128 was too large for the destination type")] - Leb128TooLarge, - #[error("leb128 was zero when it was expected to be nonzero")] - UnexpectedZero, -} - -macro_rules! impl_leb { - ($parser_name: ident, $ty: ty) => { - #[allow(dead_code)] - pub(crate) fn $parser_name<'a, E>(input: Input<'a>) -> ParseResult<'a, $ty, E> - where - E: From, - { - let mut res = 0; - let mut shift = 0; - - let mut input = input; - let mut pos = 0; - loop { - let (i, byte) = take1(input)?; - input = i; - if (byte & 0x80) == 0 { - res |= (byte as $ty) << shift; - return Ok((input, res)); - } else if pos == leb128_size::<$ty>() - 1 { - return Err(ParseError::Error(Error::Leb128TooLarge.into())); - } else { - res |= ((byte & 0x7F) as $ty) << shift; - } - pos += 1; - shift += 7; - } - } - }; -} - -impl_leb!(leb128_u64, u64); -impl_leb!(leb128_u32, u32); -impl_leb!(leb128_i64, i64); -impl_leb!(leb128_i32, i32); - -/// Parse a LEB128 encoded u64 from the input, throwing an error if it is `0` -pub(crate) fn nonzero_leb128_u64(input: Input<'_>) -> ParseResult<'_, NonZeroU64, E> -where - E: From, -{ - let (input, num) = leb128_u64(input)?; - let result = - NonZeroU64::new(num).ok_or_else(|| ParseError::Error(Error::UnexpectedZero.into()))?; - Ok((input, result)) -} - -/// Maximum LEB128-encoded size of an integer type -const fn leb128_size() -> usize { - let bits = size_of::() * 8; - (bits + 6) / 7 // equivalent to ceil(bits/7) w/o floats -} - -#[cfg(test)] -mod tests { - use super::super::Needed; - use super::*; - use std::{convert::TryFrom, num::NonZeroUsize}; - - const NEED_ONE: Needed = Needed::Size(unsafe { NonZeroUsize::new_unchecked(1) }); - - #[test] - fn leb_128_unsigned() { - let one = &[0b00000001_u8]; - let one_two_nine = &[0b10000001, 0b00000001]; - let one_and_more = &[0b00000001, 0b00000011]; - - let scenarios: Vec<(&'static [u8], ParseResult<'_, u64, Error>)> = vec![ - (one, Ok((Input::with_position(one, 1), 1))), - (&[0b10000001_u8], Err(ParseError::Incomplete(NEED_ONE))), - ( - one_two_nine, - Ok((Input::with_position(one_two_nine, 2), 129)), - ), - (one_and_more, Ok((Input::with_position(one_and_more, 1), 1))), - ( - &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], - Err(ParseError::Error(Error::Leb128TooLarge)), - ), - ]; - for (index, (input, expected)) in scenarios.clone().into_iter().enumerate() { - let result = leb128_u64(Input::new(input)); - if result != expected { - panic!( - "Scenario {} failed for u64: expected {:?} got {:?}", - index + 1, - expected, - result - ); - } - } - - for (index, (input, expected)) in scenarios.into_iter().enumerate() { - let u32_expected = expected.map(|(i, e)| (i, u32::try_from(e).unwrap())); - let result = leb128_u32(Input::new(input)); - if result != u32_expected { - panic!( - "Scenario {} failed for u32: expected {:?} got {:?}", - index + 1, - u32_expected, - result - ); - } - } - } -} diff --git a/automerge/src/sync.rs b/automerge/src/sync.rs deleted file mode 100644 index dcbb625f..00000000 --- a/automerge/src/sync.rs +++ /dev/null @@ -1,438 +0,0 @@ -use itertools::Itertools; -use std::collections::{HashMap, HashSet}; - -use crate::{ - storage::{parse, Change as StoredChange, ReadChangeOpError}, - Automerge, AutomergeError, Change, ChangeHash, OpObserver, -}; - -mod bloom; -mod state; - -pub use bloom::BloomFilter; -pub use state::DecodeError as DecodeStateError; -pub use state::{Have, State}; - -const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification - -impl Automerge { - pub fn generate_sync_message(&self, sync_state: &mut State) -> Option { - let our_heads = self.get_heads(); - - let our_need = self.get_missing_deps(sync_state.their_heads.as_ref().unwrap_or(&vec![])); - - let their_heads_set = if let Some(ref heads) = sync_state.their_heads { - heads.iter().collect::>() - } else { - HashSet::new() - }; - let our_have = if our_need.iter().all(|hash| their_heads_set.contains(hash)) { - vec![self.make_bloom_filter(sync_state.shared_heads.clone())] - } else { - Vec::new() - }; - - if let Some(ref their_have) = sync_state.their_have { - if let Some(first_have) = their_have.first().as_ref() { - if !first_have - .last_sync - .iter() - .all(|hash| self.get_change_by_hash(hash).is_some()) - { - let reset_msg = Message { - heads: our_heads, - need: Vec::new(), - have: vec![Have::default()], - changes: Vec::new(), - }; - return Some(reset_msg); - } - } - } - - let changes_to_send = if let (Some(their_have), Some(their_need)) = ( - sync_state.their_have.as_ref(), - sync_state.their_need.as_ref(), - ) { - self.get_changes_to_send(their_have, their_need) - .expect("Should have only used hashes that are in the document") - } else { - Vec::new() - }; - - let heads_unchanged = sync_state.last_sent_heads == our_heads; - - let heads_equal = if let Some(their_heads) = sync_state.their_heads.as_ref() { - their_heads == &our_heads - } else { - false - }; - - if heads_unchanged && heads_equal && changes_to_send.is_empty() { - return None; - } - - // deduplicate the changes to send with those we have already sent and clone it now - let changes_to_send = changes_to_send - .into_iter() - .filter_map(|change| { - if !sync_state.sent_hashes.contains(&change.hash()) { - Some(change.clone()) - } else { - None - } - }) - .collect::>(); - - sync_state.last_sent_heads = our_heads.clone(); - sync_state - .sent_hashes - .extend(changes_to_send.iter().map(|c| c.hash())); - - let sync_message = Message { - heads: our_heads, - have: our_have, - need: our_need, - changes: changes_to_send, - }; - - Some(sync_message) - } - - pub fn receive_sync_message( - &mut self, - sync_state: &mut State, - message: Message, - ) -> Result<(), AutomergeError> { - self.receive_sync_message_with::<()>(sync_state, message, None) - } - - pub fn receive_sync_message_with( - &mut self, - sync_state: &mut State, - message: Message, - op_observer: Option<&mut Obs>, - ) -> Result<(), AutomergeError> { - let before_heads = self.get_heads(); - - let Message { - heads: message_heads, - changes: message_changes, - need: message_need, - have: message_have, - } = message; - - let changes_is_empty = message_changes.is_empty(); - if !changes_is_empty { - self.apply_changes_with(message_changes, op_observer)?; - sync_state.shared_heads = advance_heads( - &before_heads.iter().collect(), - &self.get_heads().into_iter().collect(), - &sync_state.shared_heads, - ); - } - - // trim down the sent hashes to those that we know they haven't seen - self.filter_changes(&message_heads, &mut sync_state.sent_hashes)?; - - if changes_is_empty && message_heads == before_heads { - sync_state.last_sent_heads = message_heads.clone(); - } - - let known_heads = message_heads - .iter() - .filter(|head| self.get_change_by_hash(head).is_some()) - .collect::>(); - if known_heads.len() == message_heads.len() { - sync_state.shared_heads = message_heads.clone(); - // If the remote peer has lost all its data, reset our state to perform a full resync - if message_heads.is_empty() { - sync_state.last_sent_heads = Default::default(); - sync_state.sent_hashes = Default::default(); - } - } else { - sync_state.shared_heads = sync_state - .shared_heads - .iter() - .chain(known_heads) - .copied() - .unique() - .sorted() - .collect::>(); - } - - sync_state.their_have = Some(message_have); - sync_state.their_heads = Some(message_heads); - sync_state.their_need = Some(message_need); - - Ok(()) - } - - fn make_bloom_filter(&self, last_sync: Vec) -> Have { - let new_changes = self - .get_changes(&last_sync) - .expect("Should have only used hashes that are in the document"); - let hashes = new_changes.iter().map(|change| change.hash()); - Have { - last_sync, - bloom: BloomFilter::from_hashes(hashes), - } - } - - fn get_changes_to_send( - &self, - have: &[Have], - need: &[ChangeHash], - ) -> Result, AutomergeError> { - if have.is_empty() { - Ok(need - .iter() - .filter_map(|hash| self.get_change_by_hash(hash)) - .collect()) - } else { - let mut last_sync_hashes = HashSet::new(); - let mut bloom_filters = Vec::with_capacity(have.len()); - - for h in have { - let Have { last_sync, bloom } = h; - last_sync_hashes.extend(last_sync); - bloom_filters.push(bloom); - } - let last_sync_hashes = last_sync_hashes.into_iter().copied().collect::>(); - - let changes = self.get_changes(&last_sync_hashes)?; - - let mut change_hashes = HashSet::with_capacity(changes.len()); - let mut dependents: HashMap> = HashMap::new(); - let mut hashes_to_send = HashSet::new(); - - for change in &changes { - change_hashes.insert(change.hash()); - - for dep in change.deps() { - dependents.entry(*dep).or_default().push(change.hash()); - } - - if bloom_filters - .iter() - .all(|bloom| !bloom.contains_hash(&change.hash())) - { - hashes_to_send.insert(change.hash()); - } - } - - let mut stack = hashes_to_send.iter().copied().collect::>(); - while let Some(hash) = stack.pop() { - if let Some(deps) = dependents.get(&hash) { - for dep in deps { - if hashes_to_send.insert(*dep) { - stack.push(*dep); - } - } - } - } - - let mut changes_to_send = Vec::new(); - for hash in need { - hashes_to_send.insert(*hash); - if !change_hashes.contains(hash) { - let change = self.get_change_by_hash(hash); - if let Some(change) = change { - changes_to_send.push(change); - } - } - } - - for change in changes { - if hashes_to_send.contains(&change.hash()) { - changes_to_send.push(change); - } - } - Ok(changes_to_send) - } - } -} - -#[derive(Debug, thiserror::Error)] -pub enum ReadMessageError { - #[error("expected {expected_one_of:?} but found {found}")] - WrongType { expected_one_of: Vec, found: u8 }, - #[error("{0}")] - Parse(String), - #[error(transparent)] - ReadChangeOps(#[from] ReadChangeOpError), - #[error("not enough input")] - NotEnoughInput, -} - -impl From for ReadMessageError { - fn from(e: parse::leb128::Error) -> Self { - ReadMessageError::Parse(e.to_string()) - } -} - -impl From for ReadMessageError { - fn from(e: bloom::ParseError) -> Self { - ReadMessageError::Parse(e.to_string()) - } -} - -impl From for ReadMessageError { - fn from(e: crate::storage::change::ParseError) -> Self { - ReadMessageError::Parse(format!("error parsing changes: {}", e)) - } -} - -impl From for parse::ParseError { - fn from(e: ReadMessageError) -> Self { - parse::ParseError::Error(e) - } -} - -impl From> for ReadMessageError { - fn from(p: parse::ParseError) -> Self { - match p { - parse::ParseError::Error(e) => e, - parse::ParseError::Incomplete(..) => Self::NotEnoughInput, - } - } -} - -/// The sync message to be sent. -#[derive(Clone, Debug, PartialEq)] -pub struct Message { - /// The heads of the sender. - pub heads: Vec, - /// The hashes of any changes that are being explicitly requested from the recipient. - pub need: Vec, - /// A summary of the changes that the sender already has. - pub have: Vec, - /// The changes for the recipient to apply. - pub changes: Vec, -} - -fn parse_have(input: parse::Input<'_>) -> parse::ParseResult<'_, Have, ReadMessageError> { - let (i, last_sync) = parse::length_prefixed(parse::change_hash)(input)?; - let (i, bloom_bytes) = parse::length_prefixed_bytes(i)?; - let (_, bloom) = BloomFilter::parse(parse::Input::new(bloom_bytes)).map_err(|e| e.lift())?; - Ok((i, Have { last_sync, bloom })) -} - -impl Message { - pub fn decode(input: &[u8]) -> Result { - let input = parse::Input::new(input); - match Self::parse(input) { - Ok((_, msg)) => Ok(msg), - Err(parse::ParseError::Error(e)) => Err(e), - Err(parse::ParseError::Incomplete(_)) => Err(ReadMessageError::NotEnoughInput), - } - } - - pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, ReadMessageError> { - let (i, message_type) = parse::take1(input)?; - if message_type != MESSAGE_TYPE_SYNC { - return Err(parse::ParseError::Error(ReadMessageError::WrongType { - expected_one_of: vec![MESSAGE_TYPE_SYNC], - found: message_type, - })); - } - - let (i, heads) = parse::length_prefixed(parse::change_hash)(i)?; - let (i, need) = parse::length_prefixed(parse::change_hash)(i)?; - let (i, have) = parse::length_prefixed(parse_have)(i)?; - - let change_parser = |i| { - let (i, bytes) = parse::length_prefixed_bytes(i)?; - let (_, change) = - StoredChange::parse(parse::Input::new(bytes)).map_err(|e| e.lift())?; - Ok((i, change)) - }; - let (i, stored_changes) = parse::length_prefixed(change_parser)(i)?; - let changes_len = stored_changes.len(); - let changes: Vec = stored_changes - .into_iter() - .try_fold::<_, _, Result<_, ReadMessageError>>( - Vec::with_capacity(changes_len), - |mut acc, stored| { - let change = Change::new_from_unverified(stored.into_owned(), None) - .map_err(ReadMessageError::ReadChangeOps)?; - acc.push(change); - Ok(acc) - }, - )?; - - Ok(( - i, - Message { - heads, - need, - have, - changes, - }, - )) - } - - pub fn encode(mut self) -> Vec { - let mut buf = vec![MESSAGE_TYPE_SYNC]; - - encode_hashes(&mut buf, &self.heads); - encode_hashes(&mut buf, &self.need); - encode_many(&mut buf, self.have.iter(), |buf, h| { - encode_hashes(buf, &h.last_sync); - leb128::write::unsigned(buf, h.bloom.to_bytes().len() as u64).unwrap(); - buf.extend(h.bloom.to_bytes()); - }); - - encode_many(&mut buf, self.changes.iter_mut(), |buf, change| { - leb128::write::unsigned(buf, change.raw_bytes().len() as u64).unwrap(); - buf.extend(change.bytes().as_ref()) - }); - - buf - } -} - -fn encode_many<'a, I, It, F>(out: &mut Vec, data: I, f: F) -where - I: Iterator + ExactSizeIterator + 'a, - F: Fn(&mut Vec, It), -{ - leb128::write::unsigned(out, data.len() as u64).unwrap(); - for datum in data { - f(out, datum) - } -} - -fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { - debug_assert!( - hashes.windows(2).all(|h| h[0] <= h[1]), - "hashes were not sorted" - ); - encode_many(buf, hashes.iter(), |buf, hash| buf.extend(hash.as_bytes())) -} - -fn advance_heads( - my_old_heads: &HashSet<&ChangeHash>, - my_new_heads: &HashSet, - our_old_shared_heads: &[ChangeHash], -) -> Vec { - let new_heads = my_new_heads - .iter() - .filter(|head| !my_old_heads.contains(head)) - .copied() - .collect::>(); - - let common_heads = our_old_shared_heads - .iter() - .filter(|head| my_new_heads.contains(head)) - .copied() - .collect::>(); - - let mut advanced_heads = HashSet::with_capacity(new_heads.len() + common_heads.len()); - for head in new_heads.into_iter().chain(common_heads) { - advanced_heads.insert(head); - } - let mut advanced_heads = advanced_heads.into_iter().collect::>(); - advanced_heads.sort(); - advanced_heads -} diff --git a/automerge/src/transaction/inner.rs b/automerge/src/transaction/inner.rs deleted file mode 100644 index aff82a99..00000000 --- a/automerge/src/transaction/inner.rs +++ /dev/null @@ -1,462 +0,0 @@ -use std::num::NonZeroU64; - -use crate::automerge::Actor; -use crate::exid::ExId; -use crate::query::{self, OpIdSearch}; -use crate::storage::Change as StoredChange; -use crate::types::{Key, ObjId, OpId}; -use crate::{op_tree::OpSetMetadata, types::Op, Automerge, Change, ChangeHash, OpObserver, Prop}; -use crate::{AutomergeError, ObjType, OpType, ScalarValue}; - -#[derive(Debug, Clone)] -pub(crate) struct TransactionInner { - pub(crate) actor: usize, - pub(crate) seq: u64, - pub(crate) start_op: NonZeroU64, - pub(crate) time: i64, - pub(crate) message: Option, - pub(crate) deps: Vec, - pub(crate) operations: Vec<(ObjId, Prop, Op)>, -} - -impl TransactionInner { - pub(crate) fn pending_ops(&self) -> usize { - self.operations.len() - } - - /// Commit the operations performed in this transaction, returning the hashes corresponding to - /// the new heads. - #[tracing::instrument(skip(self, doc))] - pub(crate) fn commit( - mut self, - doc: &mut Automerge, - message: Option, - time: Option, - ) -> ChangeHash { - if message.is_some() { - self.message = message; - } - - if let Some(t) = time { - self.time = t; - } - - let num_ops = self.pending_ops(); - let change = self.export(&doc.ops.m); - let hash = change.hash(); - #[cfg(not(debug_assertions))] - tracing::trace!(commit=?hash, deps=?change.deps(), "committing transaction"); - #[cfg(debug_assertions)] - { - let ops = change.iter_ops().collect::>(); - tracing::trace!(commit=?hash, ?ops, deps=?change.deps(), "committing transaction"); - } - doc.update_history(change, num_ops); - debug_assert_eq!(doc.get_heads(), vec![hash]); - hash - } - - #[tracing::instrument(skip(self, metadata))] - pub(crate) fn export(self, metadata: &OpSetMetadata) -> Change { - use crate::storage::{change::PredOutOfOrder, convert::op_as_actor_id}; - - let actor = metadata.actors.get(self.actor).clone(); - let ops = self.operations.iter().map(|o| (&o.0, &o.2)); - //let (ops, other_actors) = encode_change_ops(ops, actor.clone(), actors, props); - let deps = self.deps.clone(); - let stored = match StoredChange::builder() - .with_actor(actor) - .with_seq(self.seq) - .with_start_op(self.start_op) - .with_message(self.message.clone()) - .with_dependencies(deps) - .with_timestamp(self.time) - .build( - ops.into_iter() - .map(|(obj, op)| op_as_actor_id(obj, op, metadata)), - ) { - Ok(s) => s, - Err(PredOutOfOrder) => { - // SAFETY: types::Op::preds is `types::OpIds` which ensures ops are always sorted - panic!("preds out of order"); - } - }; - #[cfg(debug_assertions)] - { - let realized_ops = self.operations.iter().collect::>(); - tracing::trace!(?stored, ops=?realized_ops, "committing change"); - } - #[cfg(not(debug_assertions))] - tracing::trace!(?stored, "committing change"); - Change::new(stored) - } - - /// Undo the operations added in this transaction, returning the number of cancelled - /// operations. - pub(crate) fn rollback(self, doc: &mut Automerge) -> usize { - let num = self.pending_ops(); - // remove in reverse order so sets are removed before makes etc... - for (obj, _prop, op) in self.operations.into_iter().rev() { - for pred_id in &op.pred { - if let Some(p) = doc.ops.search(&obj, OpIdSearch::new(*pred_id)).index() { - doc.ops.replace(&obj, p, |o| o.remove_succ(&op)); - } - } - if let Some(pos) = doc.ops.search(&obj, OpIdSearch::new(op.id)).index() { - doc.ops.remove(&obj, pos); - } - } - - // remove the actor from the cache so that it doesn't end up in the saved document - if doc.states.get(&self.actor).is_none() && doc.ops.m.actors.len() > 0 { - let actor = doc.ops.m.actors.remove_last(); - doc.actor = Actor::Unused(actor); - } - - num - } - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - pub(crate) fn put, V: Into, Obs: OpObserver>( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - ex_obj: &ExId, - prop: P, - value: V, - ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(ex_obj)?; - let value = value.into(); - let prop = prop.into(); - self.local_op(doc, op_observer, obj, prop, value.into())?; - Ok(()) - } - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - pub(crate) fn put_object, Obs: OpObserver>( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - ex_obj: &ExId, - prop: P, - value: ObjType, - ) -> Result { - let obj = doc.exid_to_obj(ex_obj)?; - let prop = prop.into(); - let id = self - .local_op(doc, op_observer, obj, prop, value.into())? - .unwrap(); - let id = doc.id_to_exid(id); - Ok(id) - } - - fn next_id(&mut self) -> OpId { - OpId(self.start_op.get() + self.pending_ops() as u64, self.actor) - } - - #[allow(clippy::too_many_arguments)] - fn insert_local_op( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - prop: Prop, - op: Op, - pos: usize, - obj: ObjId, - succ_pos: &[usize], - ) { - doc.ops.add_succ(&obj, succ_pos.iter().copied(), &op); - - if !op.is_delete() { - doc.ops.insert(pos, &obj, op.clone()); - } - - self.finalize_op(doc, op_observer, obj, prop, op); - } - - pub(crate) fn insert, Obs: OpObserver>( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - ex_obj: &ExId, - index: usize, - value: V, - ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(ex_obj)?; - let value = value.into(); - tracing::trace!(obj=?obj, value=?value, "inserting value"); - self.do_insert(doc, op_observer, obj, index, value.into())?; - Ok(()) - } - - pub(crate) fn insert_object( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - ex_obj: &ExId, - index: usize, - value: ObjType, - ) -> Result { - let obj = doc.exid_to_obj(ex_obj)?; - let id = self.do_insert(doc, op_observer, obj, index, value.into())?; - let id = doc.id_to_exid(id); - Ok(id) - } - - fn do_insert( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - obj: ObjId, - index: usize, - action: OpType, - ) -> Result { - let id = self.next_id(); - - let query = doc.ops.search(&obj, query::InsertNth::new(index)); - - let key = query.key()?; - - let op = Op { - id, - action, - key, - succ: Default::default(), - pred: Default::default(), - insert: true, - }; - - doc.ops.insert(query.pos(), &obj, op.clone()); - - self.finalize_op(doc, op_observer, obj, Prop::Seq(index), op); - - Ok(id) - } - - pub(crate) fn local_op( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - obj: ObjId, - prop: Prop, - action: OpType, - ) -> Result, AutomergeError> { - match prop { - Prop::Map(s) => self.local_map_op(doc, op_observer, obj, s, action), - Prop::Seq(n) => self.local_list_op(doc, op_observer, obj, n, action), - } - } - - fn local_map_op( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - obj: ObjId, - prop: String, - action: OpType, - ) -> Result, AutomergeError> { - if prop.is_empty() { - return Err(AutomergeError::EmptyStringKey); - } - - let id = self.next_id(); - let prop_index = doc.ops.m.props.cache(prop.clone()); - let query = doc.ops.search(&obj, query::Prop::new(prop_index)); - - // no key present to delete - if query.ops.is_empty() && action == OpType::Delete { - return Ok(None); - } - - if query.ops.len() == 1 && query.ops[0].is_noop(&action) { - return Ok(None); - } - - // increment operations are only valid against counter values. - // if there are multiple values (from conflicts) then we just need one of them to be a counter. - if matches!(action, OpType::Increment(_)) && query.ops.iter().all(|op| !op.is_counter()) { - return Err(AutomergeError::MissingCounter); - } - - let pred = doc.ops.m.sorted_opids(query.ops.iter().map(|o| o.id)); - - let op = Op { - id, - action, - key: Key::Map(prop_index), - succ: Default::default(), - pred, - insert: false, - }; - - let pos = query.pos; - let ops_pos = query.ops_pos; - self.insert_local_op(doc, op_observer, Prop::Map(prop), op, pos, obj, &ops_pos); - - Ok(Some(id)) - } - - fn local_list_op( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - obj: ObjId, - index: usize, - action: OpType, - ) -> Result, AutomergeError> { - let query = doc.ops.search(&obj, query::Nth::new(index)); - - let id = self.next_id(); - let pred = doc.ops.m.sorted_opids(query.ops.iter().map(|o| o.id)); - let key = query.key()?; - - if query.ops.len() == 1 && query.ops[0].is_noop(&action) { - return Ok(None); - } - - // increment operations are only valid against counter values. - // if there are multiple values (from conflicts) then we just need one of them to be a counter. - if matches!(action, OpType::Increment(_)) && query.ops.iter().all(|op| !op.is_counter()) { - return Err(AutomergeError::MissingCounter); - } - - let op = Op { - id, - action, - key, - succ: Default::default(), - pred, - insert: false, - }; - - let pos = query.pos; - let ops_pos = query.ops_pos; - self.insert_local_op(doc, op_observer, Prop::Seq(index), op, pos, obj, &ops_pos); - - Ok(Some(id)) - } - - pub(crate) fn increment, Obs: OpObserver>( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - obj: &ExId, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(obj)?; - self.local_op(doc, op_observer, obj, prop.into(), OpType::Increment(value))?; - Ok(()) - } - - pub(crate) fn delete, Obs: OpObserver>( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - ex_obj: &ExId, - prop: P, - ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(ex_obj)?; - let prop = prop.into(); - self.local_op(doc, op_observer, obj, prop, OpType::Delete)?; - Ok(()) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - pub(crate) fn splice( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - ex_obj: &ExId, - mut pos: usize, - del: usize, - vals: impl IntoIterator, - ) -> Result<(), AutomergeError> { - let obj = doc.exid_to_obj(ex_obj)?; - for _ in 0..del { - // del() - self.local_op(doc, op_observer, obj, pos.into(), OpType::Delete)?; - } - for v in vals { - // insert() - self.do_insert(doc, op_observer, obj, pos, v.clone().into())?; - pos += 1; - } - Ok(()) - } - - fn finalize_op( - &mut self, - doc: &mut Automerge, - op_observer: &mut Obs, - obj: ObjId, - prop: Prop, - op: Op, - ) { - // TODO - id_to_exid should be a noop if not used - change type to Into? - let ex_obj = doc.ops.id_to_exid(obj.0); - let parents = doc.ops.parents(obj); - if op.insert { - let value = (op.value(), doc.ops.id_to_exid(op.id)); - match prop { - Prop::Map(_) => panic!("insert into a map"), - Prop::Seq(index) => op_observer.insert(parents, ex_obj, index, value), - } - } else if op.is_delete() { - op_observer.delete(parents, ex_obj, prop.clone()); - } else if let Some(value) = op.get_increment_value() { - op_observer.increment( - parents, - ex_obj, - prop.clone(), - (value, doc.ops.id_to_exid(op.id)), - ); - } else { - let value = (op.value(), doc.ops.id_to_exid(op.id)); - op_observer.put(parents, ex_obj, prop.clone(), value, false); - } - self.operations.push((obj, prop, op)); - } -} - -#[cfg(test)] -mod tests { - use crate::{transaction::Transactable, ROOT}; - - use super::*; - - #[test] - fn map_rollback_doesnt_panic() { - let mut doc = Automerge::new(); - let mut tx = doc.transaction(); - - let a = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); - tx.put(&a, "b", 1).unwrap(); - assert!(tx.get(&a, "b").unwrap().is_some()); - } -} diff --git a/automerge/src/transaction/transactable.rs b/automerge/src/transaction/transactable.rs deleted file mode 100644 index 0c7f6c45..00000000 --- a/automerge/src/transaction/transactable.rs +++ /dev/null @@ -1,200 +0,0 @@ -use std::ops::RangeBounds; - -use crate::exid::ExId; -use crate::{ - AutomergeError, ChangeHash, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, - ObjType, Parents, Prop, ScalarValue, Value, Values, -}; - -/// A way of mutating a document within a single change. -pub trait Transactable { - /// Get the number of pending operations in this transaction. - fn pending_ops(&self) -> usize; - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - fn put, P: Into, V: Into>( - &mut self, - obj: O, - prop: P, - value: V, - ) -> Result<(), AutomergeError>; - - /// Set the value of property `P` to the new object `V` in object `obj`. - /// - /// # Returns - /// - /// The id of the object which was created. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - fn put_object, P: Into>( - &mut self, - obj: O, - prop: P, - object: ObjType, - ) -> Result; - - /// Insert a value into a list at the given index. - fn insert, V: Into>( - &mut self, - obj: O, - index: usize, - value: V, - ) -> Result<(), AutomergeError>; - - /// Insert an object into a list at the given index. - fn insert_object>( - &mut self, - obj: O, - index: usize, - object: ObjType, - ) -> Result; - - /// Increment the counter at the prop in the object by `value`. - fn increment, P: Into>( - &mut self, - obj: O, - prop: P, - value: i64, - ) -> Result<(), AutomergeError>; - - /// Delete the value at prop in the object. - fn delete, P: Into>( - &mut self, - obj: O, - prop: P, - ) -> Result<(), AutomergeError>; - - fn splice, V: IntoIterator>( - &mut self, - obj: O, - pos: usize, - del: usize, - vals: V, - ) -> Result<(), AutomergeError>; - - /// Like [`Self::splice`] but for text. - fn splice_text>( - &mut self, - obj: O, - pos: usize, - del: usize, - text: &str, - ) -> Result<(), AutomergeError> { - let vals = text.chars().map(|c| c.into()); - self.splice(obj, pos, del, vals) - } - - /// Get the keys of the given object, it should be a map. - fn keys>(&self, obj: O) -> Keys<'_, '_>; - - /// Get the keys of the given object at a point in history. - fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_>; - - fn map_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> MapRange<'_, R>; - - fn map_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> MapRangeAt<'_, R>; - - fn list_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> ListRange<'_, R>; - - fn list_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> ListRangeAt<'_, R>; - - fn values>(&self, obj: O) -> Values<'_>; - - fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_>; - - /// Get the length of the given object. - fn length>(&self, obj: O) -> usize; - - /// Get the length of the given object at a point in history. - fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; - - /// Get type for object - fn object_type>(&self, obj: O) -> Option; - - /// Get the string that this text object represents. - fn text>(&self, obj: O) -> Result; - - /// Get the string that this text object represents at a point in history. - fn text_at>( - &self, - obj: O, - heads: &[ChangeHash], - ) -> Result; - - /// Get the value at this prop in the object. - fn get, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError>; - - /// Get the value at this prop in the object at a point in history. - fn get_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError>; - - fn get_all, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError>; - - fn get_all_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError>; - - /// Get the parents of an object in the document tree. - /// - /// ### Errors - /// - /// Returns an error when the id given is not the id of an object in this document. - /// This function does not get the parents of scalar values contained within objects. - /// - /// ### Experimental - /// - /// This function may in future be changed to allow getting the parents from the id of a scalar - /// value. - fn parents>(&self, obj: O) -> Result, AutomergeError>; - - fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { - let mut path = self.parents(obj.as_ref().clone())?.collect::>(); - path.reverse(); - Ok(path) - } -} diff --git a/edit-trace/automerge-rs.js b/edit-trace/automerge-rs.js deleted file mode 100644 index 342f5268..00000000 --- a/edit-trace/automerge-rs.js +++ /dev/null @@ -1,31 +0,0 @@ - -// this assumes that the automerge-rs folder is checked out along side this repo -// and someone has run - -// # cd automerge-rs/automerge-backend-wasm -// # yarn release - -const { edits, finalText } = require('./editing-trace') -const Automerge = require('../../automerge') -const path = require('path') -const wasmBackend = require(path.resolve("../../automerge-rs/automerge-backend-wasm")) -Automerge.setDefaultBackend(wasmBackend) - -const start = new Date() -let state = Automerge.from({text: new Automerge.Text()}) - -state = Automerge.change(state, doc => { - for (let i = 0; i < edits.length; i++) { - if (i % 10000 === 0) { - console.log(`Processed ${i} edits in ${new Date() - start} ms`) - } - if (edits[i][1] > 0) doc.text.deleteAt(edits[i][0], edits[i][1]) - if (edits[i].length > 2) doc.text.insertAt(edits[i][0], ...edits[i].slice(2)) - } -}) - -console.log(`Done in ${new Date() - start} ms`) - -if (state.text.join('') !== finalText) { - throw new RangeError('ERROR: final text did not match expectation') -} diff --git a/flake.lock b/flake.lock index b2070c2d..a052776b 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "flake-utils": { "locked": { - "lastModified": 1642700792, - "narHash": "sha256-XqHrk7hFb+zBvRg6Ghl+AZDq03ov6OshJLiSWOoX5es=", + "lastModified": 1667395993, + "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=", "owner": "numtide", "repo": "flake-utils", - "rev": "846b2ae0fc4cc943637d3d1def4454213e203cba", + "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f", "type": "github" }, "original": { @@ -17,11 +17,11 @@ }, "flake-utils_2": { "locked": { - "lastModified": 1637014545, - "narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=", + "lastModified": 1659877975, + "narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=", "owner": "numtide", "repo": "flake-utils", - "rev": "bba5dcc8e0b20ab664967ad83d24d64cb64ec4f4", + "rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0", "type": "github" }, "original": { @@ -32,11 +32,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1643805626, - "narHash": "sha256-AXLDVMG+UaAGsGSpOtQHPIKB+IZ0KSd9WS77aanGzgc=", + "lastModified": 1669542132, + "narHash": "sha256-DRlg++NJAwPh8io3ExBJdNW7Djs3plVI5jgYQ+iXAZQ=", "owner": "nixos", "repo": "nixpkgs", - "rev": "554d2d8aa25b6e583575459c297ec23750adb6cb", + "rev": "a115bb9bd56831941be3776c8a94005867f316a7", "type": "github" }, "original": { @@ -48,11 +48,11 @@ }, "nixpkgs_2": { "locked": { - "lastModified": 1637453606, - "narHash": "sha256-Gy6cwUswft9xqsjWxFYEnx/63/qzaFUwatcbV5GF/GQ=", + "lastModified": 1665296151, + "narHash": "sha256-uOB0oxqxN9K7XGF1hcnY+PQnlQJ+3bP2vCn/+Ru/bbc=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "8afc4e543663ca0a6a4f496262cd05233737e732", + "rev": "14ccaaedd95a488dd7ae142757884d8e125b3363", "type": "github" }, "original": { @@ -75,11 +75,11 @@ "nixpkgs": "nixpkgs_2" }, "locked": { - "lastModified": 1643941258, - "narHash": "sha256-uHyEuICSu8qQp6adPTqV33ajiwoF0sCh+Iazaz5r7fo=", + "lastModified": 1669775522, + "narHash": "sha256-6xxGArBqssX38DdHpDoPcPvB/e79uXyQBwpBcaO/BwY=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "674156c4c2f46dd6a6846466cb8f9fee84c211ca", + "rev": "3158e47f6b85a288d12948aeb9a048e0ed4434d6", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 938225b7..37835738 100644 --- a/flake.nix +++ b/flake.nix @@ -3,63 +3,67 @@ inputs = { nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; - flake-utils = { - url = "github:numtide/flake-utils"; - inputs.nixpkgs.follows = "nixpkgs"; - }; + flake-utils.url = "github:numtide/flake-utils"; rust-overlay.url = "github:oxalica/rust-overlay"; }; - outputs = { self, nixpkgs, flake-utils, rust-overlay }: + outputs = { + self, + nixpkgs, + flake-utils, + rust-overlay, + }: flake-utils.lib.eachDefaultSystem - (system: - let - pkgs = import nixpkgs { - overlays = [ rust-overlay.overlay ]; - inherit system; - }; - lib = pkgs.lib; - rust = pkgs.rust-bin.stable.latest.default; - cargoNix = pkgs.callPackage ./Cargo.nix { - inherit pkgs; - release = true; - }; - debugCargoNix = pkgs.callPackage ./Cargo.nix { - inherit pkgs; - release = false; - }; - in - { - devShell = pkgs.mkShell { - buildInputs = with pkgs; - [ - (rust.override { - extensions = [ "rust-src" ]; - targets = [ "wasm32-unknown-unknown" ]; - }) - cargo-edit - cargo-watch - cargo-criterion - cargo-fuzz - cargo-flamegraph - cargo-deny - crate2nix - wasm-pack - pkgconfig - openssl - gnuplot + (system: let + pkgs = import nixpkgs { + overlays = [rust-overlay.overlays.default]; + inherit system; + }; + rust = pkgs.rust-bin.stable.latest.default; + in { + formatter = pkgs.alejandra; - nodejs - yarn + packages = { + deadnix = pkgs.runCommand "deadnix" {} '' + ${pkgs.deadnix}/bin/deadnix --fail ${./.} + mkdir $out + ''; + }; - # c deps - cmake - cmocka - doxygen + checks = { + inherit (self.packages.${system}) deadnix; + }; - rnix-lsp - nixpkgs-fmt - ]; - }; - }); + devShells.default = pkgs.mkShell { + buildInputs = with pkgs; [ + (rust.override { + extensions = ["rust-src"]; + targets = ["wasm32-unknown-unknown"]; + }) + cargo-edit + cargo-watch + cargo-criterion + cargo-fuzz + cargo-flamegraph + cargo-deny + crate2nix + wasm-pack + pkgconfig + openssl + gnuplot + + nodejs + yarn + deno + + # c deps + cmake + cmocka + doxygen + + rnix-lsp + nixpkgs-fmt + ]; + }; + }); } diff --git a/javascript/.denoifyrc.json b/javascript/.denoifyrc.json new file mode 100644 index 00000000..9453a31f --- /dev/null +++ b/javascript/.denoifyrc.json @@ -0,0 +1,3 @@ +{ + "replacer": "scripts/denoify-replacer.mjs" +} diff --git a/automerge-js/.eslintignore b/javascript/.eslintignore similarity index 100% rename from automerge-js/.eslintignore rename to javascript/.eslintignore diff --git a/javascript/.eslintrc.cjs b/javascript/.eslintrc.cjs new file mode 100644 index 00000000..88776271 --- /dev/null +++ b/javascript/.eslintrc.cjs @@ -0,0 +1,15 @@ +module.exports = { + root: true, + parser: "@typescript-eslint/parser", + plugins: ["@typescript-eslint"], + extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], + rules: { + "@typescript-eslint/no-unused-vars": [ + "error", + { + argsIgnorePattern: "^_", + varsIgnorePattern: "^_", + }, + ], + }, +} diff --git a/automerge-js/.gitignore b/javascript/.gitignore similarity index 57% rename from automerge-js/.gitignore rename to javascript/.gitignore index 05065cf0..f98d9db2 100644 --- a/automerge-js/.gitignore +++ b/javascript/.gitignore @@ -1,3 +1,6 @@ /node_modules /yarn.lock dist +docs/ +.vim +deno_dist/ diff --git a/javascript/.prettierignore b/javascript/.prettierignore new file mode 100644 index 00000000..6ab2f796 --- /dev/null +++ b/javascript/.prettierignore @@ -0,0 +1,4 @@ +e2e/verdacciodb +dist +docs +deno_dist diff --git a/javascript/.prettierrc b/javascript/.prettierrc new file mode 100644 index 00000000..18b9c97f --- /dev/null +++ b/javascript/.prettierrc @@ -0,0 +1,4 @@ +{ + "semi": false, + "arrowParens": "avoid" +} diff --git a/javascript/HACKING.md b/javascript/HACKING.md new file mode 100644 index 00000000..b7e92eef --- /dev/null +++ b/javascript/HACKING.md @@ -0,0 +1,39 @@ +## Architecture + +The `@automerge/automerge` package is a set of +[`Proxy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy) +objects which provide an idiomatic javascript interface built on top of the +lower level `@automerge/automerge-wasm` package (which is in turn built from the +Rust codebase and can be found in `~/automerge-wasm`). I.e. the responsibility +of this codebase is + +- To map from the javascript data model to the underlying `set`, `make`, + `insert`, and `delete` operations of Automerge. +- To expose a more convenient interface to functions in `automerge-wasm` which + generate messages to send over the network or compressed file formats to store + on disk + +## Building and testing + +Much of the functionality of this package depends on the +`@automerge/automerge-wasm` package and frequently you will be working on both +of them at the same time. It would be frustrating to have to push +`automerge-wasm` to NPM every time you want to test a change but I (Alex) also +don't trust `yarn link` to do the right thing here. Therefore, the `./e2e` +folder contains a little yarn package which spins up a local NPM registry. See +`./e2e/README` for details. In brief though: + +To build `automerge-wasm` and install it in the local `node_modules` + +```bash +cd e2e && yarn install && yarn run e2e buildjs +``` + +NOw that you've done this you can run the tests + +```bash +yarn test +``` + +If you make changes to the `automerge-wasm` package you will need to re-run +`yarn e2e buildjs` diff --git a/automerge-js/LICENSE b/javascript/LICENSE similarity index 100% rename from automerge-js/LICENSE rename to javascript/LICENSE diff --git a/javascript/README.md b/javascript/README.md new file mode 100644 index 00000000..af8306ac --- /dev/null +++ b/javascript/README.md @@ -0,0 +1,109 @@ +## Automerge + +Automerge is a library of data structures for building collaborative +applications, this package is the javascript implementation. + +Detailed documentation is available at [automerge.org](http://automerge.org/) +but see the following for a short getting started guid. + +## Quickstart + +First, install the library. + +``` +yarn add @automerge/automerge +``` + +If you're writing a `node` application, you can skip straight to [Make some +data](#make-some-data). If you're in a browser you need a bundler + +### Bundler setup + +`@automerge/automerge` is a wrapper around a core library which is written in +rust, compiled to WebAssembly and distributed as a separate package called +`@automerge/automerge-wasm`. Browsers don't currently support WebAssembly +modules taking part in ESM module imports, so you must use a bundler to import +`@automerge/automerge` in the browser. There are a lot of bundlers out there, we +have examples for common bundlers in the `examples` folder. Here is a short +example using Webpack 5. + +Assuming a standard setup of a new webpack project, you'll need to enable the +`asyncWebAssembly` experiment. In a typical webpack project that means adding +something like this to `webpack.config.js` + +```javascript +module.exports = { + ... + experiments: { asyncWebAssembly: true }, + performance: { // we dont want the wasm blob to generate warnings + hints: false, + maxEntrypointSize: 512000, + maxAssetSize: 512000 + } +}; +``` + +### Make some data + +Automerge allows to separate threads of execution to make changes to some data +and always be able to merge their changes later. + +```javascript +import * as automerge from "@automerge/automerge" +import * as assert from "assert" + +let doc1 = automerge.from({ + tasks: [ + { description: "feed fish", done: false }, + { description: "water plants", done: false }, + ], +}) + +// Create a new thread of execution +let doc2 = automerge.clone(doc1) + +// Now we concurrently make changes to doc1 and doc2 + +// Complete a task in doc2 +doc2 = automerge.change(doc2, d => { + d.tasks[0].done = true +}) + +// Add a task in doc1 +doc1 = automerge.change(doc1, d => { + d.tasks.push({ + description: "water fish", + done: false, + }) +}) + +// Merge changes from both docs +doc1 = automerge.merge(doc1, doc2) +doc2 = automerge.merge(doc2, doc1) + +// Both docs are merged and identical +assert.deepEqual(doc1, { + tasks: [ + { description: "feed fish", done: true }, + { description: "water plants", done: false }, + { description: "water fish", done: false }, + ], +}) + +assert.deepEqual(doc2, { + tasks: [ + { description: "feed fish", done: true }, + { description: "water plants", done: false }, + { description: "water fish", done: false }, + ], +}) +``` + +## Development + +See [HACKING.md](./HACKING.md) + +## Meta + +Copyright 2017–present, the Automerge contributors. Released under the terms of the +MIT license (see `LICENSE`). diff --git a/javascript/config/cjs.json b/javascript/config/cjs.json new file mode 100644 index 00000000..0b135067 --- /dev/null +++ b/javascript/config/cjs.json @@ -0,0 +1,12 @@ +{ + "extends": "../tsconfig.json", + "exclude": [ + "../dist/**/*", + "../node_modules", + "../test/**/*", + "../src/**/*.deno.ts" + ], + "compilerOptions": { + "outDir": "../dist/cjs" + } +} diff --git a/javascript/config/declonly.json b/javascript/config/declonly.json new file mode 100644 index 00000000..7c1df687 --- /dev/null +++ b/javascript/config/declonly.json @@ -0,0 +1,13 @@ +{ + "extends": "../tsconfig.json", + "exclude": [ + "../dist/**/*", + "../node_modules", + "../test/**/*", + "../src/**/*.deno.ts" + ], + "emitDeclarationOnly": true, + "compilerOptions": { + "outDir": "../dist" + } +} diff --git a/javascript/config/mjs.json b/javascript/config/mjs.json new file mode 100644 index 00000000..ecf3ce36 --- /dev/null +++ b/javascript/config/mjs.json @@ -0,0 +1,14 @@ +{ + "extends": "../tsconfig.json", + "exclude": [ + "../dist/**/*", + "../node_modules", + "../test/**/*", + "../src/**/*.deno.ts" + ], + "compilerOptions": { + "target": "es6", + "module": "es6", + "outDir": "../dist/mjs" + } +} diff --git a/javascript/deno-tests/deno.ts b/javascript/deno-tests/deno.ts new file mode 100644 index 00000000..fc0a4dad --- /dev/null +++ b/javascript/deno-tests/deno.ts @@ -0,0 +1,10 @@ +import * as Automerge from "../deno_dist/index.ts" + +Deno.test("It should create, clone and free", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.clone(doc1) + + // this is only needed if weakrefs are not supported + Automerge.free(doc1) + Automerge.free(doc2) +}) diff --git a/javascript/e2e/.gitignore b/javascript/e2e/.gitignore new file mode 100644 index 00000000..3021843a --- /dev/null +++ b/javascript/e2e/.gitignore @@ -0,0 +1,3 @@ +node_modules/ +verdacciodb/ +htpasswd diff --git a/javascript/e2e/README.md b/javascript/e2e/README.md new file mode 100644 index 00000000..9dcee471 --- /dev/null +++ b/javascript/e2e/README.md @@ -0,0 +1,70 @@ +#End to end testing for javascript packaging + +The network of packages and bundlers we rely on to get the `automerge` package +working is a little complex. We have the `automerge-wasm` package, which the +`automerge` package depends upon, which means that anyone who depends on +`automerge` needs to either a) be using node or b) use a bundler in order to +load the underlying WASM module which is packaged in `automerge-wasm`. + +The various bundlers involved are complicated and capricious and so we need an +easy way of testing that everything is in fact working as expected. To do this +we run a custom NPM registry (namely [Verdaccio](https://verdaccio.org/)) and +build the `automerge-wasm` and `automerge` packages and publish them to this +registry. Once we have this registry running we are able to build the example +projects which depend on these packages and check that everything works as +expected. + +## Usage + +First, install everything: + +``` +yarn install +``` + +### Build `automerge-js` + +This builds the `automerge-wasm` package and then runs `yarn build` in the +`automerge-js` project with the `--registry` set to the verdaccio registry. The +end result is that you can run `yarn test` in the resulting `automerge-js` +directory in order to run tests against the current `automerge-wasm`. + +``` +yarn e2e buildjs +``` + +### Build examples + +This either builds or the examples in `automerge-js/examples` or just a subset +of them. Once this is complete you can run the relevant scripts (e.g. `vite dev` +for the Vite example) to check everything works. + +``` +yarn e2e buildexamples +``` + +Or, to just build the webpack example + +``` +yarn e2e buildexamples -e webpack +``` + +### Run Registry + +If you're experimenting with a project which is not in the `examples` folder +you'll need a running registry. `run-registry` builds and publishes +`automerge-js` and `automerge-wasm` and then runs the registry at +`localhost:4873`. + +``` +yarn e2e run-registry +``` + +You can now run `yarn install --registry http://localhost:4873` to experiment +with the built packages. + +## Using the `dev` build of `automerge-wasm` + +All the commands above take a `-p` flag which can be either `release` or +`debug`. The `debug` builds with additional debug symbols which makes errors +less cryptic. diff --git a/javascript/e2e/index.ts b/javascript/e2e/index.ts new file mode 100644 index 00000000..fb0b1599 --- /dev/null +++ b/javascript/e2e/index.ts @@ -0,0 +1,534 @@ +import { once } from "events" +import { setTimeout } from "timers/promises" +import { spawn, ChildProcess } from "child_process" +import * as child_process from "child_process" +import { + command, + subcommands, + run, + array, + multioption, + option, + Type, +} from "cmd-ts" +import * as path from "path" +import * as fsPromises from "fs/promises" +import fetch from "node-fetch" + +const VERDACCIO_DB_PATH = path.normalize(`${__dirname}/verdacciodb`) +const VERDACCIO_CONFIG_PATH = path.normalize(`${__dirname}/verdaccio.yaml`) +const AUTOMERGE_WASM_PATH = path.normalize( + `${__dirname}/../../rust/automerge-wasm` +) +const AUTOMERGE_JS_PATH = path.normalize(`${__dirname}/..`) +const EXAMPLES_DIR = path.normalize(path.join(__dirname, "../", "examples")) + +// The different example projects in "../examples" +type Example = "webpack" | "vite" | "create-react-app" + +// Type to parse strings to `Example` so the types line up for the `buildExamples` commmand +const ReadExample: Type = { + async from(str) { + if (str === "webpack") { + return "webpack" + } else if (str === "vite") { + return "vite" + } else if (str === "create-react-app") { + return "create-react-app" + } else { + throw new Error(`Unknown example type ${str}`) + } + }, +} + +type Profile = "dev" | "release" + +const ReadProfile: Type = { + async from(str) { + if (str === "dev") { + return "dev" + } else if (str === "release") { + return "release" + } else { + throw new Error(`Unknown profile ${str}`) + } + }, +} + +const buildjs = command({ + name: "buildjs", + args: { + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile, + }), + }, + handler: ({ profile }) => { + console.log("building js") + withPublishedWasm(profile, async (registryUrl: string) => { + await buildAndPublishAutomergeJs(registryUrl) + }) + }, +}) + +const buildWasm = command({ + name: "buildwasm", + args: { + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile, + }), + }, + handler: ({ profile }) => { + console.log("building automerge-wasm") + withRegistry(buildAutomergeWasm(profile)) + }, +}) + +const buildexamples = command({ + name: "buildexamples", + args: { + examples: multioption({ + long: "example", + short: "e", + type: array(ReadExample), + }), + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile, + }), + }, + handler: ({ examples, profile }) => { + if (examples.length === 0) { + examples = ["webpack", "vite", "create-react-app"] + } + buildExamples(examples, profile) + }, +}) + +const runRegistry = command({ + name: "run-registry", + args: { + profile: option({ + type: ReadProfile, + long: "profile", + short: "p", + defaultValue: () => "dev" as Profile, + }), + }, + handler: ({ profile }) => { + withPublishedWasm(profile, async (registryUrl: string) => { + await buildAndPublishAutomergeJs(registryUrl) + console.log("\n************************") + console.log(` Verdaccio NPM registry is running at ${registryUrl}`) + console.log(" press CTRL-C to exit ") + console.log("************************") + await once(process, "SIGINT") + }).catch(e => { + console.error(`Failed: ${e}`) + }) + }, +}) + +const app = subcommands({ + name: "e2e", + cmds: { + buildjs, + buildexamples, + buildwasm: buildWasm, + "run-registry": runRegistry, + }, +}) + +run(app, process.argv.slice(2)) + +async function buildExamples(examples: Array, profile: Profile) { + await withPublishedWasm(profile, async registryUrl => { + printHeader("building and publishing automerge") + await buildAndPublishAutomergeJs(registryUrl) + for (const example of examples) { + printHeader(`building ${example} example`) + if (example === "webpack") { + const projectPath = path.join(EXAMPLES_DIR, example) + await removeExistingAutomerge(projectPath) + await fsPromises.rm(path.join(projectPath, "yarn.lock"), { + force: true, + }) + await spawnAndWait( + "yarn", + [ + "--cwd", + projectPath, + "install", + "--registry", + registryUrl, + "--check-files", + ], + { stdio: "inherit" } + ) + await spawnAndWait("yarn", ["--cwd", projectPath, "build"], { + stdio: "inherit", + }) + } else if (example === "vite") { + const projectPath = path.join(EXAMPLES_DIR, example) + await removeExistingAutomerge(projectPath) + await fsPromises.rm(path.join(projectPath, "yarn.lock"), { + force: true, + }) + await spawnAndWait( + "yarn", + [ + "--cwd", + projectPath, + "install", + "--registry", + registryUrl, + "--check-files", + ], + { stdio: "inherit" } + ) + await spawnAndWait("yarn", ["--cwd", projectPath, "build"], { + stdio: "inherit", + }) + } else if (example === "create-react-app") { + const projectPath = path.join(EXAMPLES_DIR, example) + await removeExistingAutomerge(projectPath) + await fsPromises.rm(path.join(projectPath, "yarn.lock"), { + force: true, + }) + await spawnAndWait( + "yarn", + [ + "--cwd", + projectPath, + "install", + "--registry", + registryUrl, + "--check-files", + ], + { stdio: "inherit" } + ) + await spawnAndWait("yarn", ["--cwd", projectPath, "build"], { + stdio: "inherit", + }) + } + } + }) +} + +type WithRegistryAction = (registryUrl: string) => Promise + +async function withRegistry( + action: WithRegistryAction, + ...actions: Array +) { + // First, start verdaccio + printHeader("Starting verdaccio NPM server") + const verd = await VerdaccioProcess.start() + actions.unshift(action) + + for (const action of actions) { + try { + type Step = "verd-died" | "action-completed" + const verdDied: () => Promise = async () => { + await verd.died() + return "verd-died" + } + const actionComplete: () => Promise = async () => { + await action("http://localhost:4873") + return "action-completed" + } + const result = await Promise.race([verdDied(), actionComplete()]) + if (result === "verd-died") { + throw new Error("verdaccio unexpectedly exited") + } + } catch (e) { + await verd.kill() + throw e + } + } + await verd.kill() +} + +async function withPublishedWasm(profile: Profile, action: WithRegistryAction) { + await withRegistry(buildAutomergeWasm(profile), publishAutomergeWasm, action) +} + +function buildAutomergeWasm(profile: Profile): WithRegistryAction { + return async (registryUrl: string) => { + printHeader("building automerge-wasm") + await spawnAndWait( + "yarn", + ["--cwd", AUTOMERGE_WASM_PATH, "--registry", registryUrl, "install"], + { stdio: "inherit" } + ) + const cmd = profile === "release" ? "release" : "debug" + await spawnAndWait("yarn", ["--cwd", AUTOMERGE_WASM_PATH, cmd], { + stdio: "inherit", + }) + } +} + +async function publishAutomergeWasm(registryUrl: string) { + printHeader("Publishing automerge-wasm to verdaccio") + await fsPromises.rm( + path.join(VERDACCIO_DB_PATH, "@automerge/automerge-wasm"), + { recursive: true, force: true } + ) + await yarnPublish(registryUrl, AUTOMERGE_WASM_PATH) +} + +async function buildAndPublishAutomergeJs(registryUrl: string) { + // Build the js package + printHeader("Building automerge") + await removeExistingAutomerge(AUTOMERGE_JS_PATH) + await removeFromVerdaccio("@automerge/automerge") + await fsPromises.rm(path.join(AUTOMERGE_JS_PATH, "yarn.lock"), { + force: true, + }) + await spawnAndWait( + "yarn", + [ + "--cwd", + AUTOMERGE_JS_PATH, + "install", + "--registry", + registryUrl, + "--check-files", + ], + { stdio: "inherit" } + ) + await spawnAndWait("yarn", ["--cwd", AUTOMERGE_JS_PATH, "build"], { + stdio: "inherit", + }) + await yarnPublish(registryUrl, AUTOMERGE_JS_PATH) +} + +/** + * A running verdaccio process + * + */ +class VerdaccioProcess { + child: ChildProcess + stdout: Array + stderr: Array + + constructor(child: ChildProcess) { + this.child = child + + // Collect stdout/stderr otherwise the subprocess gets blocked writing + this.stdout = [] + this.stderr = [] + this.child.stdout && + this.child.stdout.on("data", data => this.stdout.push(data)) + this.child.stderr && + this.child.stderr.on("data", data => this.stderr.push(data)) + + const errCallback = (e: any) => { + console.error("!!!!!!!!!ERROR IN VERDACCIO PROCESS!!!!!!!!!") + console.error(" ", e) + if (this.stdout.length > 0) { + console.log("\n**Verdaccio stdout**") + const stdout = Buffer.concat(this.stdout) + process.stdout.write(stdout) + } + + if (this.stderr.length > 0) { + console.log("\n**Verdaccio stderr**") + const stdout = Buffer.concat(this.stderr) + process.stdout.write(stdout) + } + process.exit(-1) + } + this.child.on("error", errCallback) + } + + /** + * Spawn a verdaccio process and wait for it to respond succesfully to http requests + * + * The returned `VerdaccioProcess` can be used to control the subprocess + */ + static async start() { + const child = spawn( + "yarn", + ["verdaccio", "--config", VERDACCIO_CONFIG_PATH], + { env: { ...process.env, FORCE_COLOR: "true" } } + ) + + // Forward stdout and stderr whilst waiting for startup to complete + const stdoutCallback = (data: Buffer) => process.stdout.write(data) + const stderrCallback = (data: Buffer) => process.stderr.write(data) + child.stdout && child.stdout.on("data", stdoutCallback) + child.stderr && child.stderr.on("data", stderrCallback) + + const healthCheck = async () => { + while (true) { + try { + const resp = await fetch("http://localhost:4873") + if (resp.status === 200) { + return + } else { + console.log(`Healthcheck failed: bad status ${resp.status}`) + } + } catch (e) { + console.error(`Healthcheck failed: ${e}`) + } + await setTimeout(500) + } + } + await withTimeout(healthCheck(), 10000) + + // Stop forwarding stdout/stderr + child.stdout && child.stdout.off("data", stdoutCallback) + child.stderr && child.stderr.off("data", stderrCallback) + return new VerdaccioProcess(child) + } + + /** + * Send a SIGKILL to the process and wait for it to stop + */ + async kill() { + this.child.stdout && this.child.stdout.destroy() + this.child.stderr && this.child.stderr.destroy() + this.child.kill() + try { + await withTimeout(once(this.child, "close"), 500) + } catch (e) { + console.error("unable to kill verdaccio subprocess, trying -9") + this.child.kill(9) + await withTimeout(once(this.child, "close"), 500) + } + } + + /** + * A promise which resolves if the subprocess exits for some reason + */ + async died(): Promise { + const [exit, _signal] = await once(this.child, "exit") + return exit + } +} + +function printHeader(header: string) { + console.log("\n===============================") + console.log(` ${header}`) + console.log("===============================") +} + +/** + * Removes the automerge, @automerge/automerge-wasm, and @automerge/automerge packages from + * `$packageDir/node_modules` + * + * This is useful to force refreshing a package by use in combination with + * `yarn install --check-files`, which checks if a package is present in + * `node_modules` and if it is not forces a reinstall. + * + * @param packageDir - The directory containing the package.json of the target project + */ +async function removeExistingAutomerge(packageDir: string) { + await fsPromises.rm(path.join(packageDir, "node_modules", "@automerge"), { + recursive: true, + force: true, + }) + await fsPromises.rm(path.join(packageDir, "node_modules", "automerge"), { + recursive: true, + force: true, + }) +} + +type SpawnResult = { + stdout?: Buffer + stderr?: Buffer +} + +async function spawnAndWait( + cmd: string, + args: Array, + options: child_process.SpawnOptions +): Promise { + const child = spawn(cmd, args, options) + let stdout = null + let stderr = null + if (child.stdout) { + stdout = [] + child.stdout.on("data", data => stdout.push(data)) + } + if (child.stderr) { + stderr = [] + child.stderr.on("data", data => stderr.push(data)) + } + + const [exit, _signal] = await once(child, "exit") + if (exit && exit !== 0) { + throw new Error("nonzero exit code") + } + return { + stderr: stderr ? Buffer.concat(stderr) : null, + stdout: stdout ? Buffer.concat(stdout) : null, + } +} + +/** + * Remove a package from the verdaccio registry. This is necessary because we + * often want to _replace_ a version rather than update the version number. + * Obviously this is very bad and verboten in normal circumastances, but the + * whole point here is to be able to test the entire packaging story so it's + * okay I Promise. + */ +async function removeFromVerdaccio(packageName: string) { + await fsPromises.rm(path.join(VERDACCIO_DB_PATH, packageName), { + force: true, + recursive: true, + }) +} + +async function yarnPublish(registryUrl: string, cwd: string) { + await spawnAndWait( + "yarn", + ["--registry", registryUrl, "--cwd", cwd, "publish", "--non-interactive"], + { + stdio: "inherit", + env: { + ...process.env, + FORCE_COLOR: "true", + // This is a fake token, it just has to be the right format + npm_config__auth: + "//localhost:4873/:_authToken=Gp2Mgxm4faa/7wp0dMSuRA==", + }, + } + ) +} + +/** + * Wait for a given delay to resolve a promise, throwing an error if the + * promise doesn't resolve with the timeout + * + * @param promise - the promise to wait for @param timeout - the delay in + * milliseconds to wait before throwing + */ +async function withTimeout( + promise: Promise, + timeout: number +): Promise { + type Step = "timed-out" | { result: T } + const timedOut: () => Promise = async () => { + await setTimeout(timeout) + return "timed-out" + } + const succeeded: () => Promise = async () => { + const result = await promise + return { result } + } + const result = await Promise.race([timedOut(), succeeded()]) + if (result === "timed-out") { + throw new Error("timed out") + } else { + return result.result + } +} diff --git a/javascript/e2e/package.json b/javascript/e2e/package.json new file mode 100644 index 00000000..7bb80852 --- /dev/null +++ b/javascript/e2e/package.json @@ -0,0 +1,23 @@ +{ + "name": "e2e", + "version": "0.0.1", + "description": "", + "main": "index.js", + "scripts": { + "e2e": "ts-node index.ts" + }, + "author": "", + "license": "ISC", + "dependencies": { + "@types/node": "^18.7.18", + "cmd-ts": "^0.11.0", + "node-fetch": "^2", + "ts-node": "^10.9.1", + "typed-emitter": "^2.1.0", + "typescript": "^4.8.3", + "verdaccio": "5" + }, + "devDependencies": { + "@types/node-fetch": "2.x" + } +} diff --git a/javascript/e2e/tsconfig.json b/javascript/e2e/tsconfig.json new file mode 100644 index 00000000..a2109873 --- /dev/null +++ b/javascript/e2e/tsconfig.json @@ -0,0 +1,6 @@ +{ + "compilerOptions": { + "types": ["node"] + }, + "module": "nodenext" +} diff --git a/javascript/e2e/verdaccio.yaml b/javascript/e2e/verdaccio.yaml new file mode 100644 index 00000000..865f5f05 --- /dev/null +++ b/javascript/e2e/verdaccio.yaml @@ -0,0 +1,25 @@ +storage: "./verdacciodb" +auth: + htpasswd: + file: ./htpasswd +publish: + allow_offline: true +logs: { type: stdout, format: pretty, level: info } +packages: + "@automerge/automerge-wasm": + access: "$all" + publish: "$all" + "@automerge/automerge": + access: "$all" + publish: "$all" + "*": + access: "$all" + publish: "$all" + proxy: npmjs + "@*/*": + access: "$all" + publish: "$all" + proxy: npmjs +uplinks: + npmjs: + url: https://registry.npmjs.org/ diff --git a/javascript/e2e/yarn.lock b/javascript/e2e/yarn.lock new file mode 100644 index 00000000..46e2abf2 --- /dev/null +++ b/javascript/e2e/yarn.lock @@ -0,0 +1,2130 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@cspotcode/source-map-support@^0.8.0": + version "0.8.1" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== + dependencies: + "@jridgewell/trace-mapping" "0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@0.3.9": + version "0.3.9" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@tootallnate/once@1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@tsconfig/node10@^1.0.7": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" + integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.3.tgz#472eaab5f15c1ffdd7f8628bd4c4f753995ec79e" + integrity sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ== + +"@types/node-fetch@2.x": + version "2.6.2" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da" + integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A== + dependencies: + "@types/node" "*" + form-data "^3.0.0" + +"@types/node@*", "@types/node@^18.7.18": + version "18.7.23" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.7.23.tgz#75c580983846181ebe5f4abc40fe9dfb2d65665f" + integrity sha512-DWNcCHolDq0ZKGizjx2DZjR/PqsYwAcYUJmfMWqtVU2MBMG5Mo+xFZrhGId5r/O5HOuMPyQEcM6KUBp5lBZZBg== + +"@verdaccio/commons-api@10.2.0": + version "10.2.0" + resolved "https://registry.yarnpkg.com/@verdaccio/commons-api/-/commons-api-10.2.0.tgz#3b684c31749837b0574375bb2e10644ecea9fcca" + integrity sha512-F/YZANu4DmpcEV0jronzI7v2fGVWkQ5Mwi+bVmV+ACJ+EzR0c9Jbhtbe5QyLUuzR97t8R5E/Xe53O0cc2LukdQ== + dependencies: + http-errors "2.0.0" + http-status-codes "2.2.0" + +"@verdaccio/file-locking@10.3.0": + version "10.3.0" + resolved "https://registry.yarnpkg.com/@verdaccio/file-locking/-/file-locking-10.3.0.tgz#a4342665c549163817c267bfa451e32ed3009767" + integrity sha512-FE5D5H4wy/nhgR/d2J5e1Na9kScj2wMjlLPBHz7XF4XZAVSRdm45+kL3ZmrfA6b2HTADP/uH7H05/cnAYW8bhw== + dependencies: + lockfile "1.0.4" + +"@verdaccio/local-storage@10.3.1": + version "10.3.1" + resolved "https://registry.yarnpkg.com/@verdaccio/local-storage/-/local-storage-10.3.1.tgz#8cbdc6390a0eb532577ae217729cb0a4e062f299" + integrity sha512-f3oArjXPOAwUAA2dsBhfL/rSouqJ2sfml8k97RtnBPKOzisb28bgyAQW0mqwQvN4MTK5S/2xudmobFpvJAIatg== + dependencies: + "@verdaccio/commons-api" "10.2.0" + "@verdaccio/file-locking" "10.3.0" + "@verdaccio/streams" "10.2.0" + async "3.2.4" + debug "4.3.4" + lodash "4.17.21" + lowdb "1.0.0" + mkdirp "1.0.4" + +"@verdaccio/readme@10.4.1": + version "10.4.1" + resolved "https://registry.yarnpkg.com/@verdaccio/readme/-/readme-10.4.1.tgz#c568d158c36ca7dd742b1abef890383918f621b2" + integrity sha512-OZ6R+HF2bIU3WFFdPxgUgyglaIfZzGSqyUfM2m1TFNfDCK84qJvRIgQJ1HG/82KVOpGuz/nxVyw2ZyEZDkP1vA== + dependencies: + dompurify "2.3.9" + jsdom "16.7.0" + marked "4.0.18" + +"@verdaccio/streams@10.2.0": + version "10.2.0" + resolved "https://registry.yarnpkg.com/@verdaccio/streams/-/streams-10.2.0.tgz#e01d2bfdcfe8aa2389f31bc6b72a602628bd025b" + integrity sha512-FaIzCnDg0x0Js5kSQn1Le3YzDHl7XxrJ0QdIw5LrDUmLsH3VXNi4/NMlSHnw5RiTTMs4UbEf98V3RJRB8exqJA== + +"@verdaccio/ui-theme@6.0.0-6-next.28": + version "6.0.0-6-next.28" + resolved "https://registry.yarnpkg.com/@verdaccio/ui-theme/-/ui-theme-6.0.0-6-next.28.tgz#bf8ff0e90f3d292741440c7e6ab6744b97d96a98" + integrity sha512-1sJ28aVGMiRJrSz0e8f4t+IUgt/cyYmuDLhogXHOEjEIIEcfMNyQ5bVYqq03wLVoKWEh5D6gHo1hQnVKQl1L5g== + +JSONStream@1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0" + integrity sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ== + dependencies: + jsonparse "^1.2.0" + through ">=2.2.7 <3" + +abab@^2.0.3, abab@^2.0.5: + version "2.0.6" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" + integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== + +accepts@~1.3.5, accepts@~1.3.8: + version "1.3.8" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +acorn-globals@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + dependencies: + acorn "^7.1.1" + acorn-walk "^7.1.1" + +acorn-walk@^7.1.1: + version "7.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + +acorn-walk@^8.1.1: + version "8.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== + +acorn@^7.1.1: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +acorn@^8.2.4, acorn@^8.4.1: + version "8.8.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" + integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== + +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +ajv@^6.12.3: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +apache-md5@1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/apache-md5/-/apache-md5-1.1.7.tgz#dcef1802700cc231d60c5e08fd088f2f9b36375a" + integrity sha512-JtHjzZmJxtzfTSjsCyHgPR155HBe5WGyUyHTaEkfy46qhwCFKx1Epm6nAxgUG3WfUZP1dWhGqj9Z2NOBeZ+uBw== + +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +asn1@~0.2.3: + version "0.2.6" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" + integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== + dependencies: + safer-buffer "~2.1.0" + +assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw== + +async@3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" + integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +atomic-sleep@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" + integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== + +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA== + +aws4@^1.8.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" + integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +bcrypt-pbkdf@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== + dependencies: + tweetnacl "^0.14.3" + +bcryptjs@2.4.3: + version "2.4.3" + resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb" + integrity sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ== + +body-parser@1.20.0: + version "1.20.0" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" + integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.10.3" + raw-body "2.5.1" + type-is "~1.6.18" + unpipe "1.0.0" + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +buffer-equal-constant-time@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" + integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA== + +bytes@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== + +bytes@3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +call-bind@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== + +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +clipanion@3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/clipanion/-/clipanion-3.1.0.tgz#3e217dd6476bb9236638b07eb4673f7309839819" + integrity sha512-v025Hz+IDQ15FpOyK8p02h5bFznMu6rLFsJSyOPR+7WrbSnZ1Ek6pblPukV7K5tC/dsWfncQPIrJ4iUy2PXkbw== + dependencies: + typanion "^3.3.1" + +cmd-ts@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/cmd-ts/-/cmd-ts-0.11.0.tgz#80926180f39665e35e321b72439f792a2b63b745" + integrity sha512-6RvjD+f9oGPeWoMS53oavafmQ9qC839PjP3CyvPkAIfqMEXTbrclni7t3fnyVJFNWxuBexnLshcotY0RuNrI8Q== + dependencies: + chalk "^4.0.0" + debug "^4.3.4" + didyoumean "^1.2.2" + strip-ansi "^6.0.0" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +compressible@~2.0.16: + version "2.0.18" + resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== + dependencies: + accepts "~1.3.5" + bytes "3.0.0" + compressible "~2.0.16" + debug "2.6.9" + on-headers "~1.0.2" + safe-buffer "5.1.2" + vary "~1.1.2" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + +cookies@0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/cookies/-/cookies-0.8.0.tgz#1293ce4b391740a8406e3c9870e828c4b54f3f90" + integrity sha512-8aPsApQfebXnuI+537McwYsDtjVxGm8gTIzQI3FDW6t5t/DAhERxtnbEPN/8RX+uZthoz4eCOgloXaE5cYyNow== + dependencies: + depd "~2.0.0" + keygrip "~1.1.0" + +core-util-is@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ== + +cors@2.8.5: + version "2.8.5" + resolved "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29" + integrity sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g== + dependencies: + object-assign "^4" + vary "^1" + +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + +cssom@^0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + +cssom@~0.3.6: + version "0.3.8" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + dependencies: + cssom "~0.3.6" + +d@1, d@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" + integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== + dependencies: + es5-ext "^0.10.50" + type "^1.0.1" + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g== + dependencies: + assert-plus "^1.0.0" + +data-urls@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== + dependencies: + abab "^2.0.3" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + +dayjs@1.11.5: + version "1.11.5" + resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.5.tgz#00e8cc627f231f9499c19b38af49f56dc0ac5e93" + integrity sha512-CAdX5Q3YW3Gclyo5Vpqkgpj8fSdLQcRuzfX6mC6Phy0nfJ0eGYOeS7m4mt2plDWLAtA4TqTakvbboHvUxfe4iA== + +debug@2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@4, debug@4.3.4, debug@^4.3.3, debug@^4.3.4: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +decimal.js@^10.2.1: + version "10.4.1" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.4.1.tgz#be75eeac4a2281aace80c1a8753587c27ef053e7" + integrity sha512-F29o+vci4DodHYT9UrR5IEbfBw9pE5eSapIJdTqXK5+6hq+t8VRxwQyKlW2i+KDKFkkJQRvFyI/QXD83h8LyQw== + +deep-is@~0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +depd@2.0.0, depd@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +destroy@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +didyoumean@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + +domexception@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + dependencies: + webidl-conversions "^5.0.0" + +dompurify@2.3.9: + version "2.3.9" + resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.3.9.tgz#a4be5e7278338d6db09922dffcf6182cd099d70a" + integrity sha512-3zOnuTwup4lPV/GfGS6UzG4ub9nhSYagR/5tB3AvDEwqyy5dtyCM2dVjwGDCnrPerXifBKTYh/UWCGKK7ydhhw== + +ecc-jsbn@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" + integrity sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw== + dependencies: + jsbn "~0.1.0" + safer-buffer "^2.1.0" + +ecdsa-sig-formatter@1.0.11: + version "1.0.11" + resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" + integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== + dependencies: + safe-buffer "^5.0.1" + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +envinfo@7.8.1: + version "7.8.1" + resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.8.1.tgz#06377e3e5f4d379fea7ac592d5ad8927e0c4d475" + integrity sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw== + +es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@^0.10.53, es5-ext@~0.10.14, es5-ext@~0.10.2, es5-ext@~0.10.46: + version "0.10.62" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5" + integrity sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA== + dependencies: + es6-iterator "^2.0.3" + es6-symbol "^3.1.3" + next-tick "^1.1.0" + +es6-iterator@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" + integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g== + dependencies: + d "1" + es5-ext "^0.10.35" + es6-symbol "^3.1.1" + +es6-symbol@^3.1.1, es6-symbol@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" + integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== + dependencies: + d "^1.0.1" + ext "^1.1.2" + +es6-weak-map@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.3.tgz#b6da1f16cc2cc0d9be43e6bdbfc5e7dfcdf31d53" + integrity sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA== + dependencies: + d "1" + es5-ext "^0.10.46" + es6-iterator "^2.0.3" + es6-symbol "^3.1.1" + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +escodegen@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" + integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== + dependencies: + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +eslint-import-resolver-node@0.3.6: + version "0.3.6" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +esprima@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +event-emitter@^0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" + integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA== + dependencies: + d "1" + es5-ext "~0.10.14" + +express-rate-limit@5.5.1: + version "5.5.1" + resolved "https://registry.yarnpkg.com/express-rate-limit/-/express-rate-limit-5.5.1.tgz#110c23f6a65dfa96ab468eda95e71697bc6987a2" + integrity sha512-MTjE2eIbHv5DyfuFz4zLYWxpqVhEhkTiwFGuB74Q9CSou2WHO52nlE5y3Zlg6SIsiYUIPj6ifFxnkPz6O3sIUg== + +express@4.18.1: + version "4.18.1" + resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" + integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.0" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.5.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.2.0" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.10.3" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.18.0" + serve-static "1.15.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +ext@^1.1.2: + version "1.7.0" + resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" + integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== + dependencies: + type "^2.7.2" + +extend@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g== + +extsprintf@^1.2.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.1.tgz#8d172c064867f235c0c84a596806d279bf4bcc07" + integrity sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA== + +fast-deep-equal@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fast-redact@^3.0.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/fast-redact/-/fast-redact-3.1.2.tgz#d58e69e9084ce9fa4c1a6fa98a3e1ecf5d7839aa" + integrity sha512-+0em+Iya9fKGfEQGcd62Yv6onjBmmhV1uh86XVfOU8VwAe6kaFdQCWI9s0/Nnugx5Vd9tdbZ7e6gE2tR9dzXdw== + +fast-safe-stringify@2.1.1, fast-safe-stringify@^2.0.8: + version "2.1.1" + resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" + integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== + +finalhandler@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +flatstr@^1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/flatstr/-/flatstr-1.0.12.tgz#c2ba6a08173edbb6c9640e3055b95e287ceb5931" + integrity sha512-4zPxDyhCyiN2wIAtSLI6gc82/EjqZc1onI4Mz/l0pWrAlsSfYH/2ZIcU+e3oA2wDwbzIWNKwa23F8rh6+DRWkw== + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== + +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +form-data@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +get-intrinsic@^1.0.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" + integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng== + dependencies: + assert-plus "^1.0.0" + +glob@^6.0.1: + version "6.0.4" + resolved "https://registry.yarnpkg.com/glob/-/glob-6.0.4.tgz#0f08860f6a155127b2fadd4f9ce24b1aab6e4d22" + integrity sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A== + dependencies: + inflight "^1.0.4" + inherits "2" + minimatch "2 || 3" + once "^1.3.0" + path-is-absolute "^1.0.0" + +graceful-fs@^4.1.3: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +handlebars@4.7.7: + version "4.7.7" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" + integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== + dependencies: + minimist "^1.2.5" + neo-async "^2.6.0" + source-map "^0.6.1" + wordwrap "^1.0.0" + optionalDependencies: + uglify-js "^3.1.4" + +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + integrity sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q== + +har-validator@~5.1.0: + version "5.1.5" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" + integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== + dependencies: + ajv "^6.12.3" + har-schema "^2.0.0" + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +html-encoding-sniffer@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + dependencies: + whatwg-encoding "^1.0.5" + +http-errors@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +http-proxy-agent@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + integrity sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ== + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +http-status-codes@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/http-status-codes/-/http-status-codes-2.2.0.tgz#bb2efe63d941dfc2be18e15f703da525169622be" + integrity sha512-feERVo9iWxvnejp3SEfm/+oNG517npqL2/PIA8ORjyOZjGC7TwCRQsZylciLS64i6pJ0wRYz3rkXLRwbtFa8Ng== + +https-proxy-agent@5.0.1, https-proxy-agent@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== + dependencies: + agent-base "6" + debug "4" + +iconv-lite@0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +is-core-module@^2.9.0: + version "2.10.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" + integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== + dependencies: + has "^1.0.3" + +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== + +is-promise@^2.1.0, is-promise@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" + integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== + +is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== + +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g== + +js-yaml@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + integrity sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg== + +jsdom@16.7.0: + version "16.7.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== + dependencies: + abab "^2.0.5" + acorn "^8.2.4" + acorn-globals "^6.0.0" + cssom "^0.4.4" + cssstyle "^2.3.0" + data-urls "^2.0.0" + decimal.js "^10.2.1" + domexception "^2.0.1" + escodegen "^2.0.0" + form-data "^3.0.0" + html-encoding-sniffer "^2.0.1" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + is-potential-custom-element-name "^1.0.1" + nwsapi "^2.2.0" + parse5 "6.0.1" + saxes "^5.0.1" + symbol-tree "^3.2.4" + tough-cookie "^4.0.0" + w3c-hr-time "^1.0.2" + w3c-xmlserializer "^2.0.0" + webidl-conversions "^6.1.0" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.5.0" + ws "^7.4.6" + xml-name-validator "^3.0.0" + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + +json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== + +jsonparse@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" + integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== + +jsonwebtoken@8.5.1: + version "8.5.1" + resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" + integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== + dependencies: + jws "^3.2.2" + lodash.includes "^4.3.0" + lodash.isboolean "^3.0.3" + lodash.isinteger "^4.0.4" + lodash.isnumber "^3.0.3" + lodash.isplainobject "^4.0.6" + lodash.isstring "^4.0.1" + lodash.once "^4.0.0" + ms "^2.1.1" + semver "^5.6.0" + +jsprim@^1.2.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.2.tgz#712c65533a15c878ba59e9ed5f0e26d5b77c5feb" + integrity sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw== + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.4.0" + verror "1.10.0" + +jwa@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" + integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== + dependencies: + buffer-equal-constant-time "1.0.1" + ecdsa-sig-formatter "1.0.11" + safe-buffer "^5.0.1" + +jws@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" + integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== + dependencies: + jwa "^1.4.1" + safe-buffer "^5.0.1" + +keygrip@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/keygrip/-/keygrip-1.1.0.tgz#871b1681d5e159c62a445b0c74b615e0917e7226" + integrity sha512-iYSchDJ+liQ8iwbSI2QqsQOvqv58eJCEanyJPJi+Khyu8smkcKSFUCbPwzFcL7YVtZ6eONjqRX/38caJ7QjRAQ== + dependencies: + tsscmp "1.0.6" + +kleur@4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" + integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== + +levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +lockfile@1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/lockfile/-/lockfile-1.0.4.tgz#07f819d25ae48f87e538e6578b6964a4981a5609" + integrity sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA== + dependencies: + signal-exit "^3.0.2" + +lodash.includes@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" + integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w== + +lodash.isboolean@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" + integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg== + +lodash.isinteger@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" + integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA== + +lodash.isnumber@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" + integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw== + +lodash.isplainobject@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" + integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== + +lodash.isstring@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" + integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw== + +lodash.once@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" + integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg== + +lodash@4, lodash@4.17.21, lodash@^4.7.0: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +lowdb@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lowdb/-/lowdb-1.0.0.tgz#5243be6b22786ccce30e50c9a33eac36b20c8064" + integrity sha512-2+x8esE/Wb9SQ1F9IHaYWfsC9FIecLOPrK4g17FGEayjUWH172H6nwicRovGvSE2CPZouc2MCIqCI7h9d+GftQ== + dependencies: + graceful-fs "^4.1.3" + is-promise "^2.1.0" + lodash "4" + pify "^3.0.0" + steno "^0.4.1" + +lru-cache@7.14.0: + version "7.14.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.0.tgz#21be64954a4680e303a09e9468f880b98a0b3c7f" + integrity sha512-EIRtP1GrSJny0dqb50QXRUNBxHJhcpxHC++M5tD7RYbvLLn5KVWKsbyswSSqDuU15UFi3bgTQIY8nhDMeF6aDQ== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lru-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" + integrity sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ== + dependencies: + es5-ext "~0.10.2" + +lunr-mutable-indexes@2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/lunr-mutable-indexes/-/lunr-mutable-indexes-2.3.2.tgz#864253489735d598c5140f3fb75c0a5c8be2e98c" + integrity sha512-Han6cdWAPPFM7C2AigS2Ofl3XjAT0yVMrUixodJEpyg71zCtZ2yzXc3s+suc/OaNt4ca6WJBEzVnEIjxCTwFMw== + dependencies: + lunr ">= 2.3.0 < 2.4.0" + +"lunr@>= 2.3.0 < 2.4.0": + version "2.3.9" + resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" + integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== + +make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + +marked@4.0.18: + version "4.0.18" + resolved "https://registry.yarnpkg.com/marked/-/marked-4.0.18.tgz#cd0ac54b2e5610cfb90e8fd46ccaa8292c9ed569" + integrity sha512-wbLDJ7Zh0sqA0Vdg6aqlbT+yPxqLblpAZh1mK2+AO2twQkPywvvqQNfEPVwSSRjZ7dZcdeVBIAgiO7MMp3Dszw== + +marked@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/marked/-/marked-4.1.0.tgz#3fc6e7485f21c1ca5d6ec4a39de820e146954796" + integrity sha512-+Z6KDjSPa6/723PQYyc1axYZpYYpDnECDaU6hkaf5gqBieBkMKYReL5hteF2QizhlMbgbo8umXl/clZ67+GlsA== + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +memoizee@0.4.15: + version "0.4.15" + resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.4.15.tgz#e6f3d2da863f318d02225391829a6c5956555b72" + integrity sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ== + dependencies: + d "^1.0.1" + es5-ext "^0.10.53" + es6-weak-map "^2.0.3" + event-emitter "^0.3.5" + is-promise "^2.2.2" + lru-queue "^0.1.0" + next-tick "^1.1.0" + timers-ext "^0.1.7" + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12, mime-types@~2.1.19, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mime@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7" + integrity sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A== + +"minimatch@2 || 3": + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" + integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== + dependencies: + brace-expansion "^2.0.1" + +minimist@^1.2.5, minimist@^1.2.6: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + +mkdirp@1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + +mkdirp@~0.5.1: + version "0.5.6" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3, ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +mv@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/mv/-/mv-2.1.1.tgz#ae6ce0d6f6d5e0a4f7d893798d03c1ea9559b6a2" + integrity sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg== + dependencies: + mkdirp "~0.5.1" + ncp "~2.0.0" + rimraf "~2.4.0" + +ncp@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ncp/-/ncp-2.0.0.tgz#195a21d6c46e361d2fb1281ba38b91e9df7bdbb3" + integrity sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA== + +negotiator@0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +neo-async@^2.6.0: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +next-tick@1, next-tick@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" + integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ== + +node-fetch@2.6.7, node-fetch@^2: + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== + dependencies: + whatwg-url "^5.0.0" + +nwsapi@^2.2.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.2.tgz#e5418863e7905df67d51ec95938d67bf801f0bb0" + integrity sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw== + +oauth-sign@~0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" + integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== + +object-assign@^4: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-inspect@^1.9.0: + version "1.12.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + +on-finished@2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +optionator@^0.8.1: + version "0.8.3" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +parse-ms@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/parse-ms/-/parse-ms-2.1.0.tgz#348565a753d4391fa524029956b172cb7753097d" + integrity sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA== + +parse5@6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== + +pify@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" + integrity sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg== + +pino-std-serializers@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/pino-std-serializers/-/pino-std-serializers-3.2.0.tgz#b56487c402d882eb96cd67c257868016b61ad671" + integrity sha512-EqX4pwDPrt3MuOAAUBMU0Tk5kR/YcCM5fNPEzgCO2zJ5HfX0vbiH9HbJglnyeQsN96Kznae6MWD47pZB5avTrg== + +pino@6.14.0: + version "6.14.0" + resolved "https://registry.yarnpkg.com/pino/-/pino-6.14.0.tgz#b745ea87a99a6c4c9b374e4f29ca7910d4c69f78" + integrity sha512-iuhEDel3Z3hF9Jfe44DPXR8l07bhjuFY3GMHIXbjnY9XcafbyDDwl2sN2vw2GjMPf5Nkoe+OFao7ffn9SXaKDg== + dependencies: + fast-redact "^3.0.0" + fast-safe-stringify "^2.0.8" + flatstr "^1.0.12" + pino-std-serializers "^3.1.0" + process-warning "^1.0.0" + quick-format-unescaped "^4.0.3" + sonic-boom "^1.0.2" + +pkginfo@0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/pkginfo/-/pkginfo-0.4.1.tgz#b5418ef0439de5425fc4995042dced14fb2a84ff" + integrity sha512-8xCNE/aT/EXKenuMDZ+xTVwkT8gsoHN2z/Q29l80u0ppGEXVvsKRzNMbtKhg8LS8k1tJLAHHylf6p4VFmP6XUQ== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== + +prettier-bytes@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/prettier-bytes/-/prettier-bytes-1.0.4.tgz#994b02aa46f699c50b6257b5faaa7fe2557e62d6" + integrity sha512-dLbWOa4xBn+qeWeIF60qRoB6Pk2jX5P3DIVgOQyMyvBpu931Q+8dXz8X0snJiFkQdohDDLnZQECjzsAj75hgZQ== + +pretty-ms@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/pretty-ms/-/pretty-ms-7.0.1.tgz#7d903eaab281f7d8e03c66f867e239dc32fb73e8" + integrity sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q== + dependencies: + parse-ms "^2.1.0" + +process-warning@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-1.0.0.tgz#980a0b25dc38cd6034181be4b7726d89066b4616" + integrity sha512-du4wfLyj4yCZq1VupnVSZmRsPJsNuxoDQFdCFHLaYiEbFBD7QE0a+I4D7hOxrVnh78QE/YipFAj9lXHiXocV+Q== + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +psl@^1.1.24, psl@^1.1.33: + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + +punycode@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +qs@6.10.3: + version "6.10.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" + integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== + dependencies: + side-channel "^1.0.4" + +qs@~6.5.2: + version "6.5.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.3.tgz#3aeeffc91967ef6e35c0e488ef46fb296ab76aad" + integrity sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA== + +querystringify@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + +quick-format-unescaped@^4.0.3: + version "4.0.4" + resolved "https://registry.yarnpkg.com/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz#93ef6dd8d3453cbc7970dd614fad4c5954d6b5a7" + integrity sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg== + +range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +request@2.88.0: + version "2.88.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" + integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.8.0" + caseless "~0.12.0" + combined-stream "~1.0.6" + extend "~3.0.2" + forever-agent "~0.6.1" + form-data "~2.3.2" + har-validator "~5.1.0" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.4.3" + tunnel-agent "^0.6.0" + uuid "^3.3.2" + +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + +resolve@^1.20.0: + version "1.22.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +rimraf@~2.4.0: + version "2.4.5" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.4.5.tgz#ee710ce5d93a8fdb856fb5ea8ff0e2d75934b2da" + integrity sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ== + dependencies: + glob "^6.0.1" + +rxjs@^7.5.2: + version "7.5.7" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.7.tgz#2ec0d57fdc89ece220d2e702730ae8f1e49def39" + integrity sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA== + dependencies: + tslib "^2.1.0" + +safe-buffer@5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.1.2: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +saxes@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + dependencies: + xmlchars "^2.2.0" + +semver@7.3.7: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + +semver@^5.6.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +send@0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serve-static@1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.18.0" + +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sonic-boom@^1.0.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-1.4.1.tgz#d35d6a74076624f12e6f917ade7b9d75e918f53e" + integrity sha512-LRHh/A8tpW7ru89lrlkU4AszXt1dbwSjVWguGrmlxE7tawVmDBlI1PILMkXAxJTwqhgsEeTHzj36D5CmHgQmNg== + dependencies: + atomic-sleep "^1.0.0" + flatstr "^1.0.12" + +source-map@^0.6.1, source-map@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +sshpk@^1.7.0: + version "1.17.0" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5" + integrity sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ== + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + bcrypt-pbkdf "^1.0.0" + dashdash "^1.12.0" + ecc-jsbn "~0.1.1" + getpass "^0.1.1" + jsbn "~0.1.0" + safer-buffer "^2.0.2" + tweetnacl "~0.14.0" + +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +steno@^0.4.1: + version "0.4.4" + resolved "https://registry.yarnpkg.com/steno/-/steno-0.4.4.tgz#071105bdfc286e6615c0403c27e9d7b5dcb855cb" + integrity sha512-EEHMVYHNXFHfGtgjNITnka0aHhiAlo93F7z2/Pwd+g0teG9CnM3JIINM7hVVB5/rhw9voufD7Wukwgtw2uqh6w== + dependencies: + graceful-fs "^4.1.3" + +strip-ansi@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +symbol-tree@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +"through@>=2.2.7 <3": + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== + +timers-ext@^0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.7.tgz#6f57ad8578e07a3fb9f91d9387d65647555e25c6" + integrity sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ== + dependencies: + es5-ext "~0.10.46" + next-tick "1" + +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tough-cookie@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.2.tgz#e53e84b85f24e0b65dd526f46628db6c85f6b874" + integrity sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.2.0" + url-parse "^1.5.3" + +tough-cookie@~2.4.3: + version "2.4.3" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" + integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== + dependencies: + psl "^1.1.24" + punycode "^1.4.1" + +tr46@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" + integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== + dependencies: + punycode "^2.1.1" + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +ts-node@^10.9.1: + version "10.9.1" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" + integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + v8-compile-cache-lib "^3.0.1" + yn "3.1.1" + +tslib@^2.1.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" + integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== + +tsscmp@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/tsscmp/-/tsscmp-1.0.6.tgz#85b99583ac3589ec4bfef825b5000aa911d605eb" + integrity sha512-LxhtAkPDTkVCMQjt2h6eBVY28KCjikZqZfMcC15YBeNjkgUpdCfBu5HoiOTDu86v6smE8yOjyEktJ8hlbANHQA== + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== + dependencies: + safe-buffer "^5.0.1" + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA== + +typanion@^3.3.1: + version "3.12.0" + resolved "https://registry.yarnpkg.com/typanion/-/typanion-3.12.0.tgz#8352830e5cf26ebfc5832da265886c9fb3ebb323" + integrity sha512-o59ZobUBsG+2dHnGVI2shscqqzHdzCOixCU0t8YXLxM2Su42J2ha7hY9V5+6SIBjVsw6aLqrlYznCgQGJN4Kag== + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== + dependencies: + prelude-ls "~1.1.2" + +type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +type@^1.0.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" + integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== + +type@^2.7.2: + version "2.7.2" + resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" + integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== + +typed-emitter@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/typed-emitter/-/typed-emitter-2.1.0.tgz#ca78e3d8ef1476f228f548d62e04e3d4d3fd77fb" + integrity sha512-g/KzbYKbH5C2vPkaXGu8DJlHrGKHLsM25Zg9WuC9pMGfuvT+X25tZQWo5fK1BjBm8+UrVE9LDCvaY0CQk+fXDA== + optionalDependencies: + rxjs "^7.5.2" + +typescript@^4.8.3: + version "4.8.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.8.4.tgz#c464abca159669597be5f96b8943500b238e60e6" + integrity sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ== + +uglify-js@^3.1.4: + version "3.17.2" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.2.tgz#f55f668b9a64b213977ae688703b6bbb7ca861c6" + integrity sha512-bbxglRjsGQMchfvXZNusUcYgiB9Hx2K4AHYXQy2DITZ9Rd+JzhX7+hoocE5Winr7z2oHvPsekkBwXtigvxevXg== + +universalify@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== + +unix-crypt-td-js@1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/unix-crypt-td-js/-/unix-crypt-td-js-1.1.4.tgz#4912dfad1c8aeb7d20fa0a39e4c31918c1d5d5dd" + integrity sha512-8rMeVYWSIyccIJscb9NdCfZKSRBKYTeVnwmiRYT2ulE3qd1RaDQ0xQDP+rI3ccIWbhu/zuo5cgN8z73belNZgw== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-parse@^1.5.3: + version "1.5.10" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@^3.3.2: + version "3.4.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" + integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== + +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== + +validator@13.7.0: + version "13.7.0" + resolved "https://registry.yarnpkg.com/validator/-/validator-13.7.0.tgz#4f9658ba13ba8f3d82ee881d3516489ea85c0857" + integrity sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw== + +vary@^1, vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +verdaccio-audit@10.2.2: + version "10.2.2" + resolved "https://registry.yarnpkg.com/verdaccio-audit/-/verdaccio-audit-10.2.2.tgz#254380e57932fda64b45cb739e9c42cc9fb2dfdf" + integrity sha512-f2uZlKD7vi0yEB0wN8WOf+eA/3SCyKD9cvK17Hh7Wm8f/bl7k1B3hHOTtUCn/yu85DGsj2pcNzrAfp2wMVgz9Q== + dependencies: + body-parser "1.20.0" + express "4.18.1" + https-proxy-agent "5.0.1" + node-fetch "2.6.7" + +verdaccio-htpasswd@10.5.0: + version "10.5.0" + resolved "https://registry.yarnpkg.com/verdaccio-htpasswd/-/verdaccio-htpasswd-10.5.0.tgz#de9ea2967856af765178b08485dc8e83f544a12c" + integrity sha512-olBsT3uy1TT2ZqmMCJUsMHrztJzoEpa8pxxvYrDZdWnEksl6mHV10lTeLbH9BUwbEheOeKkkdsERqUOs+if0jg== + dependencies: + "@verdaccio/file-locking" "10.3.0" + apache-md5 "1.1.7" + bcryptjs "2.4.3" + http-errors "2.0.0" + unix-crypt-td-js "1.1.4" + +verdaccio@5: + version "5.15.3" + resolved "https://registry.yarnpkg.com/verdaccio/-/verdaccio-5.15.3.tgz#4953471c0130c8e88b3d5562b5c63b38b575ed3d" + integrity sha512-8oEtepXF1oksGVYahi2HS1Yx9u6HD/4ukBDNDfwISmlNp7HVKJL2+kjzmDJWam88BpDNxOBU/LFXWSsEAFKFCQ== + dependencies: + "@verdaccio/commons-api" "10.2.0" + "@verdaccio/local-storage" "10.3.1" + "@verdaccio/readme" "10.4.1" + "@verdaccio/streams" "10.2.0" + "@verdaccio/ui-theme" "6.0.0-6-next.28" + JSONStream "1.3.5" + async "3.2.4" + body-parser "1.20.0" + clipanion "3.1.0" + compression "1.7.4" + cookies "0.8.0" + cors "2.8.5" + dayjs "1.11.5" + debug "^4.3.3" + envinfo "7.8.1" + eslint-import-resolver-node "0.3.6" + express "4.18.1" + express-rate-limit "5.5.1" + fast-safe-stringify "2.1.1" + handlebars "4.7.7" + http-errors "2.0.0" + js-yaml "4.1.0" + jsonwebtoken "8.5.1" + kleur "4.1.5" + lodash "4.17.21" + lru-cache "7.14.0" + lunr-mutable-indexes "2.3.2" + marked "4.1.0" + memoizee "0.4.15" + mime "3.0.0" + minimatch "5.1.0" + mkdirp "1.0.4" + mv "2.1.1" + pino "6.14.0" + pkginfo "0.4.1" + prettier-bytes "^1.0.4" + pretty-ms "^7.0.1" + request "2.88.0" + semver "7.3.7" + validator "13.7.0" + verdaccio-audit "10.2.2" + verdaccio-htpasswd "10.5.0" + +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw== + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + +w3c-hr-time@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + dependencies: + xml-name-validator "^3.0.0" + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +webidl-conversions@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + +webidl-conversions@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + +whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +whatwg-url@^8.0.0, whatwg-url@^8.5.0: + version "8.7.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" + integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== + dependencies: + lodash "^4.7.0" + tr46 "^2.1.0" + webidl-conversions "^6.1.0" + +word-wrap@~1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wordwrap@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +ws@^7.4.6: + version "7.5.9" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xmlchars@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== diff --git a/javascript/examples/create-react-app/.gitignore b/javascript/examples/create-react-app/.gitignore new file mode 100644 index 00000000..c2658d7d --- /dev/null +++ b/javascript/examples/create-react-app/.gitignore @@ -0,0 +1 @@ +node_modules/ diff --git a/javascript/examples/create-react-app/README.md b/javascript/examples/create-react-app/README.md new file mode 100644 index 00000000..baa135ac --- /dev/null +++ b/javascript/examples/create-react-app/README.md @@ -0,0 +1,59 @@ +# Automerge + `create-react-app` + +This is a little fiddly to get working. The problem is that `create-react-app` +hard codes a webpack configuration which does not support WASM modules, which we +require in order to bundle the WASM implementation of automerge. To get around +this we use [`craco`](https://github.com/dilanx/craco) which does some monkey +patching to allow us to modify the webpack config that `create-react-app` +bundles. Then we use a craco plugin called +[`craco-wasm`](https://www.npmjs.com/package/craco-wasm) to perform the +necessary modifications to the webpack config. It should be noted that this is +all quite fragile and ideally you probably don't want to use `create-react-app` +to do this in production. + +## Setup + +Assuming you have already run `create-react-app` and your working directory is +the project. + +### Install craco and craco-wasm + +```bash +yarn add craco craco-wasm +``` + +### Modify `package.json` to use `craco` for scripts + +In `package.json` the `scripts` section will look like this: + +```json + "scripts": { + "start": "craco start", + "build": "craco build", + "test": "craco test", + "eject": "craco eject" + }, +``` + +Replace that section with: + +```json + "scripts": { + "start": "craco start", + "build": "craco build", + "test": "craco test", + "eject": "craco eject" + }, +``` + +### Create `craco.config.js` + +In the root of the project add the following contents to `craco.config.js` + +```javascript +const cracoWasm = require("craco-wasm") + +module.exports = { + plugins: [cracoWasm()], +} +``` diff --git a/javascript/examples/create-react-app/craco.config.js b/javascript/examples/create-react-app/craco.config.js new file mode 100644 index 00000000..489dad8f --- /dev/null +++ b/javascript/examples/create-react-app/craco.config.js @@ -0,0 +1,5 @@ +const cracoWasm = require("craco-wasm") + +module.exports = { + plugins: [cracoWasm()], +} diff --git a/javascript/examples/create-react-app/package.json b/javascript/examples/create-react-app/package.json new file mode 100644 index 00000000..273d277b --- /dev/null +++ b/javascript/examples/create-react-app/package.json @@ -0,0 +1,41 @@ +{ + "name": "automerge-create-react-app", + "version": "0.1.0", + "private": true, + "dependencies": { + "@craco/craco": "^7.0.0-alpha.8", + "craco-wasm": "0.0.1", + "@testing-library/jest-dom": "^5.16.5", + "@testing-library/react": "^13.4.0", + "@testing-library/user-event": "^13.5.0", + "@automerge/automerge": "2.0.0-alpha.7", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "react-scripts": "5.0.1", + "web-vitals": "^2.1.4" + }, + "scripts": { + "start": "craco start", + "build": "craco build", + "test": "craco test", + "eject": "craco eject" + }, + "eslintConfig": { + "extends": [ + "react-app", + "react-app/jest" + ] + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} diff --git a/automerge-wasm/examples/cra/public/favicon.ico b/javascript/examples/create-react-app/public/favicon.ico similarity index 100% rename from automerge-wasm/examples/cra/public/favicon.ico rename to javascript/examples/create-react-app/public/favicon.ico diff --git a/automerge-wasm/examples/cra/public/index.html b/javascript/examples/create-react-app/public/index.html similarity index 100% rename from automerge-wasm/examples/cra/public/index.html rename to javascript/examples/create-react-app/public/index.html diff --git a/automerge-wasm/examples/cra/public/logo192.png b/javascript/examples/create-react-app/public/logo192.png similarity index 100% rename from automerge-wasm/examples/cra/public/logo192.png rename to javascript/examples/create-react-app/public/logo192.png diff --git a/automerge-wasm/examples/cra/public/logo512.png b/javascript/examples/create-react-app/public/logo512.png similarity index 100% rename from automerge-wasm/examples/cra/public/logo512.png rename to javascript/examples/create-react-app/public/logo512.png diff --git a/automerge-wasm/examples/cra/public/manifest.json b/javascript/examples/create-react-app/public/manifest.json similarity index 100% rename from automerge-wasm/examples/cra/public/manifest.json rename to javascript/examples/create-react-app/public/manifest.json diff --git a/automerge-wasm/examples/cra/public/robots.txt b/javascript/examples/create-react-app/public/robots.txt similarity index 100% rename from automerge-wasm/examples/cra/public/robots.txt rename to javascript/examples/create-react-app/public/robots.txt diff --git a/automerge-wasm/examples/cra/src/App.css b/javascript/examples/create-react-app/src/App.css similarity index 100% rename from automerge-wasm/examples/cra/src/App.css rename to javascript/examples/create-react-app/src/App.css diff --git a/javascript/examples/create-react-app/src/App.js b/javascript/examples/create-react-app/src/App.js new file mode 100644 index 00000000..7cfe997b --- /dev/null +++ b/javascript/examples/create-react-app/src/App.js @@ -0,0 +1,20 @@ +import * as Automerge from "@automerge/automerge" +import logo from "./logo.svg" +import "./App.css" + +let doc = Automerge.init() +doc = Automerge.change(doc, d => (d.hello = "from automerge")) +const result = JSON.stringify(doc) + +function App() { + return ( +
+
+ logo +

{result}

+
+
+ ) +} + +export default App diff --git a/javascript/examples/create-react-app/src/App.test.js b/javascript/examples/create-react-app/src/App.test.js new file mode 100644 index 00000000..ea796120 --- /dev/null +++ b/javascript/examples/create-react-app/src/App.test.js @@ -0,0 +1,8 @@ +import { render, screen } from "@testing-library/react" +import App from "./App" + +test("renders learn react link", () => { + render() + const linkElement = screen.getByText(/learn react/i) + expect(linkElement).toBeInTheDocument() +}) diff --git a/javascript/examples/create-react-app/src/index.css b/javascript/examples/create-react-app/src/index.css new file mode 100644 index 00000000..4a1df4db --- /dev/null +++ b/javascript/examples/create-react-app/src/index.css @@ -0,0 +1,13 @@ +body { + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", + "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", + sans-serif; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +code { + font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", + monospace; +} diff --git a/javascript/examples/create-react-app/src/index.js b/javascript/examples/create-react-app/src/index.js new file mode 100644 index 00000000..58c21edc --- /dev/null +++ b/javascript/examples/create-react-app/src/index.js @@ -0,0 +1,17 @@ +import React from "react" +import ReactDOM from "react-dom/client" +import "./index.css" +import App from "./App" +import reportWebVitals from "./reportWebVitals" + +const root = ReactDOM.createRoot(document.getElementById("root")) +root.render( + + + +) + +// If you want to start measuring performance in your app, pass a function +// to log results (for example: reportWebVitals(console.log)) +// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals +reportWebVitals() diff --git a/automerge-wasm/examples/cra/src/logo.svg b/javascript/examples/create-react-app/src/logo.svg similarity index 100% rename from automerge-wasm/examples/cra/src/logo.svg rename to javascript/examples/create-react-app/src/logo.svg diff --git a/javascript/examples/create-react-app/src/reportWebVitals.js b/javascript/examples/create-react-app/src/reportWebVitals.js new file mode 100644 index 00000000..eee308db --- /dev/null +++ b/javascript/examples/create-react-app/src/reportWebVitals.js @@ -0,0 +1,13 @@ +const reportWebVitals = onPerfEntry => { + if (onPerfEntry && onPerfEntry instanceof Function) { + import("web-vitals").then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { + getCLS(onPerfEntry) + getFID(onPerfEntry) + getFCP(onPerfEntry) + getLCP(onPerfEntry) + getTTFB(onPerfEntry) + }) + } +} + +export default reportWebVitals diff --git a/javascript/examples/create-react-app/src/setupTests.js b/javascript/examples/create-react-app/src/setupTests.js new file mode 100644 index 00000000..6a0fd123 --- /dev/null +++ b/javascript/examples/create-react-app/src/setupTests.js @@ -0,0 +1,5 @@ +// jest-dom adds custom jest matchers for asserting on DOM nodes. +// allows you to do things like: +// expect(element).toHaveTextContent(/react/i) +// learn more: https://github.com/testing-library/jest-dom +import "@testing-library/jest-dom" diff --git a/javascript/examples/create-react-app/yarn.lock b/javascript/examples/create-react-app/yarn.lock new file mode 100644 index 00000000..ec83af3b --- /dev/null +++ b/javascript/examples/create-react-app/yarn.lock @@ -0,0 +1,9120 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@adobe/css-tools@^4.0.1": + version "4.0.1" + resolved "http://localhost:4873/@adobe%2fcss-tools/-/css-tools-4.0.1.tgz#b38b444ad3aa5fedbb15f2f746dcd934226a12dd" + integrity sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g== + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "http://localhost:4873/@ampproject%2fremapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@apideck/better-ajv-errors@^0.3.1": + version "0.3.6" + resolved "http://localhost:4873/@apideck%2fbetter-ajv-errors/-/better-ajv-errors-0.3.6.tgz#957d4c28e886a64a8141f7522783be65733ff097" + integrity sha512-P+ZygBLZtkp0qqOAJJVX4oX/sFo5JR3eBWwwuqHHhK0GIgQOKWrAfiAaWX0aArHkRWHMuggFEgAZNxVPwPZYaA== + dependencies: + json-schema "^0.4.0" + jsonpointer "^5.0.0" + leven "^3.1.0" + +"@automerge/automerge-wasm@0.1.12": + version "0.1.12" + resolved "https://registry.yarnpkg.com/@automerge/automerge-wasm/-/automerge-wasm-0.1.12.tgz#8ce25255d95d4ed6fb387de6858f7b7b7e2ed4a9" + integrity sha512-/xjX1217QYJ+QaoT6iHQw4hGNUIoc3xc65c9eCnfX5v9J9BkTOl05p2Cnr51O2rPc/M6TqZLmlvpvNVdcH9JpA== + +"@automerge/automerge@2.0.0-alpha.7": + version "2.0.0-alpha.7" + resolved "https://registry.yarnpkg.com/@automerge/automerge/-/automerge-2.0.0-alpha.7.tgz#2ee220d51bcd796074a18af74eeabb5f177e1f36" + integrity sha512-Wd2/GNeqtBybUtXclEE7bWBmmEkhv3q2ITQmLh18V0VvMPbqMBpcOKYzQFnKCyiPyRe5XcYeQAyGyunhE5V0ug== + dependencies: + "@automerge/automerge-wasm" "0.1.12" + uuid "^8.3" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.8.3": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fcode-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== + dependencies: + "@babel/highlight" "^7.18.6" + +"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.18.8", "@babel/compat-data@^7.19.3": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fcompat-data/-/compat-data-7.19.3.tgz#707b939793f867f5a73b2666e6d9a3396eb03151" + integrity sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw== + +"@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fcore/-/core-7.19.3.tgz#2519f62a51458f43b682d61583c3810e7dcee64c" + integrity sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.3" + "@babel/helper-compilation-targets" "^7.19.3" + "@babel/helper-module-transforms" "^7.19.0" + "@babel/helpers" "^7.19.0" + "@babel/parser" "^7.19.3" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.3" + "@babel/types" "^7.19.3" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/eslint-parser@^7.16.3": + version "7.19.1" + resolved "http://localhost:4873/@babel%2feslint-parser/-/eslint-parser-7.19.1.tgz#4f68f6b0825489e00a24b41b6a1ae35414ecd2f4" + integrity sha512-AqNf2QWt1rtu2/1rLswy6CDP7H9Oh3mMhk177Y67Rg8d7RD9WfOLLv8CGn6tisFvS2htm86yIe1yLF6I1UDaGQ== + dependencies: + "@nicolo-ribaudo/eslint-scope-5-internals" "5.1.1-v1" + eslint-visitor-keys "^2.1.0" + semver "^6.3.0" + +"@babel/generator@^7.19.3", "@babel/generator@^7.7.2": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fgenerator/-/generator-7.19.3.tgz#d7f4d1300485b4547cb6f94b27d10d237b42bf59" + integrity sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ== + dependencies: + "@babel/types" "^7.19.3" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/helper-annotate-as-pure@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" + integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.9.tgz#acd4edfd7a566d1d51ea975dff38fd52906981bb" + integrity sha512-yFQ0YCHoIqarl8BCRwBL8ulYUaZpz3bNsA7oFepAzee+8/+ImtADXNOmO5vJvsPff3qi+hvpkY/NYBTrBQgdNw== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.18.6" + "@babel/types" "^7.18.9" + +"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.19.0", "@babel/helper-compilation-targets@^7.19.3": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fhelper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz#a10a04588125675d7c7ae299af86fa1b2ee038ca" + integrity sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg== + dependencies: + "@babel/compat-data" "^7.19.3" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.21.3" + semver "^6.3.0" + +"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-create-class-features-plugin/-/helper-create-class-features-plugin-7.19.0.tgz#bfd6904620df4e46470bae4850d66be1054c404b" + integrity sha512-NRz8DwF4jT3UfrmUoZjd0Uph9HQnP30t7Ash+weACcyNkiYTywpIjDBgReJMKgr+n86sn2nPVVmJ28Dm053Kqw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-member-expression-to-functions" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.9" + "@babel/helper-split-export-declaration" "^7.18.6" + +"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.19.0.tgz#7976aca61c0984202baca73d84e2337a5424a41b" + integrity sha512-htnV+mHX32DF81amCDrwIDr8nrp1PTm+3wfBN9/v8QJOLEioOCOG7qNyq0nHeFiWbT3Eb7gsPwEmV64UCQ1jzw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + regexpu-core "^5.1.0" + +"@babel/helper-define-polyfill-provider@^0.3.3": + version "0.3.3" + resolved "http://localhost:4873/@babel%2fhelper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz#8612e55be5d51f0cd1f36b4a5a83924e89884b7a" + integrity sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww== + dependencies: + "@babel/helper-compilation-targets" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" + debug "^4.1.1" + lodash.debounce "^4.0.8" + resolve "^1.14.2" + semver "^6.1.2" + +"@babel/helper-environment-visitor@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== + +"@babel/helper-explode-assignable-expression@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz#41f8228ef0a6f1a036b8dfdfec7ce94f9a6bc096" + integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" + integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== + dependencies: + "@babel/template" "^7.18.10" + "@babel/types" "^7.19.0" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-member-expression-to-functions@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.9.tgz#1531661e8375af843ad37ac692c132841e2fd815" + integrity sha512-RxifAh2ZoVU67PyKIO4AMi1wTenGfMR/O/ae0CCRqwgBAt5v7xjdtRw7UoSbsreKrQn5t7r89eruK/9JjYHuDg== + dependencies: + "@babel/types" "^7.18.9" + +"@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-module-transforms/-/helper-module-transforms-7.19.0.tgz#309b230f04e22c58c6a2c0c0c7e50b216d350c30" + integrity sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.18.6" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/helper-optimise-call-expression@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe" + integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-plugin-utils/-/helper-plugin-utils-7.19.0.tgz#4796bb14961521f0f8715990bee2fb6e51ce21bf" + integrity sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw== + +"@babel/helper-remap-async-to-generator@^7.18.6", "@babel/helper-remap-async-to-generator@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.9.tgz#997458a0e3357080e54e1d79ec347f8a8cd28519" + integrity sha512-dI7q50YKd8BAv3VEfgg7PS7yD3Rtbi2J1XMXaalXO0W0164hYLnh8zpjRS0mte9MfVp/tltvr/cfdXPvJr1opA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-wrap-function" "^7.18.9" + "@babel/types" "^7.18.9" + +"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.18.9", "@babel/helper-replace-supers@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fhelper-replace-supers/-/helper-replace-supers-7.19.1.tgz#e1592a9b4b368aa6bdb8784a711e0bcbf0612b78" + integrity sha512-T7ahH7wV0Hfs46SFh5Jz3s0B6+o8g3c+7TMxu7xKfmHikg7EAZ3I2Qk9LFhjxXq8sL7UkP5JflezNwoZa8WvWw== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-member-expression-to-functions" "^7.18.9" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/traverse" "^7.19.1" + "@babel/types" "^7.19.0" + +"@babel/helper-simple-access@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" + integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-skip-transparent-expression-wrappers@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fhelper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.9.tgz#778d87b3a758d90b471e7b9918f34a9a02eb5818" + integrity sha512-imytd2gHi3cJPsybLRbmFrF7u5BIEuI2cNheyKi3/iOBC63kNn3q8Crn2xVuESli0aM4KYsyEqKyS7lFL8YVtw== + dependencies: + "@babel/types" "^7.18.9" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-string-parser@^7.18.10": + version "7.18.10" + resolved "http://localhost:4873/@babel%2fhelper-string-parser/-/helper-string-parser-7.18.10.tgz#181f22d28ebe1b3857fa575f5c290b1aaf659b56" + integrity sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw== + +"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fhelper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" + integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhelper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helper-wrap-function@^7.18.9": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelper-wrap-function/-/helper-wrap-function-7.19.0.tgz#89f18335cff1152373222f76a4b37799636ae8b1" + integrity sha512-txX8aN8CZyYGTwcLhlk87KRqncAzhh5TpQamZUa0/u3an36NtDpUP6bQgBCBcLeBs09R/OwQu3OjK0k/HwfNDg== + dependencies: + "@babel/helper-function-name" "^7.19.0" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/helpers@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fhelpers/-/helpers-7.19.0.tgz#f30534657faf246ae96551d88dd31e9d1fa1fc18" + integrity sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg== + dependencies: + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/highlight@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fhighlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.19.3": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fparser/-/parser-7.19.3.tgz#8dd36d17c53ff347f9e55c328710321b49479a9a" + integrity sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ== + +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz#da5b8f9a580acdfbe53494dba45ea389fb09a4d2" + integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.9.tgz#a11af19aa373d68d561f08e0a57242350ed0ec50" + integrity sha512-AHrP9jadvH7qlOj6PINbgSuphjQUAK7AOT7DPjBo9EHoLhQTnnK5u45e1Hd4DbSQEO9nqPWtQ89r+XEOWFScKg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + +"@babel/plugin-proposal-async-generator-functions@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fplugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.19.1.tgz#34f6f5174b688529342288cd264f80c9ea9fb4a7" + integrity sha512-0yu8vNATgLy4ivqMNBIwb1HebCelqN7YX8SL3FDXORv/RqT0zEEWUCH4GH44JsSrvCu6GqnAdR5EBFAPeNBB4Q== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-remap-async-to-generator" "^7.18.9" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@^7.16.0", "@babel/plugin-proposal-class-properties@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" + integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-class-static-block@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz#8aa81d403ab72d3962fc06c26e222dacfc9b9020" + integrity sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-decorators@^7.16.4": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fplugin-proposal-decorators/-/plugin-proposal-decorators-7.19.3.tgz#c1977e4902a18cdf9051bf7bf08d97db2fd8b110" + integrity sha512-MbgXtNXqo7RTKYIXVchVJGPvaVufQH3pxvQyfbGvNw1DObIhph+PesYXJTcd8J4DdWibvf6Z2eanOyItX8WnJg== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-replace-supers" "^7.19.1" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/plugin-syntax-decorators" "^7.19.0" + +"@babel/plugin-proposal-dynamic-import@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz#72bcf8d408799f547d759298c3c27c7e7faa4d94" + integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-namespace-from@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203" + integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz#7e8788c1811c393aff762817e7dbf1ebd0c05f0b" + integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.9.tgz#8148cbb350483bf6220af06fa6db3690e14b2e23" + integrity sha512-128YbMpjCrP35IOExw2Fq+x55LMP42DzhOhX2aNNIdI9avSWl2PI0yuBWarr3RYpZBSPtabfadkH2yeRiMD61Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0", "@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" + integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.16.0", "@babel/plugin-proposal-numeric-separator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" + integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.9.tgz#f9434f6beb2c8cae9dfcf97d2a5941bbbf9ad4e7" + integrity sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q== + dependencies: + "@babel/compat-data" "^7.18.8" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.18.8" + +"@babel/plugin-proposal-optional-catch-binding@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz#f9400d0e6a3ea93ba9ef70b09e72dd6da638a2cb" + integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.16.0", "@babel/plugin-proposal-optional-chaining@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.9.tgz#e8e8fe0723f2563960e4bf5e9690933691915993" + integrity sha512-v5nwt4IqBXihxGsW2QmCWMDS3B3bzGIk/EQVZz2ei7f3NJl8NzAJVvUmpDW5q1CRNY+Beb/k58UAH1Km1N411w== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-private-methods@^7.16.0", "@babel/plugin-proposal-private-methods@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" + integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-private-property-in-object@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz#a64137b232f0aca3733a67eb1a144c192389c503" + integrity sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz#af613d2cd5e643643b65cded64207b15c85cb78e" + integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "http://localhost:4873/@babel%2fplugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "http://localhost:4873/@babel%2fplugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-decorators@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-syntax-decorators/-/plugin-syntax-decorators-7.19.0.tgz#5f13d1d8fce96951bea01a10424463c9a5b3a599" + integrity sha512-xaBZUEDntt4faL1yN8oIFlhfXeQAWJW7CLKYsHTUqriCUbj8xOra8bfxxKGi/UwExPFBuPdH4XfHc9rGQhrVkQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-flow@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-flow/-/plugin-syntax-flow-7.18.6.tgz#774d825256f2379d06139be0c723c4dd444f3ca1" + integrity sha512-LUbR+KNTBWCUAqRG9ex5Gnzu2IOkt8jRJbHHXFT9q+L9zm7M/QQbEqXyw1n1pohYvOyWC8CjeyjrSaIwiYjK7A== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-import-assertions@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz#cd6190500a4fa2fe31990a963ffab4b63e4505e4" + integrity sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" + integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "http://localhost:4873/@babel%2fplugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "http://localhost:4873/@babel%2fplugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "http://localhost:4873/@babel%2fplugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "http://localhost:4873/@babel%2fplugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.18.6", "@babel/plugin-syntax-typescript@^7.7.2": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz#1c09cd25795c7c2b8a4ba9ae49394576d4133285" + integrity sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-arrow-functions@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz#19063fcf8771ec7b31d742339dac62433d0611fe" + integrity sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-async-to-generator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz#ccda3d1ab9d5ced5265fdb13f1882d5476c71615" + integrity sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-remap-async-to-generator" "^7.18.6" + +"@babel/plugin-transform-block-scoped-functions@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8" + integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-block-scoping@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.9.tgz#f9b7e018ac3f373c81452d6ada8bd5a18928926d" + integrity sha512-5sDIJRV1KtQVEbt/EIBwGy4T01uYIo4KRB3VUqzkhrAIOGx7AoctL9+Ux88btY0zXdDyPJ9mW+bg+v+XEkGmtw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-classes@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-classes/-/plugin-transform-classes-7.19.0.tgz#0e61ec257fba409c41372175e7c1e606dc79bb20" + integrity sha512-YfeEE9kCjqTS9IitkgfJuxjcEtLUHMqa8yUJ6zdz8vR7hKuo6mOy2C05P0F1tdMmDCeuyidKnlrw/iTppHcr2A== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-compilation-targets" "^7.19.0" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-replace-supers" "^7.18.9" + "@babel/helper-split-export-declaration" "^7.18.6" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.9.tgz#2357a8224d402dad623caf6259b611e56aec746e" + integrity sha512-+i0ZU1bCDymKakLxn5srGHrsAPRELC2WIbzwjLhHW9SIE1cPYkLCL0NlnXMZaM1vhfgA2+M7hySk42VBvrkBRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-destructuring@^7.18.13": + version "7.18.13" + resolved "http://localhost:4873/@babel%2fplugin-transform-destructuring/-/plugin-transform-destructuring-7.18.13.tgz#9e03bc4a94475d62b7f4114938e6c5c33372cbf5" + integrity sha512-TodpQ29XekIsex2A+YJPj5ax2plkGa8YYY6mFjCohk/IG9IY42Rtuj1FuDeemfg2ipxIFLzPeA83SIBnlhSIow== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz#b286b3e7aae6c7b861e45bed0a2fafd6b1a4fef8" + integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-duplicate-keys@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.9.tgz#687f15ee3cdad6d85191eb2a372c4528eaa0ae0e" + integrity sha512-d2bmXCtZXYc59/0SanQKbiWINadaJXqtvIQIzd4+hNwkWBgyCd5F/2t1kXoUdvPMrxzPvhK6EMQRROxsue+mfw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-exponentiation-operator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz#421c705f4521888c65e91fdd1af951bfefd4dacd" + integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-flow-strip-types@^7.16.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.19.0.tgz#e9e8606633287488216028719638cbbb2f2dde8f" + integrity sha512-sgeMlNaQVbCSpgLSKP4ZZKfsJVnFnNQlUSk6gPYzR/q7tzCgQF2t8RBKAP6cKJeZdveei7Q7Jm527xepI8lNLg== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/plugin-syntax-flow" "^7.18.6" + +"@babel/plugin-transform-for-of@^7.18.8": + version "7.18.8" + resolved "http://localhost:4873/@babel%2fplugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz#6ef8a50b244eb6a0bdbad0c7c61877e4e30097c1" + integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-function-name@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0" + integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== + dependencies: + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-literals@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc" + integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-member-expression-literals@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e" + integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-modules-amd@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz#8c91f8c5115d2202f277549848874027d7172d21" + integrity sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-commonjs@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz#afd243afba166cca69892e24a8fd8c9f2ca87883" + integrity sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-systemjs@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.19.0.tgz#5f20b471284430f02d9c5059d9b9a16d4b085a1f" + integrity sha512-x9aiR0WXAWmOWsqcsnrzGR+ieaTMVyGyffPVA7F8cXAGt/UxefYv6uSHZLkAFChN5M5Iy1+wjE+xJuPt22H39A== + dependencies: + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-module-transforms" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-validator-identifier" "^7.18.6" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-umd@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz#81d3832d6034b75b54e62821ba58f28ed0aab4b9" + integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== + dependencies: + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.19.1": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fplugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.19.1.tgz#ec7455bab6cd8fb05c525a94876f435a48128888" + integrity sha512-oWk9l9WItWBQYS4FgXD4Uyy5kq898lvkXpXQxoJEY1RnvPk4R/Dvu2ebXU9q8lP+rlMwUQTFf2Ok6d78ODa0kw== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-transform-new-target@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz#d128f376ae200477f37c4ddfcc722a8a1b3246a8" + integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-object-super@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c" + integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.6" + +"@babel/plugin-transform-parameters@^7.18.8": + version "7.18.8" + resolved "http://localhost:4873/@babel%2fplugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz#ee9f1a0ce6d78af58d0956a9378ea3427cccb48a" + integrity sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-property-literals@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3" + integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-constant-elements@^7.12.1": + version "7.18.12" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.18.12.tgz#edf3bec47eb98f14e84fa0af137fcc6aad8e0443" + integrity sha512-Q99U9/ttiu+LMnRU8psd23HhvwXmKWDQIpocm0JKaICcZHnw+mdQbHm6xnSy7dOl8I5PELakYtNBubNQlBXbZw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz#8b1125f919ef36ebdfff061d664e266c666b9415" + integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-jsx-development@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz#dbe5c972811e49c7405b630e4d0d2e1380c0ddc5" + integrity sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA== + dependencies: + "@babel/plugin-transform-react-jsx" "^7.18.6" + +"@babel/plugin-transform-react-jsx@^7.18.6": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-jsx/-/plugin-transform-react-jsx-7.19.0.tgz#b3cbb7c3a00b92ec8ae1027910e331ba5c500eb9" + integrity sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/plugin-syntax-jsx" "^7.18.6" + "@babel/types" "^7.19.0" + +"@babel/plugin-transform-react-pure-annotations@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz#561af267f19f3e5d59291f9950fd7b9663d0d844" + integrity sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-regenerator@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz#585c66cb84d4b4bf72519a34cfce761b8676ca73" + integrity sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + regenerator-transform "^0.15.0" + +"@babel/plugin-transform-reserved-words@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz#b1abd8ebf8edaa5f7fe6bbb8d2133d23b6a6f76a" + integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-runtime@^7.16.4": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fplugin-transform-runtime/-/plugin-transform-runtime-7.19.1.tgz#a3df2d7312eea624c7889a2dcd37fd1dfd25b2c6" + integrity sha512-2nJjTUFIzBMP/f/miLxEK9vxwW/KUXsdvN4sR//TmuDhe6yU2h57WmIOE12Gng3MDP/xpjUV/ToZRdcf8Yj4fA== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.19.0" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + semver "^6.3.0" + +"@babel/plugin-transform-shorthand-properties@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9" + integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-spread@^7.19.0": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fplugin-transform-spread/-/plugin-transform-spread-7.19.0.tgz#dd60b4620c2fec806d60cfaae364ec2188d593b6" + integrity sha512-RsuMk7j6n+r752EtzyScnWkQyuJdli6LdO5Klv8Yx0OfPVTcQkIUfS8clx5e9yHXzlnhOZF3CbQ8C2uP5j074w== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-skip-transparent-expression-wrappers" "^7.18.9" + +"@babel/plugin-transform-sticky-regex@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz#c6706eb2b1524028e317720339583ad0f444adcc" + integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-template-literals@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e" + integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typeof-symbol@^7.18.9": + version "7.18.9" + resolved "http://localhost:4873/@babel%2fplugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.9.tgz#c8cea68263e45addcd6afc9091429f80925762c0" + integrity sha512-SRfwTtF11G2aemAZWivL7PD+C9z52v9EvMqH9BuYbabyPuKUvSWks3oCg6041pT925L4zVFqaVBeECwsmlguEw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-typescript@^7.18.6": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fplugin-transform-typescript/-/plugin-transform-typescript-7.19.3.tgz#4f1db1e0fe278b42ddbc19ec2f6cd2f8262e35d6" + integrity sha512-z6fnuK9ve9u/0X0rRvI9MY0xg+DOUaABDYOe+/SQTxtlptaBB/V9JIUxJn6xp3lMBeb9qe8xSFmHU35oZDXD+w== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.19.0" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/plugin-syntax-typescript" "^7.18.6" + +"@babel/plugin-transform-unicode-escapes@^7.18.10": + version "7.18.10" + resolved "http://localhost:4873/@babel%2fplugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.10.tgz#1ecfb0eda83d09bbcb77c09970c2dd55832aa246" + integrity sha512-kKAdAI+YzPgGY/ftStBFXTI1LZFju38rYThnfMykS+IXy8BVx+res7s2fxf1l8I35DV2T97ezo6+SGrXz6B3iQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-unicode-regex@^7.18.6": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fplugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz#194317225d8c201bbae103364ffe9e2cea36cdca" + integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/preset-env@^7.11.0", "@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.4": + version "7.19.3" + resolved "http://localhost:4873/@babel%2fpreset-env/-/preset-env-7.19.3.tgz#52cd19abaecb3f176a4ff9cc5e15b7bf06bec754" + integrity sha512-ziye1OTc9dGFOAXSWKUqQblYHNlBOaDl8wzqf2iKXJAltYiR3hKHUKmkt+S9PppW7RQpq4fFCrwwpIDj/f5P4w== + dependencies: + "@babel/compat-data" "^7.19.3" + "@babel/helper-compilation-targets" "^7.19.3" + "@babel/helper-plugin-utils" "^7.19.0" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-async-generator-functions" "^7.19.1" + "@babel/plugin-proposal-class-properties" "^7.18.6" + "@babel/plugin-proposal-class-static-block" "^7.18.6" + "@babel/plugin-proposal-dynamic-import" "^7.18.6" + "@babel/plugin-proposal-export-namespace-from" "^7.18.9" + "@babel/plugin-proposal-json-strings" "^7.18.6" + "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" + "@babel/plugin-proposal-numeric-separator" "^7.18.6" + "@babel/plugin-proposal-object-rest-spread" "^7.18.9" + "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-private-methods" "^7.18.6" + "@babel/plugin-proposal-private-property-in-object" "^7.18.6" + "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-import-assertions" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.18.6" + "@babel/plugin-transform-async-to-generator" "^7.18.6" + "@babel/plugin-transform-block-scoped-functions" "^7.18.6" + "@babel/plugin-transform-block-scoping" "^7.18.9" + "@babel/plugin-transform-classes" "^7.19.0" + "@babel/plugin-transform-computed-properties" "^7.18.9" + "@babel/plugin-transform-destructuring" "^7.18.13" + "@babel/plugin-transform-dotall-regex" "^7.18.6" + "@babel/plugin-transform-duplicate-keys" "^7.18.9" + "@babel/plugin-transform-exponentiation-operator" "^7.18.6" + "@babel/plugin-transform-for-of" "^7.18.8" + "@babel/plugin-transform-function-name" "^7.18.9" + "@babel/plugin-transform-literals" "^7.18.9" + "@babel/plugin-transform-member-expression-literals" "^7.18.6" + "@babel/plugin-transform-modules-amd" "^7.18.6" + "@babel/plugin-transform-modules-commonjs" "^7.18.6" + "@babel/plugin-transform-modules-systemjs" "^7.19.0" + "@babel/plugin-transform-modules-umd" "^7.18.6" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.19.1" + "@babel/plugin-transform-new-target" "^7.18.6" + "@babel/plugin-transform-object-super" "^7.18.6" + "@babel/plugin-transform-parameters" "^7.18.8" + "@babel/plugin-transform-property-literals" "^7.18.6" + "@babel/plugin-transform-regenerator" "^7.18.6" + "@babel/plugin-transform-reserved-words" "^7.18.6" + "@babel/plugin-transform-shorthand-properties" "^7.18.6" + "@babel/plugin-transform-spread" "^7.19.0" + "@babel/plugin-transform-sticky-regex" "^7.18.6" + "@babel/plugin-transform-template-literals" "^7.18.9" + "@babel/plugin-transform-typeof-symbol" "^7.18.9" + "@babel/plugin-transform-unicode-escapes" "^7.18.10" + "@babel/plugin-transform-unicode-regex" "^7.18.6" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.19.3" + babel-plugin-polyfill-corejs2 "^0.3.3" + babel-plugin-polyfill-corejs3 "^0.6.0" + babel-plugin-polyfill-regenerator "^0.4.1" + core-js-compat "^3.25.1" + semver "^6.3.0" + +"@babel/preset-modules@^0.1.5": + version "0.1.5" + resolved "http://localhost:4873/@babel%2fpreset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" + integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fpreset-react/-/preset-react-7.18.6.tgz#979f76d6277048dc19094c217b507f3ad517dd2d" + integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-transform-react-display-name" "^7.18.6" + "@babel/plugin-transform-react-jsx" "^7.18.6" + "@babel/plugin-transform-react-jsx-development" "^7.18.6" + "@babel/plugin-transform-react-pure-annotations" "^7.18.6" + +"@babel/preset-typescript@^7.16.0": + version "7.18.6" + resolved "http://localhost:4873/@babel%2fpreset-typescript/-/preset-typescript-7.18.6.tgz#ce64be3e63eddc44240c6358daefac17b3186399" + integrity sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-transform-typescript" "^7.18.6" + +"@babel/runtime-corejs3@^7.10.2": + version "7.19.1" + resolved "http://localhost:4873/@babel%2fruntime-corejs3/-/runtime-corejs3-7.19.1.tgz#f0cbbe7edda7c4109cd253bb1dee99aba4594ad9" + integrity sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g== + dependencies: + core-js-pure "^3.25.1" + regenerator-runtime "^0.13.4" + +"@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.5", "@babel/runtime@^7.16.3", "@babel/runtime@^7.18.9", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": + version "7.19.0" + resolved "http://localhost:4873/@babel%2fruntime/-/runtime-7.19.0.tgz#22b11c037b094d27a8a2504ea4dcff00f50e2259" + integrity sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA== + dependencies: + regenerator-runtime "^0.13.4" + +"@babel/template@^7.18.10", "@babel/template@^7.3.3": + version "7.18.10" + resolved "http://localhost:4873/@babel%2ftemplate/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" + integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.18.10" + "@babel/types" "^7.18.10" + +"@babel/traverse@^7.19.0", "@babel/traverse@^7.19.1", "@babel/traverse@^7.19.3", "@babel/traverse@^7.7.2": + version "7.19.3" + resolved "http://localhost:4873/@babel%2ftraverse/-/traverse-7.19.3.tgz#3a3c5348d4988ba60884e8494b0592b2f15a04b4" + integrity sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.3" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.19.3" + "@babel/types" "^7.19.3" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.19.0", "@babel/types@^7.19.3", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": + version "7.19.3" + resolved "http://localhost:4873/@babel%2ftypes/-/types-7.19.3.tgz#fc420e6bbe54880bce6779ffaf315f5e43ec9624" + integrity sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw== + dependencies: + "@babel/helper-string-parser" "^7.18.10" + "@babel/helper-validator-identifier" "^7.19.1" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "http://localhost:4873/@bcoe%2fv8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@craco/craco@^7.0.0-alpha.8": + version "7.0.0-alpha.8" + resolved "http://localhost:4873/@craco%2fcraco/-/craco-7.0.0-alpha.8.tgz#40f19f44198ff2341b40654c8c6b4f54c2217972" + integrity sha512-IN3/ldPaktGflPu342cg7n8LYa2c3x9H2XzngUkDzTjro25ig1GyVcUdnG1U0X6wrRTF9K1AxZ5su9jLbdyFUw== + dependencies: + autoprefixer "^10.4.12" + cosmiconfig "^7.0.1" + cosmiconfig-typescript-loader "^4.1.1" + cross-spawn "^7.0.3" + lodash "^4.17.21" + semver "^7.3.7" + webpack-merge "^5.8.0" + +"@csstools/normalize.css@*": + version "12.0.0" + resolved "http://localhost:4873/@csstools%2fnormalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" + integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== + +"@csstools/postcss-cascade-layers@^1.1.0": + version "1.1.1" + resolved "http://localhost:4873/@csstools%2fpostcss-cascade-layers/-/postcss-cascade-layers-1.1.1.tgz#8a997edf97d34071dd2e37ea6022447dd9e795ad" + integrity sha512-+KdYrpKC5TgomQr2DlZF4lDEpHcoxnj5IGddYYfBWJAKfj1JtuHUIqMa+E1pJJ+z3kvDViWMqyqPlG4Ja7amQA== + dependencies: + "@csstools/selector-specificity" "^2.0.2" + postcss-selector-parser "^6.0.10" + +"@csstools/postcss-color-function@^1.1.1": + version "1.1.1" + resolved "http://localhost:4873/@csstools%2fpostcss-color-function/-/postcss-color-function-1.1.1.tgz#2bd36ab34f82d0497cfacdc9b18d34b5e6f64b6b" + integrity sha512-Bc0f62WmHdtRDjf5f3e2STwRAl89N2CLb+9iAwzrv4L2hncrbDwnQD9PCq0gtAt7pOI2leIV08HIBUd4jxD8cw== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-font-format-keywords@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-font-format-keywords/-/postcss-font-format-keywords-1.0.1.tgz#677b34e9e88ae997a67283311657973150e8b16a" + integrity sha512-ZgrlzuUAjXIOc2JueK0X5sZDjCtgimVp/O5CEqTcs5ShWBa6smhWYbS0x5cVc/+rycTDbjjzoP0KTDnUneZGOg== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-hwb-function@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@csstools%2fpostcss-hwb-function/-/postcss-hwb-function-1.0.2.tgz#ab54a9fce0ac102c754854769962f2422ae8aa8b" + integrity sha512-YHdEru4o3Rsbjmu6vHy4UKOXZD+Rn2zmkAmLRfPet6+Jz4Ojw8cbWxe1n42VaXQhD3CQUXXTooIy8OkVbUcL+w== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-ic-unit@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-ic-unit/-/postcss-ic-unit-1.0.1.tgz#28237d812a124d1a16a5acc5c3832b040b303e58" + integrity sha512-Ot1rcwRAaRHNKC9tAqoqNZhjdYBzKk1POgWfhN4uCOE47ebGcLRqXjKkApVDpjifL6u2/55ekkpnFcp+s/OZUw== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-is-pseudo-class@^2.0.7": + version "2.0.7" + resolved "http://localhost:4873/@csstools%2fpostcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.7.tgz#846ae6c0d5a1eaa878fce352c544f9c295509cd1" + integrity sha512-7JPeVVZHd+jxYdULl87lvjgvWldYu+Bc62s9vD/ED6/QTGjy0jy0US/f6BG53sVMTBJ1lzKZFpYmofBN9eaRiA== + dependencies: + "@csstools/selector-specificity" "^2.0.0" + postcss-selector-parser "^6.0.10" + +"@csstools/postcss-nested-calc@^1.0.0": + version "1.0.0" + resolved "http://localhost:4873/@csstools%2fpostcss-nested-calc/-/postcss-nested-calc-1.0.0.tgz#d7e9d1d0d3d15cf5ac891b16028af2a1044d0c26" + integrity sha512-JCsQsw1wjYwv1bJmgjKSoZNvf7R6+wuHDAbi5f/7MbFhl2d/+v+TvBTU4BJH3G1X1H87dHl0mh6TfYogbT/dJQ== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-normalize-display-values@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-normalize-display-values/-/postcss-normalize-display-values-1.0.1.tgz#15da54a36e867b3ac5163ee12c1d7f82d4d612c3" + integrity sha512-jcOanIbv55OFKQ3sYeFD/T0Ti7AMXc9nM1hZWu8m/2722gOTxFg7xYu4RDLJLeZmPUVQlGzo4jhzvTUq3x4ZUw== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-oklab-function@^1.1.1": + version "1.1.1" + resolved "http://localhost:4873/@csstools%2fpostcss-oklab-function/-/postcss-oklab-function-1.1.1.tgz#88cee0fbc8d6df27079ebd2fa016ee261eecf844" + integrity sha512-nJpJgsdA3dA9y5pgyb/UfEzE7W5Ka7u0CX0/HIMVBNWzWemdcTH3XwANECU6anWv/ao4vVNLTMxhiPNZsTK6iA== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-progressive-custom-properties@^1.1.0", "@csstools/postcss-progressive-custom-properties@^1.3.0": + version "1.3.0" + resolved "http://localhost:4873/@csstools%2fpostcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz#542292558384361776b45c85226b9a3a34f276fa" + integrity sha512-ASA9W1aIy5ygskZYuWams4BzafD12ULvSypmaLJT2jvQ8G0M3I8PRQhC0h7mG0Z3LI05+agZjqSR9+K9yaQQjA== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-stepped-value-functions@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@csstools%2fpostcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.1.tgz#f8772c3681cc2befed695e2b0b1d68e22f08c4f4" + integrity sha512-dz0LNoo3ijpTOQqEJLY8nyaapl6umbmDcgj4AD0lgVQ572b2eqA1iGZYTTWhrcrHztWDDRAX2DGYyw2VBjvCvQ== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-text-decoration-shorthand@^1.0.0": + version "1.0.0" + resolved "http://localhost:4873/@csstools%2fpostcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-1.0.0.tgz#ea96cfbc87d921eca914d3ad29340d9bcc4c953f" + integrity sha512-c1XwKJ2eMIWrzQenN0XbcfzckOLLJiczqy+YvfGmzoVXd7pT9FfObiSEfzs84bpE/VqfpEuAZ9tCRbZkZxxbdw== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-trigonometric-functions@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@csstools%2fpostcss-trigonometric-functions/-/postcss-trigonometric-functions-1.0.2.tgz#94d3e4774c36d35dcdc88ce091336cb770d32756" + integrity sha512-woKaLO///4bb+zZC2s80l+7cm07M7268MsyG3M0ActXXEFi6SuhvriQYcb58iiKGbjwwIU7n45iRLEHypB47Og== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-unset-value@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@csstools%2fpostcss-unset-value/-/postcss-unset-value-1.0.2.tgz#c99bb70e2cdc7312948d1eb41df2412330b81f77" + integrity sha512-c8J4roPBILnelAsdLr4XOAR/GsTm0GJi4XpcfvoWk3U6KiTCqiFYc63KhRMQQX35jYMp4Ao8Ij9+IZRgMfJp1g== + +"@csstools/selector-specificity@^2.0.0", "@csstools/selector-specificity@^2.0.2": + version "2.0.2" + resolved "http://localhost:4873/@csstools%2fselector-specificity/-/selector-specificity-2.0.2.tgz#1bfafe4b7ed0f3e4105837e056e0a89b108ebe36" + integrity sha512-IkpVW/ehM1hWKln4fCA3NzJU8KwD+kIOvPZA4cqxoJHtE21CCzjyp+Kxbu0i5I4tBNOlXPL9mjwnWlL0VEG4Fg== + +"@eslint/eslintrc@^1.3.2": + version "1.3.2" + resolved "http://localhost:4873/@eslint%2feslintrc/-/eslintrc-1.3.2.tgz#58b69582f3b7271d8fa67fe5251767a5b38ea356" + integrity sha512-AXYd23w1S/bv3fTs3Lz0vjiYemS08jWkI3hYyS9I1ry+0f+Yjs1wm+sU0BS8qDOPrBIkp4qHYC16I8uVtpLajQ== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.4.0" + globals "^13.15.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + +"@humanwhocodes/config-array@^0.10.5": + version "0.10.7" + resolved "http://localhost:4873/@humanwhocodes%2fconfig-array/-/config-array-0.10.7.tgz#6d53769fd0c222767e6452e8ebda825c22e9f0dc" + integrity sha512-MDl6D6sBsaV452/QSdX+4CXIjZhIcI0PELsxUjk4U828yd58vk3bTIvk/6w5FY+4hIy9sLW0sfrV7K7Kc++j/w== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/gitignore-to-minimatch@^1.0.2": + version "1.0.2" + resolved "http://localhost:4873/@humanwhocodes%2fgitignore-to-minimatch/-/gitignore-to-minimatch-1.0.2.tgz#316b0a63b91c10e53f242efb4ace5c3b34e8728d" + integrity sha512-rSqmMJDdLFUsyxR6FMtD00nfQKKLFb1kv+qBbOVKqErvloEIJLo5bDTJTQNTYgeyp78JsA7u/NPi5jT1GR/MuA== + +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "http://localhost:4873/@humanwhocodes%2fmodule-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "http://localhost:4873/@humanwhocodes%2fobject-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "http://localhost:4873/@istanbuljs%2fload-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "http://localhost:4873/@istanbuljs%2fschema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fconsole/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" + integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^27.5.1" + jest-util "^27.5.1" + slash "^3.0.0" + +"@jest/console@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2fconsole/-/console-28.1.3.tgz#2030606ec03a18c31803b8a36382762e447655df" + integrity sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^28.1.3" + jest-util "^28.1.3" + slash "^3.0.0" + +"@jest/core@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fcore/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" + integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== + dependencies: + "@jest/console" "^27.5.1" + "@jest/reporters" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.8.1" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-changed-files "^27.5.1" + jest-config "^27.5.1" + jest-haste-map "^27.5.1" + jest-message-util "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-resolve-dependencies "^27.5.1" + jest-runner "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + jest-watcher "^27.5.1" + micromatch "^4.0.4" + rimraf "^3.0.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fenvironment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" + integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== + dependencies: + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + +"@jest/expect-utils@^29.1.2": + version "29.1.2" + resolved "http://localhost:4873/@jest%2fexpect-utils/-/expect-utils-29.1.2.tgz#66dbb514d38f7d21456bc774419c9ae5cca3f88d" + integrity sha512-4a48bhKfGj/KAH39u0ppzNTABXQ8QPccWAFUFobWBaEMSMp+sB31Z2fK/l47c4a/Mu1po2ffmfAIPxXbVTXdtg== + dependencies: + jest-get-type "^29.0.0" + +"@jest/fake-timers@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ffake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" + integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== + dependencies: + "@jest/types" "^27.5.1" + "@sinonjs/fake-timers" "^8.0.1" + "@types/node" "*" + jest-message-util "^27.5.1" + jest-mock "^27.5.1" + jest-util "^27.5.1" + +"@jest/globals@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fglobals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" + integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/types" "^27.5.1" + expect "^27.5.1" + +"@jest/reporters@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2freporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" + integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.2" + graceful-fs "^4.2.9" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^5.1.0" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.1.3" + jest-haste-map "^27.5.1" + jest-resolve "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + slash "^3.0.0" + source-map "^0.6.0" + string-length "^4.0.1" + terminal-link "^2.0.0" + v8-to-istanbul "^8.1.0" + +"@jest/schemas@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2fschemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" + integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/schemas@^29.0.0": + version "29.0.0" + resolved "http://localhost:4873/@jest%2fschemas/-/schemas-29.0.0.tgz#5f47f5994dd4ef067fb7b4188ceac45f77fe952a" + integrity sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/source-map@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2fsource-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" + integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== + dependencies: + callsites "^3.0.0" + graceful-fs "^4.2.9" + source-map "^0.6.0" + +"@jest/test-result@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftest-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" + integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== + dependencies: + "@jest/console" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-result@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2ftest-result/-/test-result-28.1.3.tgz#5eae945fd9f4b8fcfce74d239e6f725b6bf076c5" + integrity sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg== + dependencies: + "@jest/console" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftest-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" + integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== + dependencies: + "@jest/test-result" "^27.5.1" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-runtime "^27.5.1" + +"@jest/transform@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftransform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409" + integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== + dependencies: + "@babel/core" "^7.1.0" + "@jest/types" "^27.5.1" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-regex-util "^27.5.1" + jest-util "^27.5.1" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + source-map "^0.6.1" + write-file-atomic "^3.0.0" + +"@jest/types@^27.5.1": + version "27.5.1" + resolved "http://localhost:4873/@jest%2ftypes/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" + integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^16.0.0" + chalk "^4.0.0" + +"@jest/types@^28.1.3": + version "28.1.3" + resolved "http://localhost:4873/@jest%2ftypes/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" + integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== + dependencies: + "@jest/schemas" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jest/types@^29.1.2": + version "29.1.2" + resolved "http://localhost:4873/@jest%2ftypes/-/types-29.1.2.tgz#7442d32b16bcd7592d9614173078b8c334ec730a" + integrity sha512-DcXGtoTykQB5jiwCmVr8H4vdg2OJhQex3qPkG+ISyDO7xQXbt/4R6dowcRyPemRnkH7JoHvZuxPBdlq+9JxFCg== + dependencies: + "@jest/schemas" "^29.0.0" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "http://localhost:4873/@jridgewell%2fgen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "http://localhost:4873/@jridgewell%2fgen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "http://localhost:4873/@jridgewell%2fresolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "http://localhost:4873/@jridgewell%2fset-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/source-map@^0.3.2": + version "0.3.2" + resolved "http://localhost:4873/@jridgewell%2fsource-map/-/source-map-0.3.2.tgz#f45351aaed4527a298512ec72f81040c998580fb" + integrity sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "http://localhost:4873/@jridgewell%2fsourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@^0.3.14", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.15" + resolved "http://localhost:4873/@jridgewell%2ftrace-mapping/-/trace-mapping-0.3.15.tgz#aba35c48a38d3fd84b37e66c9c0423f9744f9774" + integrity sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@leichtgewicht/ip-codec@^2.0.1": + version "2.0.4" + resolved "http://localhost:4873/@leichtgewicht%2fip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" + integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== + +"@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1": + version "5.1.1-v1" + resolved "http://localhost:4873/@nicolo-ribaudo%2feslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz#dbf733a965ca47b1973177dc0bb6c889edcfb129" + integrity sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg== + dependencies: + eslint-scope "5.1.1" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "http://localhost:4873/@nodelib%2ffs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "http://localhost:4873/@nodelib%2ffs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "http://localhost:4873/@nodelib%2ffs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@pmmmwh/react-refresh-webpack-plugin@^0.5.3": + version "0.5.7" + resolved "http://localhost:4873/@pmmmwh%2freact-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.7.tgz#58f8217ba70069cc6a73f5d7e05e85b458c150e2" + integrity sha512-bcKCAzF0DV2IIROp9ZHkRJa6O4jy7NlnHdWL3GmcUxYWNjLXkK5kfELELwEfSP5hXPfVL/qOGMAROuMQb9GG8Q== + dependencies: + ansi-html-community "^0.0.8" + common-path-prefix "^3.0.0" + core-js-pure "^3.8.1" + error-stack-parser "^2.0.6" + find-up "^5.0.0" + html-entities "^2.1.0" + loader-utils "^2.0.0" + schema-utils "^3.0.0" + source-map "^0.7.3" + +"@rollup/plugin-babel@^5.2.0": + version "5.3.1" + resolved "http://localhost:4873/@rollup%2fplugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" + integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== + dependencies: + "@babel/helper-module-imports" "^7.10.4" + "@rollup/pluginutils" "^3.1.0" + +"@rollup/plugin-node-resolve@^11.2.1": + version "11.2.1" + resolved "http://localhost:4873/@rollup%2fplugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" + integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + "@types/resolve" "1.17.1" + builtin-modules "^3.1.0" + deepmerge "^4.2.2" + is-module "^1.0.0" + resolve "^1.19.0" + +"@rollup/plugin-replace@^2.4.1": + version "2.4.2" + resolved "http://localhost:4873/@rollup%2fplugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" + integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + magic-string "^0.25.7" + +"@rollup/pluginutils@^3.1.0": + version "3.1.0" + resolved "http://localhost:4873/@rollup%2fpluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" + integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== + dependencies: + "@types/estree" "0.0.39" + estree-walker "^1.0.1" + picomatch "^2.2.2" + +"@rushstack/eslint-patch@^1.1.0": + version "1.2.0" + resolved "http://localhost:4873/@rushstack%2feslint-patch/-/eslint-patch-1.2.0.tgz#8be36a1f66f3265389e90b5f9c9962146758f728" + integrity sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg== + +"@sinclair/typebox@^0.24.1": + version "0.24.44" + resolved "http://localhost:4873/@sinclair%2ftypebox/-/typebox-0.24.44.tgz#0a0aa3bf4a155a678418527342a3ee84bd8caa5c" + integrity sha512-ka0W0KN5i6LfrSocduwliMMpqVgohtPFidKdMEOUjoOFCHcOOYkKsPRxfs5f15oPNHTm6ERAm0GV/+/LTKeiWg== + +"@sinonjs/commons@^1.7.0": + version "1.8.3" + resolved "http://localhost:4873/@sinonjs%2fcommons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" + integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^8.0.1": + version "8.1.0" + resolved "http://localhost:4873/@sinonjs%2ffake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" + integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@surma/rollup-plugin-off-main-thread@^2.2.3": + version "2.2.3" + resolved "http://localhost:4873/@surma%2frollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" + integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== + dependencies: + ejs "^3.1.6" + json5 "^2.2.0" + magic-string "^0.25.0" + string.prototype.matchall "^4.0.6" + +"@svgr/babel-plugin-add-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" + integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== + +"@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef" + integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== + +"@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": + version "5.0.1" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd" + integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== + +"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": + version "5.0.1" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897" + integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== + +"@svgr/babel-plugin-svg-dynamic-title@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7" + integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== + +"@svgr/babel-plugin-svg-em-dimensions@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0" + integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== + +"@svgr/babel-plugin-transform-react-native-svg@^5.4.0": + version "5.4.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80" + integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== + +"@svgr/babel-plugin-transform-svg-component@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fbabel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a" + integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== + +"@svgr/babel-preset@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fbabel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327" + integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== + dependencies: + "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1" + "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1" + "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0" + "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0" + "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0" + "@svgr/babel-plugin-transform-svg-component" "^5.5.0" + +"@svgr/core@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fcore/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579" + integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== + dependencies: + "@svgr/plugin-jsx" "^5.5.0" + camelcase "^6.2.0" + cosmiconfig "^7.0.0" + +"@svgr/hast-util-to-babel-ast@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fhast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461" + integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== + dependencies: + "@babel/types" "^7.12.6" + +"@svgr/plugin-jsx@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fplugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000" + integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== + dependencies: + "@babel/core" "^7.12.3" + "@svgr/babel-preset" "^5.5.0" + "@svgr/hast-util-to-babel-ast" "^5.5.0" + svg-parser "^2.0.2" + +"@svgr/plugin-svgo@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fplugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246" + integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== + dependencies: + cosmiconfig "^7.0.0" + deepmerge "^4.2.2" + svgo "^1.2.2" + +"@svgr/webpack@^5.5.0": + version "5.5.0" + resolved "http://localhost:4873/@svgr%2fwebpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" + integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== + dependencies: + "@babel/core" "^7.12.3" + "@babel/plugin-transform-react-constant-elements" "^7.12.1" + "@babel/preset-env" "^7.12.1" + "@babel/preset-react" "^7.12.5" + "@svgr/core" "^5.5.0" + "@svgr/plugin-jsx" "^5.5.0" + "@svgr/plugin-svgo" "^5.5.0" + loader-utils "^2.0.0" + +"@testing-library/dom@^8.5.0": + version "8.18.1" + resolved "http://localhost:4873/@testing-library%2fdom/-/dom-8.18.1.tgz#80f91be02bc171fe5a3a7003f88207be31ac2cf3" + integrity sha512-oEvsm2B/WtcHKE+IcEeeCqNU/ltFGaVyGbpcm4g/2ytuT49jrlH9x5qRKL/H3A6yfM4YAbSbC0ceT5+9CEXnLg== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/runtime" "^7.12.5" + "@types/aria-query" "^4.2.0" + aria-query "^5.0.0" + chalk "^4.1.0" + dom-accessibility-api "^0.5.9" + lz-string "^1.4.4" + pretty-format "^27.0.2" + +"@testing-library/jest-dom@^5.16.5": + version "5.16.5" + resolved "http://localhost:4873/@testing-library%2fjest-dom/-/jest-dom-5.16.5.tgz#3912846af19a29b2dbf32a6ae9c31ef52580074e" + integrity sha512-N5ixQ2qKpi5OLYfwQmUb/5mSV9LneAcaUfp32pn4yCnpb8r/Yz0pXFPck21dIicKmi+ta5WRAknkZCfA8refMA== + dependencies: + "@adobe/css-tools" "^4.0.1" + "@babel/runtime" "^7.9.2" + "@types/testing-library__jest-dom" "^5.9.1" + aria-query "^5.0.0" + chalk "^3.0.0" + css.escape "^1.5.1" + dom-accessibility-api "^0.5.6" + lodash "^4.17.15" + redent "^3.0.0" + +"@testing-library/react@^13.4.0": + version "13.4.0" + resolved "http://localhost:4873/@testing-library%2freact/-/react-13.4.0.tgz#6a31e3bf5951615593ad984e96b9e5e2d9380966" + integrity sha512-sXOGON+WNTh3MLE9rve97ftaZukN3oNf2KjDy7YTx6hcTO2uuLHuCGynMDhFwGw/jYf4OJ2Qk0i4i79qMNNkyw== + dependencies: + "@babel/runtime" "^7.12.5" + "@testing-library/dom" "^8.5.0" + "@types/react-dom" "^18.0.0" + +"@testing-library/user-event@^13.5.0": + version "13.5.0" + resolved "http://localhost:4873/@testing-library%2fuser-event/-/user-event-13.5.0.tgz#69d77007f1e124d55314a2b73fd204b333b13295" + integrity sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg== + dependencies: + "@babel/runtime" "^7.12.5" + +"@tootallnate/once@1": + version "1.1.2" + resolved "http://localhost:4873/@tootallnate%2fonce/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@trysound/sax@0.2.0": + version "0.2.0" + resolved "http://localhost:4873/@trysound%2fsax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" + integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== + +"@types/aria-query@^4.2.0": + version "4.2.2" + resolved "http://localhost:4873/@types%2faria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" + integrity sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig== + +"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": + version "7.1.19" + resolved "http://localhost:4873/@types%2fbabel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" + integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.4" + resolved "http://localhost:4873/@types%2fbabel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "http://localhost:4873/@types%2fbabel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": + version "7.18.2" + resolved "http://localhost:4873/@types%2fbabel__traverse/-/babel__traverse-7.18.2.tgz#235bf339d17185bdec25e024ca19cce257cc7309" + integrity sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg== + dependencies: + "@babel/types" "^7.3.0" + +"@types/body-parser@*": + version "1.19.2" + resolved "http://localhost:4873/@types%2fbody-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" + integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== + dependencies: + "@types/connect" "*" + "@types/node" "*" + +"@types/bonjour@^3.5.9": + version "3.5.10" + resolved "http://localhost:4873/@types%2fbonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" + integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== + dependencies: + "@types/node" "*" + +"@types/connect-history-api-fallback@^1.3.5": + version "1.3.5" + resolved "http://localhost:4873/@types%2fconnect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae" + integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== + dependencies: + "@types/express-serve-static-core" "*" + "@types/node" "*" + +"@types/connect@*": + version "3.4.35" + resolved "http://localhost:4873/@types%2fconnect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" + integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== + dependencies: + "@types/node" "*" + +"@types/eslint-scope@^3.7.3": + version "3.7.4" + resolved "http://localhost:4873/@types%2feslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" + integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*", "@types/eslint@^7.29.0 || ^8.4.1": + version "8.4.6" + resolved "http://localhost:4873/@types%2feslint/-/eslint-8.4.6.tgz#7976f054c1bccfcf514bff0564c0c41df5c08207" + integrity sha512-/fqTbjxyFUaYNO7VcW5g+4npmqVACz1bB7RTHYuLj+PRjw9hrCwrUXVQFpChUS0JsyEFvMZ7U/PfmvWgxJhI9g== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*": + version "1.0.0" + resolved "http://localhost:4873/@types%2festree/-/estree-1.0.0.tgz#5fb2e536c1ae9bf35366eed879e827fa59ca41c2" + integrity sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ== + +"@types/estree@0.0.39": + version "0.0.39" + resolved "http://localhost:4873/@types%2festree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" + integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== + +"@types/estree@^0.0.51": + version "0.0.51" + resolved "http://localhost:4873/@types%2festree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" + integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== + +"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": + version "4.17.31" + resolved "http://localhost:4873/@types%2fexpress-serve-static-core/-/express-serve-static-core-4.17.31.tgz#a1139efeab4e7323834bb0226e62ac019f474b2f" + integrity sha512-DxMhY+NAsTwMMFHBTtJFNp5qiHKJ7TeqOo23zVEM9alT1Ml27Q3xcTH0xwxn7Q0BbMcVEJOs/7aQtUWupUQN3Q== + dependencies: + "@types/node" "*" + "@types/qs" "*" + "@types/range-parser" "*" + +"@types/express@*", "@types/express@^4.17.13": + version "4.17.14" + resolved "http://localhost:4873/@types%2fexpress/-/express-4.17.14.tgz#143ea0557249bc1b3b54f15db4c81c3d4eb3569c" + integrity sha512-TEbt+vaPFQ+xpxFLFssxUDXj5cWCxZJjIcB7Yg0k0GMHGtgtQgpvx/MUQUeAkNbA9AAGrwkAsoeItdTgS7FMyg== + dependencies: + "@types/body-parser" "*" + "@types/express-serve-static-core" "^4.17.18" + "@types/qs" "*" + "@types/serve-static" "*" + +"@types/graceful-fs@^4.1.2": + version "4.1.5" + resolved "http://localhost:4873/@types%2fgraceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/html-minifier-terser@^6.0.0": + version "6.1.0" + resolved "http://localhost:4873/@types%2fhtml-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" + integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== + +"@types/http-proxy@^1.17.8": + version "1.17.9" + resolved "http://localhost:4873/@types%2fhttp-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a" + integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.4" + resolved "http://localhost:4873/@types%2fistanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "http://localhost:4873/@types%2fistanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "http://localhost:4873/@types%2fistanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@*": + version "29.1.2" + resolved "http://localhost:4873/@types%2fjest/-/jest-29.1.2.tgz#7ad8077043ab5f6c108c8111bcc1d224e5600a87" + integrity sha512-y+nlX0h87U0R+wsGn6EBuoRWYyv3KFtwRNP3QWp9+k2tJ2/bqcGS3UxD7jgT+tiwJWWq3UsyV4Y+T6rsMT4XMg== + dependencies: + expect "^29.0.0" + pretty-format "^29.0.0" + +"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + version "7.0.11" + resolved "http://localhost:4873/@types%2fjson-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "http://localhost:4873/@types%2fjson5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== + +"@types/mime@*": + version "3.0.1" + resolved "http://localhost:4873/@types%2fmime/-/mime-3.0.1.tgz#5f8f2bca0a5863cb69bc0b0acd88c96cb1d4ae10" + integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== + +"@types/node@*": + version "18.8.3" + resolved "http://localhost:4873/@types%2fnode/-/node-18.8.3.tgz#ce750ab4017effa51aed6a7230651778d54e327c" + integrity sha512-0os9vz6BpGwxGe9LOhgP/ncvYN5Tx1fNcd2TM3rD/aCGBkysb+ZWpXEocG24h6ZzOi13+VB8HndAQFezsSOw1w== + +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "http://localhost:4873/@types%2fparse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/prettier@^2.1.5": + version "2.7.1" + resolved "http://localhost:4873/@types%2fprettier/-/prettier-2.7.1.tgz#dfd20e2dc35f027cdd6c1908e80a5ddc7499670e" + integrity sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow== + +"@types/prop-types@*": + version "15.7.5" + resolved "http://localhost:4873/@types%2fprop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + +"@types/q@^1.5.1": + version "1.5.5" + resolved "http://localhost:4873/@types%2fq/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df" + integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== + +"@types/qs@*": + version "6.9.7" + resolved "http://localhost:4873/@types%2fqs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" + integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== + +"@types/range-parser@*": + version "1.2.4" + resolved "http://localhost:4873/@types%2frange-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" + integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== + +"@types/react-dom@^18.0.0": + version "18.0.6" + resolved "http://localhost:4873/@types%2freact-dom/-/react-dom-18.0.6.tgz#36652900024842b74607a17786b6662dd1e103a1" + integrity sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA== + dependencies: + "@types/react" "*" + +"@types/react@*": + version "18.0.21" + resolved "http://localhost:4873/@types%2freact/-/react-18.0.21.tgz#b8209e9626bb00a34c76f55482697edd2b43cc67" + integrity sha512-7QUCOxvFgnD5Jk8ZKlUAhVcRj7GuJRjnjjiY/IUBWKgOlnvDvTMLD4RTF7NPyVmbRhNrbomZiOepg7M/2Kj1mA== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/resolve@1.17.1": + version "1.17.1" + resolved "http://localhost:4873/@types%2fresolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" + integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== + dependencies: + "@types/node" "*" + +"@types/retry@0.12.0": + version "0.12.0" + resolved "http://localhost:4873/@types%2fretry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" + integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== + +"@types/scheduler@*": + version "0.16.2" + resolved "http://localhost:4873/@types%2fscheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@types/serve-index@^1.9.1": + version "1.9.1" + resolved "http://localhost:4873/@types%2fserve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" + integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== + dependencies: + "@types/express" "*" + +"@types/serve-static@*", "@types/serve-static@^1.13.10": + version "1.15.0" + resolved "http://localhost:4873/@types%2fserve-static/-/serve-static-1.15.0.tgz#c7930ff61afb334e121a9da780aac0d9b8f34155" + integrity sha512-z5xyF6uh8CbjAu9760KDKsH2FcDxZ2tFCsA4HIMWE6IkiYMXfVoa+4f9KX+FN0ZLsaMw1WNG2ETLA6N+/YA+cg== + dependencies: + "@types/mime" "*" + "@types/node" "*" + +"@types/sockjs@^0.3.33": + version "0.3.33" + resolved "http://localhost:4873/@types%2fsockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" + integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== + dependencies: + "@types/node" "*" + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "http://localhost:4873/@types%2fstack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/testing-library__jest-dom@^5.9.1": + version "5.14.5" + resolved "http://localhost:4873/@types%2ftesting-library__jest-dom/-/testing-library__jest-dom-5.14.5.tgz#d113709c90b3c75fdb127ec338dad7d5f86c974f" + integrity sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ== + dependencies: + "@types/jest" "*" + +"@types/trusted-types@^2.0.2": + version "2.0.2" + resolved "http://localhost:4873/@types%2ftrusted-types/-/trusted-types-2.0.2.tgz#fc25ad9943bcac11cceb8168db4f275e0e72e756" + integrity sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg== + +"@types/ws@^8.5.1": + version "8.5.3" + resolved "http://localhost:4873/@types%2fws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d" + integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== + dependencies: + "@types/node" "*" + +"@types/yargs-parser@*": + version "21.0.0" + resolved "http://localhost:4873/@types%2fyargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^16.0.0": + version "16.0.4" + resolved "http://localhost:4873/@types%2fyargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977" + integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw== + dependencies: + "@types/yargs-parser" "*" + +"@types/yargs@^17.0.8": + version "17.0.13" + resolved "http://localhost:4873/@types%2fyargs/-/yargs-17.0.13.tgz#34cced675ca1b1d51fcf4d34c3c6f0fa142a5c76" + integrity sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg== + dependencies: + "@types/yargs-parser" "*" + +"@typescript-eslint/eslint-plugin@^5.5.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2feslint-plugin/-/eslint-plugin-5.39.0.tgz#778b2d9e7f293502c7feeea6c74dca8eb3e67511" + integrity sha512-xVfKOkBm5iWMNGKQ2fwX5GVgBuHmZBO1tCRwXmY5oAIsPscfwm2UADDuNB8ZVYCtpQvJK4xpjrK7jEhcJ0zY9A== + dependencies: + "@typescript-eslint/scope-manager" "5.39.0" + "@typescript-eslint/type-utils" "5.39.0" + "@typescript-eslint/utils" "5.39.0" + debug "^4.3.4" + ignore "^5.2.0" + regexpp "^3.2.0" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/experimental-utils@^5.0.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fexperimental-utils/-/experimental-utils-5.39.0.tgz#9263bb72b57449cc2f07ffb7fd4e12d0160b7f5e" + integrity sha512-n5N9kG/oGu2xXhHzsWzn94s6CWoiUj59FPU2dF2IQZxPftw+q6Jm5sV2vj5qTgAElRooHhrgtl2gxBQDCPt6WA== + dependencies: + "@typescript-eslint/utils" "5.39.0" + +"@typescript-eslint/parser@^5.5.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fparser/-/parser-5.39.0.tgz#93fa0bc980a3a501e081824f6097f7ca30aaa22b" + integrity sha512-PhxLjrZnHShe431sBAGHaNe6BDdxAASDySgsBCGxcBecVCi8NQWxQZMcizNA4g0pN51bBAn/FUfkWG3SDVcGlA== + dependencies: + "@typescript-eslint/scope-manager" "5.39.0" + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/typescript-estree" "5.39.0" + debug "^4.3.4" + +"@typescript-eslint/scope-manager@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fscope-manager/-/scope-manager-5.39.0.tgz#873e1465afa3d6c78d8ed2da68aed266a08008d0" + integrity sha512-/I13vAqmG3dyqMVSZPjsbuNQlYS082Y7OMkwhCfLXYsmlI0ca4nkL7wJ/4gjX70LD4P8Hnw1JywUVVAwepURBw== + dependencies: + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/visitor-keys" "5.39.0" + +"@typescript-eslint/type-utils@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2ftype-utils/-/type-utils-5.39.0.tgz#0a8c00f95dce4335832ad2dc6bc431c14e32a0a6" + integrity sha512-KJHJkOothljQWzR3t/GunL0TPKY+fGJtnpl+pX+sJ0YiKTz3q2Zr87SGTmFqsCMFrLt5E0+o+S6eQY0FAXj9uA== + dependencies: + "@typescript-eslint/typescript-estree" "5.39.0" + "@typescript-eslint/utils" "5.39.0" + debug "^4.3.4" + tsutils "^3.21.0" + +"@typescript-eslint/types@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2ftypes/-/types-5.39.0.tgz#f4e9f207ebb4579fd854b25c0bf64433bb5ed78d" + integrity sha512-gQMZrnfEBFXK38hYqt8Lkwt8f4U6yq+2H5VDSgP/qiTzC8Nw8JO3OuSUOQ2qW37S/dlwdkHDntkZM6SQhKyPhw== + +"@typescript-eslint/typescript-estree@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2ftypescript-estree/-/typescript-estree-5.39.0.tgz#c0316aa04a1a1f4f7f9498e3c13ef1d3dc4cf88b" + integrity sha512-qLFQP0f398sdnogJoLtd43pUgB18Q50QSA+BTE5h3sUxySzbWDpTSdgt4UyxNSozY/oDK2ta6HVAzvGgq8JYnA== + dependencies: + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/visitor-keys" "5.39.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/utils@5.39.0", "@typescript-eslint/utils@^5.13.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2futils/-/utils-5.39.0.tgz#b7063cca1dcf08d1d21b0d91db491161ad0be110" + integrity sha512-+DnY5jkpOpgj+EBtYPyHRjXampJfC0yUZZzfzLuUWVZvCuKqSdJVC8UhdWipIw7VKNTfwfAPiOWzYkAwuIhiAg== + dependencies: + "@types/json-schema" "^7.0.9" + "@typescript-eslint/scope-manager" "5.39.0" + "@typescript-eslint/types" "5.39.0" + "@typescript-eslint/typescript-estree" "5.39.0" + eslint-scope "^5.1.1" + eslint-utils "^3.0.0" + +"@typescript-eslint/visitor-keys@5.39.0": + version "5.39.0" + resolved "http://localhost:4873/@typescript-eslint%2fvisitor-keys/-/visitor-keys-5.39.0.tgz#8f41f7d241b47257b081ddba5d3ce80deaae61e2" + integrity sha512-yyE3RPwOG+XJBLrhvsxAidUgybJVQ/hG8BhiJo0k8JSAYfk/CshVcxf0HwP4Jt7WZZ6vLmxdo1p6EyN3tzFTkg== + dependencies: + "@typescript-eslint/types" "5.39.0" + eslint-visitor-keys "^3.3.0" + +"@webassemblyjs/ast@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" + integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== + dependencies: + "@webassemblyjs/helper-numbers" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + +"@webassemblyjs/floating-point-hex-parser@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2ffloating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" + integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== + +"@webassemblyjs/helper-api-error@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" + integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== + +"@webassemblyjs/helper-buffer@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" + integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== + +"@webassemblyjs/helper-numbers@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" + integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== + dependencies: + "@webassemblyjs/floating-point-hex-parser" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/helper-wasm-bytecode@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" + integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== + +"@webassemblyjs/helper-wasm-section@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fhelper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" + integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + +"@webassemblyjs/ieee754@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" + integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== + dependencies: + "@xtuc/ieee754" "^1.2.0" + +"@webassemblyjs/leb128@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fleb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" + integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== + dependencies: + "@xtuc/long" "4.2.2" + +"@webassemblyjs/utf8@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2futf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" + integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== + +"@webassemblyjs/wasm-edit@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" + integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-wasm-section" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-opt" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + "@webassemblyjs/wast-printer" "1.11.1" + +"@webassemblyjs/wasm-gen@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" + integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wasm-opt@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" + integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + +"@webassemblyjs/wasm-parser@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" + integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wast-printer@1.11.1": + version "1.11.1" + resolved "http://localhost:4873/@webassemblyjs%2fwast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" + integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@xtuc/long" "4.2.2" + +"@xtuc/ieee754@^1.2.0": + version "1.2.0" + resolved "http://localhost:4873/@xtuc%2fieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" + integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== + +"@xtuc/long@4.2.2": + version "4.2.2" + resolved "http://localhost:4873/@xtuc%2flong/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" + integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== + +abab@^2.0.3, abab@^2.0.5: + version "2.0.6" + resolved "http://localhost:4873/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" + integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== + +accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: + version "1.3.8" + resolved "http://localhost:4873/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +acorn-globals@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + dependencies: + acorn "^7.1.1" + acorn-walk "^7.1.1" + +acorn-import-assertions@^1.7.6: + version "1.8.0" + resolved "http://localhost:4873/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" + integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== + +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "http://localhost:4873/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn-node@^1.8.2: + version "1.8.2" + resolved "http://localhost:4873/acorn-node/-/acorn-node-1.8.2.tgz#114c95d64539e53dede23de8b9d96df7c7ae2af8" + integrity sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A== + dependencies: + acorn "^7.0.0" + acorn-walk "^7.0.0" + xtend "^4.0.2" + +acorn-walk@^7.0.0, acorn-walk@^7.1.1: + version "7.2.0" + resolved "http://localhost:4873/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + +acorn@^7.0.0, acorn@^7.1.1: + version "7.4.1" + resolved "http://localhost:4873/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +acorn@^8.2.4, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.0: + version "8.8.0" + resolved "http://localhost:4873/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" + integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== + +address@^1.0.1, address@^1.1.2: + version "1.2.1" + resolved "http://localhost:4873/address/-/address-1.2.1.tgz#25bb61095b7522d65b357baa11bc05492d4c8acd" + integrity sha512-B+6bi5D34+fDYENiH5qOlA0cV2rAGKuWZ9LeyUUehbXy8e0VS9e498yO0Jeeh+iM+6KbfudHTFjXw2MmJD4QRA== + +adjust-sourcemap-loader@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" + integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== + dependencies: + loader-utils "^2.0.0" + regex-parser "^2.2.11" + +agent-base@6: + version "6.0.2" + resolved "http://localhost:4873/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +ajv-formats@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== + dependencies: + ajv "^8.0.0" + +ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: + version "3.5.2" + resolved "http://localhost:4873/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== + +ajv-keywords@^5.0.0: + version "5.1.0" + resolved "http://localhost:4873/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== + dependencies: + fast-deep-equal "^3.1.3" + +ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: + version "6.12.6" + resolved "http://localhost:4873/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.0, ajv@^8.6.0, ajv@^8.8.0: + version "8.11.0" + resolved "http://localhost:4873/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" + integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: + version "4.3.2" + resolved "http://localhost:4873/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-html-community@^0.0.8: + version "0.0.8" + resolved "http://localhost:4873/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" + integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "http://localhost:4873/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "http://localhost:4873/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "http://localhost:4873/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +anymatch@^3.0.3, anymatch@~3.1.2: + version "3.1.2" + resolved "http://localhost:4873/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^5.0.2: + version "5.0.2" + resolved "http://localhost:4873/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + +argparse@^1.0.7: + version "1.0.10" + resolved "http://localhost:4873/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +aria-query@^4.2.2: + version "4.2.2" + resolved "http://localhost:4873/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" + integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== + dependencies: + "@babel/runtime" "^7.10.2" + "@babel/runtime-corejs3" "^7.10.2" + +aria-query@^5.0.0: + version "5.0.2" + resolved "http://localhost:4873/aria-query/-/aria-query-5.0.2.tgz#0b8a744295271861e1d933f8feca13f9b70cfdc1" + integrity sha512-eigU3vhqSO+Z8BKDnVLN/ompjhf3pYzecKXz8+whRy+9gZu8n1TCGfwzQUUPnqdHl9ax1Hr9031orZ+UOEYr7Q== + +array-flatten@1.1.1: + version "1.1.1" + resolved "http://localhost:4873/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +array-flatten@^2.1.2: + version "2.1.2" + resolved "http://localhost:4873/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" + integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== + +array-includes@^3.1.4, array-includes@^3.1.5: + version "3.1.5" + resolved "http://localhost:4873/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" + integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.2.5: + version "1.3.0" + resolved "http://localhost:4873/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" + integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +array.prototype.flatmap@^1.3.0: + version "1.3.0" + resolved "http://localhost:4873/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" + integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +array.prototype.reduce@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/array.prototype.reduce/-/array.prototype.reduce-1.0.4.tgz#8167e80089f78bff70a99e20bd4201d4663b0a6f" + integrity sha512-WnM+AjG/DvLRLo4DDl+r+SvCzYtD2Jd9oeBYMcEaI7t3fFrHY9M53/wdLcTvmZNQ70IU6Htj0emFkZ5TS+lrdw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-array-method-boxes-properly "^1.0.0" + is-string "^1.0.7" + +asap@~2.0.6: + version "2.0.6" + resolved "http://localhost:4873/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== + +ast-types-flow@^0.0.7: + version "0.0.7" + resolved "http://localhost:4873/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" + integrity sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag== + +async@^3.2.3: + version "3.2.4" + resolved "http://localhost:4873/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" + integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== + +asynckit@^0.4.0: + version "0.4.0" + resolved "http://localhost:4873/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +at-least-node@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + +autoprefixer@^10.4.11, autoprefixer@^10.4.12: + version "10.4.12" + resolved "http://localhost:4873/autoprefixer/-/autoprefixer-10.4.12.tgz#183f30bf0b0722af54ee5ef257f7d4320bb33129" + integrity sha512-WrCGV9/b97Pa+jtwf5UGaRjgQIg7OK3D06GnoYoZNcG1Xb8Gt3EfuKjlhh9i/VtT16g6PYjZ69jdJ2g8FxSC4Q== + dependencies: + browserslist "^4.21.4" + caniuse-lite "^1.0.30001407" + fraction.js "^4.2.0" + normalize-range "^0.1.2" + picocolors "^1.0.0" + postcss-value-parser "^4.2.0" + +axe-core@^4.4.3: + version "4.4.3" + resolved "http://localhost:4873/axe-core/-/axe-core-4.4.3.tgz#11c74d23d5013c0fa5d183796729bc3482bd2f6f" + integrity sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w== + +axobject-query@^2.2.0: + version "2.2.0" + resolved "http://localhost:4873/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" + integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== + +babel-jest@^27.4.2, babel-jest@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" + integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== + dependencies: + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^27.5.1" + chalk "^4.0.0" + graceful-fs "^4.2.9" + slash "^3.0.0" + +babel-loader@^8.2.3: + version "8.2.5" + resolved "http://localhost:4873/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e" + integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ== + dependencies: + find-cache-dir "^3.3.1" + loader-utils "^2.0.0" + make-dir "^3.1.0" + schema-utils "^2.6.5" + +babel-plugin-dynamic-import-node@^2.3.3: + version "2.3.3" + resolved "http://localhost:4873/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" + integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== + dependencies: + object.assign "^4.1.0" + +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "http://localhost:4873/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e" + integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.0.0" + "@types/babel__traverse" "^7.0.6" + +babel-plugin-macros@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1" + integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== + dependencies: + "@babel/runtime" "^7.12.5" + cosmiconfig "^7.0.0" + resolve "^1.19.0" + +babel-plugin-named-asset-import@^0.3.8: + version "0.3.8" + resolved "http://localhost:4873/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" + integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== + +babel-plugin-polyfill-corejs2@^0.3.3: + version "0.3.3" + resolved "http://localhost:4873/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz#5d1bd3836d0a19e1b84bbf2d9640ccb6f951c122" + integrity sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q== + dependencies: + "@babel/compat-data" "^7.17.7" + "@babel/helper-define-polyfill-provider" "^0.3.3" + semver "^6.1.1" + +babel-plugin-polyfill-corejs3@^0.6.0: + version "0.6.0" + resolved "http://localhost:4873/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.6.0.tgz#56ad88237137eade485a71b52f72dbed57c6230a" + integrity sha512-+eHqR6OPcBhJOGgsIar7xoAB1GcSwVUA3XjAd7HJNzOXT4wv6/H7KIdA/Nc60cvUlDbKApmqNvD1B1bzOt4nyA== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + core-js-compat "^3.25.1" + +babel-plugin-polyfill-regenerator@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz#390f91c38d90473592ed43351e801a9d3e0fd747" + integrity sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + +babel-plugin-transform-react-remove-prop-types@^0.4.24: + version "0.4.24" + resolved "http://localhost:4873/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" + integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81" + integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag== + dependencies: + babel-plugin-jest-hoist "^27.5.1" + babel-preset-current-node-syntax "^1.0.0" + +babel-preset-react-app@^10.0.1: + version "10.0.1" + resolved "http://localhost:4873/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" + integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== + dependencies: + "@babel/core" "^7.16.0" + "@babel/plugin-proposal-class-properties" "^7.16.0" + "@babel/plugin-proposal-decorators" "^7.16.4" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.0" + "@babel/plugin-proposal-numeric-separator" "^7.16.0" + "@babel/plugin-proposal-optional-chaining" "^7.16.0" + "@babel/plugin-proposal-private-methods" "^7.16.0" + "@babel/plugin-transform-flow-strip-types" "^7.16.0" + "@babel/plugin-transform-react-display-name" "^7.16.0" + "@babel/plugin-transform-runtime" "^7.16.4" + "@babel/preset-env" "^7.16.4" + "@babel/preset-react" "^7.16.0" + "@babel/preset-typescript" "^7.16.0" + "@babel/runtime" "^7.16.3" + babel-plugin-macros "^3.1.0" + babel-plugin-transform-react-remove-prop-types "^0.4.24" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "http://localhost:4873/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +batch@0.6.1: + version "0.6.1" + resolved "http://localhost:4873/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" + integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== + +bfj@^7.0.2: + version "7.0.2" + resolved "http://localhost:4873/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2" + integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== + dependencies: + bluebird "^3.5.5" + check-types "^11.1.1" + hoopy "^0.1.4" + tryer "^1.0.1" + +big.js@^5.2.2: + version "5.2.2" + resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" + integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "http://localhost:4873/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +bluebird@^3.5.5: + version "3.7.2" + resolved "http://localhost:4873/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + +body-parser@1.20.0: + version "1.20.0" + resolved "http://localhost:4873/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" + integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.10.3" + raw-body "2.5.1" + type-is "~1.6.18" + unpipe "1.0.0" + +bonjour-service@^1.0.11: + version "1.0.14" + resolved "http://localhost:4873/bonjour-service/-/bonjour-service-1.0.14.tgz#c346f5bc84e87802d08f8d5a60b93f758e514ee7" + integrity sha512-HIMbgLnk1Vqvs6B4Wq5ep7mxvj9sGz5d1JJyDNSGNIdA/w2MCz6GTjWTdjqOJV1bEPj+6IkxDvWNFKEBxNt4kQ== + dependencies: + array-flatten "^2.1.2" + dns-equal "^1.0.0" + fast-deep-equal "^3.1.3" + multicast-dns "^7.2.5" + +boolbase@^1.0.0, boolbase@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "http://localhost:4873/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +braces@^3.0.2, braces@~3.0.2: + version "3.0.2" + resolved "http://localhost:4873/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.3, browserslist@^4.21.3, browserslist@^4.21.4: + version "4.21.4" + resolved "http://localhost:4873/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" + integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== + dependencies: + caniuse-lite "^1.0.30001400" + electron-to-chromium "^1.4.251" + node-releases "^2.0.6" + update-browserslist-db "^1.0.9" + +bser@2.1.1: + version "2.1.1" + resolved "http://localhost:4873/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "http://localhost:4873/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +builtin-modules@^3.1.0: + version "3.3.0" + resolved "http://localhost:4873/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" + integrity sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw== + +bytes@3.0.0: + version "3.0.0" + resolved "http://localhost:4873/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== + +bytes@3.1.2: + version "3.1.2" + resolved "http://localhost:4873/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "http://localhost:4873/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camel-case@^4.1.2: + version "4.1.2" + resolved "http://localhost:4873/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" + integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== + dependencies: + pascal-case "^3.1.2" + tslib "^2.0.3" + +camelcase-css@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== + +camelcase@^5.3.1: + version "5.3.1" + resolved "http://localhost:4873/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.2.0, camelcase@^6.2.1: + version "6.3.0" + resolved "http://localhost:4873/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-api@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" + integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== + dependencies: + browserslist "^4.0.0" + caniuse-lite "^1.0.0" + lodash.memoize "^4.1.2" + lodash.uniq "^4.5.0" + +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001400, caniuse-lite@^1.0.30001407: + version "1.0.30001416" + resolved "http://localhost:4873/caniuse-lite/-/caniuse-lite-1.0.30001416.tgz#29692af8a6a11412f2d3cf9a59d588fcdd21ce4c" + integrity sha512-06wzzdAkCPZO+Qm4e/eNghZBDfVNDsCgw33T27OwBH9unE9S478OYw//Q2L7Npf/zBzs7rjZOszIFQkwQKAEqA== + +case-sensitive-paths-webpack-plugin@^2.4.0: + version "2.4.0" + resolved "http://localhost:4873/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" + integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== + +chalk@^2.0.0, chalk@^2.4.1: + version "2.4.2" + resolved "http://localhost:4873/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: + version "4.1.2" + resolved "http://localhost:4873/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +char-regex@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +char-regex@^2.0.0: + version "2.0.1" + resolved "http://localhost:4873/char-regex/-/char-regex-2.0.1.tgz#6dafdb25f9d3349914079f010ba8d0e6ff9cd01e" + integrity sha512-oSvEeo6ZUD7NepqAat3RqoucZ5SeqLJgOvVIwkafu6IP3V0pO38s/ypdVUmDDK6qIIHNlYHJAKX9E7R7HoKElw== + +check-types@^11.1.1: + version "11.1.2" + resolved "http://localhost:4873/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" + integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== + +chokidar@^3.4.2, chokidar@^3.5.3: + version "3.5.3" + resolved "http://localhost:4873/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +chrome-trace-event@^1.0.2: + version "1.0.3" + resolved "http://localhost:4873/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" + integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== + +ci-info@^3.2.0: + version "3.4.0" + resolved "http://localhost:4873/ci-info/-/ci-info-3.4.0.tgz#b28484fd436cbc267900364f096c9dc185efb251" + integrity sha512-t5QdPT5jq3o262DOQ8zA6E1tlH2upmUc4Hlvrbx1pGYJuiiHl7O7rvVNI+l8HTVhd/q3Qc9vqimkNk5yiXsAug== + +cjs-module-lexer@^1.0.0: + version "1.2.2" + resolved "http://localhost:4873/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" + integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== + +clean-css@^5.2.2: + version "5.3.1" + resolved "http://localhost:4873/clean-css/-/clean-css-5.3.1.tgz#d0610b0b90d125196a2894d35366f734e5d7aa32" + integrity sha512-lCr8OHhiWCTw4v8POJovCoh4T7I9U11yVsPjMWWnnMmp9ZowCxyad1Pathle/9HjaDp+fdQKjO9fQydE6RHTZg== + dependencies: + source-map "~0.6.0" + +cliui@^7.0.2: + version "7.0.4" + resolved "http://localhost:4873/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +clone-deep@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "http://localhost:4873/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== + +coa@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" + integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== + dependencies: + "@types/q" "^1.5.1" + chalk "^2.4.1" + q "^1.1.2" + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +color-convert@^1.9.0: + version "1.9.3" + resolved "http://localhost:4873/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "http://localhost:4873/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@^1.1.4, color-name@~1.1.4: + version "1.1.4" + resolved "http://localhost:4873/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +colord@^2.9.1: + version "2.9.3" + resolved "http://localhost:4873/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" + integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== + +colorette@^2.0.10: + version "2.0.19" + resolved "http://localhost:4873/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" + integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== + +combined-stream@^1.0.8: + version "1.0.8" + resolved "http://localhost:4873/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +commander@^2.20.0: + version "2.20.3" + resolved "http://localhost:4873/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^7.2.0: + version "7.2.0" + resolved "http://localhost:4873/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +commander@^8.3.0: + version "8.3.0" + resolved "http://localhost:4873/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + +common-path-prefix@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" + integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== + +common-tags@^1.8.0: + version "1.8.2" + resolved "http://localhost:4873/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" + integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== + +commondir@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== + +compressible@~2.0.16: + version "2.0.18" + resolved "http://localhost:4873/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@^1.7.4: + version "1.7.4" + resolved "http://localhost:4873/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== + dependencies: + accepts "~1.3.5" + bytes "3.0.0" + compressible "~2.0.16" + debug "2.6.9" + on-headers "~1.0.2" + safe-buffer "5.1.2" + vary "~1.1.2" + +concat-map@0.0.1: + version "0.0.1" + resolved "http://localhost:4873/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +confusing-browser-globals@^1.0.11: + version "1.0.11" + resolved "http://localhost:4873/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" + integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== + +connect-history-api-fallback@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" + integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== + +content-disposition@0.5.4: + version "0.5.4" + resolved "http://localhost:4873/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4: + version "1.0.4" + resolved "http://localhost:4873/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.8.0" + resolved "http://localhost:4873/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +cookie-signature@1.0.6: + version "1.0.6" + resolved "http://localhost:4873/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.5.0: + version "0.5.0" + resolved "http://localhost:4873/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + +core-js-compat@^3.25.1: + version "3.25.5" + resolved "http://localhost:4873/core-js-compat/-/core-js-compat-3.25.5.tgz#0016e8158c904f7b059486639e6e82116eafa7d9" + integrity sha512-ovcyhs2DEBUIE0MGEKHP4olCUW/XYte3Vroyxuh38rD1wAO4dHohsovUC4eAOuzFxE6b+RXvBU3UZ9o0YhUTkA== + dependencies: + browserslist "^4.21.4" + +core-js-pure@^3.25.1, core-js-pure@^3.8.1: + version "3.25.5" + resolved "http://localhost:4873/core-js-pure/-/core-js-pure-3.25.5.tgz#79716ba54240c6aa9ceba6eee08cf79471ba184d" + integrity sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg== + +core-js@^3.19.2: + version "3.25.5" + resolved "http://localhost:4873/core-js/-/core-js-3.25.5.tgz#e86f651a2ca8a0237a5f064c2fe56cef89646e27" + integrity sha512-nbm6eZSjm+ZuBQxCUPQKQCoUEfFOXjUZ8dTTyikyKaWrTYmAVbykQfwsKE5dBK88u3QCkCrzsx/PPlKfhsvgpw== + +core-util-is@~1.0.0: + version "1.0.3" + resolved "http://localhost:4873/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cosmiconfig-typescript-loader@^4.1.1: + version "4.1.1" + resolved "http://localhost:4873/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-4.1.1.tgz#38dd3578344038dae40fdf09792bc2e9df529f78" + integrity sha512-9DHpa379Gp0o0Zefii35fcmuuin6q92FnLDffzdZ0l9tVd3nEobG3O+MZ06+kuBvFTSVScvNb/oHA13Nd4iipg== + +cosmiconfig@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" + integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.7.2" + +cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d" + integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +craco-wasm@0.0.1: + version "0.0.1" + resolved "http://localhost:4873/craco-wasm/-/craco-wasm-0.0.1.tgz#a7edbf7ff64e7569909b15684c00de13209985c6" + integrity sha512-0vwZLtkQocS7UlPg9IF4TsG/6gKXcd9O0ISomjRoBMvR2XvtZN4yxvU8/WlY0Vf42PtOcWvhSx9i4oVNxLVE6w== + +cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "http://localhost:4873/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +crypto-random-string@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== + +css-blank-pseudo@^3.0.3: + version "3.0.3" + resolved "http://localhost:4873/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz#36523b01c12a25d812df343a32c322d2a2324561" + integrity sha512-VS90XWtsHGqoM0t4KpH053c4ehxZ2E6HtGI7x68YFV0pTo/QmkV/YFA+NnlvK8guxZVNWGQhVNJGC39Q8XF4OQ== + dependencies: + postcss-selector-parser "^6.0.9" + +css-declaration-sorter@^6.3.0: + version "6.3.1" + resolved "http://localhost:4873/css-declaration-sorter/-/css-declaration-sorter-6.3.1.tgz#be5e1d71b7a992433fb1c542c7a1b835e45682ec" + integrity sha512-fBffmak0bPAnyqc/HO8C3n2sHrp9wcqQz6ES9koRF2/mLOVAx9zIQ3Y7R29sYCteTPqMCwns4WYQoCX91Xl3+w== + +css-has-pseudo@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz#57f6be91ca242d5c9020ee3e51bbb5b89fc7af73" + integrity sha512-Vse0xpR1K9MNlp2j5w1pgWIJtm1a8qS0JwS9goFYcImjlHEmywP9VUF05aGBXzGpDJF86QXk4L0ypBmwPhGArw== + dependencies: + postcss-selector-parser "^6.0.9" + +css-loader@^6.5.1: + version "6.7.1" + resolved "http://localhost:4873/css-loader/-/css-loader-6.7.1.tgz#e98106f154f6e1baf3fc3bc455cb9981c1d5fd2e" + integrity sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw== + dependencies: + icss-utils "^5.1.0" + postcss "^8.4.7" + postcss-modules-extract-imports "^3.0.0" + postcss-modules-local-by-default "^4.0.0" + postcss-modules-scope "^3.0.0" + postcss-modules-values "^4.0.0" + postcss-value-parser "^4.2.0" + semver "^7.3.5" + +css-minimizer-webpack-plugin@^3.2.0: + version "3.4.1" + resolved "http://localhost:4873/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" + integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== + dependencies: + cssnano "^5.0.6" + jest-worker "^27.0.2" + postcss "^8.3.5" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" + source-map "^0.6.1" + +css-prefers-color-scheme@^6.0.3: + version "6.0.3" + resolved "http://localhost:4873/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz#ca8a22e5992c10a5b9d315155e7caee625903349" + integrity sha512-4BqMbZksRkJQx2zAjrokiGMd07RqOa2IxIrrN10lyBe9xhn9DEvjUK79J6jkeiv9D9hQFXKb6g1jwU62jziJZA== + +css-select-base-adapter@^0.1.1: + version "0.1.1" + resolved "http://localhost:4873/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" + integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== + +css-select@^2.0.0: + version "2.1.0" + resolved "http://localhost:4873/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" + integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== + dependencies: + boolbase "^1.0.0" + css-what "^3.2.1" + domutils "^1.7.0" + nth-check "^1.0.2" + +css-select@^4.1.3: + version "4.3.0" + resolved "http://localhost:4873/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" + integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== + dependencies: + boolbase "^1.0.0" + css-what "^6.0.1" + domhandler "^4.3.1" + domutils "^2.8.0" + nth-check "^2.0.1" + +css-tree@1.0.0-alpha.37: + version "1.0.0-alpha.37" + resolved "http://localhost:4873/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" + integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== + dependencies: + mdn-data "2.0.4" + source-map "^0.6.1" + +css-tree@^1.1.2, css-tree@^1.1.3: + version "1.1.3" + resolved "http://localhost:4873/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" + integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== + dependencies: + mdn-data "2.0.14" + source-map "^0.6.1" + +css-what@^3.2.1: + version "3.4.2" + resolved "http://localhost:4873/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" + integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== + +css-what@^6.0.1: + version "6.1.0" + resolved "http://localhost:4873/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== + +css.escape@^1.5.1: + version "1.5.1" + resolved "http://localhost:4873/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" + integrity sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg== + +cssdb@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/cssdb/-/cssdb-7.0.1.tgz#3810a0c67ae06362982dfe965dbedf57a0f26617" + integrity sha512-pT3nzyGM78poCKLAEy2zWIVX2hikq6dIrjuZzLV98MumBg+xMTNYfHx7paUlfiRTgg91O/vR889CIf+qiv79Rw== + +cssesc@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +cssnano-preset-default@^5.2.12: + version "5.2.12" + resolved "http://localhost:4873/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz#ebe6596ec7030e62c3eb2b3c09f533c0644a9a97" + integrity sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew== + dependencies: + css-declaration-sorter "^6.3.0" + cssnano-utils "^3.1.0" + postcss-calc "^8.2.3" + postcss-colormin "^5.3.0" + postcss-convert-values "^5.1.2" + postcss-discard-comments "^5.1.2" + postcss-discard-duplicates "^5.1.0" + postcss-discard-empty "^5.1.1" + postcss-discard-overridden "^5.1.0" + postcss-merge-longhand "^5.1.6" + postcss-merge-rules "^5.1.2" + postcss-minify-font-values "^5.1.0" + postcss-minify-gradients "^5.1.1" + postcss-minify-params "^5.1.3" + postcss-minify-selectors "^5.2.1" + postcss-normalize-charset "^5.1.0" + postcss-normalize-display-values "^5.1.0" + postcss-normalize-positions "^5.1.1" + postcss-normalize-repeat-style "^5.1.1" + postcss-normalize-string "^5.1.0" + postcss-normalize-timing-functions "^5.1.0" + postcss-normalize-unicode "^5.1.0" + postcss-normalize-url "^5.1.0" + postcss-normalize-whitespace "^5.1.1" + postcss-ordered-values "^5.1.3" + postcss-reduce-initial "^5.1.0" + postcss-reduce-transforms "^5.1.0" + postcss-svgo "^5.1.0" + postcss-unique-selectors "^5.1.1" + +cssnano-utils@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" + integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== + +cssnano@^5.0.6: + version "5.1.13" + resolved "http://localhost:4873/cssnano/-/cssnano-5.1.13.tgz#83d0926e72955332dc4802a7070296e6258efc0a" + integrity sha512-S2SL2ekdEz6w6a2epXn4CmMKU4K3KpcyXLKfAYc9UQQqJRkD/2eLUG0vJ3Db/9OvO5GuAdgXw3pFbR6abqghDQ== + dependencies: + cssnano-preset-default "^5.2.12" + lilconfig "^2.0.3" + yaml "^1.10.2" + +csso@^4.0.2, csso@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" + integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== + dependencies: + css-tree "^1.1.2" + +cssom@^0.4.4: + version "0.4.4" + resolved "http://localhost:4873/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + +cssom@~0.3.6: + version "0.3.8" + resolved "http://localhost:4873/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^2.3.0: + version "2.3.0" + resolved "http://localhost:4873/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + dependencies: + cssom "~0.3.6" + +csstype@^3.0.2: + version "3.1.1" + resolved "http://localhost:4873/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9" + integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== + +damerau-levenshtein@^1.0.8: + version "1.0.8" + resolved "http://localhost:4873/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== + +data-urls@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== + dependencies: + abab "^2.0.3" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + +debug@2.6.9, debug@^2.6.0, debug@^2.6.9: + version "2.6.9" + resolved "http://localhost:4873/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.4" + resolved "http://localhost:4873/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +debug@^3.2.7: + version "3.2.7" + resolved "http://localhost:4873/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +decimal.js@^10.2.1: + version "10.4.1" + resolved "http://localhost:4873/decimal.js/-/decimal.js-10.4.1.tgz#be75eeac4a2281aace80c1a8753587c27ef053e7" + integrity sha512-F29o+vci4DodHYT9UrR5IEbfBw9pE5eSapIJdTqXK5+6hq+t8VRxwQyKlW2i+KDKFkkJQRvFyI/QXD83h8LyQw== + +dedent@^0.7.0: + version "0.7.0" + resolved "http://localhost:4873/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== + +deep-is@^0.1.3, deep-is@~0.1.3: + version "0.1.4" + resolved "http://localhost:4873/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "http://localhost:4873/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +default-gateway@^6.0.3: + version "6.0.3" + resolved "http://localhost:4873/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== + dependencies: + execa "^5.0.0" + +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.1.4" + resolved "http://localhost:4873/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +defined@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" + integrity sha512-Y2caI5+ZwS5c3RiNDJ6u53VhQHv+hHKwhkI1iHvceKUHw9Df6EK2zRLfjejRgMuCuxK7PfSWIMwWecceVvThjQ== + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +depd@2.0.0: + version "2.0.0" + resolved "http://localhost:4873/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +depd@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== + +destroy@1.2.0: + version "1.2.0" + resolved "http://localhost:4873/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +detect-newline@^3.0.0: + version "3.1.0" + resolved "http://localhost:4873/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +detect-node@^2.0.4: + version "2.1.0" + resolved "http://localhost:4873/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== + +detect-port-alt@^1.1.6: + version "1.1.6" + resolved "http://localhost:4873/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" + integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== + dependencies: + address "^1.0.1" + debug "^2.6.0" + +detective@^5.2.1: + version "5.2.1" + resolved "http://localhost:4873/detective/-/detective-5.2.1.tgz#6af01eeda11015acb0e73f933242b70f24f91034" + integrity sha512-v9XE1zRnz1wRtgurGu0Bs8uHKFSTdteYZNbIPFVhUZ39L/S79ppMpdmVOZAnoz1jfEFodc48n6MX483Xo3t1yw== + dependencies: + acorn-node "^1.8.2" + defined "^1.0.0" + minimist "^1.2.6" + +didyoumean@^1.2.2: + version "1.2.2" + resolved "http://localhost:4873/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + +diff-sequences@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" + integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== + +diff-sequences@^29.0.0: + version "29.0.0" + resolved "http://localhost:4873/diff-sequences/-/diff-sequences-29.0.0.tgz#bae49972ef3933556bcb0800b72e8579d19d9e4f" + integrity sha512-7Qe/zd1wxSDL4D/X/FPjOMB+ZMDt71W94KYaq05I2l0oQqgXgs7s4ftYYmV38gBSrPz2vcygxfs1xn0FT+rKNA== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "http://localhost:4873/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +dlv@^1.1.3: + version "1.1.3" + resolved "http://localhost:4873/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" + integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== + +dns-equal@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" + integrity sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg== + +dns-packet@^5.2.2: + version "5.4.0" + resolved "http://localhost:4873/dns-packet/-/dns-packet-5.4.0.tgz#1f88477cf9f27e78a213fb6d118ae38e759a879b" + integrity sha512-EgqGeaBB8hLiHLZtp/IbaDQTL8pZ0+IvwzSHA6d7VyMDM+B9hgddEMa9xjK5oYnw0ci0JQ6g2XCD7/f6cafU6g== + dependencies: + "@leichtgewicht/ip-codec" "^2.0.1" + +doctrine@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: + version "0.5.14" + resolved "http://localhost:4873/dom-accessibility-api/-/dom-accessibility-api-0.5.14.tgz#56082f71b1dc7aac69d83c4285eef39c15d93f56" + integrity sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg== + +dom-converter@^0.2.0: + version "0.2.0" + resolved "http://localhost:4873/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" + integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== + dependencies: + utila "~0.4" + +dom-serializer@0: + version "0.2.2" + resolved "http://localhost:4873/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" + integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== + dependencies: + domelementtype "^2.0.1" + entities "^2.0.0" + +dom-serializer@^1.0.1: + version "1.4.1" + resolved "http://localhost:4873/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" + integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.2.0" + entities "^2.0.0" + +domelementtype@1: + version "1.3.1" + resolved "http://localhost:4873/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" + integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== + +domelementtype@^2.0.1, domelementtype@^2.2.0: + version "2.3.0" + resolved "http://localhost:4873/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domexception@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + dependencies: + webidl-conversions "^5.0.0" + +domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: + version "4.3.1" + resolved "http://localhost:4873/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== + dependencies: + domelementtype "^2.2.0" + +domutils@^1.7.0: + version "1.7.0" + resolved "http://localhost:4873/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" + integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== + dependencies: + dom-serializer "0" + domelementtype "1" + +domutils@^2.5.2, domutils@^2.8.0: + version "2.8.0" + resolved "http://localhost:4873/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== + dependencies: + dom-serializer "^1.0.1" + domelementtype "^2.2.0" + domhandler "^4.2.0" + +dot-case@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +dotenv-expand@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" + integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== + +dotenv@^10.0.0: + version "10.0.0" + resolved "http://localhost:4873/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" + integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== + +duplexer@^0.1.2: + version "0.1.2" + resolved "http://localhost:4873/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + +ee-first@1.1.1: + version "1.1.1" + resolved "http://localhost:4873/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +ejs@^3.1.6: + version "3.1.8" + resolved "http://localhost:4873/ejs/-/ejs-3.1.8.tgz#758d32910c78047585c7ef1f92f9ee041c1c190b" + integrity sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ== + dependencies: + jake "^10.8.5" + +electron-to-chromium@^1.4.251: + version "1.4.274" + resolved "http://localhost:4873/electron-to-chromium/-/electron-to-chromium-1.4.274.tgz#74369ac6f020c3cea7c77ec040ddf159fe226233" + integrity sha512-Fgn7JZQzq85I81FpKUNxVLAzoghy8JZJ4NIue+YfUYBbu1AkpgzFvNwzF/ZNZH9ElkmJD0TSWu1F2gTpw/zZlg== + +emittery@^0.10.2: + version "0.10.2" + resolved "http://localhost:4873/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" + integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== + +emittery@^0.8.1: + version "0.8.1" + resolved "http://localhost:4873/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" + integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "http://localhost:4873/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "http://localhost:4873/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +emojis-list@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "http://localhost:4873/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +enhanced-resolve@^5.10.0: + version "5.10.0" + resolved "http://localhost:4873/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz#0dc579c3bb2a1032e357ac45b8f3a6f3ad4fb1e6" + integrity sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +entities@^2.0.0: + version "2.2.0" + resolved "http://localhost:4873/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" + integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== + +error-ex@^1.3.1: + version "1.3.2" + resolved "http://localhost:4873/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +error-stack-parser@^2.0.6: + version "2.1.4" + resolved "http://localhost:4873/error-stack-parser/-/error-stack-parser-2.1.4.tgz#229cb01cdbfa84440bfa91876285b94680188286" + integrity sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ== + dependencies: + stackframe "^1.3.4" + +es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.1: + version "1.20.4" + resolved "http://localhost:4873/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" + integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.3" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.7" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.2" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.4.3" + safe-regex-test "^1.0.0" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + +es-array-method-boxes-properly@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz#873f3e84418de4ee19c5be752990b2e44718d09e" + integrity sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA== + +es-module-lexer@^0.9.0: + version "0.9.3" + resolved "http://localhost:4873/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" + integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "http://localhost:4873/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.1: + version "3.1.1" + resolved "http://localhost:4873/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-html@~1.0.3: + version "1.0.3" + resolved "http://localhost:4873/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +escodegen@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" + integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== + dependencies: + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +eslint-config-react-app@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4" + integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA== + dependencies: + "@babel/core" "^7.16.0" + "@babel/eslint-parser" "^7.16.3" + "@rushstack/eslint-patch" "^1.1.0" + "@typescript-eslint/eslint-plugin" "^5.5.0" + "@typescript-eslint/parser" "^5.5.0" + babel-preset-react-app "^10.0.1" + confusing-browser-globals "^1.0.11" + eslint-plugin-flowtype "^8.0.3" + eslint-plugin-import "^2.25.3" + eslint-plugin-jest "^25.3.0" + eslint-plugin-jsx-a11y "^6.5.1" + eslint-plugin-react "^7.27.1" + eslint-plugin-react-hooks "^4.3.0" + eslint-plugin-testing-library "^5.0.1" + +eslint-import-resolver-node@^0.3.6: + version "0.3.6" + resolved "http://localhost:4873/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +eslint-module-utils@^2.7.3: + version "2.7.4" + resolved "http://localhost:4873/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz#4f3e41116aaf13a20792261e61d3a2e7e0583974" + integrity sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA== + dependencies: + debug "^3.2.7" + +eslint-plugin-flowtype@^8.0.3: + version "8.0.3" + resolved "http://localhost:4873/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" + integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== + dependencies: + lodash "^4.17.21" + string-natural-compare "^3.0.1" + +eslint-plugin-import@^2.25.3: + version "2.26.0" + resolved "http://localhost:4873/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" + integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== + dependencies: + array-includes "^3.1.4" + array.prototype.flat "^1.2.5" + debug "^2.6.9" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.6" + eslint-module-utils "^2.7.3" + has "^1.0.3" + is-core-module "^2.8.1" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.values "^1.1.5" + resolve "^1.22.0" + tsconfig-paths "^3.14.1" + +eslint-plugin-jest@^25.3.0: + version "25.7.0" + resolved "http://localhost:4873/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" + integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== + dependencies: + "@typescript-eslint/experimental-utils" "^5.0.0" + +eslint-plugin-jsx-a11y@^6.5.1: + version "6.6.1" + resolved "http://localhost:4873/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz#93736fc91b83fdc38cc8d115deedfc3091aef1ff" + integrity sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q== + dependencies: + "@babel/runtime" "^7.18.9" + aria-query "^4.2.2" + array-includes "^3.1.5" + ast-types-flow "^0.0.7" + axe-core "^4.4.3" + axobject-query "^2.2.0" + damerau-levenshtein "^1.0.8" + emoji-regex "^9.2.2" + has "^1.0.3" + jsx-ast-utils "^3.3.2" + language-tags "^1.0.5" + minimatch "^3.1.2" + semver "^6.3.0" + +eslint-plugin-react-hooks@^4.3.0: + version "4.6.0" + resolved "http://localhost:4873/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" + integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== + +eslint-plugin-react@^7.27.1: + version "7.31.8" + resolved "http://localhost:4873/eslint-plugin-react/-/eslint-plugin-react-7.31.8.tgz#3a4f80c10be1bcbc8197be9e8b641b2a3ef219bf" + integrity sha512-5lBTZmgQmARLLSYiwI71tiGVTLUuqXantZM6vlSY39OaDSV0M7+32K5DnLkmFrwTe+Ksz0ffuLUC91RUviVZfw== + dependencies: + array-includes "^3.1.5" + array.prototype.flatmap "^1.3.0" + doctrine "^2.1.0" + estraverse "^5.3.0" + jsx-ast-utils "^2.4.1 || ^3.0.0" + minimatch "^3.1.2" + object.entries "^1.1.5" + object.fromentries "^2.0.5" + object.hasown "^1.1.1" + object.values "^1.1.5" + prop-types "^15.8.1" + resolve "^2.0.0-next.3" + semver "^6.3.0" + string.prototype.matchall "^4.0.7" + +eslint-plugin-testing-library@^5.0.1: + version "5.7.2" + resolved "http://localhost:4873/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.7.2.tgz#c1b2112a40aab61f93e10859e8b2d81e54f0ce84" + integrity sha512-0ZmHeR/DUUgEzW8rwUBRWxuqntipDtpvxK0hymdHnLlABryJkzd+CAHr+XnISaVsTisZ5MLHp6nQF+8COHLLTA== + dependencies: + "@typescript-eslint/utils" "^5.13.0" + +eslint-scope@5.1.1, eslint-scope@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-scope@^7.1.1: + version "7.1.1" + resolved "http://localhost:4873/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.3.0: + version "3.3.0" + resolved "http://localhost:4873/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== + +eslint-webpack-plugin@^3.1.1: + version "3.2.0" + resolved "http://localhost:4873/eslint-webpack-plugin/-/eslint-webpack-plugin-3.2.0.tgz#1978cdb9edc461e4b0195a20da950cf57988347c" + integrity sha512-avrKcGncpPbPSUHX6B3stNGzkKFto3eL+DKM4+VyMrVnhPc3vRczVlCq3uhuFOdRvDHTVXuzwk1ZKUrqDQHQ9w== + dependencies: + "@types/eslint" "^7.29.0 || ^8.4.1" + jest-worker "^28.0.2" + micromatch "^4.0.5" + normalize-path "^3.0.0" + schema-utils "^4.0.0" + +eslint@^8.3.0: + version "8.24.0" + resolved "http://localhost:4873/eslint/-/eslint-8.24.0.tgz#489516c927a5da11b3979dbfb2679394523383c8" + integrity sha512-dWFaPhGhTAiPcCgm3f6LI2MBWbogMnTJzFBbhXVRQDJPkr9pGZvVjlVfXd+vyDcWPA2Ic9L2AXPIQM0+vk/cSQ== + dependencies: + "@eslint/eslintrc" "^1.3.2" + "@humanwhocodes/config-array" "^0.10.5" + "@humanwhocodes/gitignore-to-minimatch" "^1.0.2" + "@humanwhocodes/module-importer" "^1.0.1" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.1.1" + eslint-utils "^3.0.0" + eslint-visitor-keys "^3.3.0" + espree "^9.4.0" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + find-up "^5.0.0" + glob-parent "^6.0.1" + globals "^13.15.0" + globby "^11.1.0" + grapheme-splitter "^1.0.4" + ignore "^5.2.0" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-sdsl "^4.1.4" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.1" + regexpp "^3.2.0" + strip-ansi "^6.0.1" + strip-json-comments "^3.1.0" + text-table "^0.2.0" + +espree@^9.4.0: + version "9.4.0" + resolved "http://localhost:4873/espree/-/espree-9.4.0.tgz#cd4bc3d6e9336c433265fc0aa016fc1aaf182f8a" + integrity sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw== + dependencies: + acorn "^8.8.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.3.0" + +esprima@^4.0.0, esprima@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "http://localhost:4873/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "http://localhost:4873/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "http://localhost:4873/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: + version "5.3.0" + resolved "http://localhost:4873/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +estree-walker@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" + integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== + +esutils@^2.0.2: + version "2.0.3" + resolved "http://localhost:4873/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "http://localhost:4873/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +eventemitter3@^4.0.0: + version "4.0.7" + resolved "http://localhost:4873/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + +events@^3.2.0: + version "3.3.0" + resolved "http://localhost:4873/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + +execa@^5.0.0: + version "5.1.1" + resolved "http://localhost:4873/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "http://localhost:4873/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== + +expect@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" + integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== + dependencies: + "@jest/types" "^27.5.1" + jest-get-type "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + +expect@^29.0.0: + version "29.1.2" + resolved "http://localhost:4873/expect/-/expect-29.1.2.tgz#82f8f28d7d408c7c68da3a386a490ee683e1eced" + integrity sha512-AuAGn1uxva5YBbBlXb+2JPxJRuemZsmlGcapPXWNSBNsQtAULfjioREGBWuI0EOvYUKjDnrCy8PW5Zlr1md5mw== + dependencies: + "@jest/expect-utils" "^29.1.2" + jest-get-type "^29.0.0" + jest-matcher-utils "^29.1.2" + jest-message-util "^29.1.2" + jest-util "^29.1.2" + +express@^4.17.3: + version "4.18.1" + resolved "http://localhost:4873/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" + integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.0" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.5.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.2.0" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.10.3" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.18.0" + serve-static "1.15.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "http://localhost:4873/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.11, fast-glob@^3.2.9: + version "3.2.12" + resolved "http://localhost:4873/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "http://localhost:4873/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fastq@^1.6.0: + version "1.13.0" + resolved "http://localhost:4873/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +faye-websocket@^0.11.3: + version "0.11.4" + resolved "http://localhost:4873/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" + integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== + dependencies: + websocket-driver ">=0.5.1" + +fb-watchman@^2.0.0: + version "2.0.2" + resolved "http://localhost:4873/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== + dependencies: + bser "2.1.1" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +file-loader@^6.2.0: + version "6.2.0" + resolved "http://localhost:4873/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" + integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + +filelist@^1.0.1: + version "1.0.4" + resolved "http://localhost:4873/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" + integrity sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q== + dependencies: + minimatch "^5.0.1" + +filesize@^8.0.6: + version "8.0.7" + resolved "http://localhost:4873/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" + integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== + +fill-range@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +finalhandler@1.2.0: + version "1.2.0" + resolved "http://localhost:4873/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +find-cache-dir@^3.3.1: + version "3.3.2" + resolved "http://localhost:4873/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" + integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== + dependencies: + commondir "^1.0.1" + make-dir "^3.0.2" + pkg-dir "^4.1.0" + +find-up@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +find-up@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.7" + resolved "http://localhost:4873/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== + +follow-redirects@^1.0.0: + version "1.15.2" + resolved "http://localhost:4873/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + +fork-ts-checker-webpack-plugin@^6.5.0: + version "6.5.2" + resolved "http://localhost:4873/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz#4f67183f2f9eb8ba7df7177ce3cf3e75cdafb340" + integrity sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA== + dependencies: + "@babel/code-frame" "^7.8.3" + "@types/json-schema" "^7.0.5" + chalk "^4.1.0" + chokidar "^3.4.2" + cosmiconfig "^6.0.0" + deepmerge "^4.2.2" + fs-extra "^9.0.0" + glob "^7.1.6" + memfs "^3.1.2" + minimatch "^3.0.4" + schema-utils "2.7.0" + semver "^7.3.2" + tapable "^1.0.0" + +form-data@^3.0.0: + version "3.0.1" + resolved "http://localhost:4873/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +forwarded@0.2.0: + version "0.2.0" + resolved "http://localhost:4873/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fraction.js@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" + integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== + +fresh@0.5.2: + version "0.5.2" + resolved "http://localhost:4873/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +fs-extra@^10.0.0: + version "10.1.0" + resolved "http://localhost:4873/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-extra@^9.0.0, fs-extra@^9.0.1: + version "9.1.0" + resolved "http://localhost:4873/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-monkey@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" + integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@^2.3.2, fsevents@~2.3.2: + version "2.3.2" + resolved "http://localhost:4873/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "http://localhost:4873/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.5: + version "1.1.5" + resolved "http://localhost:4873/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "http://localhost:4873/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "http://localhost:4873/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "http://localhost:4873/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: + version "1.1.3" + resolved "http://localhost:4873/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" + integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +get-own-enumerable-property-symbols@^3.0.0: + version "3.0.2" + resolved "http://localhost:4873/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" + integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== + +get-package-type@^0.1.0: + version "0.1.0" + resolved "http://localhost:4873/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^6.0.0: + version "6.0.1" + resolved "http://localhost:4873/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "http://localhost:4873/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.1, glob-parent@^6.0.2: + version "6.0.2" + resolved "http://localhost:4873/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob-to-regexp@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== + +glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: + version "7.2.3" + resolved "http://localhost:4873/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-modules@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" + integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== + dependencies: + global-prefix "^3.0.0" + +global-prefix@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" + integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== + dependencies: + ini "^1.3.5" + kind-of "^6.0.2" + which "^1.3.1" + +globals@^11.1.0: + version "11.12.0" + resolved "http://localhost:4873/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.15.0: + version "13.17.0" + resolved "http://localhost:4873/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" + integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== + dependencies: + type-fest "^0.20.2" + +globby@^11.0.4, globby@^11.1.0: + version "11.1.0" + resolved "http://localhost:4873/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: + version "4.2.10" + resolved "http://localhost:4873/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +grapheme-splitter@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== + +gzip-size@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" + integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== + dependencies: + duplexer "^0.1.2" + +handle-thing@^2.0.0: + version "2.0.1" + resolved "http://localhost:4873/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" + integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== + +harmony-reflect@^1.4.6: + version "1.6.2" + resolved "http://localhost:4873/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" + integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +he@^1.2.0: + version "1.2.0" + resolved "http://localhost:4873/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + +hoopy@^0.1.4: + version "0.1.4" + resolved "http://localhost:4873/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" + integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== + +hpack.js@^2.1.6: + version "2.1.6" + resolved "http://localhost:4873/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" + integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ== + dependencies: + inherits "^2.0.1" + obuf "^1.0.0" + readable-stream "^2.0.1" + wbuf "^1.1.0" + +html-encoding-sniffer@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + dependencies: + whatwg-encoding "^1.0.5" + +html-entities@^2.1.0, html-entities@^2.3.2: + version "2.3.3" + resolved "http://localhost:4873/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46" + integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== + +html-escaper@^2.0.0: + version "2.0.2" + resolved "http://localhost:4873/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +html-minifier-terser@^6.0.2: + version "6.1.0" + resolved "http://localhost:4873/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" + integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== + dependencies: + camel-case "^4.1.2" + clean-css "^5.2.2" + commander "^8.3.0" + he "^1.2.0" + param-case "^3.0.4" + relateurl "^0.2.7" + terser "^5.10.0" + +html-webpack-plugin@^5.5.0: + version "5.5.0" + resolved "http://localhost:4873/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50" + integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== + dependencies: + "@types/html-minifier-terser" "^6.0.0" + html-minifier-terser "^6.0.2" + lodash "^4.17.21" + pretty-error "^4.0.0" + tapable "^2.0.0" + +htmlparser2@^6.1.0: + version "6.1.0" + resolved "http://localhost:4873/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" + integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.0.0" + domutils "^2.5.2" + entities "^2.0.0" + +http-deceiver@^1.2.7: + version "1.2.7" + resolved "http://localhost:4873/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" + integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== + +http-errors@2.0.0: + version "2.0.0" + resolved "http://localhost:4873/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +http-errors@~1.6.2: + version "1.6.3" + resolved "http://localhost:4873/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + +http-parser-js@>=0.5.1: + version "0.5.8" + resolved "http://localhost:4873/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3" + integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== + +http-proxy-agent@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + +http-proxy-middleware@^2.0.3: + version "2.0.6" + resolved "http://localhost:4873/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" + integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== + dependencies: + "@types/http-proxy" "^1.17.8" + http-proxy "^1.18.1" + is-glob "^4.0.1" + is-plain-obj "^3.0.0" + micromatch "^4.0.2" + +http-proxy@^1.18.1: + version "1.18.1" + resolved "http://localhost:4873/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" + integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== + dependencies: + eventemitter3 "^4.0.0" + follow-redirects "^1.0.0" + requires-port "^1.0.0" + +https-proxy-agent@^5.0.0: + version "5.0.1" + resolved "http://localhost:4873/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== + dependencies: + agent-base "6" + debug "4" + +human-signals@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@0.4.24: + version "0.4.24" + resolved "http://localhost:4873/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +iconv-lite@^0.6.3: + version "0.6.3" + resolved "http://localhost:4873/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + +icss-utils@^5.0.0, icss-utils@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== + +idb@^7.0.1: + version "7.1.0" + resolved "http://localhost:4873/idb/-/idb-7.1.0.tgz#2cc886be57738419e57f9aab58f647e5e2160270" + integrity sha512-Wsk07aAxDsntgYJY4h0knZJuTxM73eQ4reRAO+Z1liOh8eMCJ/MoDS8fCui1vGT9mnjtl1sOu3I2i/W1swPYZg== + +identity-obj-proxy@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" + integrity sha512-00n6YnVHKrinT9t0d9+5yZC6UBNJANpYEQvL2LlX6Ab9lnmxzIRcEmTPuyGScvl1+jKuCICX1Z0Ab1pPKKdikA== + dependencies: + harmony-reflect "^1.4.6" + +ignore@^5.2.0: + version "5.2.0" + resolved "http://localhost:4873/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +immer@^9.0.7: + version "9.0.15" + resolved "http://localhost:4873/immer/-/immer-9.0.15.tgz#0b9169e5b1d22137aba7d43f8a81a495dd1b62dc" + integrity sha512-2eB/sswms9AEUSkOm4SbV5Y7Vmt/bKRwByd52jfLkW4OLYeaTP3EEiJ9agqU0O/tq6Dk62Zfj+TJSqfm1rLVGQ== + +import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "http://localhost:4873/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-local@^3.0.2: + version "3.1.0" + resolved "http://localhost:4873/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "http://localhost:4873/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +indent-string@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +inflight@^1.0.4: + version "1.0.6" + resolved "http://localhost:4873/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: + version "2.0.4" + resolved "http://localhost:4873/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "http://localhost:4873/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== + +ini@^1.3.5: + version "1.3.8" + resolved "http://localhost:4873/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +internal-slot@^1.0.3: + version "1.0.3" + resolved "http://localhost:4873/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "http://localhost:4873/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +ipaddr.js@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" + integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "http://localhost:4873/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-bigint@^1.0.1: + version "1.0.4" + resolved "http://localhost:4873/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "http://localhost:4873/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "http://localhost:4873/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-callable@^1.1.4, is-callable@^1.2.7: + version "1.2.7" + resolved "http://localhost:4873/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== + +is-core-module@^2.8.1, is-core-module@^2.9.0: + version "2.10.0" + resolved "http://localhost:4873/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" + integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "http://localhost:4873/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-docker@^2.0.0, is-docker@^2.1.1: + version "2.2.1" + resolved "http://localhost:4873/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + +is-extglob@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "http://localhost:4873/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "http://localhost:4873/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-module@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + integrity sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g== + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "http://localhost:4873/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "http://localhost:4873/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-obj@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + integrity sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg== + +is-plain-obj@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" + integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== + +is-plain-object@^2.0.4: + version "2.0.4" + resolved "http://localhost:4873/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== + +is-regex@^1.1.4: + version "1.1.4" + resolved "http://localhost:4873/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-regexp@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" + integrity sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA== + +is-root@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" + integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-stream@^2.0.0: + version "2.0.1" + resolved "http://localhost:4873/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "http://localhost:4873/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "http://localhost:4873/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typedarray@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== + +is-weakref@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +is-wsl@^2.2.0: + version "2.2.0" + resolved "http://localhost:4873/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +isarray@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + +isexe@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +isobject@^3.0.1: + version "3.0.1" + resolved "http://localhost:4873/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "http://localhost:4873/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: + version "5.2.1" + resolved "http://localhost:4873/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "http://localhost:4873/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.1.3: + version "3.1.5" + resolved "http://localhost:4873/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" + integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jake@^10.8.5: + version "10.8.5" + resolved "http://localhost:4873/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" + integrity sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw== + dependencies: + async "^3.2.3" + chalk "^4.0.2" + filelist "^1.0.1" + minimatch "^3.0.4" + +jest-changed-files@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" + integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== + dependencies: + "@jest/types" "^27.5.1" + execa "^5.0.0" + throat "^6.0.1" + +jest-circus@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" + integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^0.7.0" + expect "^27.5.1" + is-generator-fn "^2.0.0" + jest-each "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + slash "^3.0.0" + stack-utils "^2.0.3" + throat "^6.0.1" + +jest-cli@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" + integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== + dependencies: + "@jest/core" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + import-local "^3.0.2" + jest-config "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + prompts "^2.0.1" + yargs "^16.2.0" + +jest-config@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" + integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== + dependencies: + "@babel/core" "^7.8.0" + "@jest/test-sequencer" "^27.5.1" + "@jest/types" "^27.5.1" + babel-jest "^27.5.1" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.1" + graceful-fs "^4.2.9" + jest-circus "^27.5.1" + jest-environment-jsdom "^27.5.1" + jest-environment-node "^27.5.1" + jest-get-type "^27.5.1" + jest-jasmine2 "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-runner "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + micromatch "^4.0.4" + parse-json "^5.2.0" + pretty-format "^27.5.1" + slash "^3.0.0" + strip-json-comments "^3.1.1" + +jest-diff@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def" + integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== + dependencies: + chalk "^4.0.0" + diff-sequences "^27.5.1" + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-diff@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-diff/-/jest-diff-29.1.2.tgz#bb7aaf5353227d6f4f96c5e7e8713ce576a607dc" + integrity sha512-4GQts0aUopVvecIT4IwD/7xsBaMhKTYoM4/njE/aVw9wpw+pIUVp8Vab/KnSzSilr84GnLBkaP3JLDnQYCKqVQ== + dependencies: + chalk "^4.0.0" + diff-sequences "^29.0.0" + jest-get-type "^29.0.0" + pretty-format "^29.1.2" + +jest-docblock@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" + integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== + dependencies: + detect-newline "^3.0.0" + +jest-each@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" + integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== + dependencies: + "@jest/types" "^27.5.1" + chalk "^4.0.0" + jest-get-type "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + +jest-environment-jsdom@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" + integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + jest-util "^27.5.1" + jsdom "^16.6.0" + +jest-environment-node@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" + integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + jest-util "^27.5.1" + +jest-get-type@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1" + integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== + +jest-get-type@^29.0.0: + version "29.0.0" + resolved "http://localhost:4873/jest-get-type/-/jest-get-type-29.0.0.tgz#843f6c50a1b778f7325df1129a0fd7aa713aef80" + integrity sha512-83X19z/HuLKYXYHskZlBAShO7UfLFXu/vWajw9ZNJASN32li8yHMaVGAQqxFW1RCFOkB7cubaL6FaJVQqqJLSw== + +jest-haste-map@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f" + integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== + dependencies: + "@jest/types" "^27.5.1" + "@types/graceful-fs" "^4.1.2" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^27.5.1" + jest-serializer "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + micromatch "^4.0.4" + walker "^1.0.7" + optionalDependencies: + fsevents "^2.3.2" + +jest-jasmine2@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" + integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/source-map" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + expect "^27.5.1" + is-generator-fn "^2.0.0" + jest-each "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + throat "^6.0.1" + +jest-leak-detector@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" + integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== + dependencies: + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-matcher-utils@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" + integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== + dependencies: + chalk "^4.0.0" + jest-diff "^27.5.1" + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-matcher-utils@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-matcher-utils/-/jest-matcher-utils-29.1.2.tgz#e68c4bcc0266e70aa1a5c13fb7b8cd4695e318a1" + integrity sha512-MV5XrD3qYSW2zZSHRRceFzqJ39B2z11Qv0KPyZYxnzDHFeYZGJlgGi0SW+IXSJfOewgJp/Km/7lpcFT+cgZypw== + dependencies: + chalk "^4.0.0" + jest-diff "^29.1.2" + jest-get-type "^29.0.0" + pretty-format "^29.1.2" + +jest-message-util@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" + integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^27.5.1" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^27.5.1" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-message-util@^28.1.3: + version "28.1.3" + resolved "http://localhost:4873/jest-message-util/-/jest-message-util-28.1.3.tgz#232def7f2e333f1eecc90649b5b94b0055e7c43d" + integrity sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^28.1.3" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^28.1.3" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-message-util@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-message-util/-/jest-message-util-29.1.2.tgz#c21a33c25f9dc1ebfcd0f921d89438847a09a501" + integrity sha512-9oJ2Os+Qh6IlxLpmvshVbGUiSkZVc2FK+uGOm6tghafnB2RyjKAxMZhtxThRMxfX1J1SOMhTn9oK3/MutRWQJQ== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^29.1.2" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^29.1.2" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" + integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "http://localhost:4873/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95" + integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg== + +jest-regex-util@^28.0.0: + version "28.0.2" + resolved "http://localhost:4873/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" + integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== + +jest-resolve-dependencies@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" + integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== + dependencies: + "@jest/types" "^27.5.1" + jest-regex-util "^27.5.1" + jest-snapshot "^27.5.1" + +jest-resolve@^27.4.2, jest-resolve@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" + integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== + dependencies: + "@jest/types" "^27.5.1" + chalk "^4.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-pnp-resolver "^1.2.2" + jest-util "^27.5.1" + jest-validate "^27.5.1" + resolve "^1.20.0" + resolve.exports "^1.1.0" + slash "^3.0.0" + +jest-runner@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" + integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== + dependencies: + "@jest/console" "^27.5.1" + "@jest/environment" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.8.1" + graceful-fs "^4.2.9" + jest-docblock "^27.5.1" + jest-environment-jsdom "^27.5.1" + jest-environment-node "^27.5.1" + jest-haste-map "^27.5.1" + jest-leak-detector "^27.5.1" + jest-message-util "^27.5.1" + jest-resolve "^27.5.1" + jest-runtime "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + source-map-support "^0.5.6" + throat "^6.0.1" + +jest-runtime@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" + integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/globals" "^27.5.1" + "@jest/source-map" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + execa "^5.0.0" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-message-util "^27.5.1" + jest-mock "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + slash "^3.0.0" + strip-bom "^4.0.0" + +jest-serializer@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64" + integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== + dependencies: + "@types/node" "*" + graceful-fs "^4.2.9" + +jest-snapshot@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" + integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== + dependencies: + "@babel/core" "^7.7.2" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" + "@babel/types" "^7.0.0" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/babel__traverse" "^7.0.4" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^27.5.1" + graceful-fs "^4.2.9" + jest-diff "^27.5.1" + jest-get-type "^27.5.1" + jest-haste-map "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-util "^27.5.1" + natural-compare "^1.4.0" + pretty-format "^27.5.1" + semver "^7.3.2" + +jest-util@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9" + integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-util@^28.1.3: + version "28.1.3" + resolved "http://localhost:4873/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" + integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-util@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/jest-util/-/jest-util-29.1.2.tgz#ac5798e93cb6a6703084e194cfa0898d66126df1" + integrity sha512-vPCk9F353i0Ymx3WQq3+a4lZ07NXu9Ca8wya6o4Fe4/aO1e1awMMprZ3woPFpKwghEOW+UXgd15vVotuNN9ONQ== + dependencies: + "@jest/types" "^29.1.2" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-validate@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" + integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== + dependencies: + "@jest/types" "^27.5.1" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^27.5.1" + leven "^3.1.0" + pretty-format "^27.5.1" + +jest-watch-typeahead@^1.0.0: + version "1.1.0" + resolved "http://localhost:4873/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz#b4a6826dfb9c9420da2f7bc900de59dad11266a9" + integrity sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== + dependencies: + ansi-escapes "^4.3.1" + chalk "^4.0.0" + jest-regex-util "^28.0.0" + jest-watcher "^28.0.0" + slash "^4.0.0" + string-length "^5.0.1" + strip-ansi "^7.0.1" + +jest-watcher@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" + integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== + dependencies: + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + jest-util "^27.5.1" + string-length "^4.0.1" + +jest-watcher@^28.0.0: + version "28.1.3" + resolved "http://localhost:4873/jest-watcher/-/jest-watcher-28.1.3.tgz#c6023a59ba2255e3b4c57179fc94164b3e73abd4" + integrity sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g== + dependencies: + "@jest/test-result" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.10.2" + jest-util "^28.1.3" + string-length "^4.0.1" + +jest-worker@^26.2.1: + version "26.6.2" + resolved "http://localhost:4873/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" + integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^7.0.0" + +jest-worker@^27.0.2, jest-worker@^27.4.5, jest-worker@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest-worker@^28.0.2: + version "28.1.3" + resolved "http://localhost:4873/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" + integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^27.4.3: + version "27.5.1" + resolved "http://localhost:4873/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" + integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== + dependencies: + "@jest/core" "^27.5.1" + import-local "^3.0.2" + jest-cli "^27.5.1" + +js-sdsl@^4.1.4: + version "4.1.5" + resolved "http://localhost:4873/js-sdsl/-/js-sdsl-4.1.5.tgz#1ff1645e6b4d1b028cd3f862db88c9d887f26e2a" + integrity sha512-08bOAKweV2NUC1wqTtf3qZlnpOX/R2DU9ikpjOHs0H+ibQv3zpncVQg6um4uYtRtrwIX8M4Nh3ytK4HGlYAq7Q== + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "http://localhost:4873/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsdom@^16.6.0: + version "16.7.0" + resolved "http://localhost:4873/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== + dependencies: + abab "^2.0.5" + acorn "^8.2.4" + acorn-globals "^6.0.0" + cssom "^0.4.4" + cssstyle "^2.3.0" + data-urls "^2.0.0" + decimal.js "^10.2.1" + domexception "^2.0.1" + escodegen "^2.0.0" + form-data "^3.0.0" + html-encoding-sniffer "^2.0.1" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + is-potential-custom-element-name "^1.0.1" + nwsapi "^2.2.0" + parse5 "6.0.1" + saxes "^5.0.1" + symbol-tree "^3.2.4" + tough-cookie "^4.0.0" + w3c-hr-time "^1.0.2" + w3c-xmlserializer "^2.0.0" + webidl-conversions "^6.1.0" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.5.0" + ws "^7.4.6" + xml-name-validator "^3.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "http://localhost:4873/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "http://localhost:4873/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== + +json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: + version "2.3.1" + resolved "http://localhost:4873/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-schema@^0.4.0: + version "0.4.0" + resolved "http://localhost:4873/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json5@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== + dependencies: + minimist "^1.2.0" + +json5@^2.1.2, json5@^2.2.0, json5@^2.2.1: + version "2.2.1" + resolved "http://localhost:4873/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +jsonfile@^6.0.1: + version "6.1.0" + resolved "http://localhost:4873/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonpointer@^5.0.0: + version "5.0.1" + resolved "http://localhost:4873/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" + integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== + +"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.2: + version "3.3.3" + resolved "http://localhost:4873/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz#76b3e6e6cece5c69d49a5792c3d01bd1a0cdc7ea" + integrity sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw== + dependencies: + array-includes "^3.1.5" + object.assign "^4.1.3" + +kind-of@^6.0.2: + version "6.0.3" + resolved "http://localhost:4873/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +kleur@^3.0.3: + version "3.0.3" + resolved "http://localhost:4873/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +klona@^2.0.4, klona@^2.0.5: + version "2.0.5" + resolved "http://localhost:4873/klona/-/klona-2.0.5.tgz#d166574d90076395d9963aa7a928fabb8d76afbc" + integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== + +language-subtag-registry@~0.3.2: + version "0.3.22" + resolved "http://localhost:4873/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d" + integrity sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w== + +language-tags@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" + integrity sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ== + dependencies: + language-subtag-registry "~0.3.2" + +leven@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "http://localhost:4873/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +levn@~0.3.0: + version "0.3.0" + resolved "http://localhost:4873/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +lilconfig@^2.0.3, lilconfig@^2.0.5, lilconfig@^2.0.6: + version "2.0.6" + resolved "http://localhost:4873/lilconfig/-/lilconfig-2.0.6.tgz#32a384558bd58af3d4c6e077dd1ad1d397bc69d4" + integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "http://localhost:4873/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +loader-runner@^4.2.0: + version "4.3.0" + resolved "http://localhost:4873/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== + +loader-utils@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c" + integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + +loader-utils@^3.2.0: + version "3.2.0" + resolved "http://localhost:4873/loader-utils/-/loader-utils-3.2.0.tgz#bcecc51a7898bee7473d4bc6b845b23af8304d4f" + integrity sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ== + +locate-path@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +locate-path@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.debounce@^4.0.8: + version "4.0.8" + resolved "http://localhost:4873/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== + +lodash.memoize@^4.1.2: + version "4.1.2" + resolved "http://localhost:4873/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "http://localhost:4873/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "http://localhost:4873/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== + +lodash.uniq@^4.5.0: + version "4.5.0" + resolved "http://localhost:4873/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" + integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== + +lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: + version "4.17.21" + resolved "http://localhost:4873/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "http://localhost:4873/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lower-case@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + dependencies: + tslib "^2.0.3" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lz-string@^1.4.4: + version "1.4.4" + resolved "http://localhost:4873/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" + integrity sha512-0ckx7ZHRPqb0oUm8zNr+90mtf9DQB60H1wMCjBtfi62Kl3a7JbHob6gA2bC+xRvZoOL+1hzUK8jeuEIQE8svEQ== + +magic-string@^0.25.0, magic-string@^0.25.7: + version "0.25.9" + resolved "http://localhost:4873/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c" + integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== + dependencies: + sourcemap-codec "^1.4.8" + +make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +makeerror@1.0.12: + version "1.0.12" + resolved "http://localhost:4873/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +mdn-data@2.0.14: + version "2.0.14" + resolved "http://localhost:4873/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" + integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== + +mdn-data@2.0.4: + version "2.0.4" + resolved "http://localhost:4873/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" + integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== + +media-typer@0.3.0: + version "0.3.0" + resolved "http://localhost:4873/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +memfs@^3.1.2, memfs@^3.4.3: + version "3.4.7" + resolved "http://localhost:4873/memfs/-/memfs-3.4.7.tgz#e5252ad2242a724f938cb937e3c4f7ceb1f70e5a" + integrity sha512-ygaiUSNalBX85388uskeCyhSAoOSgzBbtVCr9jA2RROssFL9Q19/ZXFqS+2Th2sr1ewNIWgFdLzLC3Yl1Zv+lw== + dependencies: + fs-monkey "^1.0.3" + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "http://localhost:4873/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== + +merge-stream@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "http://localhost:4873/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +methods@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: + version "4.0.5" + resolved "http://localhost:4873/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": + version "1.52.0" + resolved "http://localhost:4873/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "http://localhost:4873/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "http://localhost:4873/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +min-indent@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== + +mini-css-extract-plugin@^2.4.5: + version "2.6.1" + resolved "http://localhost:4873/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz#9a1251d15f2035c342d99a468ab9da7a0451b71e" + integrity sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg== + dependencies: + schema-utils "^4.0.0" + +minimalistic-assert@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimatch@3.0.4: + version "3.0.4" + resolved "http://localhost:4873/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.2" + resolved "http://localhost:4873/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^5.0.1: + version "5.1.0" + resolved "http://localhost:4873/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" + integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== + dependencies: + brace-expansion "^2.0.1" + +minimist@^1.2.0, minimist@^1.2.6: + version "1.2.7" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" + integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== + +mkdirp@~0.5.1: + version "0.5.6" + resolved "http://localhost:4873/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +ms@2.0.0: + version "2.0.0" + resolved "http://localhost:4873/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "http://localhost:4873/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3, ms@^2.1.1: + version "2.1.3" + resolved "http://localhost:4873/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +multicast-dns@^7.2.5: + version "7.2.5" + resolved "http://localhost:4873/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced" + integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== + dependencies: + dns-packet "^5.2.2" + thunky "^1.0.2" + +nanoid@^3.3.4: + version "3.3.4" + resolved "http://localhost:4873/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" + integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "http://localhost:4873/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +negotiator@0.6.3: + version "0.6.3" + resolved "http://localhost:4873/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +neo-async@^2.6.2: + version "2.6.2" + resolved "http://localhost:4873/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +no-case@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== + dependencies: + lower-case "^2.0.2" + tslib "^2.0.3" + +node-forge@^1: + version "1.3.1" + resolved "http://localhost:4873/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" + integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== + +node-int64@^0.4.0: + version "0.4.0" + resolved "http://localhost:4873/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + +node-releases@^2.0.6: + version "2.0.6" + resolved "http://localhost:4873/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" + integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "http://localhost:4873/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-range@^0.1.2: + version "0.1.2" + resolved "http://localhost:4873/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== + +normalize-url@^6.0.1: + version "6.1.0" + resolved "http://localhost:4873/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +nth-check@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" + integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== + dependencies: + boolbase "~1.0.0" + +nth-check@^2.0.1: + version "2.1.1" + resolved "http://localhost:4873/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" + integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== + dependencies: + boolbase "^1.0.0" + +nwsapi@^2.2.0: + version "2.2.2" + resolved "http://localhost:4873/nwsapi/-/nwsapi-2.2.2.tgz#e5418863e7905df67d51ec95938d67bf801f0bb0" + integrity sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw== + +object-assign@^4.1.1: + version "4.1.1" + resolved "http://localhost:4873/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-hash@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" + integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== + +object-inspect@^1.12.2, object-inspect@^1.9.0: + version "1.12.2" + resolved "http://localhost:4873/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + +object-keys@^1.1.1: + version "1.1.1" + resolved "http://localhost:4873/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.0, object.assign@^4.1.3, object.assign@^4.1.4: + version "4.1.4" + resolved "http://localhost:4873/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" + object-keys "^1.1.1" + +object.entries@^1.1.5: + version "1.1.5" + resolved "http://localhost:4873/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" + integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.fromentries@^2.0.5: + version "2.0.5" + resolved "http://localhost:4873/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" + integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.getownpropertydescriptors@^2.1.0: + version "2.1.4" + resolved "http://localhost:4873/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.4.tgz#7965e6437a57278b587383831a9b829455a4bc37" + integrity sha512-sccv3L/pMModT6dJAYF3fzGMVcb38ysQ0tEE6ixv2yXJDtEIPph268OlAdJj5/qZMZDq2g/jqvwppt36uS/uQQ== + dependencies: + array.prototype.reduce "^1.0.4" + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.1" + +object.hasown@^1.1.1: + version "1.1.1" + resolved "http://localhost:4873/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3" + integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A== + dependencies: + define-properties "^1.1.4" + es-abstract "^1.19.5" + +object.values@^1.1.0, object.values@^1.1.5: + version "1.1.5" + resolved "http://localhost:4873/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +obuf@^1.0.0, obuf@^1.1.2: + version "1.1.2" + resolved "http://localhost:4873/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" + integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== + +on-finished@2.4.1: + version "2.4.1" + resolved "http://localhost:4873/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "http://localhost:4873/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +once@^1.3.0: + version "1.4.0" + resolved "http://localhost:4873/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +open@^8.0.9, open@^8.4.0: + version "8.4.0" + resolved "http://localhost:4873/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" + integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + +optionator@^0.8.1: + version "0.8.3" + resolved "http://localhost:4873/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +optionator@^0.9.1: + version "0.9.1" + resolved "http://localhost:4873/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +p-limit@^2.0.0, p-limit@^2.2.0: + version "2.3.0" + resolved "http://localhost:4873/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-limit@^3.0.2: + version "3.1.0" + resolved "http://localhost:4873/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +p-retry@^4.5.0: + version "4.6.2" + resolved "http://localhost:4873/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" + integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== + dependencies: + "@types/retry" "0.12.0" + retry "^0.13.1" + +p-try@^2.0.0: + version "2.2.0" + resolved "http://localhost:4873/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +param-case@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" + integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + +parent-module@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-json@^5.0.0, parse-json@^5.2.0: + version "5.2.0" + resolved "http://localhost:4873/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +parse5@6.0.1: + version "6.0.1" + resolved "http://localhost:4873/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parseurl@~1.3.2, parseurl@~1.3.3: + version "1.3.3" + resolved "http://localhost:4873/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascal-case@^3.1.2: + version "3.1.2" + resolved "http://localhost:4873/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" + integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +path-exists@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== + +path-exists@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "http://localhost:4873/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "http://localhost:4873/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "http://localhost:4873/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "http://localhost:4873/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== + +path-type@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +performance-now@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== + +picocolors@^0.2.1: + version "0.2.1" + resolved "http://localhost:4873/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" + integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== + +picocolors@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: + version "2.3.1" + resolved "http://localhost:4873/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pify@^2.3.0: + version "2.3.0" + resolved "http://localhost:4873/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== + +pirates@^4.0.4: + version "4.0.5" + resolved "http://localhost:4873/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.1.0, pkg-dir@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +pkg-up@^3.1.0: + version "3.1.0" + resolved "http://localhost:4873/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" + integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== + dependencies: + find-up "^3.0.0" + +postcss-attribute-case-insensitive@^5.0.2: + version "5.0.2" + resolved "http://localhost:4873/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.2.tgz#03d761b24afc04c09e757e92ff53716ae8ea2741" + integrity sha512-XIidXV8fDr0kKt28vqki84fRK8VW8eTuIa4PChv2MqKuT6C9UjmSKzen6KaWhWEoYvwxFCa7n/tC1SZ3tyq4SQ== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-browser-comments@^4: + version "4.0.0" + resolved "http://localhost:4873/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" + integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== + +postcss-calc@^8.2.3: + version "8.2.4" + resolved "http://localhost:4873/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" + integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== + dependencies: + postcss-selector-parser "^6.0.9" + postcss-value-parser "^4.2.0" + +postcss-clamp@^4.1.0: + version "4.1.0" + resolved "http://localhost:4873/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363" + integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-functional-notation@^4.2.4: + version "4.2.4" + resolved "http://localhost:4873/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.4.tgz#21a909e8d7454d3612d1659e471ce4696f28caec" + integrity sha512-2yrTAUZUab9s6CpxkxC4rVgFEVaR6/2Pipvi6qcgvnYiVqZcbDHEoBDhrXzyb7Efh2CCfHQNtcqWcIruDTIUeg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-hex-alpha@^8.0.4: + version "8.0.4" + resolved "http://localhost:4873/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.4.tgz#c66e2980f2fbc1a63f5b079663340ce8b55f25a5" + integrity sha512-nLo2DCRC9eE4w2JmuKgVA3fGL3d01kGq752pVALF68qpGLmx2Qrk91QTKkdUqqp45T1K1XV8IhQpcu1hoAQflQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-rebeccapurple@^7.1.1: + version "7.1.1" + resolved "http://localhost:4873/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.1.1.tgz#63fdab91d878ebc4dd4b7c02619a0c3d6a56ced0" + integrity sha512-pGxkuVEInwLHgkNxUc4sdg4g3py7zUeCQ9sMfwyHAT+Ezk8a4OaaVZ8lIY5+oNqA/BXXgLyXv0+5wHP68R79hg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-colormin@^5.3.0: + version "5.3.0" + resolved "http://localhost:4873/postcss-colormin/-/postcss-colormin-5.3.0.tgz#3cee9e5ca62b2c27e84fce63affc0cfb5901956a" + integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + colord "^2.9.1" + postcss-value-parser "^4.2.0" + +postcss-convert-values@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz#31586df4e184c2e8890e8b34a0b9355313f503ab" + integrity sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g== + dependencies: + browserslist "^4.20.3" + postcss-value-parser "^4.2.0" + +postcss-custom-media@^8.0.2: + version "8.0.2" + resolved "http://localhost:4873/postcss-custom-media/-/postcss-custom-media-8.0.2.tgz#c8f9637edf45fef761b014c024cee013f80529ea" + integrity sha512-7yi25vDAoHAkbhAzX9dHx2yc6ntS4jQvejrNcC+csQJAXjj15e7VcWfMgLqBNAbOvqi5uIa9huOVwdHbf+sKqg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-custom-properties@^12.1.9: + version "12.1.9" + resolved "http://localhost:4873/postcss-custom-properties/-/postcss-custom-properties-12.1.9.tgz#0883429a7ef99f1ba239d1fea29ce84906daa8bd" + integrity sha512-/E7PRvK8DAVljBbeWrcEQJPG72jaImxF3vvCNFwv9cC8CzigVoNIpeyfnJzphnN3Fd8/auBf5wvkw6W9MfmTyg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-custom-selectors@^6.0.3: + version "6.0.3" + resolved "http://localhost:4873/postcss-custom-selectors/-/postcss-custom-selectors-6.0.3.tgz#1ab4684d65f30fed175520f82d223db0337239d9" + integrity sha512-fgVkmyiWDwmD3JbpCmB45SvvlCD6z9CG6Ie6Iere22W5aHea6oWa7EM2bpnv2Fj3I94L3VbtvX9KqwSi5aFzSg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-dir-pseudo-class@^6.0.5: + version "6.0.5" + resolved "http://localhost:4873/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.5.tgz#2bf31de5de76added44e0a25ecf60ae9f7c7c26c" + integrity sha512-eqn4m70P031PF7ZQIvSgy9RSJ5uI2171O/OO/zcRNYpJbvaeKFUlar1aJ7rmgiQtbm0FSPsRewjpdS0Oew7MPA== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-discard-comments@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz#8df5e81d2925af2780075840c1526f0660e53696" + integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== + +postcss-discard-duplicates@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" + integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== + +postcss-discard-empty@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" + integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== + +postcss-discard-overridden@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" + integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== + +postcss-double-position-gradients@^3.1.2: + version "3.1.2" + resolved "http://localhost:4873/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.2.tgz#b96318fdb477be95997e86edd29c6e3557a49b91" + integrity sha512-GX+FuE/uBR6eskOK+4vkXgT6pDkexLokPaz/AbJna9s5Kzp/yl488pKPjhy0obB475ovfT1Wv8ho7U/cHNaRgQ== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +postcss-env-function@^4.0.6: + version "4.0.6" + resolved "http://localhost:4873/postcss-env-function/-/postcss-env-function-4.0.6.tgz#7b2d24c812f540ed6eda4c81f6090416722a8e7a" + integrity sha512-kpA6FsLra+NqcFnL81TnsU+Z7orGtDTxcOhl6pwXeEq1yFPpRMkCDpHhrz8CFQDr/Wfm0jLiNQ1OsGGPjlqPwA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-flexbugs-fixes@^5.0.2: + version "5.0.2" + resolved "http://localhost:4873/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" + integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== + +postcss-focus-visible@^6.0.4: + version "6.0.4" + resolved "http://localhost:4873/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz#50c9ea9afa0ee657fb75635fabad25e18d76bf9e" + integrity sha512-QcKuUU/dgNsstIK6HELFRT5Y3lbrMLEOwG+A4s5cA+fx3A3y/JTq3X9LaOj3OC3ALH0XqyrgQIgey/MIZ8Wczw== + dependencies: + postcss-selector-parser "^6.0.9" + +postcss-focus-within@^5.0.4: + version "5.0.4" + resolved "http://localhost:4873/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz#5b1d2ec603195f3344b716c0b75f61e44e8d2e20" + integrity sha512-vvjDN++C0mu8jz4af5d52CB184ogg/sSxAFS+oUJQq2SuCe7T5U2iIsVJtsCp2d6R4j0jr5+q3rPkBVZkXD9fQ== + dependencies: + postcss-selector-parser "^6.0.9" + +postcss-font-variant@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" + integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== + +postcss-gap-properties@^3.0.5: + version "3.0.5" + resolved "http://localhost:4873/postcss-gap-properties/-/postcss-gap-properties-3.0.5.tgz#f7e3cddcf73ee19e94ccf7cb77773f9560aa2fff" + integrity sha512-IuE6gKSdoUNcvkGIqdtjtcMtZIFyXZhmFd5RUlg97iVEvp1BZKV5ngsAjCjrVy+14uhGBQl9tzmi1Qwq4kqVOg== + +postcss-image-set-function@^4.0.7: + version "4.0.7" + resolved "http://localhost:4873/postcss-image-set-function/-/postcss-image-set-function-4.0.7.tgz#08353bd756f1cbfb3b6e93182c7829879114481f" + integrity sha512-9T2r9rsvYzm5ndsBE8WgtrMlIT7VbtTfE7b3BQnudUqnBcBo7L758oc+o+pdj/dUV0l5wjwSdjeOH2DZtfv8qw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-import@^14.1.0: + version "14.1.0" + resolved "http://localhost:4873/postcss-import/-/postcss-import-14.1.0.tgz#a7333ffe32f0b8795303ee9e40215dac922781f0" + integrity sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw== + dependencies: + postcss-value-parser "^4.0.0" + read-cache "^1.0.0" + resolve "^1.1.7" + +postcss-initial@^4.0.1: + version "4.0.1" + resolved "http://localhost:4873/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" + integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== + +postcss-js@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-js/-/postcss-js-4.0.0.tgz#31db79889531b80dc7bc9b0ad283e418dce0ac00" + integrity sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ== + dependencies: + camelcase-css "^2.0.1" + +postcss-lab-function@^4.2.1: + version "4.2.1" + resolved "http://localhost:4873/postcss-lab-function/-/postcss-lab-function-4.2.1.tgz#6fe4c015102ff7cd27d1bd5385582f67ebdbdc98" + integrity sha512-xuXll4isR03CrQsmxyz92LJB2xX9n+pZJ5jE9JgcnmsCammLyKdlzrBin+25dy6wIjfhJpKBAN80gsTlCgRk2w== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +postcss-load-config@^3.1.4: + version "3.1.4" + resolved "http://localhost:4873/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" + integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== + dependencies: + lilconfig "^2.0.5" + yaml "^1.10.2" + +postcss-loader@^6.2.1: + version "6.2.1" + resolved "http://localhost:4873/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" + integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== + dependencies: + cosmiconfig "^7.0.0" + klona "^2.0.5" + semver "^7.3.5" + +postcss-logical@^5.0.4: + version "5.0.4" + resolved "http://localhost:4873/postcss-logical/-/postcss-logical-5.0.4.tgz#ec75b1ee54421acc04d5921576b7d8db6b0e6f73" + integrity sha512-RHXxplCeLh9VjinvMrZONq7im4wjWGlRJAqmAVLXyZaXwfDWP73/oq4NdIp+OZwhQUMj0zjqDfM5Fj7qby+B4g== + +postcss-media-minmax@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" + integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== + +postcss-merge-longhand@^5.1.6: + version "5.1.6" + resolved "http://localhost:4873/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz#f378a8a7e55766b7b644f48e5d8c789ed7ed51ce" + integrity sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw== + dependencies: + postcss-value-parser "^4.2.0" + stylehacks "^5.1.0" + +postcss-merge-rules@^5.1.2: + version "5.1.2" + resolved "http://localhost:4873/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz#7049a14d4211045412116d79b751def4484473a5" + integrity sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + cssnano-utils "^3.1.0" + postcss-selector-parser "^6.0.5" + +postcss-minify-font-values@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" + integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-minify-gradients@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" + integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== + dependencies: + colord "^2.9.1" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-params@^5.1.3: + version "5.1.3" + resolved "http://localhost:4873/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz#ac41a6465be2db735099bbd1798d85079a6dc1f9" + integrity sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg== + dependencies: + browserslist "^4.16.6" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-selectors@^5.2.1: + version "5.2.1" + resolved "http://localhost:4873/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz#d4e7e6b46147b8117ea9325a915a801d5fe656c6" + integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-modules-extract-imports@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== + +postcss-modules-local-by-default@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" + integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== + dependencies: + icss-utils "^5.0.0" + postcss-selector-parser "^6.0.2" + postcss-value-parser "^4.1.0" + +postcss-modules-scope@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-modules-values@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== + dependencies: + icss-utils "^5.0.0" + +postcss-nested@5.0.6: + version "5.0.6" + resolved "http://localhost:4873/postcss-nested/-/postcss-nested-5.0.6.tgz#466343f7fc8d3d46af3e7dba3fcd47d052a945bc" + integrity sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA== + dependencies: + postcss-selector-parser "^6.0.6" + +postcss-nesting@^10.2.0: + version "10.2.0" + resolved "http://localhost:4873/postcss-nesting/-/postcss-nesting-10.2.0.tgz#0b12ce0db8edfd2d8ae0aaf86427370b898890be" + integrity sha512-EwMkYchxiDiKUhlJGzWsD9b2zvq/r2SSubcRrgP+jujMXFzqvANLt16lJANC+5uZ6hjI7lpRmI6O8JIl+8l1KA== + dependencies: + "@csstools/selector-specificity" "^2.0.0" + postcss-selector-parser "^6.0.10" + +postcss-normalize-charset@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" + integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== + +postcss-normalize-display-values@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" + integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-positions@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz#ef97279d894087b59325b45c47f1e863daefbb92" + integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-repeat-style@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz#e9eb96805204f4766df66fd09ed2e13545420fb2" + integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-string@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" + integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-timing-functions@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" + integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-unicode@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz#3d23aede35e160089a285e27bf715de11dc9db75" + integrity sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ== + dependencies: + browserslist "^4.16.6" + postcss-value-parser "^4.2.0" + +postcss-normalize-url@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" + integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== + dependencies: + normalize-url "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-normalize-whitespace@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" + integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize@^10.0.1: + version "10.0.1" + resolved "http://localhost:4873/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" + integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== + dependencies: + "@csstools/normalize.css" "*" + postcss-browser-comments "^4" + sanitize.css "*" + +postcss-opacity-percentage@^1.1.2: + version "1.1.2" + resolved "http://localhost:4873/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.2.tgz#bd698bb3670a0a27f6d657cc16744b3ebf3b1145" + integrity sha512-lyUfF7miG+yewZ8EAk9XUBIlrHyUE6fijnesuz+Mj5zrIHIEw6KcIZSOk/elVMqzLvREmXB83Zi/5QpNRYd47w== + +postcss-ordered-values@^5.1.3: + version "5.1.3" + resolved "http://localhost:4873/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz#b6fd2bd10f937b23d86bc829c69e7732ce76ea38" + integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== + dependencies: + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-overflow-shorthand@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.4.tgz#7ed6486fec44b76f0eab15aa4866cda5d55d893e" + integrity sha512-otYl/ylHK8Y9bcBnPLo3foYFLL6a6Ak+3EQBPOTR7luMYCOsiVTUk1iLvNf6tVPNGXcoL9Hoz37kpfriRIFb4A== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-page-break@^3.0.4: + version "3.0.4" + resolved "http://localhost:4873/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" + integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== + +postcss-place@^7.0.5: + version "7.0.5" + resolved "http://localhost:4873/postcss-place/-/postcss-place-7.0.5.tgz#95dbf85fd9656a3a6e60e832b5809914236986c4" + integrity sha512-wR8igaZROA6Z4pv0d+bvVrvGY4GVHihBCBQieXFY3kuSuMyOmEnnfFzHl/tQuqHZkfkIVBEbDvYcFfHmpSet9g== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-preset-env@^7.0.1: + version "7.8.2" + resolved "http://localhost:4873/postcss-preset-env/-/postcss-preset-env-7.8.2.tgz#4c834d5cbd2e29df2abf59118947c456922b79ba" + integrity sha512-rSMUEaOCnovKnwc5LvBDHUDzpGP+nrUeWZGWt9M72fBvckCi45JmnJigUr4QG4zZeOHmOCNCZnd2LKDvP++ZuQ== + dependencies: + "@csstools/postcss-cascade-layers" "^1.1.0" + "@csstools/postcss-color-function" "^1.1.1" + "@csstools/postcss-font-format-keywords" "^1.0.1" + "@csstools/postcss-hwb-function" "^1.0.2" + "@csstools/postcss-ic-unit" "^1.0.1" + "@csstools/postcss-is-pseudo-class" "^2.0.7" + "@csstools/postcss-nested-calc" "^1.0.0" + "@csstools/postcss-normalize-display-values" "^1.0.1" + "@csstools/postcss-oklab-function" "^1.1.1" + "@csstools/postcss-progressive-custom-properties" "^1.3.0" + "@csstools/postcss-stepped-value-functions" "^1.0.1" + "@csstools/postcss-text-decoration-shorthand" "^1.0.0" + "@csstools/postcss-trigonometric-functions" "^1.0.2" + "@csstools/postcss-unset-value" "^1.0.2" + autoprefixer "^10.4.11" + browserslist "^4.21.3" + css-blank-pseudo "^3.0.3" + css-has-pseudo "^3.0.4" + css-prefers-color-scheme "^6.0.3" + cssdb "^7.0.1" + postcss-attribute-case-insensitive "^5.0.2" + postcss-clamp "^4.1.0" + postcss-color-functional-notation "^4.2.4" + postcss-color-hex-alpha "^8.0.4" + postcss-color-rebeccapurple "^7.1.1" + postcss-custom-media "^8.0.2" + postcss-custom-properties "^12.1.9" + postcss-custom-selectors "^6.0.3" + postcss-dir-pseudo-class "^6.0.5" + postcss-double-position-gradients "^3.1.2" + postcss-env-function "^4.0.6" + postcss-focus-visible "^6.0.4" + postcss-focus-within "^5.0.4" + postcss-font-variant "^5.0.0" + postcss-gap-properties "^3.0.5" + postcss-image-set-function "^4.0.7" + postcss-initial "^4.0.1" + postcss-lab-function "^4.2.1" + postcss-logical "^5.0.4" + postcss-media-minmax "^5.0.0" + postcss-nesting "^10.2.0" + postcss-opacity-percentage "^1.1.2" + postcss-overflow-shorthand "^3.0.4" + postcss-page-break "^3.0.4" + postcss-place "^7.0.5" + postcss-pseudo-class-any-link "^7.1.6" + postcss-replace-overflow-wrap "^4.0.0" + postcss-selector-not "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-pseudo-class-any-link@^7.1.6: + version "7.1.6" + resolved "http://localhost:4873/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.6.tgz#2693b221902da772c278def85a4d9a64b6e617ab" + integrity sha512-9sCtZkO6f/5ML9WcTLcIyV1yz9D1rf0tWc+ulKcvV30s0iZKS/ONyETvoWsr6vnrmW+X+KmuK3gV/w5EWnT37w== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-reduce-initial@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz#fc31659ea6e85c492fb2a7b545370c215822c5d6" + integrity sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + +postcss-reduce-transforms@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" + integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-replace-overflow-wrap@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" + integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== + +postcss-selector-not@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/postcss-selector-not/-/postcss-selector-not-6.0.1.tgz#8f0a709bf7d4b45222793fc34409be407537556d" + integrity sha512-1i9affjAe9xu/y9uqWH+tD4r6/hDaXJruk8xn2x1vzxC2U3J3LKO3zJW4CyxlNhA56pADJ/djpEwpH1RClI2rQ== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.6, postcss-selector-parser@^6.0.9: + version "6.0.10" + resolved "http://localhost:4873/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" + integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-svgo@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" + integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== + dependencies: + postcss-value-parser "^4.2.0" + svgo "^2.7.0" + +postcss-unique-selectors@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" + integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-value-parser@^4.0.0, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "http://localhost:4873/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss@^7.0.35: + version "7.0.39" + resolved "http://localhost:4873/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" + integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== + dependencies: + picocolors "^0.2.1" + source-map "^0.6.1" + +postcss@^8.3.5, postcss@^8.4.14, postcss@^8.4.4, postcss@^8.4.7: + version "8.4.17" + resolved "http://localhost:4873/postcss/-/postcss-8.4.17.tgz#f87863ec7cd353f81f7ab2dec5d67d861bbb1be5" + integrity sha512-UNxNOLQydcOFi41yHNMcKRZ39NeXlr8AxGuZJsdub8vIb12fHzcq37DTU/QtbI6WLxNg2gF9Z+8qtRwTj1UI1Q== + dependencies: + nanoid "^3.3.4" + picocolors "^1.0.0" + source-map-js "^1.0.2" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "http://localhost:4873/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== + +pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: + version "5.6.0" + resolved "http://localhost:4873/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" + integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== + +pretty-error@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" + integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== + dependencies: + lodash "^4.17.20" + renderkid "^3.0.0" + +pretty-format@^27.0.2, pretty-format@^27.5.1: + version "27.5.1" + resolved "http://localhost:4873/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" + integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== + dependencies: + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^17.0.1" + +pretty-format@^28.1.3: + version "28.1.3" + resolved "http://localhost:4873/pretty-format/-/pretty-format-28.1.3.tgz#c9fba8cedf99ce50963a11b27d982a9ae90970d5" + integrity sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q== + dependencies: + "@jest/schemas" "^28.1.3" + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +pretty-format@^29.0.0, pretty-format@^29.1.2: + version "29.1.2" + resolved "http://localhost:4873/pretty-format/-/pretty-format-29.1.2.tgz#b1f6b75be7d699be1a051f5da36e8ae9e76a8e6a" + integrity sha512-CGJ6VVGXVRP2o2Dorl4mAwwvDWT25luIsYhkyVQW32E4nL+TgW939J7LlKT/npq5Cpq6j3s+sy+13yk7xYpBmg== + dependencies: + "@jest/schemas" "^29.0.0" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "http://localhost:4873/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +promise@^8.1.0: + version "8.2.0" + resolved "http://localhost:4873/promise/-/promise-8.2.0.tgz#a1f6280ab67457fbfc8aad2b198c9497e9e5c806" + integrity sha512-+CMAlLHqwRYwBMXKCP+o8ns7DN+xHDUiI+0nArsiJ9y+kJVPLFxEaSw6Ha9s9H0tftxg2Yzl25wqj9G7m5wLZg== + dependencies: + asap "~2.0.6" + +prompts@^2.0.1, prompts@^2.4.2: + version "2.4.2" + resolved "http://localhost:4873/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +prop-types@^15.8.1: + version "15.8.1" + resolved "http://localhost:4873/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "http://localhost:4873/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +psl@^1.1.33: + version "1.9.0" + resolved "http://localhost:4873/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +q@^1.1.2: + version "1.5.1" + resolved "http://localhost:4873/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" + integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw== + +qs@6.10.3: + version "6.10.3" + resolved "http://localhost:4873/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" + integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== + dependencies: + side-channel "^1.0.4" + +querystringify@^2.1.1: + version "2.2.0" + resolved "http://localhost:4873/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "http://localhost:4873/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +quick-lru@^5.1.1: + version "5.1.1" + resolved "http://localhost:4873/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + +raf@^3.4.1: + version "3.4.1" + resolved "http://localhost:4873/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" + integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== + dependencies: + performance-now "^2.1.0" + +randombytes@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +range-parser@^1.2.1, range-parser@~1.2.1: + version "1.2.1" + resolved "http://localhost:4873/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.1: + version "2.5.1" + resolved "http://localhost:4873/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +react-app-polyfill@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" + integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== + dependencies: + core-js "^3.19.2" + object-assign "^4.1.1" + promise "^8.1.0" + raf "^3.4.1" + regenerator-runtime "^0.13.9" + whatwg-fetch "^3.6.2" + +react-dev-utils@^12.0.1: + version "12.0.1" + resolved "http://localhost:4873/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73" + integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== + dependencies: + "@babel/code-frame" "^7.16.0" + address "^1.1.2" + browserslist "^4.18.1" + chalk "^4.1.2" + cross-spawn "^7.0.3" + detect-port-alt "^1.1.6" + escape-string-regexp "^4.0.0" + filesize "^8.0.6" + find-up "^5.0.0" + fork-ts-checker-webpack-plugin "^6.5.0" + global-modules "^2.0.0" + globby "^11.0.4" + gzip-size "^6.0.0" + immer "^9.0.7" + is-root "^2.1.0" + loader-utils "^3.2.0" + open "^8.4.0" + pkg-up "^3.1.0" + prompts "^2.4.2" + react-error-overlay "^6.0.11" + recursive-readdir "^2.2.2" + shell-quote "^1.7.3" + strip-ansi "^6.0.1" + text-table "^0.2.0" + +react-dom@^18.2.0: + version "18.2.0" + resolved "http://localhost:4873/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d" + integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g== + dependencies: + loose-envify "^1.1.0" + scheduler "^0.23.0" + +react-error-overlay@^6.0.11: + version "6.0.11" + resolved "http://localhost:4873/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb" + integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== + +react-is@^16.13.1: + version "16.13.1" + resolved "http://localhost:4873/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-is@^17.0.1: + version "17.0.2" + resolved "http://localhost:4873/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" + integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== + +react-is@^18.0.0: + version "18.2.0" + resolved "http://localhost:4873/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== + +react-refresh@^0.11.0: + version "0.11.0" + resolved "http://localhost:4873/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" + integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== + +react-scripts@5.0.1: + version "5.0.1" + resolved "http://localhost:4873/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003" + integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ== + dependencies: + "@babel/core" "^7.16.0" + "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" + "@svgr/webpack" "^5.5.0" + babel-jest "^27.4.2" + babel-loader "^8.2.3" + babel-plugin-named-asset-import "^0.3.8" + babel-preset-react-app "^10.0.1" + bfj "^7.0.2" + browserslist "^4.18.1" + camelcase "^6.2.1" + case-sensitive-paths-webpack-plugin "^2.4.0" + css-loader "^6.5.1" + css-minimizer-webpack-plugin "^3.2.0" + dotenv "^10.0.0" + dotenv-expand "^5.1.0" + eslint "^8.3.0" + eslint-config-react-app "^7.0.1" + eslint-webpack-plugin "^3.1.1" + file-loader "^6.2.0" + fs-extra "^10.0.0" + html-webpack-plugin "^5.5.0" + identity-obj-proxy "^3.0.0" + jest "^27.4.3" + jest-resolve "^27.4.2" + jest-watch-typeahead "^1.0.0" + mini-css-extract-plugin "^2.4.5" + postcss "^8.4.4" + postcss-flexbugs-fixes "^5.0.2" + postcss-loader "^6.2.1" + postcss-normalize "^10.0.1" + postcss-preset-env "^7.0.1" + prompts "^2.4.2" + react-app-polyfill "^3.0.0" + react-dev-utils "^12.0.1" + react-refresh "^0.11.0" + resolve "^1.20.0" + resolve-url-loader "^4.0.0" + sass-loader "^12.3.0" + semver "^7.3.5" + source-map-loader "^3.0.0" + style-loader "^3.3.1" + tailwindcss "^3.0.2" + terser-webpack-plugin "^5.2.5" + webpack "^5.64.4" + webpack-dev-server "^4.6.0" + webpack-manifest-plugin "^4.0.2" + workbox-webpack-plugin "^6.4.1" + optionalDependencies: + fsevents "^2.3.2" + +react@^18.2.0: + version "18.2.0" + resolved "http://localhost:4873/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5" + integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ== + dependencies: + loose-envify "^1.1.0" + +read-cache@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" + integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== + dependencies: + pify "^2.3.0" + +readable-stream@^2.0.1: + version "2.3.7" + resolved "http://localhost:4873/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.0.6: + version "3.6.0" + resolved "http://localhost:4873/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@~3.6.0: + version "3.6.0" + resolved "http://localhost:4873/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +recursive-readdir@^2.2.2: + version "2.2.2" + resolved "http://localhost:4873/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f" + integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== + dependencies: + minimatch "3.0.4" + +redent@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== + dependencies: + indent-string "^4.0.0" + strip-indent "^3.0.0" + +regenerate-unicode-properties@^10.1.0: + version "10.1.0" + resolved "http://localhost:4873/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c" + integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== + dependencies: + regenerate "^1.4.2" + +regenerate@^1.4.2: + version "1.4.2" + resolved "http://localhost:4873/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + +regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: + version "0.13.9" + resolved "http://localhost:4873/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +regenerator-transform@^0.15.0: + version "0.15.0" + resolved "http://localhost:4873/regenerator-transform/-/regenerator-transform-0.15.0.tgz#cbd9ead5d77fae1a48d957cf889ad0586adb6537" + integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg== + dependencies: + "@babel/runtime" "^7.8.4" + +regex-parser@^2.2.11: + version "2.2.11" + resolved "http://localhost:4873/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" + integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== + +regexp.prototype.flags@^1.4.1, regexp.prototype.flags@^1.4.3: + version "1.4.3" + resolved "http://localhost:4873/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +regexpp@^3.2.0: + version "3.2.0" + resolved "http://localhost:4873/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +regexpu-core@^5.1.0: + version "5.2.1" + resolved "http://localhost:4873/regexpu-core/-/regexpu-core-5.2.1.tgz#a69c26f324c1e962e9ffd0b88b055caba8089139" + integrity sha512-HrnlNtpvqP1Xkb28tMhBUO2EbyUHdQlsnlAhzWcwHy8WJR53UWr7/MAvqrsQKMbV4qdpv03oTMG8iIhfsPFktQ== + dependencies: + regenerate "^1.4.2" + regenerate-unicode-properties "^10.1.0" + regjsgen "^0.7.1" + regjsparser "^0.9.1" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.0.0" + +regjsgen@^0.7.1: + version "0.7.1" + resolved "http://localhost:4873/regjsgen/-/regjsgen-0.7.1.tgz#ee5ef30e18d3f09b7c369b76e7c2373ed25546f6" + integrity sha512-RAt+8H2ZEzHeYWxZ3H2z6tF18zyyOnlcdaafLrm21Bguj7uZy6ULibiAFdXEtKQY4Sy7wDTwDiOazasMLc4KPA== + +regjsparser@^0.9.1: + version "0.9.1" + resolved "http://localhost:4873/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" + integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== + dependencies: + jsesc "~0.5.0" + +relateurl@^0.2.7: + version "0.2.7" + resolved "http://localhost:4873/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" + integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== + +renderkid@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" + integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== + dependencies: + css-select "^4.1.3" + dom-converter "^0.2.0" + htmlparser2 "^6.1.0" + lodash "^4.17.21" + strip-ansi "^6.0.1" + +require-directory@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + +require-from-string@^2.0.2: + version "2.0.2" + resolved "http://localhost:4873/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +requires-port@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve-url-loader@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" + integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== + dependencies: + adjust-sourcemap-loader "^4.0.0" + convert-source-map "^1.7.0" + loader-utils "^2.0.0" + postcss "^7.0.35" + source-map "0.6.1" + +resolve.exports@^1.1.0: + version "1.1.0" + resolved "http://localhost:4873/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" + integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== + +resolve@^1.1.7, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.0, resolve@^1.22.1: + version "1.22.1" + resolved "http://localhost:4873/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +resolve@^2.0.0-next.3: + version "2.0.0-next.4" + resolved "http://localhost:4873/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" + integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +retry@^0.13.1: + version "0.13.1" + resolved "http://localhost:4873/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + +reusify@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "http://localhost:4873/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +rollup-plugin-terser@^7.0.0: + version "7.0.2" + resolved "http://localhost:4873/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" + integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== + dependencies: + "@babel/code-frame" "^7.10.4" + jest-worker "^26.2.1" + serialize-javascript "^4.0.0" + terser "^5.0.0" + +rollup@^2.43.1: + version "2.79.1" + resolved "http://localhost:4873/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" + integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== + optionalDependencies: + fsevents "~2.3.2" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "http://localhost:4873/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "http://localhost:4873/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "http://localhost:4873/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-regex-test@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + is-regex "^1.1.4" + +"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": + version "2.1.2" + resolved "http://localhost:4873/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sanitize.css@*: + version "13.0.0" + resolved "http://localhost:4873/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" + integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== + +sass-loader@^12.3.0: + version "12.6.0" + resolved "http://localhost:4873/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" + integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== + dependencies: + klona "^2.0.4" + neo-async "^2.6.2" + +sax@~1.2.4: + version "1.2.4" + resolved "http://localhost:4873/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +saxes@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + dependencies: + xmlchars "^2.2.0" + +scheduler@^0.23.0: + version "0.23.0" + resolved "http://localhost:4873/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe" + integrity sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw== + dependencies: + loose-envify "^1.1.0" + +schema-utils@2.7.0: + version "2.7.0" + resolved "http://localhost:4873/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== + dependencies: + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" + +schema-utils@^2.6.5: + version "2.7.1" + resolved "http://localhost:4873/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" + integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== + dependencies: + "@types/json-schema" "^7.0.5" + ajv "^6.12.4" + ajv-keywords "^3.5.2" + +schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: + version "3.1.1" + resolved "http://localhost:4873/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" + integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" + integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.8.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.0.0" + +select-hose@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" + integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== + +selfsigned@^2.1.1: + version "2.1.1" + resolved "http://localhost:4873/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" + integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== + dependencies: + node-forge "^1" + +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: + version "6.3.0" + resolved "http://localhost:4873/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.3.2, semver@^7.3.5, semver@^7.3.7: + version "7.3.8" + resolved "http://localhost:4873/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== + dependencies: + lru-cache "^6.0.0" + +send@0.18.0: + version "0.18.0" + resolved "http://localhost:4873/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serialize-javascript@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" + integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== + dependencies: + randombytes "^2.1.0" + +serialize-javascript@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" + integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== + dependencies: + randombytes "^2.1.0" + +serve-index@^1.9.1: + version "1.9.1" + resolved "http://localhost:4873/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" + integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw== + dependencies: + accepts "~1.3.4" + batch "0.6.1" + debug "2.6.9" + escape-html "~1.0.3" + http-errors "~1.6.2" + mime-types "~2.1.17" + parseurl "~1.3.2" + +serve-static@1.15.0: + version "1.15.0" + resolved "http://localhost:4873/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.18.0" + +setprototypeof@1.1.0: + version "1.1.0" + resolved "http://localhost:4873/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" + integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== + +setprototypeof@1.2.0: + version "1.2.0" + resolved "http://localhost:4873/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +shallow-clone@^3.0.0: + version "3.0.1" + resolved "http://localhost:4873/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== + dependencies: + kind-of "^6.0.2" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shell-quote@^1.7.3: + version "1.7.3" + resolved "http://localhost:4873/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" + integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== + +side-channel@^1.0.4: + version "1.0.4" + resolved "http://localhost:4873/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2, signal-exit@^3.0.3: + version "3.0.7" + resolved "http://localhost:4873/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sisteransi@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slash@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" + integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== + +sockjs@^0.3.24: + version "0.3.24" + resolved "http://localhost:4873/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" + integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== + dependencies: + faye-websocket "^0.11.3" + uuid "^8.3.2" + websocket-driver "^0.7.4" + +source-list-map@^2.0.0, source-list-map@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" + integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== + +source-map-js@^1.0.1, source-map-js@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map-loader@^3.0.0: + version "3.0.1" + resolved "http://localhost:4873/source-map-loader/-/source-map-loader-3.0.1.tgz#9ae5edc7c2d42570934be4c95d1ccc6352eba52d" + integrity sha512-Vp1UsfyPvgujKQzi4pyDiTOnE3E4H+yHvkVRN3c/9PJmQS4CQJExvcDvaX/D+RV+xQben9HJ56jMJS3CgUeWyA== + dependencies: + abab "^2.0.5" + iconv-lite "^0.6.3" + source-map-js "^1.0.1" + +source-map-support@^0.5.6, source-map-support@~0.5.20: + version "0.5.21" + resolved "http://localhost:4873/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: + version "0.6.1" + resolved "http://localhost:4873/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +source-map@^0.7.3: + version "0.7.4" + resolved "http://localhost:4873/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" + integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== + +source-map@^0.8.0-beta.0: + version "0.8.0-beta.0" + resolved "http://localhost:4873/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" + integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== + dependencies: + whatwg-url "^7.0.0" + +sourcemap-codec@^1.4.8: + version "1.4.8" + resolved "http://localhost:4873/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" + integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== + +spdy-transport@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" + integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== + dependencies: + debug "^4.1.0" + detect-node "^2.0.4" + hpack.js "^2.1.6" + obuf "^1.1.2" + readable-stream "^3.0.6" + wbuf "^1.7.3" + +spdy@^4.0.2: + version "4.0.2" + resolved "http://localhost:4873/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" + integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== + dependencies: + debug "^4.1.0" + handle-thing "^2.0.0" + http-deceiver "^1.2.7" + select-hose "^2.0.0" + spdy-transport "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "http://localhost:4873/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +stable@^0.1.8: + version "0.1.8" + resolved "http://localhost:4873/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" + integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== + +stack-utils@^2.0.3: + version "2.0.5" + resolved "http://localhost:4873/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +stackframe@^1.3.4: + version "1.3.4" + resolved "http://localhost:4873/stackframe/-/stackframe-1.3.4.tgz#b881a004c8c149a5e8efef37d51b16e412943310" + integrity sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw== + +statuses@2.0.1: + version "2.0.1" + resolved "http://localhost:4873/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +"statuses@>= 1.4.0 < 2": + version "1.5.0" + resolved "http://localhost:4873/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== + +string-length@^4.0.1: + version "4.0.2" + resolved "http://localhost:4873/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-length@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" + integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== + dependencies: + char-regex "^2.0.0" + strip-ansi "^7.0.1" + +string-natural-compare@^3.0.1: + version "3.0.1" + resolved "http://localhost:4873/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" + integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== + +string-width@^4.1.0, string-width@^4.2.0: + version "4.2.3" + resolved "http://localhost:4873/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.7: + version "4.0.7" + resolved "http://localhost:4873/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" + integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + regexp.prototype.flags "^1.4.1" + side-channel "^1.0.4" + +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "http://localhost:4873/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "http://localhost:4873/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +stringify-object@^3.3.0: + version "3.3.0" + resolved "http://localhost:4873/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" + integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== + dependencies: + get-own-enumerable-property-symbols "^3.0.0" + is-obj "^1.0.1" + is-regexp "^1.0.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.1: + version "7.0.1" + resolved "http://localhost:4873/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" + integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== + dependencies: + ansi-regex "^6.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== + +strip-bom@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-comments@^2.0.1: + version "2.0.1" + resolved "http://localhost:4873/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" + integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-indent@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== + dependencies: + min-indent "^1.0.0" + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "http://localhost:4873/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +style-loader@^3.3.1: + version "3.3.1" + resolved "http://localhost:4873/style-loader/-/style-loader-3.3.1.tgz#057dfa6b3d4d7c7064462830f9113ed417d38575" + integrity sha512-GPcQ+LDJbrcxHORTRes6Jy2sfvK2kS6hpSfI/fXhPt+spVzxF6LJ1dHLN9zIGmVaaP044YKaIatFaufENRiDoQ== + +stylehacks@^5.1.0: + version "5.1.0" + resolved "http://localhost:4873/stylehacks/-/stylehacks-5.1.0.tgz#a40066490ca0caca04e96c6b02153ddc39913520" + integrity sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q== + dependencies: + browserslist "^4.16.6" + postcss-selector-parser "^6.0.4" + +supports-color@^5.3.0: + version "5.5.0" + resolved "http://localhost:4873/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "http://localhost:4873/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "http://localhost:4873/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.3.0" + resolved "http://localhost:4873/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" + integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "http://localhost:4873/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +svg-parser@^2.0.2: + version "2.0.4" + resolved "http://localhost:4873/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" + integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== + +svgo@^1.2.2: + version "1.3.2" + resolved "http://localhost:4873/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" + integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== + dependencies: + chalk "^2.4.1" + coa "^2.0.2" + css-select "^2.0.0" + css-select-base-adapter "^0.1.1" + css-tree "1.0.0-alpha.37" + csso "^4.0.2" + js-yaml "^3.13.1" + mkdirp "~0.5.1" + object.values "^1.1.0" + sax "~1.2.4" + stable "^0.1.8" + unquote "~1.1.1" + util.promisify "~1.0.0" + +svgo@^2.7.0: + version "2.8.0" + resolved "http://localhost:4873/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" + integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== + dependencies: + "@trysound/sax" "0.2.0" + commander "^7.2.0" + css-select "^4.1.3" + css-tree "^1.1.3" + csso "^4.2.0" + picocolors "^1.0.0" + stable "^0.1.8" + +symbol-tree@^3.2.4: + version "3.2.4" + resolved "http://localhost:4873/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +tailwindcss@^3.0.2: + version "3.1.8" + resolved "http://localhost:4873/tailwindcss/-/tailwindcss-3.1.8.tgz#4f8520550d67a835d32f2f4021580f9fddb7b741" + integrity sha512-YSneUCZSFDYMwk+TGq8qYFdCA3yfBRdBlS7txSq0LUmzyeqRe3a8fBQzbz9M3WS/iFT4BNf/nmw9mEzrnSaC0g== + dependencies: + arg "^5.0.2" + chokidar "^3.5.3" + color-name "^1.1.4" + detective "^5.2.1" + didyoumean "^1.2.2" + dlv "^1.1.3" + fast-glob "^3.2.11" + glob-parent "^6.0.2" + is-glob "^4.0.3" + lilconfig "^2.0.6" + normalize-path "^3.0.0" + object-hash "^3.0.0" + picocolors "^1.0.0" + postcss "^8.4.14" + postcss-import "^14.1.0" + postcss-js "^4.0.0" + postcss-load-config "^3.1.4" + postcss-nested "5.0.6" + postcss-selector-parser "^6.0.10" + postcss-value-parser "^4.2.0" + quick-lru "^5.1.1" + resolve "^1.22.1" + +tapable@^1.0.0: + version "1.1.3" + resolved "http://localhost:4873/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" + integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== + +tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: + version "2.2.1" + resolved "http://localhost:4873/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +temp-dir@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" + integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== + +tempy@^0.6.0: + version "0.6.0" + resolved "http://localhost:4873/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" + integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== + dependencies: + is-stream "^2.0.0" + temp-dir "^2.0.0" + type-fest "^0.16.0" + unique-string "^2.0.0" + +terminal-link@^2.0.0: + version "2.1.1" + resolved "http://localhost:4873/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: + version "5.3.6" + resolved "http://localhost:4873/terser-webpack-plugin/-/terser-webpack-plugin-5.3.6.tgz#5590aec31aa3c6f771ce1b1acca60639eab3195c" + integrity sha512-kfLFk+PoLUQIbLmB1+PZDMRSZS99Mp+/MHqDNmMA6tOItzRt+Npe3E+fsMs5mfcM0wCtrrdU387UnV+vnSffXQ== + dependencies: + "@jridgewell/trace-mapping" "^0.3.14" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.0" + terser "^5.14.1" + +terser@^5.0.0, terser@^5.10.0, terser@^5.14.1: + version "5.15.1" + resolved "http://localhost:4873/terser/-/terser-5.15.1.tgz#8561af6e0fd6d839669c73b92bdd5777d870ed6c" + integrity sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw== + dependencies: + "@jridgewell/source-map" "^0.3.2" + acorn "^8.5.0" + commander "^2.20.0" + source-map-support "~0.5.20" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "http://localhost:4873/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-table@^0.2.0: + version "0.2.0" + resolved "http://localhost:4873/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +throat@^6.0.1: + version "6.0.1" + resolved "http://localhost:4873/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" + integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== + +thunky@^1.0.2: + version "1.1.0" + resolved "http://localhost:4873/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" + integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== + +tmpl@1.0.5: + version "1.0.5" + resolved "http://localhost:4873/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "http://localhost:4873/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +toidentifier@1.0.1: + version "1.0.1" + resolved "http://localhost:4873/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tough-cookie@^4.0.0: + version "4.1.2" + resolved "http://localhost:4873/tough-cookie/-/tough-cookie-4.1.2.tgz#e53e84b85f24e0b65dd526f46628db6c85f6b874" + integrity sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.2.0" + url-parse "^1.5.3" + +tr46@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== + dependencies: + punycode "^2.1.0" + +tr46@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" + integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== + dependencies: + punycode "^2.1.1" + +tryer@^1.0.1: + version "1.0.1" + resolved "http://localhost:4873/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" + integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== + +tsconfig-paths@^3.14.1: + version "3.14.1" + resolved "http://localhost:4873/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" + integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.6" + strip-bom "^3.0.0" + +tslib@^1.8.1: + version "1.14.1" + resolved "http://localhost:4873/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.0.3: + version "2.4.0" + resolved "http://localhost:4873/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" + integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== + +tsutils@^3.21.0: + version "3.21.0" + resolved "http://localhost:4873/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "http://localhost:4873/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-check@~0.3.2: + version "0.3.2" + resolved "http://localhost:4873/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== + dependencies: + prelude-ls "~1.1.2" + +type-detect@4.0.8: + version "4.0.8" + resolved "http://localhost:4873/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.16.0: + version "0.16.0" + resolved "http://localhost:4873/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" + integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== + +type-fest@^0.20.2: + version "0.20.2" + resolved "http://localhost:4873/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.21.3: + version "0.21.3" + resolved "http://localhost:4873/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +type-is@~1.6.18: + version "1.6.18" + resolved "http://localhost:4873/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "http://localhost:4873/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== + +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + dependencies: + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" + +unicode-match-property-value-ecmascript@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714" + integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== + +unicode-property-aliases-ecmascript@^2.0.0: + version "2.1.0" + resolved "http://localhost:4873/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" + integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== + +unique-string@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== + dependencies: + crypto-random-string "^2.0.0" + +universalify@^0.2.0: + version "0.2.0" + resolved "http://localhost:4873/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== + +universalify@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "http://localhost:4873/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +unquote@~1.1.1: + version "1.1.1" + resolved "http://localhost:4873/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" + integrity sha512-vRCqFv6UhXpWxZPyGDh/F3ZpNv8/qo7w6iufLpQg9aKnQ71qM4B5KiI7Mia9COcjEhrO9LueHpMYjYzsWH3OIg== + +upath@^1.2.0: + version "1.2.0" + resolved "http://localhost:4873/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== + +update-browserslist-db@^1.0.9: + version "1.0.10" + resolved "http://localhost:4873/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz#0f54b876545726f17d00cd9a2561e6dade943ff3" + integrity sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + +uri-js@^4.2.2: + version "4.4.1" + resolved "http://localhost:4873/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-parse@^1.5.3: + version "1.5.10" + resolved "http://localhost:4873/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + +util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: + version "1.0.2" + resolved "http://localhost:4873/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +util.promisify@~1.0.0: + version "1.0.1" + resolved "http://localhost:4873/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" + integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.2" + has-symbols "^1.0.1" + object.getownpropertydescriptors "^2.1.0" + +utila@~0.4: + version "0.4.0" + resolved "http://localhost:4873/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" + integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA== + +utils-merge@1.0.1: + version "1.0.1" + resolved "http://localhost:4873/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@^8.3, uuid@^8.3.2: + version "8.3.2" + resolved "http://localhost:4873/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +v8-to-istanbul@^8.1.0: + version "8.1.1" + resolved "http://localhost:4873/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" + integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + source-map "^0.7.3" + +vary@~1.1.2: + version "1.1.2" + resolved "http://localhost:4873/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +w3c-hr-time@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + dependencies: + xml-name-validator "^3.0.0" + +walker@^1.0.7: + version "1.0.8" + resolved "http://localhost:4873/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +watchpack@^2.4.0: + version "2.4.0" + resolved "http://localhost:4873/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" + integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== + dependencies: + glob-to-regexp "^0.4.1" + graceful-fs "^4.1.2" + +wbuf@^1.1.0, wbuf@^1.7.3: + version "1.7.3" + resolved "http://localhost:4873/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" + integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== + dependencies: + minimalistic-assert "^1.0.0" + +web-vitals@^2.1.4: + version "2.1.4" + resolved "http://localhost:4873/web-vitals/-/web-vitals-2.1.4.tgz#76563175a475a5e835264d373704f9dde718290c" + integrity sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg== + +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + +webidl-conversions@^5.0.0: + version "5.0.0" + resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + +webidl-conversions@^6.1.0: + version "6.1.0" + resolved "http://localhost:4873/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + +webpack-dev-middleware@^5.3.1: + version "5.3.3" + resolved "http://localhost:4873/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f" + integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== + dependencies: + colorette "^2.0.10" + memfs "^3.4.3" + mime-types "^2.1.31" + range-parser "^1.2.1" + schema-utils "^4.0.0" + +webpack-dev-server@^4.6.0: + version "4.11.1" + resolved "http://localhost:4873/webpack-dev-server/-/webpack-dev-server-4.11.1.tgz#ae07f0d71ca0438cf88446f09029b92ce81380b5" + integrity sha512-lILVz9tAUy1zGFwieuaQtYiadImb5M3d+H+L1zDYalYoDl0cksAB1UNyuE5MMWJrG6zR1tXkCP2fitl7yoUJiw== + dependencies: + "@types/bonjour" "^3.5.9" + "@types/connect-history-api-fallback" "^1.3.5" + "@types/express" "^4.17.13" + "@types/serve-index" "^1.9.1" + "@types/serve-static" "^1.13.10" + "@types/sockjs" "^0.3.33" + "@types/ws" "^8.5.1" + ansi-html-community "^0.0.8" + bonjour-service "^1.0.11" + chokidar "^3.5.3" + colorette "^2.0.10" + compression "^1.7.4" + connect-history-api-fallback "^2.0.0" + default-gateway "^6.0.3" + express "^4.17.3" + graceful-fs "^4.2.6" + html-entities "^2.3.2" + http-proxy-middleware "^2.0.3" + ipaddr.js "^2.0.1" + open "^8.0.9" + p-retry "^4.5.0" + rimraf "^3.0.2" + schema-utils "^4.0.0" + selfsigned "^2.1.1" + serve-index "^1.9.1" + sockjs "^0.3.24" + spdy "^4.0.2" + webpack-dev-middleware "^5.3.1" + ws "^8.4.2" + +webpack-manifest-plugin@^4.0.2: + version "4.1.1" + resolved "http://localhost:4873/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" + integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== + dependencies: + tapable "^2.0.0" + webpack-sources "^2.2.0" + +webpack-merge@^5.8.0: + version "5.8.0" + resolved "http://localhost:4873/webpack-merge/-/webpack-merge-5.8.0.tgz#2b39dbf22af87776ad744c390223731d30a68f61" + integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== + dependencies: + clone-deep "^4.0.1" + wildcard "^2.0.0" + +webpack-sources@^1.4.3: + version "1.4.3" + resolved "http://localhost:4873/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" + integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== + dependencies: + source-list-map "^2.0.0" + source-map "~0.6.1" + +webpack-sources@^2.2.0: + version "2.3.1" + resolved "http://localhost:4873/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" + integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== + dependencies: + source-list-map "^2.0.1" + source-map "^0.6.1" + +webpack-sources@^3.2.3: + version "3.2.3" + resolved "http://localhost:4873/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== + +webpack@^5.64.4: + version "5.74.0" + resolved "http://localhost:4873/webpack/-/webpack-5.74.0.tgz#02a5dac19a17e0bb47093f2be67c695102a55980" + integrity sha512-A2InDwnhhGN4LYctJj6M1JEaGL7Luj6LOmyBHjcI8529cm5p6VXiTIW2sn6ffvEAKmveLzvu4jrihwXtPojlAA== + dependencies: + "@types/eslint-scope" "^3.7.3" + "@types/estree" "^0.0.51" + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/wasm-edit" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + acorn "^8.7.1" + acorn-import-assertions "^1.7.6" + browserslist "^4.14.5" + chrome-trace-event "^1.0.2" + enhanced-resolve "^5.10.0" + es-module-lexer "^0.9.0" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.9" + json-parse-even-better-errors "^2.3.1" + loader-runner "^4.2.0" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^3.1.0" + tapable "^2.1.1" + terser-webpack-plugin "^5.1.3" + watchpack "^2.4.0" + webpack-sources "^3.2.3" + +websocket-driver@>=0.5.1, websocket-driver@^0.7.4: + version "0.7.4" + resolved "http://localhost:4873/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" + integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== + dependencies: + http-parser-js ">=0.5.1" + safe-buffer ">=5.1.0" + websocket-extensions ">=0.1.1" + +websocket-extensions@>=0.1.1: + version "0.1.4" + resolved "http://localhost:4873/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" + integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== + +whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "http://localhost:4873/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-fetch@^3.6.2: + version "3.6.2" + resolved "http://localhost:4873/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" + integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== + +whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "http://localhost:4873/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^7.0.0: + version "7.1.0" + resolved "http://localhost:4873/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + +whatwg-url@^8.0.0, whatwg-url@^8.5.0: + version "8.7.0" + resolved "http://localhost:4873/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" + integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== + dependencies: + lodash "^4.7.0" + tr46 "^2.1.0" + webidl-conversions "^6.1.0" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "http://localhost:4873/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which@^1.3.1: + version "1.3.1" + resolved "http://localhost:4873/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +which@^2.0.1: + version "2.0.2" + resolved "http://localhost:4873/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +wildcard@^2.0.0: + version "2.0.0" + resolved "http://localhost:4873/wildcard/-/wildcard-2.0.0.tgz#a77d20e5200c6faaac979e4b3aadc7b3dd7f8fec" + integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== + +word-wrap@^1.2.3, word-wrap@~1.2.3: + version "1.2.3" + resolved "http://localhost:4873/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +workbox-background-sync@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-background-sync/-/workbox-background-sync-6.5.4.tgz#3141afba3cc8aa2ae14c24d0f6811374ba8ff6a9" + integrity sha512-0r4INQZMyPky/lj4Ou98qxcThrETucOde+7mRGJl13MPJugQNKeZQOdIJe/1AchOP23cTqHcN/YVpD6r8E6I8g== + dependencies: + idb "^7.0.1" + workbox-core "6.5.4" + +workbox-broadcast-update@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-broadcast-update/-/workbox-broadcast-update-6.5.4.tgz#8441cff5417cd41f384ba7633ca960a7ffe40f66" + integrity sha512-I/lBERoH1u3zyBosnpPEtcAVe5lwykx9Yg1k6f8/BGEPGaMMgZrwVrqL1uA9QZ1NGGFoyE6t9i7lBjOlDhFEEw== + dependencies: + workbox-core "6.5.4" + +workbox-build@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-build/-/workbox-build-6.5.4.tgz#7d06d31eb28a878817e1c991c05c5b93409f0389" + integrity sha512-kgRevLXEYvUW9WS4XoziYqZ8Q9j/2ziJYEtTrjdz5/L/cTUa2XfyMP2i7c3p34lgqJ03+mTiz13SdFef2POwbA== + dependencies: + "@apideck/better-ajv-errors" "^0.3.1" + "@babel/core" "^7.11.1" + "@babel/preset-env" "^7.11.0" + "@babel/runtime" "^7.11.2" + "@rollup/plugin-babel" "^5.2.0" + "@rollup/plugin-node-resolve" "^11.2.1" + "@rollup/plugin-replace" "^2.4.1" + "@surma/rollup-plugin-off-main-thread" "^2.2.3" + ajv "^8.6.0" + common-tags "^1.8.0" + fast-json-stable-stringify "^2.1.0" + fs-extra "^9.0.1" + glob "^7.1.6" + lodash "^4.17.20" + pretty-bytes "^5.3.0" + rollup "^2.43.1" + rollup-plugin-terser "^7.0.0" + source-map "^0.8.0-beta.0" + stringify-object "^3.3.0" + strip-comments "^2.0.1" + tempy "^0.6.0" + upath "^1.2.0" + workbox-background-sync "6.5.4" + workbox-broadcast-update "6.5.4" + workbox-cacheable-response "6.5.4" + workbox-core "6.5.4" + workbox-expiration "6.5.4" + workbox-google-analytics "6.5.4" + workbox-navigation-preload "6.5.4" + workbox-precaching "6.5.4" + workbox-range-requests "6.5.4" + workbox-recipes "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + workbox-streams "6.5.4" + workbox-sw "6.5.4" + workbox-window "6.5.4" + +workbox-cacheable-response@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-cacheable-response/-/workbox-cacheable-response-6.5.4.tgz#a5c6ec0c6e2b6f037379198d4ef07d098f7cf137" + integrity sha512-DCR9uD0Fqj8oB2TSWQEm1hbFs/85hXXoayVwFKLVuIuxwJaihBsLsp4y7J9bvZbqtPJ1KlCkmYVGQKrBU4KAug== + dependencies: + workbox-core "6.5.4" + +workbox-core@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-core/-/workbox-core-6.5.4.tgz#df48bf44cd58bb1d1726c49b883fb1dffa24c9ba" + integrity sha512-OXYb+m9wZm8GrORlV2vBbE5EC1FKu71GGp0H4rjmxmF4/HLbMCoTFws87M3dFwgpmg0v00K++PImpNQ6J5NQ6Q== + +workbox-expiration@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-expiration/-/workbox-expiration-6.5.4.tgz#501056f81e87e1d296c76570bb483ce5e29b4539" + integrity sha512-jUP5qPOpH1nXtjGGh1fRBa1wJL2QlIb5mGpct3NzepjGG2uFFBn4iiEBiI9GUmfAFR2ApuRhDydjcRmYXddiEQ== + dependencies: + idb "^7.0.1" + workbox-core "6.5.4" + +workbox-google-analytics@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-google-analytics/-/workbox-google-analytics-6.5.4.tgz#c74327f80dfa4c1954cbba93cd7ea640fe7ece7d" + integrity sha512-8AU1WuaXsD49249Wq0B2zn4a/vvFfHkpcFfqAFHNHwln3jK9QUYmzdkKXGIZl9wyKNP+RRX30vcgcyWMcZ9VAg== + dependencies: + workbox-background-sync "6.5.4" + workbox-core "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + +workbox-navigation-preload@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-navigation-preload/-/workbox-navigation-preload-6.5.4.tgz#ede56dd5f6fc9e860a7e45b2c1a8f87c1c793212" + integrity sha512-IIwf80eO3cr8h6XSQJF+Hxj26rg2RPFVUmJLUlM0+A2GzB4HFbQyKkrgD5y2d84g2IbJzP4B4j5dPBRzamHrng== + dependencies: + workbox-core "6.5.4" + +workbox-precaching@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-precaching/-/workbox-precaching-6.5.4.tgz#740e3561df92c6726ab5f7471e6aac89582cab72" + integrity sha512-hSMezMsW6btKnxHB4bFy2Qfwey/8SYdGWvVIKFaUm8vJ4E53JAY+U2JwLTRD8wbLWoP6OVUdFlXsTdKu9yoLTg== + dependencies: + workbox-core "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + +workbox-range-requests@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-range-requests/-/workbox-range-requests-6.5.4.tgz#86b3d482e090433dab38d36ae031b2bb0bd74399" + integrity sha512-Je2qR1NXCFC8xVJ/Lux6saH6IrQGhMpDrPXWZWWS8n/RD+WZfKa6dSZwU+/QksfEadJEr/NfY+aP/CXFFK5JFg== + dependencies: + workbox-core "6.5.4" + +workbox-recipes@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-recipes/-/workbox-recipes-6.5.4.tgz#cca809ee63b98b158b2702dcfb741b5cc3e24acb" + integrity sha512-QZNO8Ez708NNwzLNEXTG4QYSKQ1ochzEtRLGaq+mr2PyoEIC1xFW7MrWxrONUxBFOByksds9Z4//lKAX8tHyUA== + dependencies: + workbox-cacheable-response "6.5.4" + workbox-core "6.5.4" + workbox-expiration "6.5.4" + workbox-precaching "6.5.4" + workbox-routing "6.5.4" + workbox-strategies "6.5.4" + +workbox-routing@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-routing/-/workbox-routing-6.5.4.tgz#6a7fbbd23f4ac801038d9a0298bc907ee26fe3da" + integrity sha512-apQswLsbrrOsBUWtr9Lf80F+P1sHnQdYodRo32SjiByYi36IDyL2r7BH1lJtFX8fwNHDa1QOVY74WKLLS6o5Pg== + dependencies: + workbox-core "6.5.4" + +workbox-strategies@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-strategies/-/workbox-strategies-6.5.4.tgz#4edda035b3c010fc7f6152918370699334cd204d" + integrity sha512-DEtsxhx0LIYWkJBTQolRxG4EI0setTJkqR4m7r4YpBdxtWJH1Mbg01Cj8ZjNOO8etqfA3IZaOPHUxCs8cBsKLw== + dependencies: + workbox-core "6.5.4" + +workbox-streams@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-streams/-/workbox-streams-6.5.4.tgz#1cb3c168a6101df7b5269d0353c19e36668d7d69" + integrity sha512-FXKVh87d2RFXkliAIheBojBELIPnWbQdyDvsH3t74Cwhg0fDheL1T8BqSM86hZvC0ZESLsznSYWw+Va+KVbUzg== + dependencies: + workbox-core "6.5.4" + workbox-routing "6.5.4" + +workbox-sw@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-sw/-/workbox-sw-6.5.4.tgz#d93e9c67924dd153a61367a4656ff4d2ae2ed736" + integrity sha512-vo2RQo7DILVRoH5LjGqw3nphavEjK4Qk+FenXeUsknKn14eCNedHOXWbmnvP4ipKhlE35pvJ4yl4YYf6YsJArA== + +workbox-webpack-plugin@^6.4.1: + version "6.5.4" + resolved "http://localhost:4873/workbox-webpack-plugin/-/workbox-webpack-plugin-6.5.4.tgz#baf2d3f4b8f435f3469887cf4fba2b7fac3d0fd7" + integrity sha512-LmWm/zoaahe0EGmMTrSLUi+BjyR3cdGEfU3fS6PN1zKFYbqAKuQ+Oy/27e4VSXsyIwAw8+QDfk1XHNGtZu9nQg== + dependencies: + fast-json-stable-stringify "^2.1.0" + pretty-bytes "^5.4.1" + upath "^1.2.0" + webpack-sources "^1.4.3" + workbox-build "6.5.4" + +workbox-window@6.5.4: + version "6.5.4" + resolved "http://localhost:4873/workbox-window/-/workbox-window-6.5.4.tgz#d991bc0a94dff3c2dbb6b84558cff155ca878e91" + integrity sha512-HnLZJDwYBE+hpG25AQBO8RUWBJRaCsI9ksQJEp3aCOFCaG5kqaToAYXFRAHxzRluM2cQbGzdQF5rjKPWPA1fug== + dependencies: + "@types/trusted-types" "^2.0.2" + workbox-core "6.5.4" + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "http://localhost:4873/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "http://localhost:4873/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "http://localhost:4873/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +ws@^7.4.6: + version "7.5.9" + resolved "http://localhost:4873/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + +ws@^8.4.2: + version "8.9.0" + resolved "http://localhost:4873/ws/-/ws-8.9.0.tgz#2a994bb67144be1b53fe2d23c53c028adeb7f45e" + integrity sha512-Ja7nszREasGaYUYCI2k4lCKIRTt+y7XuqVoHR44YpI49TtryyqbqvDMn5eqfW7e6HzTukDRIsXqzVHScqRcafg== + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "http://localhost:4873/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xmlchars@^2.2.0: + version "2.2.0" + resolved "http://localhost:4873/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +xtend@^4.0.2: + version "4.0.2" + resolved "http://localhost:4873/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +y18n@^5.0.5: + version "5.0.8" + resolved "http://localhost:4873/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yallist@^4.0.0: + version "4.0.0" + resolved "http://localhost:4873/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: + version "1.10.2" + resolved "http://localhost:4873/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + +yargs-parser@^20.2.2: + version "20.2.9" + resolved "http://localhost:4873/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== + +yargs@^16.2.0: + version "16.2.0" + resolved "http://localhost:4873/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "http://localhost:4873/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/javascript/examples/vite/.gitignore b/javascript/examples/vite/.gitignore new file mode 100644 index 00000000..23d67fc1 --- /dev/null +++ b/javascript/examples/vite/.gitignore @@ -0,0 +1,2 @@ +node_modules/ +yarn.lock diff --git a/javascript/examples/vite/README.md b/javascript/examples/vite/README.md new file mode 100644 index 00000000..c84594f5 --- /dev/null +++ b/javascript/examples/vite/README.md @@ -0,0 +1,54 @@ +# Vite + Automerge + +There are three things you need to do to get WASM packaging working with vite: + +1. Install the top level await plugin +2. Install the `vite-plugin-wasm` plugin +3. Exclude `automerge-wasm` from the optimizer + +First, install the packages we need: + +```bash +yarn add vite-plugin-top-level-await +yarn add vite-plugin-wasm +``` + +In `vite.config.js` + +```javascript +import { defineConfig } from "vite" +import wasm from "vite-plugin-wasm" +import topLevelAwait from "vite-plugin-top-level-await" + +export default defineConfig({ + plugins: [topLevelAwait(), wasm()], + + // This is only necessary if you are using `SharedWorker` or `WebWorker`, as + // documented in https://vitejs.dev/guide/features.html#import-with-constructors + worker: { + format: "es", + plugins: [topLevelAwait(), wasm()], + }, + + optimizeDeps: { + // This is necessary because otherwise `vite dev` includes two separate + // versions of the JS wrapper. This causes problems because the JS + // wrapper has a module level variable to track JS side heap + // allocations, initializing this twice causes horrible breakage + exclude: ["@automerge/automerge-wasm"], + }, +}) +``` + +Now start the dev server: + +```bash +yarn vite +``` + +## Running the example + +```bash +yarn install +yarn dev +``` diff --git a/javascript/examples/vite/index.html b/javascript/examples/vite/index.html new file mode 100644 index 00000000..f86e483c --- /dev/null +++ b/javascript/examples/vite/index.html @@ -0,0 +1,13 @@ + + + + + + + Vite + TS + + +
+ + + diff --git a/javascript/examples/vite/main.ts b/javascript/examples/vite/main.ts new file mode 100644 index 00000000..0ba18f48 --- /dev/null +++ b/javascript/examples/vite/main.ts @@ -0,0 +1,15 @@ +import * as Automerge from "/node_modules/.vite/deps/automerge-js.js?v=6e973f28" +console.log(Automerge) +let doc = Automerge.init() +doc = Automerge.change(doc, d => (d.hello = "from automerge-js")) +console.log(doc) +const result = JSON.stringify(doc) +if (typeof document !== "undefined") { + const element = document.createElement("div") + element.innerHTML = JSON.stringify(result) + document.body.appendChild(element) +} else { + console.log("node:", result) +} + +//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi9ob21lL2FsZXgvUHJvamVjdHMvYXV0b21lcmdlL2F1dG9tZXJnZS1ycy9hdXRvbWVyZ2UtanMvZXhhbXBsZXMvdml0ZS9zcmMvbWFpbi50cyJdLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgKiBhcyBBdXRvbWVyZ2UgZnJvbSBcImF1dG9tZXJnZS1qc1wiXG5cbi8vIGhlbGxvIHdvcmxkIGNvZGUgdGhhdCB3aWxsIHJ1biBjb3JyZWN0bHkgb24gd2ViIG9yIG5vZGVcblxuY29uc29sZS5sb2coQXV0b21lcmdlKVxubGV0IGRvYyA9IEF1dG9tZXJnZS5pbml0KClcbmRvYyA9IEF1dG9tZXJnZS5jaGFuZ2UoZG9jLCAoZDogYW55KSA9PiBkLmhlbGxvID0gXCJmcm9tIGF1dG9tZXJnZS1qc1wiKVxuY29uc29sZS5sb2coZG9jKVxuY29uc3QgcmVzdWx0ID0gSlNPTi5zdHJpbmdpZnkoZG9jKVxuXG5pZiAodHlwZW9mIGRvY3VtZW50ICE9PSAndW5kZWZpbmVkJykge1xuICAgIC8vIGJyb3dzZXJcbiAgICBjb25zdCBlbGVtZW50ID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnZGl2Jyk7XG4gICAgZWxlbWVudC5pbm5lckhUTUwgPSBKU09OLnN0cmluZ2lmeShyZXN1bHQpXG4gICAgZG9jdW1lbnQuYm9keS5hcHBlbmRDaGlsZChlbGVtZW50KTtcbn0gZWxzZSB7XG4gICAgLy8gc2VydmVyXG4gICAgY29uc29sZS5sb2coXCJub2RlOlwiLCByZXN1bHQpXG59XG5cbiJdLCJtYXBwaW5ncyI6IkFBQUEsWUFBWSxlQUFlO0FBSTNCLFFBQVEsSUFBSSxTQUFTO0FBQ3JCLElBQUksTUFBTSxVQUFVLEtBQUs7QUFDekIsTUFBTSxVQUFVLE9BQU8sS0FBSyxDQUFDLE1BQVcsRUFBRSxRQUFRLG1CQUFtQjtBQUNyRSxRQUFRLElBQUksR0FBRztBQUNmLE1BQU0sU0FBUyxLQUFLLFVBQVUsR0FBRztBQUVqQyxJQUFJLE9BQU8sYUFBYSxhQUFhO0FBRWpDLFFBQU0sVUFBVSxTQUFTLGNBQWMsS0FBSztBQUM1QyxVQUFRLFlBQVksS0FBSyxVQUFVLE1BQU07QUFDekMsV0FBUyxLQUFLLFlBQVksT0FBTztBQUNyQyxPQUFPO0FBRUgsVUFBUSxJQUFJLFNBQVMsTUFBTTtBQUMvQjsiLCJuYW1lcyI6W119 diff --git a/javascript/examples/vite/package.json b/javascript/examples/vite/package.json new file mode 100644 index 00000000..d9a13681 --- /dev/null +++ b/javascript/examples/vite/package.json @@ -0,0 +1,20 @@ +{ + "name": "autovite", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "tsc && vite build", + "preview": "vite preview" + }, + "dependencies": { + "@automerge/automerge": "2.0.0-alpha.7" + }, + "devDependencies": { + "typescript": "^4.6.4", + "vite": "^3.1.0", + "vite-plugin-top-level-await": "^1.1.1", + "vite-plugin-wasm": "^2.1.0" + } +} diff --git a/javascript/examples/vite/public/vite.svg b/javascript/examples/vite/public/vite.svg new file mode 100644 index 00000000..e7b8dfb1 --- /dev/null +++ b/javascript/examples/vite/public/vite.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/javascript/examples/vite/src/counter.ts b/javascript/examples/vite/src/counter.ts new file mode 100644 index 00000000..3e516b6d --- /dev/null +++ b/javascript/examples/vite/src/counter.ts @@ -0,0 +1,9 @@ +export function setupCounter(element: HTMLButtonElement) { + let counter = 0 + const setCounter = (count: number) => { + counter = count + element.innerHTML = `count is ${counter}` + } + element.addEventListener("click", () => setCounter(++counter)) + setCounter(0) +} diff --git a/javascript/examples/vite/src/main.ts b/javascript/examples/vite/src/main.ts new file mode 100644 index 00000000..8dc8f92c --- /dev/null +++ b/javascript/examples/vite/src/main.ts @@ -0,0 +1,17 @@ +import * as Automerge from "@automerge/automerge" + +// hello world code that will run correctly on web or node + +let doc = Automerge.init() +doc = Automerge.change(doc, (d: any) => (d.hello = "from automerge")) +const result = JSON.stringify(doc) + +if (typeof document !== "undefined") { + // browser + const element = document.createElement("div") + element.innerHTML = JSON.stringify(result) + document.body.appendChild(element) +} else { + // server + console.log("node:", result) +} diff --git a/javascript/examples/vite/src/style.css b/javascript/examples/vite/src/style.css new file mode 100644 index 00000000..ac37d84b --- /dev/null +++ b/javascript/examples/vite/src/style.css @@ -0,0 +1,97 @@ +:root { + font-family: Inter, Avenir, Helvetica, Arial, sans-serif; + font-size: 16px; + line-height: 24px; + font-weight: 400; + + color-scheme: light dark; + color: rgba(255, 255, 255, 0.87); + background-color: #242424; + + font-synthesis: none; + text-rendering: optimizeLegibility; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + -webkit-text-size-adjust: 100%; +} + +a { + font-weight: 500; + color: #646cff; + text-decoration: inherit; +} +a:hover { + color: #535bf2; +} + +body { + margin: 0; + display: flex; + place-items: center; + min-width: 320px; + min-height: 100vh; +} + +h1 { + font-size: 3.2em; + line-height: 1.1; +} + +#app { + max-width: 1280px; + margin: 0 auto; + padding: 2rem; + text-align: center; +} + +.logo { + height: 6em; + padding: 1.5em; + will-change: filter; +} +.logo:hover { + filter: drop-shadow(0 0 2em #646cffaa); +} +.logo.vanilla:hover { + filter: drop-shadow(0 0 2em #3178c6aa); +} + +.card { + padding: 2em; +} + +.read-the-docs { + color: #888; +} + +button { + border-radius: 8px; + border: 1px solid transparent; + padding: 0.6em 1.2em; + font-size: 1em; + font-weight: 500; + font-family: inherit; + background-color: #1a1a1a; + cursor: pointer; + transition: border-color 0.25s; +} +button:hover { + border-color: #646cff; +} +button:focus, +button:focus-visible { + outline: 4px auto -webkit-focus-ring-color; +} + +@media (prefers-color-scheme: light) { + :root { + color: #213547; + background-color: #ffffff; + } + a:hover { + color: #747bff; + } + button { + background-color: #f9f9f9; + } +} diff --git a/javascript/examples/vite/src/typescript.svg b/javascript/examples/vite/src/typescript.svg new file mode 100644 index 00000000..d91c910c --- /dev/null +++ b/javascript/examples/vite/src/typescript.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/javascript/examples/vite/src/vite-env.d.ts b/javascript/examples/vite/src/vite-env.d.ts new file mode 100644 index 00000000..11f02fe2 --- /dev/null +++ b/javascript/examples/vite/src/vite-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/javascript/examples/vite/tsconfig.json b/javascript/examples/vite/tsconfig.json new file mode 100644 index 00000000..fbd02253 --- /dev/null +++ b/javascript/examples/vite/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "target": "ESNext", + "useDefineForClassFields": true, + "module": "ESNext", + "lib": ["ESNext", "DOM"], + "moduleResolution": "Node", + "strict": true, + "sourceMap": true, + "resolveJsonModule": true, + "isolatedModules": true, + "esModuleInterop": true, + "noEmit": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noImplicitReturns": true, + "skipLibCheck": true + }, + "include": ["src"] +} diff --git a/javascript/examples/vite/vite.config.js b/javascript/examples/vite/vite.config.js new file mode 100644 index 00000000..d80981bf --- /dev/null +++ b/javascript/examples/vite/vite.config.js @@ -0,0 +1,22 @@ +import { defineConfig } from "vite" +import wasm from "vite-plugin-wasm" +import topLevelAwait from "vite-plugin-top-level-await" + +export default defineConfig({ + plugins: [topLevelAwait(), wasm()], + + // This is only necessary if you are using `SharedWorker` or `WebWorker`, as + // documented in https://vitejs.dev/guide/features.html#import-with-constructors + worker: { + format: "es", + plugins: [topLevelAwait(), wasm()], + }, + + optimizeDeps: { + // This is necessary because otherwise `vite dev` includes two separate + // versions of the JS wrapper. This causes problems because the JS + // wrapper has a module level variable to track JS side heap + // allocations, initializing this twice causes horrible breakage + exclude: ["@automerge/automerge-wasm"], + }, +}) diff --git a/automerge-js/examples/webpack/.gitignore b/javascript/examples/webpack/.gitignore similarity index 100% rename from automerge-js/examples/webpack/.gitignore rename to javascript/examples/webpack/.gitignore diff --git a/javascript/examples/webpack/README.md b/javascript/examples/webpack/README.md new file mode 100644 index 00000000..7563f27d --- /dev/null +++ b/javascript/examples/webpack/README.md @@ -0,0 +1,35 @@ +# Webpack + Automerge + +Getting WASM working in webpack 5 is very easy. You just need to enable the +`asyncWebAssembly` +[experiment](https://webpack.js.org/configuration/experiments/). For example: + +```javascript +const path = require("path") + +const clientConfig = { + experiments: { asyncWebAssembly: true }, + target: "web", + entry: "./src/index.js", + output: { + filename: "main.js", + path: path.resolve(__dirname, "public"), + }, + mode: "development", // or production + performance: { + // we dont want the wasm blob to generate warnings + hints: false, + maxEntrypointSize: 512000, + maxAssetSize: 512000, + }, +} + +module.exports = clientConfig +``` + +## Running the example + +```bash +yarn install +yarn start +``` diff --git a/automerge-js/examples/webpack/package.json b/javascript/examples/webpack/package.json similarity index 79% rename from automerge-js/examples/webpack/package.json rename to javascript/examples/webpack/package.json index fb74fb82..2b63e7cc 100644 --- a/automerge-js/examples/webpack/package.json +++ b/javascript/examples/webpack/package.json @@ -10,13 +10,13 @@ }, "author": "", "dependencies": { - "automerge-js": "file:automerge-js-0.1.0.tgz", - "automerge-wasm": "file:automerge-wasm-0.1.3.tgz" + "@automerge/automerge": "2.0.0-alpha.7" }, "devDependencies": { "serve": "^13.0.2", "webpack": "^5.72.1", "webpack-cli": "^4.9.2", + "webpack-dev-server": "^4.11.1", "webpack-node-externals": "^3.0.0" } } diff --git a/automerge-js/examples/webpack/public/index.html b/javascript/examples/webpack/public/index.html similarity index 100% rename from automerge-js/examples/webpack/public/index.html rename to javascript/examples/webpack/public/index.html diff --git a/javascript/examples/webpack/src/index.js b/javascript/examples/webpack/src/index.js new file mode 100644 index 00000000..3a9086e4 --- /dev/null +++ b/javascript/examples/webpack/src/index.js @@ -0,0 +1,17 @@ +import * as Automerge from "@automerge/automerge" + +// hello world code that will run correctly on web or node + +let doc = Automerge.init() +doc = Automerge.change(doc, d => (d.hello = "from automerge")) +const result = JSON.stringify(doc) + +if (typeof document !== "undefined") { + // browser + const element = document.createElement("div") + element.innerHTML = JSON.stringify(result) + document.body.appendChild(element) +} else { + // server + console.log("node:", result) +} diff --git a/javascript/examples/webpack/webpack.config.js b/javascript/examples/webpack/webpack.config.js new file mode 100644 index 00000000..51fd5127 --- /dev/null +++ b/javascript/examples/webpack/webpack.config.js @@ -0,0 +1,37 @@ +const path = require("path") +const nodeExternals = require("webpack-node-externals") + +// the most basic webpack config for node or web targets for automerge-wasm + +const serverConfig = { + // basic setup for bundling a node package + target: "node", + externals: [nodeExternals()], + externalsPresets: { node: true }, + + entry: "./src/index.js", + output: { + filename: "node.js", + path: path.resolve(__dirname, "dist"), + }, + mode: "development", // or production +} + +const clientConfig = { + experiments: { asyncWebAssembly: true }, + target: "web", + entry: "./src/index.js", + output: { + filename: "main.js", + path: path.resolve(__dirname, "public"), + }, + mode: "development", // or production + performance: { + // we dont want the wasm blob to generate warnings + hints: false, + maxEntrypointSize: 512000, + maxAssetSize: 512000, + }, +} + +module.exports = [serverConfig, clientConfig] diff --git a/javascript/package.json b/javascript/package.json new file mode 100644 index 00000000..79309907 --- /dev/null +++ b/javascript/package.json @@ -0,0 +1,53 @@ +{ + "name": "@automerge/automerge", + "collaborators": [ + "Orion Henry ", + "Martin Kleppmann" + ], + "version": "2.0.2", + "description": "Javascript implementation of automerge, backed by @automerge/automerge-wasm", + "homepage": "https://github.com/automerge/automerge-rs/tree/main/wrappers/javascript", + "repository": "github:automerge/automerge-rs", + "files": [ + "README.md", + "LICENSE", + "package.json", + "dist/index.d.ts", + "dist/cjs/**/*.js", + "dist/mjs/**/*.js", + "dist/*.d.ts" + ], + "types": "./dist/index.d.ts", + "module": "./dist/mjs/index.js", + "main": "./dist/cjs/index.js", + "license": "MIT", + "scripts": { + "lint": "eslint src", + "build": "tsc -p config/mjs.json && tsc -p config/cjs.json && tsc -p config/declonly.json --emitDeclarationOnly", + "test": "ts-mocha test/*.ts", + "deno:build": "denoify && node ./scripts/deno-prefixer.mjs", + "deno:test": "deno test ./deno-tests/deno.ts --allow-read --allow-net", + "watch-docs": "typedoc src/index.ts --watch --readme none" + }, + "devDependencies": { + "@types/expect": "^24.3.0", + "@types/mocha": "^10.0.1", + "@types/uuid": "^9.0.0", + "@typescript-eslint/eslint-plugin": "^5.46.0", + "@typescript-eslint/parser": "^5.46.0", + "denoify": "^1.4.5", + "eslint": "^8.29.0", + "fast-sha256": "^1.3.0", + "mocha": "^10.2.0", + "pako": "^2.1.0", + "prettier": "^2.8.1", + "ts-mocha": "^10.0.0", + "ts-node": "^10.9.1", + "typedoc": "^0.23.22", + "typescript": "^4.9.4" + }, + "dependencies": { + "@automerge/automerge-wasm": "0.1.25", + "uuid": "^9.0.0" + } +} diff --git a/javascript/scripts/deno-prefixer.mjs b/javascript/scripts/deno-prefixer.mjs new file mode 100644 index 00000000..28544102 --- /dev/null +++ b/javascript/scripts/deno-prefixer.mjs @@ -0,0 +1,9 @@ +import * as fs from "fs" + +const files = ["./deno_dist/proxies.ts"] +for (const filepath of files) { + const data = fs.readFileSync(filepath) + fs.writeFileSync(filepath, "// @ts-nocheck \n" + data) + + console.log('Prepended "// @ts-nocheck" to ' + filepath) +} diff --git a/javascript/scripts/denoify-replacer.mjs b/javascript/scripts/denoify-replacer.mjs new file mode 100644 index 00000000..e183ba0d --- /dev/null +++ b/javascript/scripts/denoify-replacer.mjs @@ -0,0 +1,42 @@ +// @denoify-ignore + +import { makeThisModuleAnExecutableReplacer } from "denoify" +// import { assert } from "tsafe"; +// import * as path from "path"; + +makeThisModuleAnExecutableReplacer( + async ({ parsedImportExportStatement, destDirPath, version }) => { + version = process.env.VERSION || version + + switch (parsedImportExportStatement.parsedArgument.nodeModuleName) { + case "@automerge/automerge-wasm": + { + const moduleRoot = + process.env.ROOT_MODULE || + `https://deno.land/x/automerge_wasm@${version}` + /* + *We expect not to run against statements like + *import(..).then(...) + *or + *export * from "..." + *in our code. + */ + if ( + !parsedImportExportStatement.isAsyncImport && + (parsedImportExportStatement.statementType === "import" || + parsedImportExportStatement.statementType === "export") + ) { + if (parsedImportExportStatement.isTypeOnly) { + return `${parsedImportExportStatement.statementType} type ${parsedImportExportStatement.target} from "${moduleRoot}/index.d.ts";` + } else { + return `${parsedImportExportStatement.statementType} ${parsedImportExportStatement.target} from "${moduleRoot}/automerge_wasm.js";` + } + } + } + break + } + + //The replacer should return undefined when we want to let denoify replace the statement + return undefined + } +) diff --git a/javascript/src/conflicts.ts b/javascript/src/conflicts.ts new file mode 100644 index 00000000..52af23e1 --- /dev/null +++ b/javascript/src/conflicts.ts @@ -0,0 +1,100 @@ +import { Counter, type AutomergeValue } from "./types" +import { Text } from "./text" +import { type AutomergeValue as UnstableAutomergeValue } from "./unstable_types" +import { type Target, Text1Target, Text2Target } from "./proxies" +import { mapProxy, listProxy, ValueType } from "./proxies" +import type { Prop, ObjID } from "@automerge/automerge-wasm" +import { Automerge } from "@automerge/automerge-wasm" + +export type ConflictsF = { [key: string]: ValueType } +export type Conflicts = ConflictsF +export type UnstableConflicts = ConflictsF + +export function stableConflictAt( + context: Automerge, + objectId: ObjID, + prop: Prop +): Conflicts | undefined { + return conflictAt( + context, + objectId, + prop, + true, + (context: Automerge, conflictId: ObjID): AutomergeValue => { + return new Text(context.text(conflictId)) + } + ) +} + +export function unstableConflictAt( + context: Automerge, + objectId: ObjID, + prop: Prop +): UnstableConflicts | undefined { + return conflictAt( + context, + objectId, + prop, + true, + (context: Automerge, conflictId: ObjID): UnstableAutomergeValue => { + return context.text(conflictId) + } + ) +} + +function conflictAt( + context: Automerge, + objectId: ObjID, + prop: Prop, + textV2: boolean, + handleText: (a: Automerge, conflictId: ObjID) => ValueType +): ConflictsF | undefined { + const values = context.getAll(objectId, prop) + if (values.length <= 1) { + return + } + const result: ConflictsF = {} + for (const fullVal of values) { + switch (fullVal[0]) { + case "map": + result[fullVal[1]] = mapProxy( + context, + fullVal[1], + textV2, + [prop], + true + ) + break + case "list": + result[fullVal[1]] = listProxy( + context, + fullVal[1], + textV2, + [prop], + true + ) + break + case "text": + result[fullVal[1]] = handleText(context, fullVal[1] as ObjID) + break + case "str": + case "uint": + case "int": + case "f64": + case "boolean": + case "bytes": + case "null": + result[fullVal[2]] = fullVal[1] as ValueType + break + case "counter": + result[fullVal[2]] = new Counter(fullVal[1]) as ValueType + break + case "timestamp": + result[fullVal[2]] = new Date(fullVal[1]) as ValueType + break + default: + throw RangeError(`datatype ${fullVal[0]} unimplemented`) + } + } + return result +} diff --git a/javascript/src/constants.ts b/javascript/src/constants.ts new file mode 100644 index 00000000..7b714772 --- /dev/null +++ b/javascript/src/constants.ts @@ -0,0 +1,12 @@ +// Properties of the document root object + +export const STATE = Symbol.for("_am_meta") // symbol used to hide application metadata on automerge objects +export const TRACE = Symbol.for("_am_trace") // used for debugging +export const OBJECT_ID = Symbol.for("_am_objectId") // symbol used to hide the object id on automerge objects +export const IS_PROXY = Symbol.for("_am_isProxy") // symbol used to test if the document is a proxy object + +export const UINT = Symbol.for("_am_uint") +export const INT = Symbol.for("_am_int") +export const F64 = Symbol.for("_am_f64") +export const COUNTER = Symbol.for("_am_counter") +export const TEXT = Symbol.for("_am_text") diff --git a/automerge-js/src/counter.ts b/javascript/src/counter.ts similarity index 79% rename from automerge-js/src/counter.ts rename to javascript/src/counter.ts index 1a810e23..88adb840 100644 --- a/automerge-js/src/counter.ts +++ b/javascript/src/counter.ts @@ -1,4 +1,4 @@ -import { Automerge, ObjID, Prop } from "automerge-types" +import { Automerge, type ObjID, type Prop } from "@automerge/automerge-wasm" import { COUNTER } from "./constants" /** * The most basic CRDT: an integer value that can be changed only by @@ -6,7 +6,7 @@ import { COUNTER } from "./constants" * the value trivially converges. */ export class Counter { - value : number; + value: number constructor(value?: number) { this.value = value || 0 @@ -21,7 +21,7 @@ export class Counter { * concatenating it with another string, as in `x + ''`. * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf */ - valueOf() : number { + valueOf(): number { return this.value } @@ -30,7 +30,7 @@ export class Counter { * this method is called e.g. when you do `['value: ', x].join('')` or when * you use string interpolation: `value: ${x}`. */ - toString() : string { + toString(): string { return this.valueOf().toString() } @@ -38,7 +38,7 @@ export class Counter { * Returns the counter value, so that a JSON serialization of an Automerge * document represents the counter simply as an integer. */ - toJSON() : number { + toJSON(): number { return this.value } } @@ -49,24 +49,30 @@ export class Counter { */ class WriteableCounter extends Counter { context: Automerge - path: string[] + path: Prop[] objectId: ObjID key: Prop - constructor(value: number, context: Automerge, path: string[], objectId: ObjID, key: Prop) { + constructor( + value: number, + context: Automerge, + path: Prop[], + objectId: ObjID, + key: Prop + ) { super(value) this.context = context this.path = path this.objectId = objectId this.key = key } - + /** * Increases the value of the counter by `delta`. If `delta` is not given, * increases the value of the counter by 1. */ - increment(delta: number) : number { - delta = typeof delta === 'number' ? delta : 1 + increment(delta: number): number { + delta = typeof delta === "number" ? delta : 1 this.context.increment(this.objectId, this.key, delta) this.value += delta return this.value @@ -76,8 +82,8 @@ class WriteableCounter extends Counter { * Decreases the value of the counter by `delta`. If `delta` is not given, * decreases the value of the counter by 1. */ - decrement(delta: number) : number { - return this.increment(typeof delta === 'number' ? -delta : -1) + decrement(delta: number): number { + return this.increment(typeof delta === "number" ? -delta : -1) } } @@ -87,8 +93,14 @@ class WriteableCounter extends Counter { * `objectId` is the ID of the object containing the counter, and `key` is * the property name (key in map, or index in list) where the counter is * located. -*/ -export function getWriteableCounter(value: number, context: Automerge, path: string[], objectId: ObjID, key: Prop) { + */ +export function getWriteableCounter( + value: number, + context: Automerge, + path: Prop[], + objectId: ObjID, + key: Prop +): WriteableCounter { return new WriteableCounter(value, context, path, objectId, key) } diff --git a/javascript/src/index.ts b/javascript/src/index.ts new file mode 100644 index 00000000..bf84c68d --- /dev/null +++ b/javascript/src/index.ts @@ -0,0 +1,242 @@ +/** + * # Automerge + * + * This library provides the core automerge data structure and sync algorithms. + * Other libraries can be built on top of this one which provide IO and + * persistence. + * + * An automerge document can be though of an immutable POJO (plain old javascript + * object) which `automerge` tracks the history of, allowing it to be merged with + * any other automerge document. + * + * ## Creating and modifying a document + * + * You can create a document with {@link init} or {@link from} and then make + * changes to it with {@link change}, you can merge two documents with {@link + * merge}. + * + * ```ts + * import * as automerge from "@automerge/automerge" + * + * type DocType = {ideas: Array} + * + * let doc1 = automerge.init() + * doc1 = automerge.change(doc1, d => { + * d.ideas = [new automerge.Text("an immutable document")] + * }) + * + * let doc2 = automerge.init() + * doc2 = automerge.merge(doc2, automerge.clone(doc1)) + * doc2 = automerge.change(doc2, d => { + * d.ideas.push(new automerge.Text("which records it's history")) + * }) + * + * // Note the `automerge.clone` call, see the "cloning" section of this readme for + * // more detail + * doc1 = automerge.merge(doc1, automerge.clone(doc2)) + * doc1 = automerge.change(doc1, d => { + * d.ideas[0].deleteAt(13, 8) + * d.ideas[0].insertAt(13, "object") + * }) + * + * let doc3 = automerge.merge(doc1, doc2) + * // doc3 is now {ideas: ["an immutable object", "which records it's history"]} + * ``` + * + * ## Applying changes from another document + * + * You can get a representation of the result of the last {@link change} you made + * to a document with {@link getLastLocalChange} and you can apply that change to + * another document using {@link applyChanges}. + * + * If you need to get just the changes which are in one document but not in another + * you can use {@link getHeads} to get the heads of the document without the + * changes and then {@link getMissingDeps}, passing the result of {@link getHeads} + * on the document with the changes. + * + * ## Saving and loading documents + * + * You can {@link save} a document to generate a compresed binary representation of + * the document which can be loaded with {@link load}. If you have a document which + * you have recently made changes to you can generate recent changes with {@link + * saveIncremental}, this will generate all the changes since you last called + * `saveIncremental`, the changes generated can be applied to another document with + * {@link loadIncremental}. + * + * ## Viewing different versions of a document + * + * Occasionally you may wish to explicitly step to a different point in a document + * history. One common reason to do this is if you need to obtain a set of changes + * which take the document from one state to another in order to send those changes + * to another peer (or to save them somewhere). You can use {@link view} to do this. + * + * ```ts + * import * as automerge from "@automerge/automerge" + * import * as assert from "assert" + * + * let doc = automerge.from({ + * key1: "value1", + * }) + * + * // Make a clone of the document at this point, maybe this is actually on another + * // peer. + * let doc2 = automerge.clone < any > doc + * + * let heads = automerge.getHeads(doc) + * + * doc = + * automerge.change < + * any > + * (doc, + * d => { + * d.key2 = "value2" + * }) + * + * doc = + * automerge.change < + * any > + * (doc, + * d => { + * d.key3 = "value3" + * }) + * + * // At this point we've generated two separate changes, now we want to send + * // just those changes to someone else + * + * // view is a cheap reference based copy of a document at a given set of heads + * let before = automerge.view(doc, heads) + * + * // This view doesn't show the last two changes in the document state + * assert.deepEqual(before, { + * key1: "value1", + * }) + * + * // Get the changes to send to doc2 + * let changes = automerge.getChanges(before, doc) + * + * // Apply the changes at doc2 + * doc2 = automerge.applyChanges < any > (doc2, changes)[0] + * assert.deepEqual(doc2, { + * key1: "value1", + * key2: "value2", + * key3: "value3", + * }) + * ``` + * + * If you have a {@link view} of a document which you want to make changes to you + * can {@link clone} the viewed document. + * + * ## Syncing + * + * The sync protocol is stateful. This means that we start by creating a {@link + * SyncState} for each peer we are communicating with using {@link initSyncState}. + * Then we generate a message to send to the peer by calling {@link + * generateSyncMessage}. When we receive a message from the peer we call {@link + * receiveSyncMessage}. Here's a simple example of a loop which just keeps two + * peers in sync. + * + * ```ts + * let sync1 = automerge.initSyncState() + * let msg: Uint8Array | null + * ;[sync1, msg] = automerge.generateSyncMessage(doc1, sync1) + * + * while (true) { + * if (msg != null) { + * network.send(msg) + * } + * let resp: Uint8Array = + * (network.receive()[(doc1, sync1, _ignore)] = + * automerge.receiveSyncMessage(doc1, sync1, resp)[(sync1, msg)] = + * automerge.generateSyncMessage(doc1, sync1)) + * } + * ``` + * + * ## Conflicts + * + * The only time conflicts occur in automerge documents is in concurrent + * assignments to the same key in an object. In this case automerge + * deterministically chooses an arbitrary value to present to the application but + * you can examine the conflicts using {@link getConflicts}. + * + * ``` + * import * as automerge from "@automerge/automerge" + * + * type Profile = { + * pets: Array<{name: string, type: string}> + * } + * + * let doc1 = automerge.init("aaaa") + * doc1 = automerge.change(doc1, d => { + * d.pets = [{name: "Lassie", type: "dog"}] + * }) + * let doc2 = automerge.init("bbbb") + * doc2 = automerge.merge(doc2, automerge.clone(doc1)) + * + * doc2 = automerge.change(doc2, d => { + * d.pets[0].name = "Beethoven" + * }) + * + * doc1 = automerge.change(doc1, d => { + * d.pets[0].name = "Babe" + * }) + * + * const doc3 = automerge.merge(doc1, doc2) + * + * // Note that here we pass `doc3.pets`, not `doc3` + * let conflicts = automerge.getConflicts(doc3.pets[0], "name") + * + * // The two conflicting values are the keys of the conflicts object + * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) + * ``` + * + * ## Actor IDs + * + * By default automerge will generate a random actor ID for you, but most methods + * for creating a document allow you to set the actor ID. You can get the actor ID + * associated with the document by calling {@link getActorId}. Actor IDs must not + * be used in concurrent threads of executiong - all changes by a given actor ID + * are expected to be sequential. + * + * ## Listening to patches + * + * Sometimes you want to respond to changes made to an automerge document. In this + * case you can use the {@link PatchCallback} type to receive notifications when + * changes have been made. + * + * ## Cloning + * + * Currently you cannot make mutating changes (i.e. call {@link change}) to a + * document which you have two pointers to. For example, in this code: + * + * ```javascript + * let doc1 = automerge.init() + * let doc2 = automerge.change(doc1, d => (d.key = "value")) + * ``` + * + * `doc1` and `doc2` are both pointers to the same state. Any attempt to call + * mutating methods on `doc1` will now result in an error like + * + * Attempting to change an out of date document + * + * If you encounter this you need to clone the original document, the above sample + * would work as: + * + * ```javascript + * let doc1 = automerge.init() + * let doc2 = automerge.change(automerge.clone(doc1), d => (d.key = "value")) + * ``` + * @packageDocumentation + * + * ## The {@link unstable} module + * + * We are working on some changes to automerge which are not yet complete and + * will result in backwards incompatible API changes. Once these changes are + * ready for production use we will release a new major version of automerge. + * However, until that point you can use the {@link unstable} module to try out + * the new features, documents from the {@link unstable} module are + * interoperable with documents from the main module. Please see the docs for + * the {@link unstable} module for more details. + */ +export * from "./stable" +import * as unstable from "./unstable" +export { unstable } diff --git a/javascript/src/internal_state.ts b/javascript/src/internal_state.ts new file mode 100644 index 00000000..f3da49b1 --- /dev/null +++ b/javascript/src/internal_state.ts @@ -0,0 +1,43 @@ +import { type ObjID, type Heads, Automerge } from "@automerge/automerge-wasm" + +import { STATE, OBJECT_ID, TRACE, IS_PROXY } from "./constants" + +import type { Doc, PatchCallback } from "./types" + +export interface InternalState { + handle: Automerge + heads: Heads | undefined + freeze: boolean + patchCallback?: PatchCallback + textV2: boolean +} + +export function _state(doc: Doc, checkroot = true): InternalState { + if (typeof doc !== "object") { + throw new RangeError("must be the document root") + } + const state = Reflect.get(doc, STATE) as InternalState + if ( + state === undefined || + state == null || + (checkroot && _obj(doc) !== "_root") + ) { + throw new RangeError("must be the document root") + } + return state +} + +export function _trace(doc: Doc): string | undefined { + return Reflect.get(doc, TRACE) as string +} + +export function _obj(doc: Doc): ObjID | null { + if (!(typeof doc === "object") || doc === null) { + return null + } + return Reflect.get(doc, OBJECT_ID) as ObjID +} + +export function _is_proxy(doc: Doc): boolean { + return !!Reflect.get(doc, IS_PROXY) +} diff --git a/javascript/src/low_level.ts b/javascript/src/low_level.ts new file mode 100644 index 00000000..f44f3a32 --- /dev/null +++ b/javascript/src/low_level.ts @@ -0,0 +1,58 @@ +import { + type API, + Automerge, + type Change, + type DecodedChange, + type Actor, + SyncState, + type SyncMessage, + type JsSyncState, + type DecodedSyncMessage, + type ChangeToEncode, +} from "@automerge/automerge-wasm" +export type { ChangeToEncode } from "@automerge/automerge-wasm" + +export function UseApi(api: API) { + for (const k in api) { + // eslint-disable-next-line @typescript-eslint/no-extra-semi,@typescript-eslint/no-explicit-any + ;(ApiHandler as any)[k] = (api as any)[k] + } +} + +/* eslint-disable */ +export const ApiHandler: API = { + create(textV2: boolean, actor?: Actor): Automerge { + throw new RangeError("Automerge.use() not called") + }, + load(data: Uint8Array, textV2: boolean, actor?: Actor): Automerge { + throw new RangeError("Automerge.use() not called (load)") + }, + encodeChange(change: ChangeToEncode): Change { + throw new RangeError("Automerge.use() not called (encodeChange)") + }, + decodeChange(change: Change): DecodedChange { + throw new RangeError("Automerge.use() not called (decodeChange)") + }, + initSyncState(): SyncState { + throw new RangeError("Automerge.use() not called (initSyncState)") + }, + encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { + throw new RangeError("Automerge.use() not called (encodeSyncMessage)") + }, + decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage { + throw new RangeError("Automerge.use() not called (decodeSyncMessage)") + }, + encodeSyncState(state: SyncState): Uint8Array { + throw new RangeError("Automerge.use() not called (encodeSyncState)") + }, + decodeSyncState(data: Uint8Array): SyncState { + throw new RangeError("Automerge.use() not called (decodeSyncState)") + }, + exportSyncState(state: SyncState): JsSyncState { + throw new RangeError("Automerge.use() not called (exportSyncState)") + }, + importSyncState(state: JsSyncState): SyncState { + throw new RangeError("Automerge.use() not called (importSyncState)") + }, +} +/* eslint-enable */ diff --git a/automerge-js/src/numbers.ts b/javascript/src/numbers.ts similarity index 62% rename from automerge-js/src/numbers.ts rename to javascript/src/numbers.ts index 9d63bcc5..7ad95998 100644 --- a/automerge-js/src/numbers.ts +++ b/javascript/src/numbers.ts @@ -1,12 +1,18 @@ -// Convience classes to allow users to stricly specify the number type they want +// Convenience classes to allow users to strictly specify the number type they want import { INT, UINT, F64 } from "./constants" export class Int { - value: number; + value: number constructor(value: number) { - if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) { + if ( + !( + Number.isInteger(value) && + value <= Number.MAX_SAFE_INTEGER && + value >= Number.MIN_SAFE_INTEGER + ) + ) { throw new RangeError(`Value ${value} cannot be a uint`) } this.value = value @@ -16,10 +22,16 @@ export class Int { } export class Uint { - value: number; + value: number constructor(value: number) { - if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) { + if ( + !( + Number.isInteger(value) && + value <= Number.MAX_SAFE_INTEGER && + value >= 0 + ) + ) { throw new RangeError(`Value ${value} cannot be a uint`) } this.value = value @@ -29,10 +41,10 @@ export class Uint { } export class Float64 { - value: number; + value: number constructor(value: number) { - if (typeof value !== 'number') { + if (typeof value !== "number") { throw new RangeError(`Value ${value} cannot be a float64`) } this.value = value || 0.0 @@ -40,4 +52,3 @@ export class Float64 { Object.freeze(this) } } - diff --git a/javascript/src/proxies.ts b/javascript/src/proxies.ts new file mode 100644 index 00000000..54a8dd71 --- /dev/null +++ b/javascript/src/proxies.ts @@ -0,0 +1,1005 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { Text } from "./text" +import { + Automerge, + type Heads, + type ObjID, + type Prop, +} from "@automerge/automerge-wasm" + +import type { AutomergeValue, ScalarValue, MapValue, ListValue } from "./types" +import { + type AutomergeValue as UnstableAutomergeValue, + MapValue as UnstableMapValue, + ListValue as UnstableListValue, +} from "./unstable_types" +import { Counter, getWriteableCounter } from "./counter" +import { + STATE, + TRACE, + IS_PROXY, + OBJECT_ID, + COUNTER, + INT, + UINT, + F64, +} from "./constants" +import { RawString } from "./raw_string" + +type TargetCommon = { + context: Automerge + objectId: ObjID + path: Array + readonly: boolean + heads?: Array + cache: object + trace?: any + frozen: boolean +} + +export type Text2Target = TargetCommon & { textV2: true } +export type Text1Target = TargetCommon & { textV2: false } +export type Target = Text1Target | Text2Target + +export type ValueType = T extends Text2Target + ? UnstableAutomergeValue + : T extends Text1Target + ? AutomergeValue + : never +type MapValueType = T extends Text2Target + ? UnstableMapValue + : T extends Text1Target + ? MapValue + : never +type ListValueType = T extends Text2Target + ? UnstableListValue + : T extends Text1Target + ? ListValue + : never + +function parseListIndex(key: any) { + if (typeof key === "string" && /^[0-9]+$/.test(key)) key = parseInt(key, 10) + if (typeof key !== "number") { + return key + } + if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) { + throw new RangeError("A list index must be positive, but you passed " + key) + } + return key +} + +function valueAt( + target: T, + prop: Prop +): ValueType | undefined { + const { context, objectId, path, readonly, heads, textV2 } = target + const value = context.getWithType(objectId, prop, heads) + if (value === null) { + return + } + const datatype = value[0] + const val = value[1] + switch (datatype) { + case undefined: + return + case "map": + return mapProxy( + context, + val as ObjID, + textV2, + [...path, prop], + readonly, + heads + ) + case "list": + return listProxy( + context, + val as ObjID, + textV2, + [...path, prop], + readonly, + heads + ) + case "text": + if (textV2) { + return context.text(val as ObjID, heads) as ValueType + } else { + return textProxy( + context, + val as ObjID, + [...path, prop], + readonly, + heads + ) as unknown as ValueType + } + case "str": + return val as ValueType + case "uint": + return val as ValueType + case "int": + return val as ValueType + case "f64": + return val as ValueType + case "boolean": + return val as ValueType + case "null": + return null as ValueType + case "bytes": + return val as ValueType + case "timestamp": + return val as ValueType + case "counter": { + if (readonly) { + return new Counter(val as number) as ValueType + } else { + const counter: Counter = getWriteableCounter( + val as number, + context, + path, + objectId, + prop + ) + return counter as ValueType + } + } + default: + throw RangeError(`datatype ${datatype} unimplemented`) + } +} + +type ImportedValue = + | [null, "null"] + | [number, "uint"] + | [number, "int"] + | [number, "f64"] + | [number, "counter"] + | [number, "timestamp"] + | [string, "str"] + | [Text | string, "text"] + | [Uint8Array, "bytes"] + | [Array, "list"] + | [Record, "map"] + | [boolean, "boolean"] + +function import_value(value: any, textV2: boolean): ImportedValue { + switch (typeof value) { + case "object": + if (value == null) { + return [null, "null"] + } else if (value[UINT]) { + return [value.value, "uint"] + } else if (value[INT]) { + return [value.value, "int"] + } else if (value[F64]) { + return [value.value, "f64"] + } else if (value[COUNTER]) { + return [value.value, "counter"] + } else if (value instanceof Date) { + return [value.getTime(), "timestamp"] + } else if (value instanceof RawString) { + return [value.val, "str"] + } else if (value instanceof Text) { + return [value, "text"] + } else if (value instanceof Uint8Array) { + return [value, "bytes"] + } else if (value instanceof Array) { + return [value, "list"] + } else if (Object.getPrototypeOf(value) === Object.getPrototypeOf({})) { + return [value, "map"] + } else if (value[OBJECT_ID]) { + throw new RangeError( + "Cannot create a reference to an existing document object" + ) + } else { + throw new RangeError(`Cannot assign unknown object: ${value}`) + } + case "boolean": + return [value, "boolean"] + case "number": + if (Number.isInteger(value)) { + return [value, "int"] + } else { + return [value, "f64"] + } + case "string": + if (textV2) { + return [value, "text"] + } else { + return [value, "str"] + } + default: + throw new RangeError(`Unsupported type of value: ${typeof value}`) + } +} + +const MapHandler = { + get( + target: T, + key: any + ): ValueType | ObjID | boolean | { handle: Automerge } { + const { context, objectId, cache } = target + if (key === Symbol.toStringTag) { + return target[Symbol.toStringTag] + } + if (key === OBJECT_ID) return objectId + if (key === IS_PROXY) return true + if (key === TRACE) return target.trace + if (key === STATE) return { handle: context } + if (!cache[key]) { + cache[key] = valueAt(target, key) + } + return cache[key] + }, + + set(target: Target, key: any, val: any) { + const { context, objectId, path, readonly, frozen, textV2 } = target + target.cache = {} // reset cache on set + if (val && val[OBJECT_ID]) { + throw new RangeError( + "Cannot create a reference to an existing document object" + ) + } + if (key === TRACE) { + target.trace = val + return true + } + const [value, datatype] = import_value(val, textV2) + if (frozen) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (readonly) { + throw new RangeError(`Object property "${key}" cannot be modified`) + } + switch (datatype) { + case "list": { + const list = context.putObject(objectId, key, []) + const proxyList = listProxy( + context, + list, + textV2, + [...path, key], + readonly + ) + for (let i = 0; i < value.length; i++) { + proxyList[i] = value[i] + } + break + } + case "text": { + if (textV2) { + assertString(value) + context.putObject(objectId, key, value) + } else { + assertText(value) + const text = context.putObject(objectId, key, "") + const proxyText = textProxy(context, text, [...path, key], readonly) + for (let i = 0; i < value.length; i++) { + proxyText[i] = value.get(i) + } + } + break + } + case "map": { + const map = context.putObject(objectId, key, {}) + const proxyMap = mapProxy( + context, + map, + textV2, + [...path, key], + readonly + ) + for (const key in value) { + proxyMap[key] = value[key] + } + break + } + default: + context.put(objectId, key, value, datatype) + } + return true + }, + + deleteProperty(target: Target, key: any) { + const { context, objectId, readonly } = target + target.cache = {} // reset cache on delete + if (readonly) { + throw new RangeError(`Object property "${key}" cannot be modified`) + } + context.delete(objectId, key) + return true + }, + + has(target: Target, key: any) { + const value = this.get(target, key) + return value !== undefined + }, + + getOwnPropertyDescriptor(target: Target, key: any) { + // const { context, objectId } = target + const value = this.get(target, key) + if (typeof value !== "undefined") { + return { + configurable: true, + enumerable: true, + value, + } + } + }, + + ownKeys(target: Target) { + const { context, objectId, heads } = target + // FIXME - this is a tmp workaround until fix the dupe key bug in keys() + const keys = context.keys(objectId, heads) + return [...new Set(keys)] + }, +} + +const ListHandler = { + get( + target: T, + index: any + ): + | ValueType + | boolean + | ObjID + | { handle: Automerge } + | number + | ((_: any) => boolean) { + const { context, objectId, heads } = target + index = parseListIndex(index) + if (index === Symbol.hasInstance) { + return (instance: any) => { + return Array.isArray(instance) + } + } + if (index === Symbol.toStringTag) { + return target[Symbol.toStringTag] + } + if (index === OBJECT_ID) return objectId + if (index === IS_PROXY) return true + if (index === TRACE) return target.trace + if (index === STATE) return { handle: context } + if (index === "length") return context.length(objectId, heads) + if (typeof index === "number") { + return valueAt(target, index) as ValueType + } else { + return listMethods(target)[index] + } + }, + + set(target: Target, index: any, val: any) { + const { context, objectId, path, readonly, frozen, textV2 } = target + index = parseListIndex(index) + if (val && val[OBJECT_ID]) { + throw new RangeError( + "Cannot create a reference to an existing document object" + ) + } + if (index === TRACE) { + target.trace = val + return true + } + if (typeof index == "string") { + throw new RangeError("list index must be a number") + } + const [value, datatype] = import_value(val, textV2) + if (frozen) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (readonly) { + throw new RangeError(`Object property "${index}" cannot be modified`) + } + switch (datatype) { + case "list": { + let list: ObjID + if (index >= context.length(objectId)) { + list = context.insertObject(objectId, index, []) + } else { + list = context.putObject(objectId, index, []) + } + const proxyList = listProxy( + context, + list, + textV2, + [...path, index], + readonly + ) + proxyList.splice(0, 0, ...value) + break + } + case "text": { + if (textV2) { + assertString(value) + if (index >= context.length(objectId)) { + context.insertObject(objectId, index, value) + } else { + context.putObject(objectId, index, value) + } + } else { + let text: ObjID + assertText(value) + if (index >= context.length(objectId)) { + text = context.insertObject(objectId, index, "") + } else { + text = context.putObject(objectId, index, "") + } + const proxyText = textProxy(context, text, [...path, index], readonly) + proxyText.splice(0, 0, ...value) + } + break + } + case "map": { + let map: ObjID + if (index >= context.length(objectId)) { + map = context.insertObject(objectId, index, {}) + } else { + map = context.putObject(objectId, index, {}) + } + const proxyMap = mapProxy( + context, + map, + textV2, + [...path, index], + readonly + ) + for (const key in value) { + proxyMap[key] = value[key] + } + break + } + default: + if (index >= context.length(objectId)) { + context.insert(objectId, index, value, datatype) + } else { + context.put(objectId, index, value, datatype) + } + } + return true + }, + + deleteProperty(target: Target, index: any) { + const { context, objectId } = target + index = parseListIndex(index) + const elem = context.get(objectId, index) + if (elem != null && elem[0] == "counter") { + throw new TypeError( + "Unsupported operation: deleting a counter from a list" + ) + } + context.delete(objectId, index) + return true + }, + + has(target: Target, index: any) { + const { context, objectId, heads } = target + index = parseListIndex(index) + if (typeof index === "number") { + return index < context.length(objectId, heads) + } + return index === "length" + }, + + getOwnPropertyDescriptor(target: Target, index: any) { + const { context, objectId, heads } = target + + if (index === "length") + return { writable: true, value: context.length(objectId, heads) } + if (index === OBJECT_ID) + return { configurable: false, enumerable: false, value: objectId } + + index = parseListIndex(index) + + const value = valueAt(target, index) + return { configurable: true, enumerable: true, value } + }, + + getPrototypeOf(target: Target) { + return Object.getPrototypeOf(target) + }, + ownKeys(/*target*/): string[] { + const keys: string[] = [] + // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array + // but not uncommenting it causes for (i in list) {} to not enumerate values properly + //const {context, objectId, heads } = target + //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) } + keys.push("length") + return keys + }, +} + +const TextHandler = Object.assign({}, ListHandler, { + get(target: Target, index: any) { + const { context, objectId, heads } = target + index = parseListIndex(index) + if (index === Symbol.hasInstance) { + return (instance: any) => { + return Array.isArray(instance) + } + } + if (index === Symbol.toStringTag) { + return target[Symbol.toStringTag] + } + if (index === OBJECT_ID) return objectId + if (index === IS_PROXY) return true + if (index === TRACE) return target.trace + if (index === STATE) return { handle: context } + if (index === "length") return context.length(objectId, heads) + if (typeof index === "number") { + return valueAt(target, index) + } else { + return textMethods(target)[index] || listMethods(target)[index] + } + }, + getPrototypeOf(/*target*/) { + return Object.getPrototypeOf(new Text()) + }, +}) + +export function mapProxy( + context: Automerge, + objectId: ObjID, + textV2: boolean, + path?: Prop[], + readonly?: boolean, + heads?: Heads +): MapValueType { + const target: Target = { + context, + objectId, + path: path || [], + readonly: !!readonly, + frozen: false, + heads, + cache: {}, + textV2, + } + const proxied = {} + Object.assign(proxied, target) + const result = new Proxy(proxied, MapHandler) + // conversion through unknown is necessary because the types are so different + return result as unknown as MapValueType +} + +export function listProxy( + context: Automerge, + objectId: ObjID, + textV2: boolean, + path?: Prop[], + readonly?: boolean, + heads?: Heads +): ListValueType { + const target: Target = { + context, + objectId, + path: path || [], + readonly: !!readonly, + frozen: false, + heads, + cache: {}, + textV2, + } + const proxied = [] + Object.assign(proxied, target) + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return new Proxy(proxied, ListHandler) as unknown as ListValue +} + +interface TextProxy extends Text { + splice: (index: any, del: any, ...vals: any[]) => void +} + +export function textProxy( + context: Automerge, + objectId: ObjID, + path?: Prop[], + readonly?: boolean, + heads?: Heads +): TextProxy { + const target: Target = { + context, + objectId, + path: path || [], + readonly: !!readonly, + frozen: false, + heads, + cache: {}, + textV2: false, + } + const proxied = {} + Object.assign(proxied, target) + return new Proxy(proxied, TextHandler) as unknown as TextProxy +} + +export function rootProxy( + context: Automerge, + textV2: boolean, + readonly?: boolean +): T { + /* eslint-disable-next-line */ + return mapProxy(context, "_root", textV2, [], !!readonly) +} + +function listMethods(target: T) { + const { context, objectId, path, readonly, frozen, heads, textV2 } = target + const methods = { + deleteAt(index: number, numDelete: number) { + if (typeof numDelete === "number") { + context.splice(objectId, index, numDelete) + } else { + context.delete(objectId, index) + } + return this + }, + + fill(val: ScalarValue, start: number, end: number) { + const [value, datatype] = import_value(val, textV2) + const length = context.length(objectId) + start = parseListIndex(start || 0) + end = parseListIndex(end || length) + for (let i = start; i < Math.min(end, length); i++) { + if (datatype === "list" || datatype === "map") { + context.putObject(objectId, i, value) + } else if (datatype === "text") { + if (textV2) { + assertString(value) + context.putObject(objectId, i, value) + } else { + assertText(value) + const text = context.putObject(objectId, i, "") + const proxyText = textProxy(context, text, [...path, i], readonly) + for (let i = 0; i < value.length; i++) { + proxyText[i] = value.get(i) + } + } + } else { + context.put(objectId, i, value, datatype) + } + } + return this + }, + + indexOf(o: any, start = 0) { + const length = context.length(objectId) + for (let i = start; i < length; i++) { + const value = context.getWithType(objectId, i, heads) + if (value && (value[1] === o[OBJECT_ID] || value[1] === o)) { + return i + } + } + return -1 + }, + + insertAt(index: number, ...values: any[]) { + this.splice(index, 0, ...values) + return this + }, + + pop() { + const length = context.length(objectId) + if (length == 0) { + return undefined + } + const last = valueAt(target, length - 1) + context.delete(objectId, length - 1) + return last + }, + + push(...values: any[]) { + const len = context.length(objectId) + this.splice(len, 0, ...values) + return context.length(objectId) + }, + + shift() { + if (context.length(objectId) == 0) return + const first = valueAt(target, 0) + context.delete(objectId, 0) + return first + }, + + splice(index: any, del: any, ...vals: any[]) { + index = parseListIndex(index) + del = parseListIndex(del) + for (const val of vals) { + if (val && val[OBJECT_ID]) { + throw new RangeError( + "Cannot create a reference to an existing document object" + ) + } + } + if (frozen) { + throw new RangeError("Attempting to use an outdated Automerge document") + } + if (readonly) { + throw new RangeError( + "Sequence object cannot be modified outside of a change block" + ) + } + const result: ValueType[] = [] + for (let i = 0; i < del; i++) { + const value = valueAt(target, index) + if (value !== undefined) { + result.push(value) + } + context.delete(objectId, index) + } + const values = vals.map(val => import_value(val, textV2)) + for (const [value, datatype] of values) { + switch (datatype) { + case "list": { + const list = context.insertObject(objectId, index, []) + const proxyList = listProxy( + context, + list, + textV2, + [...path, index], + readonly + ) + proxyList.splice(0, 0, ...value) + break + } + case "text": { + if (textV2) { + assertString(value) + context.insertObject(objectId, index, value) + } else { + const text = context.insertObject(objectId, index, "") + const proxyText = textProxy( + context, + text, + [...path, index], + readonly + ) + proxyText.splice(0, 0, ...value) + } + break + } + case "map": { + const map = context.insertObject(objectId, index, {}) + const proxyMap = mapProxy( + context, + map, + textV2, + [...path, index], + readonly + ) + for (const key in value) { + proxyMap[key] = value[key] + } + break + } + default: + context.insert(objectId, index, value, datatype) + } + index += 1 + } + return result + }, + + unshift(...values: any) { + this.splice(0, 0, ...values) + return context.length(objectId) + }, + + entries() { + const i = 0 + const iterator = { + next: () => { + const value = valueAt(target, i) + if (value === undefined) { + return { value: undefined, done: true } + } else { + return { value: [i, value], done: false } + } + }, + } + return iterator + }, + + keys() { + let i = 0 + const len = context.length(objectId, heads) + const iterator = { + next: () => { + let value: undefined | number = undefined + if (i < len) { + value = i + i++ + } + return { value, done: true } + }, + } + return iterator + }, + + values() { + const i = 0 + const iterator = { + next: () => { + const value = valueAt(target, i) + if (value === undefined) { + return { value: undefined, done: true } + } else { + return { value, done: false } + } + }, + } + return iterator + }, + + toArray(): ValueType[] { + const list: Array> = [] + let value: ValueType | undefined + do { + value = valueAt(target, list.length) + if (value !== undefined) { + list.push(value) + } + } while (value !== undefined) + + return list + }, + + map(f: (_a: ValueType, _n: number) => U): U[] { + return this.toArray().map(f) + }, + + toString(): string { + return this.toArray().toString() + }, + + toLocaleString(): string { + return this.toArray().toLocaleString() + }, + + forEach(f: (_a: ValueType, _n: number) => undefined) { + return this.toArray().forEach(f) + }, + + // todo: real concat function is different + concat(other: ValueType[]): ValueType[] { + return this.toArray().concat(other) + }, + + every(f: (_a: ValueType, _n: number) => boolean): boolean { + return this.toArray().every(f) + }, + + filter(f: (_a: ValueType, _n: number) => boolean): ValueType[] { + return this.toArray().filter(f) + }, + + find( + f: (_a: ValueType, _n: number) => boolean + ): ValueType | undefined { + let index = 0 + for (const v of this) { + if (f(v, index)) { + return v + } + index += 1 + } + }, + + findIndex(f: (_a: ValueType, _n: number) => boolean): number { + let index = 0 + for (const v of this) { + if (f(v, index)) { + return index + } + index += 1 + } + return -1 + }, + + includes(elem: ValueType): boolean { + return this.find(e => e === elem) !== undefined + }, + + join(sep?: string): string { + return this.toArray().join(sep) + }, + + reduce( + f: (acc: U, currentValue: ValueType) => U, + initialValue: U + ): U | undefined { + return this.toArray().reduce(f, initialValue) + }, + + reduceRight( + f: (acc: U, item: ValueType) => U, + initialValue: U + ): U | undefined { + return this.toArray().reduceRight(f, initialValue) + }, + + lastIndexOf(search: ValueType, fromIndex = +Infinity): number { + // this can be faster + return this.toArray().lastIndexOf(search, fromIndex) + }, + + slice(index?: number, num?: number): ValueType[] { + return this.toArray().slice(index, num) + }, + + some(f: (v: ValueType, i: number) => boolean): boolean { + let index = 0 + for (const v of this) { + if (f(v, index)) { + return true + } + index += 1 + } + return false + }, + + [Symbol.iterator]: function* () { + let i = 0 + let value = valueAt(target, i) + while (value !== undefined) { + yield value + i += 1 + value = valueAt(target, i) + } + }, + } + return methods +} + +function textMethods(target: Target) { + const { context, objectId, heads } = target + const methods = { + set(index: number, value: any) { + return (this[index] = value) + }, + get(index: number): AutomergeValue { + return this[index] + }, + toString(): string { + return context.text(objectId, heads).replace(//g, "") + }, + toSpans(): AutomergeValue[] { + const spans: AutomergeValue[] = [] + let chars = "" + const length = context.length(objectId) + for (let i = 0; i < length; i++) { + const value = this[i] + if (typeof value === "string") { + chars += value + } else { + if (chars.length > 0) { + spans.push(chars) + chars = "" + } + spans.push(value) + } + } + if (chars.length > 0) { + spans.push(chars) + } + return spans + }, + toJSON(): string { + return this.toString() + }, + indexOf(o: any, start = 0) { + const text = context.text(objectId) + return text.indexOf(o, start) + }, + } + return methods +} + +function assertText(value: Text | string): asserts value is Text { + if (!(value instanceof Text)) { + throw new Error("value was not a Text instance") + } +} + +function assertString(value: Text | string): asserts value is string { + if (typeof value !== "string") { + throw new Error("value was not a string") + } +} diff --git a/javascript/src/raw_string.ts b/javascript/src/raw_string.ts new file mode 100644 index 00000000..7fc02084 --- /dev/null +++ b/javascript/src/raw_string.ts @@ -0,0 +1,6 @@ +export class RawString { + val: string + constructor(val: string) { + this.val = val + } +} diff --git a/javascript/src/stable.ts b/javascript/src/stable.ts new file mode 100644 index 00000000..e83b127f --- /dev/null +++ b/javascript/src/stable.ts @@ -0,0 +1,944 @@ +/** @hidden **/ +export { /** @hidden */ uuid } from "./uuid" + +import { rootProxy } from "./proxies" +import { STATE } from "./constants" + +import { + type AutomergeValue, + Counter, + type Doc, + type PatchCallback, +} from "./types" +export { + type AutomergeValue, + Counter, + type Doc, + Int, + Uint, + Float64, + type Patch, + type PatchCallback, + type ScalarValue, +} from "./types" + +import { Text } from "./text" +export { Text } from "./text" + +import type { + API as WasmAPI, + Actor as ActorId, + Prop, + ObjID, + Change, + DecodedChange, + Heads, + MaterializeValue, + JsSyncState, + SyncMessage, + DecodedSyncMessage, +} from "@automerge/automerge-wasm" +export type { + PutPatch, + DelPatch, + SpliceTextPatch, + InsertPatch, + IncPatch, + SyncMessage, +} from "@automerge/automerge-wasm" + +/** @hidden **/ +type API = WasmAPI + +const SyncStateSymbol = Symbol("_syncstate") + +/** + * An opaque type tracking the state of sync with a remote peer + */ +type SyncState = JsSyncState & { _opaque: typeof SyncStateSymbol } + +import { ApiHandler, type ChangeToEncode, UseApi } from "./low_level" + +import { Automerge } from "@automerge/automerge-wasm" + +import { RawString } from "./raw_string" + +import { _state, _is_proxy, _trace, _obj } from "./internal_state" + +import { stableConflictAt } from "./conflicts" + +/** Options passed to {@link change}, and {@link emptyChange} + * @typeParam T - The type of value contained in the document + */ +export type ChangeOptions = { + /** A message which describes the changes */ + message?: string + /** The unix timestamp of the change (purely advisory, not used in conflict resolution) */ + time?: number + /** A callback which will be called to notify the caller of any changes to the document */ + patchCallback?: PatchCallback +} + +/** Options passed to {@link loadIncremental}, {@link applyChanges}, and {@link receiveSyncMessage} + * @typeParam T - The type of value contained in the document + */ +export type ApplyOptions = { patchCallback?: PatchCallback } + +/** + * A List is an extended Array that adds the two helper methods `deleteAt` and `insertAt`. + */ +export interface List extends Array { + insertAt(index: number, ...args: T[]): List + deleteAt(index: number, numDelete?: number): List +} + +/** + * To extend an arbitrary type, we have to turn any arrays that are part of the type's definition into Lists. + * So we recurse through the properties of T, turning any Arrays we find into Lists. + */ +export type Extend = + // is it an array? make it a list (we recursively extend the type of the array's elements as well) + T extends Array + ? List> + : // is it an object? recursively extend all of its properties + // eslint-disable-next-line @typescript-eslint/ban-types + T extends Object + ? { [P in keyof T]: Extend } + : // otherwise leave the type alone + T + +/** + * Function which is called by {@link change} when making changes to a `Doc` + * @typeParam T - The type of value contained in the document + * + * This function may mutate `doc` + */ +export type ChangeFn = (doc: Extend) => void + +/** @hidden **/ +export interface State { + change: DecodedChange + snapshot: T +} + +/** @hidden **/ +export function use(api: API) { + UseApi(api) +} + +import * as wasm from "@automerge/automerge-wasm" +use(wasm) + +/** + * Options to be passed to {@link init} or {@link load} + * @typeParam T - The type of the value the document contains + */ +export type InitOptions = { + /** The actor ID to use for this document, a random one will be generated if `null` is passed */ + actor?: ActorId + freeze?: boolean + /** A callback which will be called with the initial patch once the document has finished loading */ + patchCallback?: PatchCallback + /** @hidden */ + enableTextV2?: boolean +} + +/** @hidden */ +export function getBackend(doc: Doc): Automerge { + return _state(doc).handle +} + +function importOpts(_actor?: ActorId | InitOptions): InitOptions { + if (typeof _actor === "object") { + return _actor + } else { + return { actor: _actor } + } +} + +/** + * Create a new automerge document + * + * @typeParam T - The type of value contained in the document. This will be the + * type that is passed to the change closure in {@link change} + * @param _opts - Either an actorId or an {@link InitOptions} (which may + * contain an actorId). If this is null the document will be initialised with a + * random actor ID + */ +export function init(_opts?: ActorId | InitOptions): Doc { + const opts = importOpts(_opts) + const freeze = !!opts.freeze + const patchCallback = opts.patchCallback + const handle = ApiHandler.create(opts.enableTextV2 || false, opts.actor) + handle.enablePatches(true) + handle.enableFreeze(!!opts.freeze) + handle.registerDatatype("counter", (n: number) => new Counter(n)) + const textV2 = opts.enableTextV2 || false + if (textV2) { + handle.registerDatatype("str", (n: string) => new RawString(n)) + } else { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + handle.registerDatatype("text", (n: any) => new Text(n)) + } + const doc = handle.materialize("/", undefined, { + handle, + heads: undefined, + freeze, + patchCallback, + textV2, + }) as Doc + return doc +} + +/** + * Make an immutable view of an automerge document as at `heads` + * + * @remarks + * The document returned from this function cannot be passed to {@link change}. + * This is because it shares the same underlying memory as `doc`, but it is + * consequently a very cheap copy. + * + * Note that this function will throw an error if any of the hashes in `heads` + * are not in the document. + * + * @typeParam T - The type of the value contained in the document + * @param doc - The document to create a view of + * @param heads - The hashes of the heads to create a view at + */ +export function view(doc: Doc, heads: Heads): Doc { + const state = _state(doc) + const handle = state.handle + return state.handle.materialize("/", heads, { + ...state, + handle, + heads, + }) as Doc +} + +/** + * Make a full writable copy of an automerge document + * + * @remarks + * Unlike {@link view} this function makes a full copy of the memory backing + * the document and can thus be passed to {@link change}. It also generates a + * new actor ID so that changes made in the new document do not create duplicate + * sequence numbers with respect to the old document. If you need control over + * the actor ID which is generated you can pass the actor ID as the second + * argument + * + * @typeParam T - The type of the value contained in the document + * @param doc - The document to clone + * @param _opts - Either an actor ID to use for the new doc or an {@link InitOptions} + */ +export function clone( + doc: Doc, + _opts?: ActorId | InitOptions +): Doc { + const state = _state(doc) + const heads = state.heads + const opts = importOpts(_opts) + const handle = state.handle.fork(opts.actor, heads) + + // `change` uses the presence of state.heads to determine if we are in a view + // set it to undefined to indicate that this is a full fat document + const { heads: _oldHeads, ...stateSansHeads } = state + return handle.applyPatches(doc, { ...stateSansHeads, handle }) +} + +/** Explicity free the memory backing a document. Note that this is note + * necessary in environments which support + * [`FinalizationRegistry`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry) + */ +export function free(doc: Doc) { + return _state(doc).handle.free() +} + +/** + * Create an automerge document from a POJO + * + * @param initialState - The initial state which will be copied into the document + * @typeParam T - The type of the value passed to `from` _and_ the type the resulting document will contain + * @typeParam actor - The actor ID of the resulting document, if this is null a random actor ID will be used + * + * @example + * ``` + * const doc = automerge.from({ + * tasks: [ + * {description: "feed dogs", done: false} + * ] + * }) + * ``` + */ +export function from>( + initialState: T | Doc, + _opts?: ActorId | InitOptions +): Doc { + return change(init(_opts), d => Object.assign(d, initialState)) +} + +/** + * Update the contents of an automerge document + * @typeParam T - The type of the value contained in the document + * @param doc - The document to update + * @param options - Either a message, an {@link ChangeOptions}, or a {@link ChangeFn} + * @param callback - A `ChangeFn` to be used if `options` was a `string` + * + * Note that if the second argument is a function it will be used as the `ChangeFn` regardless of what the third argument is. + * + * @example A simple change + * ``` + * let doc1 = automerge.init() + * doc1 = automerge.change(doc1, d => { + * d.key = "value" + * }) + * assert.equal(doc1.key, "value") + * ``` + * + * @example A change with a message + * + * ``` + * doc1 = automerge.change(doc1, "add another value", d => { + * d.key2 = "value2" + * }) + * ``` + * + * @example A change with a message and a timestamp + * + * ``` + * doc1 = automerge.change(doc1, {message: "add another value", time: 1640995200}, d => { + * d.key2 = "value2" + * }) + * ``` + * + * @example responding to a patch callback + * ``` + * let patchedPath + * let patchCallback = patch => { + * patchedPath = patch.path + * } + * doc1 = automerge.change(doc1, {message, "add another value", time: 1640995200, patchCallback}, d => { + * d.key2 = "value2" + * }) + * assert.equal(patchedPath, ["key2"]) + * ``` + */ +export function change( + doc: Doc, + options: string | ChangeOptions | ChangeFn, + callback?: ChangeFn +): Doc { + if (typeof options === "function") { + return _change(doc, {}, options) + } else if (typeof callback === "function") { + if (typeof options === "string") { + options = { message: options } + } + return _change(doc, options, callback) + } else { + throw RangeError("Invalid args for change") + } +} + +function progressDocument( + doc: Doc, + heads: Heads | null, + callback?: PatchCallback +): Doc { + if (heads == null) { + return doc + } + const state = _state(doc) + const nextState = { ...state, heads: undefined } + const nextDoc = state.handle.applyPatches(doc, nextState, callback) + state.heads = heads + return nextDoc +} + +function _change( + doc: Doc, + options: ChangeOptions, + callback: ChangeFn +): Doc { + if (typeof callback !== "function") { + throw new RangeError("invalid change function") + } + + const state = _state(doc) + + if (doc === undefined || state === undefined) { + throw new RangeError("must be the document root") + } + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + try { + state.heads = heads + const root: T = rootProxy(state.handle, state.textV2) + callback(root as Extend) + if (state.handle.pendingOps() === 0) { + state.heads = undefined + return doc + } else { + state.handle.commit(options.message, options.time) + return progressDocument( + doc, + heads, + options.patchCallback || state.patchCallback + ) + } + } catch (e) { + state.heads = undefined + state.handle.rollback() + throw e + } +} + +/** + * Make a change to a document which does not modify the document + * + * @param doc - The doc to add the empty change to + * @param options - Either a message or a {@link ChangeOptions} for the new change + * + * Why would you want to do this? One reason might be that you have merged + * changes from some other peers and you want to generate a change which + * depends on those merged changes so that you can sign the new change with all + * of the merged changes as part of the new change. + */ +export function emptyChange( + doc: Doc, + options: string | ChangeOptions | void +) { + if (options === undefined) { + options = {} + } + if (typeof options === "string") { + options = { message: options } + } + + const state = _state(doc) + + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + + const heads = state.handle.getHeads() + state.handle.emptyChange(options.message, options.time) + return progressDocument(doc, heads) +} + +/** + * Load an automerge document from a compressed document produce by {@link save} + * + * @typeParam T - The type of the value which is contained in the document. + * Note that no validation is done to make sure this type is in + * fact the type of the contained value so be a bit careful + * @param data - The compressed document + * @param _opts - Either an actor ID or some {@link InitOptions}, if the actor + * ID is null a random actor ID will be created + * + * Note that `load` will throw an error if passed incomplete content (for + * example if you are receiving content over the network and don't know if you + * have the complete document yet). If you need to handle incomplete content use + * {@link init} followed by {@link loadIncremental}. + */ +export function load( + data: Uint8Array, + _opts?: ActorId | InitOptions +): Doc { + const opts = importOpts(_opts) + const actor = opts.actor + const patchCallback = opts.patchCallback + const handle = ApiHandler.load(data, opts.enableTextV2 || false, actor) + handle.enablePatches(true) + handle.enableFreeze(!!opts.freeze) + handle.registerDatatype("counter", (n: number) => new Counter(n)) + const textV2 = opts.enableTextV2 || false + if (textV2) { + handle.registerDatatype("str", (n: string) => new RawString(n)) + } else { + handle.registerDatatype("text", (n: string) => new Text(n)) + } + const doc = handle.materialize("/", undefined, { + handle, + heads: undefined, + patchCallback, + textV2, + }) as Doc + return doc +} + +/** + * Load changes produced by {@link saveIncremental}, or partial changes + * + * @typeParam T - The type of the value which is contained in the document. + * Note that no validation is done to make sure this type is in + * fact the type of the contained value so be a bit careful + * @param data - The compressedchanges + * @param opts - an {@link ApplyOptions} + * + * This function is useful when staying up to date with a connected peer. + * Perhaps the other end sent you a full compresed document which you loaded + * with {@link load} and they're sending you the result of + * {@link getLastLocalChange} every time they make a change. + * + * Note that this function will succesfully load the results of {@link save} as + * well as {@link getLastLocalChange} or any other incremental change. + */ +export function loadIncremental( + doc: Doc, + data: Uint8Array, + opts?: ApplyOptions +): Doc { + if (!opts) { + opts = {} + } + const state = _state(doc) + if (state.heads) { + throw new RangeError( + "Attempting to change an out of date document - set at: " + _trace(doc) + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.loadIncremental(data) + return progressDocument(doc, heads, opts.patchCallback || state.patchCallback) +} + +/** + * Export the contents of a document to a compressed format + * + * @param doc - The doc to save + * + * The returned bytes can be passed to {@link load} or {@link loadIncremental} + */ +export function save(doc: Doc): Uint8Array { + return _state(doc).handle.save() +} + +/** + * Merge `local` into `remote` + * @typeParam T - The type of values contained in each document + * @param local - The document to merge changes into + * @param remote - The document to merge changes from + * + * @returns - The merged document + * + * Often when you are merging documents you will also need to clone them. Both + * arguments to `merge` are frozen after the call so you can no longer call + * mutating methods (such as {@link change}) on them. The symtom of this will be + * an error which says "Attempting to change an out of date document". To + * overcome this call {@link clone} on the argument before passing it to {@link + * merge}. + */ +export function merge(local: Doc, remote: Doc): Doc { + const localState = _state(local) + + if (localState.heads) { + throw new RangeError( + "Attempting to change an out of date document - set at: " + _trace(local) + ) + } + const heads = localState.handle.getHeads() + const remoteState = _state(remote) + const changes = localState.handle.getChangesAdded(remoteState.handle) + localState.handle.applyChanges(changes) + return progressDocument(local, heads, localState.patchCallback) +} + +/** + * Get the actor ID associated with the document + */ +export function getActorId(doc: Doc): ActorId { + const state = _state(doc) + return state.handle.getActorId() +} + +/** + * The type of conflicts for particular key or index + * + * Maps and sequences in automerge can contain conflicting values for a + * particular key or index. In this case {@link getConflicts} can be used to + * obtain a `Conflicts` representing the multiple values present for the property + * + * A `Conflicts` is a map from a unique (per property or index) key to one of + * the possible conflicting values for the given property. + */ +type Conflicts = { [key: string]: AutomergeValue } + +/** + * Get the conflicts associated with a property + * + * The values of properties in a map in automerge can be conflicted if there + * are concurrent "put" operations to the same key. Automerge chooses one value + * arbitrarily (but deterministically, any two nodes who have the same set of + * changes will choose the same value) from the set of conflicting values to + * present as the value of the key. + * + * Sometimes you may want to examine these conflicts, in this case you can use + * {@link getConflicts} to get the conflicts for the key. + * + * @example + * ``` + * import * as automerge from "@automerge/automerge" + * + * type Profile = { + * pets: Array<{name: string, type: string}> + * } + * + * let doc1 = automerge.init("aaaa") + * doc1 = automerge.change(doc1, d => { + * d.pets = [{name: "Lassie", type: "dog"}] + * }) + * let doc2 = automerge.init("bbbb") + * doc2 = automerge.merge(doc2, automerge.clone(doc1)) + * + * doc2 = automerge.change(doc2, d => { + * d.pets[0].name = "Beethoven" + * }) + * + * doc1 = automerge.change(doc1, d => { + * d.pets[0].name = "Babe" + * }) + * + * const doc3 = automerge.merge(doc1, doc2) + * + * // Note that here we pass `doc3.pets`, not `doc3` + * let conflicts = automerge.getConflicts(doc3.pets[0], "name") + * + * // The two conflicting values are the keys of the conflicts object + * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) + * ``` + */ +export function getConflicts( + doc: Doc, + prop: Prop +): Conflicts | undefined { + const state = _state(doc, false) + if (state.textV2) { + throw new Error("use unstable.getConflicts for an unstable document") + } + const objectId = _obj(doc) + if (objectId != null) { + return stableConflictAt(state.handle, objectId, prop) + } else { + return undefined + } +} + +/** + * Get the binary representation of the last change which was made to this doc + * + * This is most useful when staying in sync with other peers, every time you + * make a change locally via {@link change} you immediately call {@link + * getLastLocalChange} and send the result over the network to other peers. + */ +export function getLastLocalChange(doc: Doc): Change | undefined { + const state = _state(doc) + return state.handle.getLastLocalChange() || undefined +} + +/** + * Return the object ID of an arbitrary javascript value + * + * This is useful to determine if something is actually an automerge document, + * if `doc` is not an automerge document this will return null. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function getObjectId(doc: any, prop?: Prop): ObjID | null { + if (prop) { + const state = _state(doc, false) + const objectId = _obj(doc) + if (!state || !objectId) { + return null + } + return state.handle.get(objectId, prop) as ObjID + } else { + return _obj(doc) + } +} + +/** + * Get the changes which are in `newState` but not in `oldState`. The returned + * changes can be loaded in `oldState` via {@link applyChanges}. + * + * Note that this will crash if there are changes in `oldState` which are not in `newState`. + */ +export function getChanges(oldState: Doc, newState: Doc): Change[] { + const n = _state(newState) + return n.handle.getChanges(getHeads(oldState)) +} + +/** + * Get all the changes in a document + * + * This is different to {@link save} because the output is an array of changes + * which can be individually applied via {@link applyChanges}` + * + */ +export function getAllChanges(doc: Doc): Change[] { + const state = _state(doc) + return state.handle.getChanges([]) +} + +/** + * Apply changes received from another document + * + * `doc` will be updated to reflect the `changes`. If there are changes which + * we do not have dependencies for yet those will be stored in the document and + * applied when the depended on changes arrive. + * + * You can use the {@link ApplyOptions} to pass a patchcallback which will be + * informed of any changes which occur as a result of applying the changes + * + */ +export function applyChanges( + doc: Doc, + changes: Change[], + opts?: ApplyOptions +): [Doc] { + const state = _state(doc) + if (!opts) { + opts = {} + } + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.applyChanges(changes) + state.heads = heads + return [ + progressDocument(doc, heads, opts.patchCallback || state.patchCallback), + ] +} + +/** @hidden */ +export function getHistory(doc: Doc): State[] { + const textV2 = _state(doc).textV2 + const history = getAllChanges(doc) + return history.map((change, index) => ({ + get change() { + return decodeChange(change) + }, + get snapshot() { + const [state] = applyChanges( + init({ enableTextV2: textV2 }), + history.slice(0, index + 1) + ) + return state + }, + })) +} + +/** @hidden */ +// FIXME : no tests +// FIXME can we just use deep equals now? +export function equals(val1: unknown, val2: unknown): boolean { + if (!isObject(val1) || !isObject(val2)) return val1 === val2 + const keys1 = Object.keys(val1).sort(), + keys2 = Object.keys(val2).sort() + if (keys1.length !== keys2.length) return false + for (let i = 0; i < keys1.length; i++) { + if (keys1[i] !== keys2[i]) return false + if (!equals(val1[keys1[i]], val2[keys2[i]])) return false + } + return true +} + +/** + * encode a {@link SyncState} into binary to send over the network + * + * @group sync + * */ +export function encodeSyncState(state: SyncState): Uint8Array { + const sync = ApiHandler.importSyncState(state) + const result = ApiHandler.encodeSyncState(sync) + sync.free() + return result +} + +/** + * Decode some binary data into a {@link SyncState} + * + * @group sync + */ +export function decodeSyncState(state: Uint8Array): SyncState { + const sync = ApiHandler.decodeSyncState(state) + const result = ApiHandler.exportSyncState(sync) + sync.free() + return result as SyncState +} + +/** + * Generate a sync message to send to the peer represented by `inState` + * @param doc - The doc to generate messages about + * @param inState - The {@link SyncState} representing the peer we are talking to + * + * @group sync + * + * @returns An array of `[newSyncState, syncMessage | null]` where + * `newSyncState` should replace `inState` and `syncMessage` should be sent to + * the peer if it is not null. If `syncMessage` is null then we are up to date. + */ +export function generateSyncMessage( + doc: Doc, + inState: SyncState +): [SyncState, SyncMessage | null] { + const state = _state(doc) + const syncState = ApiHandler.importSyncState(inState) + const message = state.handle.generateSyncMessage(syncState) + const outState = ApiHandler.exportSyncState(syncState) as SyncState + return [outState, message] +} + +/** + * Update a document and our sync state on receiving a sync message + * + * @group sync + * + * @param doc - The doc the sync message is about + * @param inState - The {@link SyncState} for the peer we are communicating with + * @param message - The message which was received + * @param opts - Any {@link ApplyOption}s, used for passing a + * {@link PatchCallback} which will be informed of any changes + * in `doc` which occur because of the received sync message. + * + * @returns An array of `[newDoc, newSyncState, syncMessage | null]` where + * `newDoc` is the updated state of `doc`, `newSyncState` should replace + * `inState` and `syncMessage` should be sent to the peer if it is not null. If + * `syncMessage` is null then we are up to date. + */ +export function receiveSyncMessage( + doc: Doc, + inState: SyncState, + message: SyncMessage, + opts?: ApplyOptions +): [Doc, SyncState, null] { + const syncState = ApiHandler.importSyncState(inState) + if (!opts) { + opts = {} + } + const state = _state(doc) + if (state.heads) { + throw new RangeError( + "Attempting to change an outdated document. Use Automerge.clone() if you wish to make a writable copy." + ) + } + if (_is_proxy(doc)) { + throw new RangeError("Calls to Automerge.change cannot be nested") + } + const heads = state.handle.getHeads() + state.handle.receiveSyncMessage(syncState, message) + const outSyncState = ApiHandler.exportSyncState(syncState) as SyncState + return [ + progressDocument(doc, heads, opts.patchCallback || state.patchCallback), + outSyncState, + null, + ] +} + +/** + * Create a new, blank {@link SyncState} + * + * When communicating with a peer for the first time use this to generate a new + * {@link SyncState} for them + * + * @group sync + */ +export function initSyncState(): SyncState { + return ApiHandler.exportSyncState(ApiHandler.initSyncState()) as SyncState +} + +/** @hidden */ +export function encodeChange(change: ChangeToEncode): Change { + return ApiHandler.encodeChange(change) +} + +/** @hidden */ +export function decodeChange(data: Change): DecodedChange { + return ApiHandler.decodeChange(data) +} + +/** @hidden */ +export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage { + return ApiHandler.encodeSyncMessage(message) +} + +/** @hidden */ +export function decodeSyncMessage(message: SyncMessage): DecodedSyncMessage { + return ApiHandler.decodeSyncMessage(message) +} + +/** + * Get any changes in `doc` which are not dependencies of `heads` + */ +export function getMissingDeps(doc: Doc, heads: Heads): Heads { + const state = _state(doc) + return state.handle.getMissingDeps(heads) +} + +/** + * Get the hashes of the heads of this document + */ +export function getHeads(doc: Doc): Heads { + const state = _state(doc) + return state.heads || state.handle.getHeads() +} + +/** @hidden */ +export function dump(doc: Doc) { + const state = _state(doc) + state.handle.dump() +} + +/** @hidden */ +export function toJS(doc: Doc): T { + const state = _state(doc) + const enabled = state.handle.enableFreeze(false) + const result = state.handle.materialize() + state.handle.enableFreeze(enabled) + return result as T +} + +export function isAutomerge(doc: unknown): boolean { + if (typeof doc == "object" && doc !== null) { + return getObjectId(doc) === "_root" && !!Reflect.get(doc, STATE) + } else { + return false + } +} + +function isObject(obj: unknown): obj is Record { + return typeof obj === "object" && obj !== null +} + +export type { + API, + SyncState, + ActorId, + Conflicts, + Prop, + Change, + ObjID, + DecodedChange, + DecodedSyncMessage, + Heads, + MaterializeValue, +} diff --git a/javascript/src/text.ts b/javascript/src/text.ts new file mode 100644 index 00000000..b01bd7db --- /dev/null +++ b/javascript/src/text.ts @@ -0,0 +1,224 @@ +import type { Value } from "@automerge/automerge-wasm" +import { TEXT, STATE } from "./constants" +import type { InternalState } from "./internal_state" + +export class Text { + //eslint-disable-next-line @typescript-eslint/no-explicit-any + elems: Array + str: string | undefined + //eslint-disable-next-line @typescript-eslint/no-explicit-any + spans: Array | undefined; + //eslint-disable-next-line @typescript-eslint/no-explicit-any + [STATE]?: InternalState + + constructor(text?: string | string[] | Value[]) { + if (typeof text === "string") { + this.elems = [...text] + } else if (Array.isArray(text)) { + this.elems = text + } else if (text === undefined) { + this.elems = [] + } else { + throw new TypeError(`Unsupported initial value for Text: ${text}`) + } + Reflect.defineProperty(this, TEXT, { value: true }) + } + + get length(): number { + return this.elems.length + } + + //eslint-disable-next-line @typescript-eslint/no-explicit-any + get(index: number): any { + return this.elems[index] + } + + /** + * Iterates over the text elements character by character, including any + * inline objects. + */ + [Symbol.iterator]() { + const elems = this.elems + let index = -1 + return { + next() { + index += 1 + if (index < elems.length) { + return { done: false, value: elems[index] } + } else { + return { done: true } + } + }, + } + } + + /** + * Returns the content of the Text object as a simple string, ignoring any + * non-character elements. + */ + toString(): string { + if (!this.str) { + // Concatting to a string is faster than creating an array and then + // .join()ing for small (<100KB) arrays. + // https://jsperf.com/join-vs-loop-w-type-test + this.str = "" + for (const elem of this.elems) { + if (typeof elem === "string") this.str += elem + else this.str += "\uFFFC" + } + } + return this.str + } + + /** + * Returns the content of the Text object as a sequence of strings, + * interleaved with non-character elements. + * + * For example, the value `['a', 'b', {x: 3}, 'c', 'd']` has spans: + * `=> ['ab', {x: 3}, 'cd']` + */ + toSpans(): Array { + if (!this.spans) { + this.spans = [] + let chars = "" + for (const elem of this.elems) { + if (typeof elem === "string") { + chars += elem + } else { + if (chars.length > 0) { + this.spans.push(chars) + chars = "" + } + this.spans.push(elem) + } + } + if (chars.length > 0) { + this.spans.push(chars) + } + } + return this.spans + } + + /** + * Returns the content of the Text object as a simple string, so that the + * JSON serialization of an Automerge document represents text nicely. + */ + toJSON(): string { + return this.toString() + } + + /** + * Updates the list item at position `index` to a new value `value`. + */ + set(index: number, value: Value) { + if (this[STATE]) { + throw new RangeError( + "object cannot be modified outside of a change block" + ) + } + this.elems[index] = value + } + + /** + * Inserts new list items `values` starting at position `index`. + */ + insertAt(index: number, ...values: Array) { + if (this[STATE]) { + throw new RangeError( + "object cannot be modified outside of a change block" + ) + } + this.elems.splice(index, 0, ...values) + } + + /** + * Deletes `numDelete` list items starting at position `index`. + * if `numDelete` is not given, one item is deleted. + */ + deleteAt(index: number, numDelete = 1) { + if (this[STATE]) { + throw new RangeError( + "object cannot be modified outside of a change block" + ) + } + this.elems.splice(index, numDelete) + } + + map(callback: (e: Value | object) => T) { + this.elems.map(callback) + } + + lastIndexOf(searchElement: Value, fromIndex?: number) { + this.elems.lastIndexOf(searchElement, fromIndex) + } + + concat(other: Text): Text { + return new Text(this.elems.concat(other.elems)) + } + + every(test: (v: Value) => boolean): boolean { + return this.elems.every(test) + } + + filter(test: (v: Value) => boolean): Text { + return new Text(this.elems.filter(test)) + } + + find(test: (v: Value) => boolean): Value | undefined { + return this.elems.find(test) + } + + findIndex(test: (v: Value) => boolean): number | undefined { + return this.elems.findIndex(test) + } + + forEach(f: (v: Value) => undefined) { + this.elems.forEach(f) + } + + includes(elem: Value): boolean { + return this.elems.includes(elem) + } + + indexOf(elem: Value) { + return this.elems.indexOf(elem) + } + + join(sep?: string): string { + return this.elems.join(sep) + } + + reduce( + f: ( + previousValue: Value, + currentValue: Value, + currentIndex: number, + array: Value[] + ) => Value + ) { + this.elems.reduce(f) + } + + reduceRight( + f: ( + previousValue: Value, + currentValue: Value, + currentIndex: number, + array: Value[] + ) => Value + ) { + this.elems.reduceRight(f) + } + + slice(start?: number, end?: number) { + new Text(this.elems.slice(start, end)) + } + + some(test: (arg: Value) => boolean): boolean { + return this.elems.some(test) + } + + toLocaleString() { + this.toString() + } +} diff --git a/javascript/src/types.ts b/javascript/src/types.ts new file mode 100644 index 00000000..beb5cf70 --- /dev/null +++ b/javascript/src/types.ts @@ -0,0 +1,46 @@ +export { Text } from "./text" +import { Text } from "./text" +export { Counter } from "./counter" +export { Int, Uint, Float64 } from "./numbers" + +import { Counter } from "./counter" +import type { Patch } from "@automerge/automerge-wasm" +export type { Patch } from "@automerge/automerge-wasm" + +export type AutomergeValue = + | ScalarValue + | { [key: string]: AutomergeValue } + | Array + | Text +export type MapValue = { [key: string]: AutomergeValue } +export type ListValue = Array +export type ScalarValue = + | string + | number + | null + | boolean + | Date + | Counter + | Uint8Array + +/** + * An automerge document. + * @typeParam T - The type of the value contained in this document + * + * Note that this provides read only access to the fields of the value. To + * modify the value use {@link change} + */ +export type Doc = { readonly [P in keyof T]: T[P] } + +/** + * Callback which is called by various methods in this library to notify the + * user of what changes have been made. + * @param patch - A description of the changes made + * @param before - The document before the change was made + * @param after - The document after the change was made + */ +export type PatchCallback = ( + patches: Array, + before: Doc, + after: Doc +) => void diff --git a/javascript/src/unstable.ts b/javascript/src/unstable.ts new file mode 100644 index 00000000..7c73afb9 --- /dev/null +++ b/javascript/src/unstable.ts @@ -0,0 +1,294 @@ +/** + * # The unstable API + * + * This module contains new features we are working on which are either not yet + * ready for a stable release and/or which will result in backwards incompatible + * API changes. The API of this module may change in arbitrary ways between + * point releases - we will always document what these changes are in the + * [CHANGELOG](#changelog) below, but only depend on this module if you are prepared to deal + * with frequent changes. + * + * ## Differences from stable + * + * In the stable API text objects are represented using the {@link Text} class. + * This means you must decide up front whether your string data might need + * concurrent merges in the future and if you change your mind you have to + * figure out how to migrate your data. In the unstable API the `Text` class is + * gone and all `string`s are represented using the text CRDT, allowing for + * concurrent changes. Modifying a string is done using the {@link splice} + * function. You can still access the old behaviour of strings which do not + * support merging behaviour via the {@link RawString} class. + * + * This leads to the following differences from `stable`: + * + * * There is no `unstable.Text` class, all strings are text objects + * * Reading strings in an `unstable` document is the same as reading any other + * javascript string + * * To modify strings in an `unstable` document use {@link splice} + * * The {@link AutomergeValue} type does not include the {@link Text} + * class but the {@link RawString} class is included in the {@link ScalarValue} + * type + * + * ## CHANGELOG + * * Introduce this module to expose the new API which has no `Text` class + * + * + * @module + */ + +export { + Counter, + type Doc, + Int, + Uint, + Float64, + type Patch, + type PatchCallback, + type AutomergeValue, + type ScalarValue, +} from "./unstable_types" + +import type { PatchCallback } from "./stable" + +import { type UnstableConflicts as Conflicts } from "./conflicts" +import { unstableConflictAt } from "./conflicts" + +export type { + PutPatch, + DelPatch, + SpliceTextPatch, + InsertPatch, + IncPatch, + SyncMessage, +} from "@automerge/automerge-wasm" + +export type { ChangeOptions, ApplyOptions, ChangeFn } from "./stable" +export { + view, + free, + getHeads, + change, + emptyChange, + loadIncremental, + save, + merge, + getActorId, + getLastLocalChange, + getChanges, + getAllChanges, + applyChanges, + getHistory, + equals, + encodeSyncState, + decodeSyncState, + generateSyncMessage, + receiveSyncMessage, + initSyncState, + encodeChange, + decodeChange, + encodeSyncMessage, + decodeSyncMessage, + getMissingDeps, + dump, + toJS, + isAutomerge, + getObjectId, +} from "./stable" + +export type InitOptions = { + /** The actor ID to use for this document, a random one will be generated if `null` is passed */ + actor?: ActorId + freeze?: boolean + /** A callback which will be called with the initial patch once the document has finished loading */ + patchCallback?: PatchCallback +} + +import { ActorId, Doc } from "./stable" +import * as stable from "./stable" +export { RawString } from "./raw_string" + +/** @hidden */ +export const getBackend = stable.getBackend + +import { _is_proxy, _state, _obj } from "./internal_state" + +/** + * Create a new automerge document + * + * @typeParam T - The type of value contained in the document. This will be the + * type that is passed to the change closure in {@link change} + * @param _opts - Either an actorId or an {@link InitOptions} (which may + * contain an actorId). If this is null the document will be initialised with a + * random actor ID + */ +export function init(_opts?: ActorId | InitOptions): Doc { + const opts = importOpts(_opts) + opts.enableTextV2 = true + return stable.init(opts) +} + +/** + * Make a full writable copy of an automerge document + * + * @remarks + * Unlike {@link view} this function makes a full copy of the memory backing + * the document and can thus be passed to {@link change}. It also generates a + * new actor ID so that changes made in the new document do not create duplicate + * sequence numbers with respect to the old document. If you need control over + * the actor ID which is generated you can pass the actor ID as the second + * argument + * + * @typeParam T - The type of the value contained in the document + * @param doc - The document to clone + * @param _opts - Either an actor ID to use for the new doc or an {@link InitOptions} + */ +export function clone( + doc: Doc, + _opts?: ActorId | InitOptions +): Doc { + const opts = importOpts(_opts) + opts.enableTextV2 = true + return stable.clone(doc, opts) +} + +/** + * Create an automerge document from a POJO + * + * @param initialState - The initial state which will be copied into the document + * @typeParam T - The type of the value passed to `from` _and_ the type the resulting document will contain + * @typeParam actor - The actor ID of the resulting document, if this is null a random actor ID will be used + * + * @example + * ``` + * const doc = automerge.from({ + * tasks: [ + * {description: "feed dogs", done: false} + * ] + * }) + * ``` + */ +export function from>( + initialState: T | Doc, + _opts?: ActorId | InitOptions +): Doc { + const opts = importOpts(_opts) + opts.enableTextV2 = true + return stable.from(initialState, opts) +} + +/** + * Load an automerge document from a compressed document produce by {@link save} + * + * @typeParam T - The type of the value which is contained in the document. + * Note that no validation is done to make sure this type is in + * fact the type of the contained value so be a bit careful + * @param data - The compressed document + * @param _opts - Either an actor ID or some {@link InitOptions}, if the actor + * ID is null a random actor ID will be created + * + * Note that `load` will throw an error if passed incomplete content (for + * example if you are receiving content over the network and don't know if you + * have the complete document yet). If you need to handle incomplete content use + * {@link init} followed by {@link loadIncremental}. + */ +export function load( + data: Uint8Array, + _opts?: ActorId | InitOptions +): Doc { + const opts = importOpts(_opts) + opts.enableTextV2 = true + return stable.load(data, opts) +} + +function importOpts( + _actor?: ActorId | InitOptions +): stable.InitOptions { + if (typeof _actor === "object") { + return _actor + } else { + return { actor: _actor } + } +} + +export function splice( + doc: Doc, + prop: stable.Prop, + index: number, + del: number, + newText?: string +) { + if (!_is_proxy(doc)) { + throw new RangeError("object cannot be modified outside of a change block") + } + const state = _state(doc, false) + const objectId = _obj(doc) + if (!objectId) { + throw new RangeError("invalid object for splice") + } + const value = `${objectId}/${prop}` + try { + return state.handle.splice(value, index, del, newText) + } catch (e) { + throw new RangeError(`Cannot splice: ${e}`) + } +} + +/** + * Get the conflicts associated with a property + * + * The values of properties in a map in automerge can be conflicted if there + * are concurrent "put" operations to the same key. Automerge chooses one value + * arbitrarily (but deterministically, any two nodes who have the same set of + * changes will choose the same value) from the set of conflicting values to + * present as the value of the key. + * + * Sometimes you may want to examine these conflicts, in this case you can use + * {@link getConflicts} to get the conflicts for the key. + * + * @example + * ``` + * import * as automerge from "@automerge/automerge" + * + * type Profile = { + * pets: Array<{name: string, type: string}> + * } + * + * let doc1 = automerge.init("aaaa") + * doc1 = automerge.change(doc1, d => { + * d.pets = [{name: "Lassie", type: "dog"}] + * }) + * let doc2 = automerge.init("bbbb") + * doc2 = automerge.merge(doc2, automerge.clone(doc1)) + * + * doc2 = automerge.change(doc2, d => { + * d.pets[0].name = "Beethoven" + * }) + * + * doc1 = automerge.change(doc1, d => { + * d.pets[0].name = "Babe" + * }) + * + * const doc3 = automerge.merge(doc1, doc2) + * + * // Note that here we pass `doc3.pets`, not `doc3` + * let conflicts = automerge.getConflicts(doc3.pets[0], "name") + * + * // The two conflicting values are the keys of the conflicts object + * assert.deepEqual(Object.values(conflicts), ["Babe", Beethoven"]) + * ``` + */ +export function getConflicts( + doc: Doc, + prop: stable.Prop +): Conflicts | undefined { + const state = _state(doc, false) + if (!state.textV2) { + throw new Error("use getConflicts for a stable document") + } + const objectId = _obj(doc) + if (objectId != null) { + return unstableConflictAt(state.handle, objectId, prop) + } else { + return undefined + } +} diff --git a/javascript/src/unstable_types.ts b/javascript/src/unstable_types.ts new file mode 100644 index 00000000..071e2cc4 --- /dev/null +++ b/javascript/src/unstable_types.ts @@ -0,0 +1,30 @@ +import { Counter } from "./types" + +export { + Counter, + type Doc, + Int, + Uint, + Float64, + type Patch, + type PatchCallback, +} from "./types" + +import { RawString } from "./raw_string" +export { RawString } from "./raw_string" + +export type AutomergeValue = + | ScalarValue + | { [key: string]: AutomergeValue } + | Array +export type MapValue = { [key: string]: AutomergeValue } +export type ListValue = Array +export type ScalarValue = + | string + | number + | null + | boolean + | Date + | Counter + | Uint8Array + | RawString diff --git a/javascript/src/uuid.deno.ts b/javascript/src/uuid.deno.ts new file mode 100644 index 00000000..04c9b93d --- /dev/null +++ b/javascript/src/uuid.deno.ts @@ -0,0 +1,26 @@ +import * as v4 from "https://deno.land/x/uuid@v0.1.2/mod.ts" + +// this file is a deno only port of the uuid module + +function defaultFactory() { + return v4.uuid().replace(/-/g, "") +} + +let factory = defaultFactory + +interface UUIDFactory extends Function { + setFactory(f: typeof factory): void + reset(): void +} + +export const uuid: UUIDFactory = () => { + return factory() +} + +uuid.setFactory = newFactory => { + factory = newFactory +} + +uuid.reset = () => { + factory = defaultFactory +} diff --git a/javascript/src/uuid.ts b/javascript/src/uuid.ts new file mode 100644 index 00000000..421ddf9d --- /dev/null +++ b/javascript/src/uuid.ts @@ -0,0 +1,24 @@ +import { v4 } from "uuid" + +function defaultFactory() { + return v4().replace(/-/g, "") +} + +let factory = defaultFactory + +interface UUIDFactory extends Function { + setFactory(f: typeof factory): void + reset(): void +} + +export const uuid: UUIDFactory = () => { + return factory() +} + +uuid.setFactory = newFactory => { + factory = newFactory +} + +uuid.reset = () => { + factory = defaultFactory +} diff --git a/javascript/test/basic_test.ts b/javascript/test/basic_test.ts new file mode 100644 index 00000000..e34484c4 --- /dev/null +++ b/javascript/test/basic_test.ts @@ -0,0 +1,488 @@ +import * as assert from "assert" +import { unstable as Automerge } from "../src" +import * as WASM from "@automerge/automerge-wasm" + +describe("Automerge", () => { + describe("basics", () => { + it("should init clone and free", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.clone(doc1) + + // this is only needed if weakrefs are not supported + Automerge.free(doc1) + Automerge.free(doc2) + }) + + it("should be able to make a view with specifc heads", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => (d.value = 1)) + let heads2 = Automerge.getHeads(doc2) + let doc3 = Automerge.change(doc2, d => (d.value = 2)) + let doc2_v2 = Automerge.view(doc3, heads2) + assert.deepEqual(doc2, doc2_v2) + let doc2_v2_clone = Automerge.clone(doc2, "aabbcc") + assert.deepEqual(doc2, doc2_v2_clone) + assert.equal(Automerge.getActorId(doc2_v2_clone), "aabbcc") + }) + + it("should allow you to change a clone of a view", () => { + let doc1 = Automerge.init() + doc1 = Automerge.change(doc1, d => (d.key = "value")) + let heads = Automerge.getHeads(doc1) + doc1 = Automerge.change(doc1, d => (d.key = "value2")) + let fork = Automerge.clone(Automerge.view(doc1, heads)) + assert.deepEqual(fork, { key: "value" }) + fork = Automerge.change(fork, d => (d.key = "value3")) + assert.deepEqual(fork, { key: "value3" }) + }) + + it("handle basic set and read on root object", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.hello = "world" + d.big = "little" + d.zip = "zop" + d.app = "dap" + assert.deepEqual(d, { + hello: "world", + big: "little", + zip: "zop", + app: "dap", + }) + }) + assert.deepEqual(doc2, { + hello: "world", + big: "little", + zip: "zop", + app: "dap", + }) + }) + + it("should be able to insert and delete a large number of properties", () => { + let doc = Automerge.init() + + doc = Automerge.change(doc, doc => { + doc["k1"] = true + }) + + for (let idx = 1; idx <= 200; idx++) { + doc = Automerge.change(doc, doc => { + delete doc["k" + idx] + doc["k" + (idx + 1)] = true + assert(Object.keys(doc).length == 1) + }) + } + }) + + it("can detect an automerge doc with isAutomerge()", () => { + const doc1 = Automerge.from({ sub: { object: true } }) + assert(Automerge.isAutomerge(doc1)) + assert(!Automerge.isAutomerge(doc1.sub)) + assert(!Automerge.isAutomerge("String")) + assert(!Automerge.isAutomerge({ sub: { object: true } })) + assert(!Automerge.isAutomerge(undefined)) + const jsObj = Automerge.toJS(doc1) + assert(!Automerge.isAutomerge(jsObj)) + assert.deepEqual(jsObj, doc1) + }) + + it("it should recursively freeze the document if requested", () => { + let doc1 = Automerge.init({ freeze: true }) + let doc2 = Automerge.init() + + assert(Object.isFrozen(doc1)) + assert(!Object.isFrozen(doc2)) + + // will also freeze sub objects + doc1 = Automerge.change( + doc1, + doc => (doc.book = { title: "how to win friends" }) + ) + doc2 = Automerge.merge(doc2, doc1) + assert(Object.isFrozen(doc1)) + assert(Object.isFrozen(doc1.book)) + assert(!Object.isFrozen(doc2)) + assert(!Object.isFrozen(doc2.book)) + + // works on from + let doc3 = Automerge.from({ sub: { obj: "inner" } }, { freeze: true }) + assert(Object.isFrozen(doc3)) + assert(Object.isFrozen(doc3.sub)) + + // works on load + let doc4 = Automerge.load(Automerge.save(doc3), { freeze: true }) + assert(Object.isFrozen(doc4)) + assert(Object.isFrozen(doc4.sub)) + + // follows clone + let doc5 = Automerge.clone(doc4) + assert(Object.isFrozen(doc5)) + assert(Object.isFrozen(doc5.sub)) + + // toJS does not freeze + let exported = Automerge.toJS(doc5) + assert(!Object.isFrozen(exported)) + }) + + it("handle basic sets over many changes", () => { + let doc1 = Automerge.init() + let timestamp = new Date() + let counter = new Automerge.Counter(100) + let bytes = new Uint8Array([10, 11, 12]) + let doc2 = Automerge.change(doc1, d => { + d.hello = "world" + }) + let doc3 = Automerge.change(doc2, d => { + d.counter1 = counter + }) + let doc4 = Automerge.change(doc3, d => { + d.timestamp1 = timestamp + }) + let doc5 = Automerge.change(doc4, d => { + d.app = null + }) + let doc6 = Automerge.change(doc5, d => { + d.bytes1 = bytes + }) + let doc7 = Automerge.change(doc6, d => { + d.uint = new Automerge.Uint(1) + d.int = new Automerge.Int(-1) + d.float64 = new Automerge.Float64(5.5) + d.number1 = 100 + d.number2 = -45.67 + d.true = true + d.false = false + }) + + assert.deepEqual(doc7, { + hello: "world", + true: true, + false: false, + int: -1, + uint: 1, + float64: 5.5, + number1: 100, + number2: -45.67, + counter1: counter, + timestamp1: timestamp, + bytes1: bytes, + app: null, + }) + + let changes = Automerge.getAllChanges(doc7) + let t1 = Automerge.init() + let [t2] = Automerge.applyChanges(t1, changes) + assert.deepEqual(doc7, t2) + }) + + it("handle overwrites to values", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.hello = "world1" + }) + let doc3 = Automerge.change(doc2, d => { + d.hello = "world2" + }) + let doc4 = Automerge.change(doc3, d => { + d.hello = "world3" + }) + let doc5 = Automerge.change(doc4, d => { + d.hello = "world4" + }) + assert.deepEqual(doc5, { hello: "world4" }) + }) + + it("handle set with object value", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.subobj = { hello: "world", subsubobj: { zip: "zop" } } + }) + assert.deepEqual(doc2, { + subobj: { hello: "world", subsubobj: { zip: "zop" } }, + }) + }) + + it("handle simple list creation", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => (d.list = [])) + assert.deepEqual(doc2, { list: [] }) + }) + + it("handle simple lists", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.list = [1, 2, 3] + }) + assert.deepEqual(doc2.list.length, 3) + assert.deepEqual(doc2.list[0], 1) + assert.deepEqual(doc2.list[1], 2) + assert.deepEqual(doc2.list[2], 3) + assert.deepEqual(doc2, { list: [1, 2, 3] }) + // assert.deepStrictEqual(Automerge.toJS(doc2), { list: [1,2,3] }) + + let doc3 = Automerge.change(doc2, d => { + d.list[1] = "a" + }) + + assert.deepEqual(doc3.list.length, 3) + assert.deepEqual(doc3.list[0], 1) + assert.deepEqual(doc3.list[1], "a") + assert.deepEqual(doc3.list[2], 3) + assert.deepEqual(doc3, { list: [1, "a", 3] }) + }) + it("handle simple lists", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.list = [1, 2, 3] + }) + let changes = Automerge.getChanges(doc1, doc2) + let docB1 = Automerge.init() + let [docB2] = Automerge.applyChanges(docB1, changes) + assert.deepEqual(docB2, doc2) + }) + it("handle text", () => { + let doc1 = Automerge.init() + let doc2 = Automerge.change(doc1, d => { + d.list = "hello" + Automerge.splice(d, "list", 2, 0, "Z") + }) + let changes = Automerge.getChanges(doc1, doc2) + let docB1 = Automerge.init() + let [docB2] = Automerge.applyChanges(docB1, changes) + assert.deepEqual(docB2, doc2) + }) + + it("handle non-text strings", () => { + let doc1 = WASM.create(true) + doc1.put("_root", "text", "hello world") + let doc2 = Automerge.load(doc1.save()) + assert.throws(() => { + Automerge.change(doc2, d => { + Automerge.splice(d, "text", 1, 0, "Z") + }) + }, /Cannot splice/) + }) + + it("have many list methods", () => { + let doc1 = Automerge.from({ list: [1, 2, 3] }) + assert.deepEqual(doc1, { list: [1, 2, 3] }) + let doc2 = Automerge.change(doc1, d => { + d.list.splice(1, 1, 9, 10) + }) + assert.deepEqual(doc2, { list: [1, 9, 10, 3] }) + let doc3 = Automerge.change(doc2, d => { + d.list.push(11, 12) + }) + assert.deepEqual(doc3, { list: [1, 9, 10, 3, 11, 12] }) + let doc4 = Automerge.change(doc3, d => { + d.list.unshift(2, 2) + }) + assert.deepEqual(doc4, { list: [2, 2, 1, 9, 10, 3, 11, 12] }) + let doc5 = Automerge.change(doc4, d => { + d.list.shift() + }) + assert.deepEqual(doc5, { list: [2, 1, 9, 10, 3, 11, 12] }) + let doc6 = Automerge.change(doc5, d => { + d.list.insertAt(3, 100, 101) + }) + assert.deepEqual(doc6, { list: [2, 1, 9, 100, 101, 10, 3, 11, 12] }) + }) + + it("allows access to the backend", () => { + let doc = Automerge.init() + assert.deepEqual(Object.keys(Automerge.getBackend(doc)), ["ptr"]) + }) + + it("lists and text have indexof", () => { + let doc = Automerge.from({ + list: [0, 1, 2, 3, 4, 5, 6], + text: "hello world", + }) + assert.deepEqual(doc.list.indexOf(5), 5) + assert.deepEqual(doc.text.indexOf("world"), 6) + }) + }) + + describe("emptyChange", () => { + it("should generate a hash", () => { + let doc = Automerge.init() + doc = Automerge.change(doc, d => { + d.key = "value" + }) + Automerge.save(doc) + let headsBefore = Automerge.getHeads(doc) + headsBefore.sort() + doc = Automerge.emptyChange(doc, "empty change") + let headsAfter = Automerge.getHeads(doc) + headsAfter.sort() + assert.notDeepEqual(headsBefore, headsAfter) + }) + }) + + describe("proxy lists", () => { + it("behave like arrays", () => { + let doc = Automerge.from({ + chars: ["a", "b", "c"], + numbers: [20, 3, 100], + repeats: [20, 20, 3, 3, 3, 3, 100, 100], + }) + let r1: Array = [] + doc = Automerge.change(doc, d => { + assert.deepEqual((d.chars as any[]).concat([1, 2]), [ + "a", + "b", + "c", + 1, + 2, + ]) + assert.deepEqual( + d.chars.map(n => n + "!"), + ["a!", "b!", "c!"] + ) + assert.deepEqual( + d.numbers.map(n => n + 10), + [30, 13, 110] + ) + assert.deepEqual(d.numbers.toString(), "20,3,100") + assert.deepEqual(d.numbers.toLocaleString(), "20,3,100") + assert.deepEqual( + d.numbers.forEach((n: number) => r1.push(n)), + undefined + ) + assert.deepEqual( + d.numbers.every(n => n > 1), + true + ) + assert.deepEqual( + d.numbers.every(n => n > 10), + false + ) + assert.deepEqual( + d.numbers.filter(n => n > 10), + [20, 100] + ) + assert.deepEqual( + d.repeats.find(n => n < 10), + 3 + ) + assert.deepEqual( + d.repeats.find(n => n < 10), + 3 + ) + assert.deepEqual( + d.repeats.find(n => n < 0), + undefined + ) + assert.deepEqual( + d.repeats.findIndex(n => n < 10), + 2 + ) + assert.deepEqual( + d.repeats.findIndex(n => n < 0), + -1 + ) + assert.deepEqual( + d.repeats.findIndex(n => n < 10), + 2 + ) + assert.deepEqual( + d.repeats.findIndex(n => n < 0), + -1 + ) + assert.deepEqual(d.numbers.includes(3), true) + assert.deepEqual(d.numbers.includes(-3), false) + assert.deepEqual(d.numbers.join("|"), "20|3|100") + assert.deepEqual(d.numbers.join(), "20,3,100") + assert.deepEqual( + d.numbers.some(f => f === 3), + true + ) + assert.deepEqual( + d.numbers.some(f => f < 0), + false + ) + assert.deepEqual( + d.numbers.reduce((sum, n) => sum + n, 100), + 223 + ) + assert.deepEqual( + d.repeats.reduce((sum, n) => sum + n, 100), + 352 + ) + assert.deepEqual( + d.chars.reduce((sum, n) => sum + n, "="), + "=abc" + ) + assert.deepEqual( + d.chars.reduceRight((sum, n) => sum + n, "="), + "=cba" + ) + assert.deepEqual( + d.numbers.reduceRight((sum, n) => sum + n, 100), + 223 + ) + assert.deepEqual(d.repeats.lastIndexOf(3), 5) + assert.deepEqual(d.repeats.lastIndexOf(3, 3), 3) + }) + doc = Automerge.change(doc, d => { + assert.deepEqual(d.numbers.fill(-1, 1, 2), [20, -1, 100]) + assert.deepEqual(d.chars.fill("z", 1, 100), ["a", "z", "z"]) + }) + assert.deepEqual(r1, [20, 3, 100]) + assert.deepEqual(doc.numbers, [20, -1, 100]) + assert.deepEqual(doc.chars, ["a", "z", "z"]) + }) + }) + + it("should obtain the same conflicts, regardless of merge order", () => { + let s1 = Automerge.init() + let s2 = Automerge.init() + s1 = Automerge.change(s1, doc => { + doc.x = 1 + doc.y = 2 + }) + s2 = Automerge.change(s2, doc => { + doc.x = 3 + doc.y = 4 + }) + const m1 = Automerge.merge(Automerge.clone(s1), Automerge.clone(s2)) + const m2 = Automerge.merge(Automerge.clone(s2), Automerge.clone(s1)) + assert.deepStrictEqual( + Automerge.getConflicts(m1, "x"), + Automerge.getConflicts(m2, "x") + ) + }) + + describe("getObjectId", () => { + let s1 = Automerge.from({ + string: "string", + number: 1, + null: null, + date: new Date(), + counter: new Automerge.Counter(), + bytes: new Uint8Array(10), + text: "", + list: [], + map: {}, + }) + + it("should return null for scalar values", () => { + assert.equal(Automerge.getObjectId(s1.string), null) + assert.equal(Automerge.getObjectId(s1.number), null) + assert.equal(Automerge.getObjectId(s1.null!), null) + assert.equal(Automerge.getObjectId(s1.date), null) + assert.equal(Automerge.getObjectId(s1.counter), null) + assert.equal(Automerge.getObjectId(s1.bytes), null) + }) + + it("should return _root for the root object", () => { + assert.equal(Automerge.getObjectId(s1), "_root") + }) + + it("should return non-null for map, list, text, and objects", () => { + assert.equal(Automerge.getObjectId(s1.text), null) + assert.notEqual(Automerge.getObjectId(s1.list), null) + assert.notEqual(Automerge.getObjectId(s1.map), null) + }) + }) +}) diff --git a/javascript/test/extra_api_tests.ts b/javascript/test/extra_api_tests.ts new file mode 100644 index 00000000..84fa4c39 --- /dev/null +++ b/javascript/test/extra_api_tests.ts @@ -0,0 +1,28 @@ +import * as assert from "assert" +import { unstable as Automerge } from "../src" + +describe("Automerge", () => { + describe("basics", () => { + it("should allow you to load incrementally", () => { + let doc1 = Automerge.from({ foo: "bar" }) + let doc2 = Automerge.init() + doc2 = Automerge.loadIncremental(doc2, Automerge.save(doc1)) + doc1 = Automerge.change(doc1, d => (d.foo2 = "bar2")) + doc2 = Automerge.loadIncremental( + doc2, + Automerge.getBackend(doc1).saveIncremental() + ) + doc1 = Automerge.change(doc1, d => (d.foo = "bar2")) + doc2 = Automerge.loadIncremental( + doc2, + Automerge.getBackend(doc1).saveIncremental() + ) + doc1 = Automerge.change(doc1, d => (d.x = "y")) + doc2 = Automerge.loadIncremental( + doc2, + Automerge.getBackend(doc1).saveIncremental() + ) + assert.deepEqual(doc1, doc2) + }) + }) +}) diff --git a/automerge-js/test/helpers.ts b/javascript/test/helpers.ts similarity index 56% rename from automerge-js/test/helpers.ts rename to javascript/test/helpers.ts index d5292130..df76e558 100644 --- a/automerge-js/test/helpers.ts +++ b/javascript/test/helpers.ts @@ -1,16 +1,21 @@ -import * as assert from 'assert' -import { Encoder } from './legacy/encoding' +import * as assert from "assert" +import { Encoder } from "./legacy/encoding" // Assertion that succeeds if the first argument deepStrictEquals at least one of the // subsequent arguments (but we don't care which one) -function assertEqualsOneOf(actual, ...expected) { +export function assertEqualsOneOf(actual, ...expected) { assert(expected.length > 0) for (let i = 0; i < expected.length; i++) { try { assert.deepStrictEqual(actual, expected[i]) return // if we get here without an exception, that means success } catch (e) { - if (!e.name.match(/^AssertionError/) || i === expected.length - 1) throw e + if (e instanceof assert.AssertionError) { + if (!e.name.match(/^AssertionError/) || i === expected.length - 1) + throw e + } else { + throw e + } } } } @@ -19,14 +24,13 @@ function assertEqualsOneOf(actual, ...expected) { * Asserts that the byte array maintained by `encoder` contains the same byte * sequence as the array `bytes`. */ -function checkEncoded(encoder, bytes, detail) { - const encoded = (encoder instanceof Encoder) ? encoder.buffer : encoder +export function checkEncoded(encoder, bytes, detail?) { + const encoded = encoder instanceof Encoder ? encoder.buffer : encoder const expected = new Uint8Array(bytes) - const message = (detail ? `${detail}: ` : '') + `${encoded} expected to equal ${expected}` + const message = + (detail ? `${detail}: ` : "") + `${encoded} expected to equal ${expected}` assert(encoded.byteLength === expected.byteLength, message) for (let i = 0; i < encoded.byteLength; i++) { assert(encoded[i] === expected[i], message) } } - -module.exports = { assertEqualsOneOf, checkEncoded } diff --git a/automerge-js/test/legacy/columnar.js b/javascript/test/legacy/columnar.js similarity index 64% rename from automerge-js/test/legacy/columnar.js rename to javascript/test/legacy/columnar.js index b97e6275..6a9b5874 100644 --- a/automerge-js/test/legacy/columnar.js +++ b/javascript/test/legacy/columnar.js @@ -1,9 +1,18 @@ -const pako = require('pako') -const { copyObject, parseOpId, equalBytes } = require('./common') +const pako = require("pako") +const { copyObject, parseOpId, equalBytes } = require("./common") const { - utf8ToString, hexStringToBytes, bytesToHexString, - Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder -} = require('./encoding') + utf8ToString, + hexStringToBytes, + bytesToHexString, + Encoder, + Decoder, + RLEEncoder, + RLEDecoder, + DeltaEncoder, + DeltaDecoder, + BooleanEncoder, + BooleanDecoder, +} = require("./encoding") // Maybe we should be using the platform's built-in hash implementation? // Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have @@ -18,7 +27,7 @@ const { // - It does not need a secure source of random bits and does not need to be // constant-time; // - I have reviewed the source code and it seems pretty reasonable. -const { Hash } = require('fast-sha256') +const { Hash } = require("fast-sha256") // These bytes don't mean anything, they were generated randomly const MAGIC_BYTES = new Uint8Array([0x85, 0x6f, 0x4a, 0x83]) @@ -33,8 +42,14 @@ const DEFLATE_MIN_SIZE = 256 // The least-significant 3 bits of a columnId indicate its datatype const COLUMN_TYPE = { - GROUP_CARD: 0, ACTOR_ID: 1, INT_RLE: 2, INT_DELTA: 3, BOOLEAN: 4, - STRING_RLE: 5, VALUE_LEN: 6, VALUE_RAW: 7 + GROUP_CARD: 0, + ACTOR_ID: 1, + INT_RLE: 2, + INT_DELTA: 3, + BOOLEAN: 4, + STRING_RLE: 5, + VALUE_LEN: 6, + VALUE_RAW: 7, } // The 4th-least-significant bit of a columnId is set if the column is DEFLATE-compressed @@ -44,53 +59,77 @@ const COLUMN_TYPE_DEFLATE = 8 // one of the following types in VALUE_TYPE. The higher bits indicate the length of the value in the // associated VALUE_RAW column (in bytes). const VALUE_TYPE = { - NULL: 0, FALSE: 1, TRUE: 2, LEB128_UINT: 3, LEB128_INT: 4, IEEE754: 5, - UTF8: 6, BYTES: 7, COUNTER: 8, TIMESTAMP: 9, MIN_UNKNOWN: 10, MAX_UNKNOWN: 15 + NULL: 0, + FALSE: 1, + TRUE: 2, + LEB128_UINT: 3, + LEB128_INT: 4, + IEEE754: 5, + UTF8: 6, + BYTES: 7, + COUNTER: 8, + TIMESTAMP: 9, + MIN_UNKNOWN: 10, + MAX_UNKNOWN: 15, } // make* actions must be at even-numbered indexes in this list -const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link'] +const ACTIONS = [ + "makeMap", + "set", + "makeList", + "del", + "makeText", + "inc", + "makeTable", + "link", +] -const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'} +const OBJECT_TYPE = { + makeMap: "map", + makeList: "list", + makeText: "text", + makeTable: "table", +} const COMMON_COLUMNS = [ - {columnName: 'objActor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'objCtr', columnId: 0 << 4 | COLUMN_TYPE.INT_RLE}, - {columnName: 'keyActor', columnId: 1 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'keyCtr', columnId: 1 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'keyStr', columnId: 1 << 4 | COLUMN_TYPE.STRING_RLE}, - {columnName: 'idActor', columnId: 2 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'idCtr', columnId: 2 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'insert', columnId: 3 << 4 | COLUMN_TYPE.BOOLEAN}, - {columnName: 'action', columnId: 4 << 4 | COLUMN_TYPE.INT_RLE}, - {columnName: 'valLen', columnId: 5 << 4 | COLUMN_TYPE.VALUE_LEN}, - {columnName: 'valRaw', columnId: 5 << 4 | COLUMN_TYPE.VALUE_RAW}, - {columnName: 'chldActor', columnId: 6 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'chldCtr', columnId: 6 << 4 | COLUMN_TYPE.INT_DELTA} + { columnName: "objActor", columnId: (0 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "objCtr", columnId: (0 << 4) | COLUMN_TYPE.INT_RLE }, + { columnName: "keyActor", columnId: (1 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "keyCtr", columnId: (1 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "keyStr", columnId: (1 << 4) | COLUMN_TYPE.STRING_RLE }, + { columnName: "idActor", columnId: (2 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "idCtr", columnId: (2 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "insert", columnId: (3 << 4) | COLUMN_TYPE.BOOLEAN }, + { columnName: "action", columnId: (4 << 4) | COLUMN_TYPE.INT_RLE }, + { columnName: "valLen", columnId: (5 << 4) | COLUMN_TYPE.VALUE_LEN }, + { columnName: "valRaw", columnId: (5 << 4) | COLUMN_TYPE.VALUE_RAW }, + { columnName: "chldActor", columnId: (6 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "chldCtr", columnId: (6 << 4) | COLUMN_TYPE.INT_DELTA }, ] const CHANGE_COLUMNS = COMMON_COLUMNS.concat([ - {columnName: 'predNum', columnId: 7 << 4 | COLUMN_TYPE.GROUP_CARD}, - {columnName: 'predActor', columnId: 7 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'predCtr', columnId: 7 << 4 | COLUMN_TYPE.INT_DELTA} + { columnName: "predNum", columnId: (7 << 4) | COLUMN_TYPE.GROUP_CARD }, + { columnName: "predActor", columnId: (7 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "predCtr", columnId: (7 << 4) | COLUMN_TYPE.INT_DELTA }, ]) const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([ - {columnName: 'succNum', columnId: 8 << 4 | COLUMN_TYPE.GROUP_CARD}, - {columnName: 'succActor', columnId: 8 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'succCtr', columnId: 8 << 4 | COLUMN_TYPE.INT_DELTA} + { columnName: "succNum", columnId: (8 << 4) | COLUMN_TYPE.GROUP_CARD }, + { columnName: "succActor", columnId: (8 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "succCtr", columnId: (8 << 4) | COLUMN_TYPE.INT_DELTA }, ]) const DOCUMENT_COLUMNS = [ - {columnName: 'actor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID}, - {columnName: 'seq', columnId: 0 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'maxOp', columnId: 1 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'time', columnId: 2 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'message', columnId: 3 << 4 | COLUMN_TYPE.STRING_RLE}, - {columnName: 'depsNum', columnId: 4 << 4 | COLUMN_TYPE.GROUP_CARD}, - {columnName: 'depsIndex', columnId: 4 << 4 | COLUMN_TYPE.INT_DELTA}, - {columnName: 'extraLen', columnId: 5 << 4 | COLUMN_TYPE.VALUE_LEN}, - {columnName: 'extraRaw', columnId: 5 << 4 | COLUMN_TYPE.VALUE_RAW} + { columnName: "actor", columnId: (0 << 4) | COLUMN_TYPE.ACTOR_ID }, + { columnName: "seq", columnId: (0 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "maxOp", columnId: (1 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "time", columnId: (2 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "message", columnId: (3 << 4) | COLUMN_TYPE.STRING_RLE }, + { columnName: "depsNum", columnId: (4 << 4) | COLUMN_TYPE.GROUP_CARD }, + { columnName: "depsIndex", columnId: (4 << 4) | COLUMN_TYPE.INT_DELTA }, + { columnName: "extraLen", columnId: (5 << 4) | COLUMN_TYPE.VALUE_LEN }, + { columnName: "extraRaw", columnId: (5 << 4) | COLUMN_TYPE.VALUE_RAW }, ] /** @@ -102,8 +141,8 @@ function actorIdToActorNum(opId, actorIds) { if (!opId || !opId.actorId) return opId const counter = opId.counter const actorNum = actorIds.indexOf(opId.actorId) - if (actorNum < 0) throw new RangeError('missing actorId') // should not happen - return {counter, actorNum, actorId: opId.actorId} + if (actorNum < 0) throw new RangeError("missing actorId") // should not happen + return { counter, actorNum, actorId: opId.actorId } } /** @@ -131,15 +170,16 @@ function compareParsedOpIds(id1, id2) { * false. */ function parseAllOpIds(changes, single) { - const actors = {}, newChanges = [] + const actors = {}, + newChanges = [] for (let change of changes) { change = copyObject(change) actors[change.actor] = true change.ops = expandMultiOps(change.ops, change.startOp, change.actor) change.ops = change.ops.map(op => { op = copyObject(op) - if (op.obj !== '_root') op.obj = parseOpId(op.obj) - if (op.elemId && op.elemId !== '_head') op.elemId = parseOpId(op.elemId) + if (op.obj !== "_root") op.obj = parseOpId(op.obj) + if (op.elemId && op.elemId !== "_head") op.elemId = parseOpId(op.elemId) if (op.child) op.child = parseOpId(op.child) if (op.pred) op.pred = op.pred.map(parseOpId) if (op.obj.actorId) actors[op.obj.actorId] = true @@ -153,20 +193,26 @@ function parseAllOpIds(changes, single) { let actorIds = Object.keys(actors).sort() if (single) { - actorIds = [changes[0].actor].concat(actorIds.filter(actor => actor !== changes[0].actor)) + actorIds = [changes[0].actor].concat( + actorIds.filter(actor => actor !== changes[0].actor) + ) } for (let change of newChanges) { change.actorNum = actorIds.indexOf(change.actor) for (let i = 0; i < change.ops.length; i++) { let op = change.ops[i] - op.id = {counter: change.startOp + i, actorNum: change.actorNum, actorId: change.actor} + op.id = { + counter: change.startOp + i, + actorNum: change.actorNum, + actorId: change.actor, + } op.obj = actorIdToActorNum(op.obj, actorIds) op.elemId = actorIdToActorNum(op.elemId, actorIds) op.child = actorIdToActorNum(op.child, actorIds) op.pred = op.pred.map(pred => actorIdToActorNum(pred, actorIds)) } } - return {changes: newChanges, actorIds} + return { changes: newChanges, actorIds } } /** @@ -174,14 +220,16 @@ function parseAllOpIds(changes, single) { * `objActor` and `objCtr`. */ function encodeObjectId(op, columns) { - if (op.obj === '_root') { + if (op.obj === "_root") { columns.objActor.appendValue(null) columns.objCtr.appendValue(null) } else if (op.obj.actorNum >= 0 && op.obj.counter > 0) { columns.objActor.appendValue(op.obj.actorNum) columns.objCtr.appendValue(op.obj.counter) } else { - throw new RangeError(`Unexpected objectId reference: ${JSON.stringify(op.obj)}`) + throw new RangeError( + `Unexpected objectId reference: ${JSON.stringify(op.obj)}` + ) } } @@ -194,7 +242,7 @@ function encodeOperationKey(op, columns) { columns.keyActor.appendValue(null) columns.keyCtr.appendValue(null) columns.keyStr.appendValue(op.key) - } else if (op.elemId === '_head' && op.insert) { + } else if (op.elemId === "_head" && op.insert) { columns.keyActor.appendValue(null) columns.keyCtr.appendValue(0) columns.keyStr.appendValue(null) @@ -214,7 +262,7 @@ function encodeOperationAction(op, columns) { const actionCode = ACTIONS.indexOf(op.action) if (actionCode >= 0) { columns.action.appendValue(actionCode) - } else if (typeof op.action === 'number') { + } else if (typeof op.action === "number") { columns.action.appendValue(op.action) } else { throw new RangeError(`Unexpected operation action: ${op.action}`) @@ -228,26 +276,32 @@ function encodeOperationAction(op, columns) { function getNumberTypeAndValue(op) { switch (op.datatype) { case "counter": - return [ VALUE_TYPE.COUNTER, op.value ] + return [VALUE_TYPE.COUNTER, op.value] case "timestamp": - return [ VALUE_TYPE.TIMESTAMP, op.value ] + return [VALUE_TYPE.TIMESTAMP, op.value] case "uint": - return [ VALUE_TYPE.LEB128_UINT, op.value ] + return [VALUE_TYPE.LEB128_UINT, op.value] case "int": - return [ VALUE_TYPE.LEB128_INT, op.value ] + return [VALUE_TYPE.LEB128_INT, op.value] case "float64": { - const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) + const buf64 = new ArrayBuffer(8), + view64 = new DataView(buf64) view64.setFloat64(0, op.value, true) - return [ VALUE_TYPE.IEEE754, new Uint8Array(buf64) ] + return [VALUE_TYPE.IEEE754, new Uint8Array(buf64)] } default: // increment operators get resolved here ... - if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) { - return [ VALUE_TYPE.LEB128_INT, op.value ] + if ( + Number.isInteger(op.value) && + op.value <= Number.MAX_SAFE_INTEGER && + op.value >= Number.MIN_SAFE_INTEGER + ) { + return [VALUE_TYPE.LEB128_INT, op.value] } else { - const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64) + const buf64 = new ArrayBuffer(8), + view64 = new DataView(buf64) view64.setFloat64(0, op.value, true) - return [ VALUE_TYPE.IEEE754, new Uint8Array(buf64) ] + return [VALUE_TYPE.IEEE754, new Uint8Array(buf64)] } } } @@ -257,19 +311,21 @@ function getNumberTypeAndValue(op) { * `valLen` and `valRaw`. */ function encodeValue(op, columns) { - if ((op.action !== 'set' && op.action !== 'inc') || op.value === null) { + if ((op.action !== "set" && op.action !== "inc") || op.value === null) { columns.valLen.appendValue(VALUE_TYPE.NULL) } else if (op.value === false) { columns.valLen.appendValue(VALUE_TYPE.FALSE) } else if (op.value === true) { columns.valLen.appendValue(VALUE_TYPE.TRUE) - } else if (typeof op.value === 'string') { + } else if (typeof op.value === "string") { const numBytes = columns.valRaw.appendRawString(op.value) - columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.UTF8) + columns.valLen.appendValue((numBytes << 4) | VALUE_TYPE.UTF8) } else if (ArrayBuffer.isView(op.value)) { - const numBytes = columns.valRaw.appendRawBytes(new Uint8Array(op.value.buffer)) - columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.BYTES) - } else if (typeof op.value === 'number') { + const numBytes = columns.valRaw.appendRawBytes( + new Uint8Array(op.value.buffer) + ) + columns.valLen.appendValue((numBytes << 4) | VALUE_TYPE.BYTES) + } else if (typeof op.value === "number") { let [typeTag, value] = getNumberTypeAndValue(op) let numBytes if (typeTag === VALUE_TYPE.LEB128_UINT) { @@ -279,13 +335,19 @@ function encodeValue(op, columns) { } else { numBytes = columns.valRaw.appendInt53(value) } - columns.valLen.appendValue(numBytes << 4 | typeTag) - } else if (typeof op.datatype === 'number' && op.datatype >= VALUE_TYPE.MIN_UNKNOWN && - op.datatype <= VALUE_TYPE.MAX_UNKNOWN && op.value instanceof Uint8Array) { + columns.valLen.appendValue((numBytes << 4) | typeTag) + } else if ( + typeof op.datatype === "number" && + op.datatype >= VALUE_TYPE.MIN_UNKNOWN && + op.datatype <= VALUE_TYPE.MAX_UNKNOWN && + op.value instanceof Uint8Array + ) { const numBytes = columns.valRaw.appendRawBytes(op.value) - columns.valLen.appendValue(numBytes << 4 | op.datatype) + columns.valLen.appendValue((numBytes << 4) | op.datatype) } else if (op.datatype) { - throw new RangeError(`Unknown datatype ${op.datatype} for value ${op.value}`) + throw new RangeError( + `Unknown datatype ${op.datatype} for value ${op.value}` + ) } else { throw new RangeError(`Unsupported value in operation: ${op.value}`) } @@ -299,31 +361,37 @@ function encodeValue(op, columns) { */ function decodeValue(sizeTag, bytes) { if (sizeTag === VALUE_TYPE.NULL) { - return {value: null} + return { value: null } } else if (sizeTag === VALUE_TYPE.FALSE) { - return {value: false} + return { value: false } } else if (sizeTag === VALUE_TYPE.TRUE) { - return {value: true} + return { value: true } } else if (sizeTag % 16 === VALUE_TYPE.UTF8) { - return {value: utf8ToString(bytes)} + return { value: utf8ToString(bytes) } } else { if (sizeTag % 16 === VALUE_TYPE.LEB128_UINT) { - return {value: new Decoder(bytes).readUint53(), datatype: "uint"} + return { value: new Decoder(bytes).readUint53(), datatype: "uint" } } else if (sizeTag % 16 === VALUE_TYPE.LEB128_INT) { - return {value: new Decoder(bytes).readInt53(), datatype: "int"} + return { value: new Decoder(bytes).readInt53(), datatype: "int" } } else if (sizeTag % 16 === VALUE_TYPE.IEEE754) { - const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength) + const view = new DataView( + bytes.buffer, + bytes.byteOffset, + bytes.byteLength + ) if (bytes.byteLength === 8) { - return {value: view.getFloat64(0, true), datatype: "float64"} + return { value: view.getFloat64(0, true), datatype: "float64" } } else { - throw new RangeError(`Invalid length for floating point number: ${bytes.byteLength}`) + throw new RangeError( + `Invalid length for floating point number: ${bytes.byteLength}` + ) } } else if (sizeTag % 16 === VALUE_TYPE.COUNTER) { - return {value: new Decoder(bytes).readInt53(), datatype: 'counter'} + return { value: new Decoder(bytes).readInt53(), datatype: "counter" } } else if (sizeTag % 16 === VALUE_TYPE.TIMESTAMP) { - return {value: new Decoder(bytes).readInt53(), datatype: 'timestamp'} + return { value: new Decoder(bytes).readInt53(), datatype: "timestamp" } } else { - return {value: bytes, datatype: sizeTag % 16} + return { value: bytes, datatype: sizeTag % 16 } } } } @@ -338,20 +406,24 @@ function decodeValue(sizeTag, bytes) { */ function decodeValueColumns(columns, colIndex, actorIds, result) { const { columnId, columnName, decoder } = columns[colIndex] - if (columnId % 8 === COLUMN_TYPE.VALUE_LEN && colIndex + 1 < columns.length && - columns[colIndex + 1].columnId === columnId + 1) { + if ( + columnId % 8 === COLUMN_TYPE.VALUE_LEN && + colIndex + 1 < columns.length && + columns[colIndex + 1].columnId === columnId + 1 + ) { const sizeTag = decoder.readValue() const rawValue = columns[colIndex + 1].decoder.readRawBytes(sizeTag >> 4) const { value, datatype } = decodeValue(sizeTag, rawValue) result[columnName] = value - if (datatype) result[columnName + '_datatype'] = datatype + if (datatype) result[columnName + "_datatype"] = datatype return 2 } else if (columnId % 8 === COLUMN_TYPE.ACTOR_ID) { const actorNum = decoder.readValue() if (actorNum === null) { result[columnName] = null } else { - if (!actorIds[actorNum]) throw new RangeError(`No actor index ${actorNum}`) + if (!actorIds[actorNum]) + throw new RangeError(`No actor index ${actorNum}`) result[columnName] = actorIds[actorNum] } } else { @@ -369,29 +441,29 @@ function decodeValueColumns(columns, colIndex, actorIds, result) { */ function encodeOps(ops, forDocument) { const columns = { - objActor : new RLEEncoder('uint'), - objCtr : new RLEEncoder('uint'), - keyActor : new RLEEncoder('uint'), - keyCtr : new DeltaEncoder(), - keyStr : new RLEEncoder('utf8'), - insert : new BooleanEncoder(), - action : new RLEEncoder('uint'), - valLen : new RLEEncoder('uint'), - valRaw : new Encoder(), - chldActor : new RLEEncoder('uint'), - chldCtr : new DeltaEncoder() + objActor: new RLEEncoder("uint"), + objCtr: new RLEEncoder("uint"), + keyActor: new RLEEncoder("uint"), + keyCtr: new DeltaEncoder(), + keyStr: new RLEEncoder("utf8"), + insert: new BooleanEncoder(), + action: new RLEEncoder("uint"), + valLen: new RLEEncoder("uint"), + valRaw: new Encoder(), + chldActor: new RLEEncoder("uint"), + chldCtr: new DeltaEncoder(), } if (forDocument) { - columns.idActor = new RLEEncoder('uint') - columns.idCtr = new DeltaEncoder() - columns.succNum = new RLEEncoder('uint') - columns.succActor = new RLEEncoder('uint') - columns.succCtr = new DeltaEncoder() + columns.idActor = new RLEEncoder("uint") + columns.idCtr = new DeltaEncoder() + columns.succNum = new RLEEncoder("uint") + columns.succActor = new RLEEncoder("uint") + columns.succCtr = new DeltaEncoder() } else { - columns.predNum = new RLEEncoder('uint') - columns.predCtr = new DeltaEncoder() - columns.predActor = new RLEEncoder('uint') + columns.predNum = new RLEEncoder("uint") + columns.predCtr = new DeltaEncoder() + columns.predActor = new RLEEncoder("uint") } for (let op of ops) { @@ -429,17 +501,22 @@ function encodeOps(ops, forDocument) { } let columnList = [] - for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) { - if (columns[columnName]) columnList.push({columnId, columnName, encoder: columns[columnName]}) + for (let { columnName, columnId } of forDocument + ? DOC_OPS_COLUMNS + : CHANGE_COLUMNS) { + if (columns[columnName]) + columnList.push({ columnId, columnName, encoder: columns[columnName] }) } return columnList.sort((a, b) => a.columnId - b.columnId) } function validDatatype(value, datatype) { if (datatype === undefined) { - return (typeof value === 'string' || typeof value === 'boolean' || value === null) + return ( + typeof value === "string" || typeof value === "boolean" || value === null + ) } else { - return typeof value === 'number' + return typeof value === "number" } } @@ -447,23 +524,37 @@ function expandMultiOps(ops, startOp, actor) { let opNum = startOp let expandedOps = [] for (const op of ops) { - if (op.action === 'set' && op.values && op.insert) { - if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty') + if (op.action === "set" && op.values && op.insert) { + if (op.pred.length !== 0) + throw new RangeError("multi-insert pred must be empty") let lastElemId = op.elemId const datatype = op.datatype for (const value of op.values) { - if (!validDatatype(value, datatype)) throw new RangeError(`Decode failed: bad value/datatype association (${value},${datatype})`) - expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, datatype, value, pred: [], insert: true}) + if (!validDatatype(value, datatype)) + throw new RangeError( + `Decode failed: bad value/datatype association (${value},${datatype})` + ) + expandedOps.push({ + action: "set", + obj: op.obj, + elemId: lastElemId, + datatype, + value, + pred: [], + insert: true, + }) lastElemId = `${opNum}@${actor}` opNum += 1 } - } else if (op.action === 'del' && op.multiOp > 1) { - if (op.pred.length !== 1) throw new RangeError('multiOp deletion must have exactly one pred') - const startElemId = parseOpId(op.elemId), startPred = parseOpId(op.pred[0]) + } else if (op.action === "del" && op.multiOp > 1) { + if (op.pred.length !== 1) + throw new RangeError("multiOp deletion must have exactly one pred") + const startElemId = parseOpId(op.elemId), + startPred = parseOpId(op.pred[0]) for (let i = 0; i < op.multiOp; i++) { const elemId = `${startElemId.counter + i}@${startElemId.actorId}` const pred = [`${startPred.counter + i}@${startPred.actorId}`] - expandedOps.push({action: 'del', obj: op.obj, elemId, pred}) + expandedOps.push({ action: "del", obj: op.obj, elemId, pred }) opNum += 1 } } else { @@ -483,26 +574,44 @@ function expandMultiOps(ops, startOp, actor) { function decodeOps(ops, forDocument) { const newOps = [] for (let op of ops) { - const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}` - const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`) + const obj = op.objCtr === null ? "_root" : `${op.objCtr}@${op.objActor}` + const elemId = op.keyStr + ? undefined + : op.keyCtr === 0 + ? "_head" + : `${op.keyCtr}@${op.keyActor}` const action = ACTIONS[op.action] || op.action - const newOp = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action} + const newOp = elemId + ? { obj, elemId, action } + : { obj, key: op.keyStr, action } newOp.insert = !!op.insert - if (ACTIONS[op.action] === 'set' || ACTIONS[op.action] === 'inc') { + if (ACTIONS[op.action] === "set" || ACTIONS[op.action] === "inc") { newOp.value = op.valLen if (op.valLen_datatype) newOp.datatype = op.valLen_datatype } if (!!op.chldCtr !== !!op.chldActor) { - throw new RangeError(`Mismatched child columns: ${op.chldCtr} and ${op.chldActor}`) + throw new RangeError( + `Mismatched child columns: ${op.chldCtr} and ${op.chldActor}` + ) } if (op.chldCtr !== null) newOp.child = `${op.chldCtr}@${op.chldActor}` if (forDocument) { newOp.id = `${op.idCtr}@${op.idActor}` newOp.succ = op.succNum.map(succ => `${succ.succCtr}@${succ.succActor}`) - checkSortedOpIds(op.succNum.map(succ => ({counter: succ.succCtr, actorId: succ.succActor}))) + checkSortedOpIds( + op.succNum.map(succ => ({ + counter: succ.succCtr, + actorId: succ.succActor, + })) + ) } else { newOp.pred = op.predNum.map(pred => `${pred.predCtr}@${pred.predActor}`) - checkSortedOpIds(op.predNum.map(pred => ({counter: pred.predCtr, actorId: pred.predActor}))) + checkSortedOpIds( + op.predNum.map(pred => ({ + counter: pred.predCtr, + actorId: pred.predActor, + })) + ) } newOps.push(newOp) } @@ -516,7 +625,7 @@ function checkSortedOpIds(opIds) { let last = null for (let opId of opIds) { if (last && compareParsedOpIds(last, opId) !== -1) { - throw new RangeError('operation IDs are not in ascending order') + throw new RangeError("operation IDs are not in ascending order") } last = opId } @@ -528,11 +637,11 @@ function encoderByColumnId(columnId) { } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { return new BooleanEncoder() } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) { - return new RLEEncoder('utf8') + return new RLEEncoder("utf8") } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) { return new Encoder() } else { - return new RLEEncoder('uint') + return new RLEEncoder("uint") } } @@ -542,31 +651,49 @@ function decoderByColumnId(columnId, buffer) { } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) { return new BooleanDecoder(buffer) } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) { - return new RLEDecoder('utf8', buffer) + return new RLEDecoder("utf8", buffer) } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) { return new Decoder(buffer) } else { - return new RLEDecoder('uint', buffer) + return new RLEDecoder("uint", buffer) } } function makeDecoders(columns, columnSpec) { const emptyBuf = new Uint8Array(0) - let decoders = [], columnIndex = 0, specIndex = 0 + let decoders = [], + columnIndex = 0, + specIndex = 0 while (columnIndex < columns.length || specIndex < columnSpec.length) { - if (columnIndex === columns.length || - (specIndex < columnSpec.length && columnSpec[specIndex].columnId < columns[columnIndex].columnId)) { - const {columnId, columnName} = columnSpec[specIndex] - decoders.push({columnId, columnName, decoder: decoderByColumnId(columnId, emptyBuf)}) + if ( + columnIndex === columns.length || + (specIndex < columnSpec.length && + columnSpec[specIndex].columnId < columns[columnIndex].columnId) + ) { + const { columnId, columnName } = columnSpec[specIndex] + decoders.push({ + columnId, + columnName, + decoder: decoderByColumnId(columnId, emptyBuf), + }) specIndex++ - } else if (specIndex === columnSpec.length || columns[columnIndex].columnId < columnSpec[specIndex].columnId) { - const {columnId, buffer} = columns[columnIndex] - decoders.push({columnId, decoder: decoderByColumnId(columnId, buffer)}) + } else if ( + specIndex === columnSpec.length || + columns[columnIndex].columnId < columnSpec[specIndex].columnId + ) { + const { columnId, buffer } = columns[columnIndex] + decoders.push({ columnId, decoder: decoderByColumnId(columnId, buffer) }) columnIndex++ - } else { // columns[columnIndex].columnId === columnSpec[specIndex].columnId - const {columnId, buffer} = columns[columnIndex], {columnName} = columnSpec[specIndex] - decoders.push({columnId, columnName, decoder: decoderByColumnId(columnId, buffer)}) + } else { + // columns[columnIndex].columnId === columnSpec[specIndex].columnId + const { columnId, buffer } = columns[columnIndex], + { columnName } = columnSpec[specIndex] + decoders.push({ + columnId, + columnName, + decoder: decoderByColumnId(columnId, buffer), + }) columnIndex++ specIndex++ } @@ -578,16 +705,22 @@ function decodeColumns(columns, actorIds, columnSpec) { columns = makeDecoders(columns, columnSpec) let parsedRows = [] while (columns.some(col => !col.decoder.done)) { - let row = {}, col = 0 + let row = {}, + col = 0 while (col < columns.length) { const columnId = columns[col].columnId - let groupId = columnId >> 4, groupCols = 1 - while (col + groupCols < columns.length && columns[col + groupCols].columnId >> 4 === groupId) { + let groupId = columnId >> 4, + groupCols = 1 + while ( + col + groupCols < columns.length && + columns[col + groupCols].columnId >> 4 === groupId + ) { groupCols++ } if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) { - const values = [], count = columns[col].decoder.readValue() + const values = [], + count = columns[col].decoder.readValue() for (let i = 0; i < count; i++) { let value = {} for (let colOffset = 1; colOffset < groupCols; colOffset++) { @@ -611,20 +744,25 @@ function decodeColumnInfo(decoder) { // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID. const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0 - let lastColumnId = -1, columns = [], numColumns = decoder.readUint53() + let lastColumnId = -1, + columns = [], + numColumns = decoder.readUint53() for (let i = 0; i < numColumns; i++) { - const columnId = decoder.readUint53(), bufferLen = decoder.readUint53() + const columnId = decoder.readUint53(), + bufferLen = decoder.readUint53() if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) { - throw new RangeError('Columns must be in ascending order') + throw new RangeError("Columns must be in ascending order") } lastColumnId = columnId - columns.push({columnId, bufferLen}) + columns.push({ columnId, bufferLen }) } return columns } function encodeColumnInfo(encoder, columns) { - const nonEmptyColumns = columns.filter(column => column.encoder.buffer.byteLength > 0) + const nonEmptyColumns = columns.filter( + column => column.encoder.buffer.byteLength > 0 + ) encoder.appendUint53(nonEmptyColumns.length) for (let column of nonEmptyColumns) { encoder.appendUint53(column.columnId) @@ -633,19 +771,21 @@ function encodeColumnInfo(encoder, columns) { } function decodeChangeHeader(decoder) { - const numDeps = decoder.readUint53(), deps = [] + const numDeps = decoder.readUint53(), + deps = [] for (let i = 0; i < numDeps; i++) { deps.push(bytesToHexString(decoder.readRawBytes(32))) } let change = { - actor: decoder.readHexString(), - seq: decoder.readUint53(), + actor: decoder.readHexString(), + seq: decoder.readUint53(), startOp: decoder.readUint53(), - time: decoder.readInt53(), + time: decoder.readInt53(), message: decoder.readPrefixedString(), - deps + deps, } - const actorIds = [change.actor], numActorIds = decoder.readUint53() + const actorIds = [change.actor], + numActorIds = decoder.readUint53() for (let i = 0; i < numActorIds; i++) actorIds.push(decoder.readHexString()) change.actorIds = actorIds return change @@ -676,31 +816,47 @@ function encodeContainer(chunkType, encodeContentsCallback) { const sha256 = new Hash() sha256.update(headerBuf) sha256.update(bodyBuf.subarray(HEADER_SPACE)) - const hash = sha256.digest(), checksum = hash.subarray(0, CHECKSUM_SIZE) + const hash = sha256.digest(), + checksum = hash.subarray(0, CHECKSUM_SIZE) // Copy header into the body buffer so that they are contiguous - bodyBuf.set(MAGIC_BYTES, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength) - bodyBuf.set(checksum, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE) - bodyBuf.set(headerBuf, HEADER_SPACE - headerBuf.byteLength) - return {hash, bytes: bodyBuf.subarray(HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength)} + bodyBuf.set( + MAGIC_BYTES, + HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength + ) + bodyBuf.set(checksum, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE) + bodyBuf.set(headerBuf, HEADER_SPACE - headerBuf.byteLength) + return { + hash, + bytes: bodyBuf.subarray( + HEADER_SPACE - + headerBuf.byteLength - + CHECKSUM_SIZE - + MAGIC_BYTES.byteLength + ), + } } function decodeContainerHeader(decoder, computeHash) { if (!equalBytes(decoder.readRawBytes(MAGIC_BYTES.byteLength), MAGIC_BYTES)) { - throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83') + throw new RangeError("Data does not begin with magic bytes 85 6f 4a 83") } const expectedHash = decoder.readRawBytes(4) const hashStartOffset = decoder.offset const chunkType = decoder.readByte() const chunkLength = decoder.readUint53() - const header = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)} + const header = { + chunkType, + chunkLength, + chunkData: decoder.readRawBytes(chunkLength), + } if (computeHash) { const sha256 = new Hash() sha256.update(decoder.buf.subarray(hashStartOffset, decoder.offset)) const binaryHash = sha256.digest() if (!equalBytes(binaryHash.subarray(0, 4), expectedHash)) { - throw new RangeError('checksum does not match data') + throw new RangeError("checksum does not match data") } header.hash = bytesToHexString(binaryHash) } @@ -712,7 +868,7 @@ function encodeChange(changeObj) { const change = changes[0] const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => { - if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array') + if (!Array.isArray(change.deps)) throw new TypeError("deps is not an array") encoder.appendUint53(change.deps.length) for (let hash of change.deps.slice().sort()) { encoder.appendRawBytes(hexStringToBytes(hash)) @@ -721,7 +877,7 @@ function encodeChange(changeObj) { encoder.appendUint53(change.seq) encoder.appendUint53(change.startOp) encoder.appendInt53(change.time) - encoder.appendPrefixedString(change.message || '') + encoder.appendPrefixedString(change.message || "") encoder.appendUint53(actorIds.length - 1) for (let actor of actorIds.slice(1)) encoder.appendHexString(actor) @@ -733,9 +889,11 @@ function encodeChange(changeObj) { const hexHash = bytesToHexString(hash) if (changeObj.hash && changeObj.hash !== hexHash) { - throw new RangeError(`Change hash does not match encoding: ${changeObj.hash} != ${hexHash}`) + throw new RangeError( + `Change hash does not match encoding: ${changeObj.hash} != ${hexHash}` + ) } - return (bytes.byteLength >= DEFLATE_MIN_SIZE) ? deflateChange(bytes) : bytes + return bytes.byteLength >= DEFLATE_MIN_SIZE ? deflateChange(bytes) : bytes } function decodeChangeColumns(buffer) { @@ -743,14 +901,15 @@ function decodeChangeColumns(buffer) { const decoder = new Decoder(buffer) const header = decodeContainerHeader(decoder, true) const chunkDecoder = new Decoder(header.chunkData) - if (!decoder.done) throw new RangeError('Encoded change has trailing data') - if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + if (!decoder.done) throw new RangeError("Encoded change has trailing data") + if (header.chunkType !== CHUNK_TYPE_CHANGE) + throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) const change = decodeChangeHeader(chunkDecoder) const columns = decodeColumnInfo(chunkDecoder) for (let i = 0; i < columns.length; i++) { if ((columns[i].columnId & COLUMN_TYPE_DEFLATE) !== 0) { - throw new RangeError('change must not contain deflated columns') + throw new RangeError("change must not contain deflated columns") } columns[i].buffer = chunkDecoder.readRawBytes(columns[i].bufferLen) } @@ -769,7 +928,10 @@ function decodeChangeColumns(buffer) { */ function decodeChange(buffer) { const change = decodeChangeColumns(buffer) - change.ops = decodeOps(decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), false) + change.ops = decodeOps( + decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), + false + ) delete change.actorIds delete change.columns return change @@ -784,7 +946,7 @@ function decodeChangeMeta(buffer, computeHash) { if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer) const header = decodeContainerHeader(new Decoder(buffer), computeHash) if (header.chunkType !== CHUNK_TYPE_CHANGE) { - throw new RangeError('Buffer chunk type is not a change') + throw new RangeError("Buffer chunk type is not a change") } const meta = decodeChangeHeader(new Decoder(header.chunkData)) meta.change = buffer @@ -797,7 +959,8 @@ function decodeChangeMeta(buffer, computeHash) { */ function deflateChange(buffer) { const header = decodeContainerHeader(new Decoder(buffer), false) - if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + if (header.chunkType !== CHUNK_TYPE_CHANGE) + throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) const compressed = pako.deflateRaw(header.chunkData) const encoder = new Encoder() encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum @@ -812,7 +975,8 @@ function deflateChange(buffer) { */ function inflateChange(buffer) { const header = decodeContainerHeader(new Decoder(buffer), false) - if (header.chunkType !== CHUNK_TYPE_DEFLATE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + if (header.chunkType !== CHUNK_TYPE_DEFLATE) + throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) const decompressed = pako.inflateRaw(header.chunkData) const encoder = new Encoder() encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum @@ -827,7 +991,9 @@ function inflateChange(buffer) { * returns an array of subarrays, each subarray containing one change. */ function splitContainers(buffer) { - let decoder = new Decoder(buffer), chunks = [], startOffset = 0 + let decoder = new Decoder(buffer), + chunks = [], + startOffset = 0 while (!decoder.done) { decodeContainerHeader(decoder, false) chunks.push(buffer.subarray(startOffset, decoder.offset)) @@ -846,7 +1012,10 @@ function decodeChanges(binaryChanges) { for (let chunk of splitContainers(binaryChange)) { if (chunk[8] === CHUNK_TYPE_DOCUMENT) { decoded = decoded.concat(decodeDocument(chunk)) - } else if (chunk[8] === CHUNK_TYPE_CHANGE || chunk[8] === CHUNK_TYPE_DEFLATE) { + } else if ( + chunk[8] === CHUNK_TYPE_CHANGE || + chunk[8] === CHUNK_TYPE_DEFLATE + ) { decoded.push(decodeChange(chunk)) } else { // ignoring chunk of unknown type @@ -858,9 +1027,10 @@ function decodeChanges(binaryChanges) { function sortOpIds(a, b) { if (a === b) return 0 - if (a === '_root') return -1 - if (b === '_root') return +1 - const a_ = parseOpId(a), b_ = parseOpId(b) + if (a === "_root") return -1 + if (b === "_root") return +1 + const a_ = parseOpId(a), + b_ = parseOpId(b) if (a_.counter < b_.counter) return -1 if (a_.counter > b_.counter) return +1 if (a_.actorId < b_.actorId) return -1 @@ -879,26 +1049,46 @@ function groupChangeOps(changes, ops) { change.ops = [] if (!changesByActor[change.actor]) changesByActor[change.actor] = [] if (change.seq !== changesByActor[change.actor].length + 1) { - throw new RangeError(`Expected seq = ${changesByActor[change.actor].length + 1}, got ${change.seq}`) + throw new RangeError( + `Expected seq = ${changesByActor[change.actor].length + 1}, got ${ + change.seq + }` + ) } - if (change.seq > 1 && changesByActor[change.actor][change.seq - 2].maxOp > change.maxOp) { - throw new RangeError('maxOp must increase monotonically per actor') + if ( + change.seq > 1 && + changesByActor[change.actor][change.seq - 2].maxOp > change.maxOp + ) { + throw new RangeError("maxOp must increase monotonically per actor") } changesByActor[change.actor].push(change) } let opsById = {} for (let op of ops) { - if (op.action === 'del') throw new RangeError('document should not contain del operations') + if (op.action === "del") + throw new RangeError("document should not contain del operations") op.pred = opsById[op.id] ? opsById[op.id].pred : [] opsById[op.id] = op for (let succ of op.succ) { if (!opsById[succ]) { if (op.elemId) { const elemId = op.insert ? op.id : op.elemId - opsById[succ] = {id: succ, action: 'del', obj: op.obj, elemId, pred: []} + opsById[succ] = { + id: succ, + action: "del", + obj: op.obj, + elemId, + pred: [], + } } else { - opsById[succ] = {id: succ, action: 'del', obj: op.obj, key: op.key, pred: []} + opsById[succ] = { + id: succ, + action: "del", + obj: op.obj, + key: op.key, + pred: [], + } } } opsById[succ].pred.push(op.id) @@ -906,14 +1096,15 @@ function groupChangeOps(changes, ops) { delete op.succ } for (let op of Object.values(opsById)) { - if (op.action === 'del') ops.push(op) + if (op.action === "del") ops.push(op) } for (let op of ops) { const { counter, actorId } = parseOpId(op.id) const actorChanges = changesByActor[actorId] // Binary search to find the change that should contain this operation - let left = 0, right = actorChanges.length + let left = 0, + right = actorChanges.length while (left < right) { const index = Math.floor((left + right) / 2) if (actorChanges[index].maxOp < counter) { @@ -933,7 +1124,8 @@ function groupChangeOps(changes, ops) { change.startOp = change.maxOp - change.ops.length + 1 delete change.maxOp for (let i = 0; i < change.ops.length; i++) { - const op = change.ops[i], expectedId = `${change.startOp + i}@${change.actor}` + const op = change.ops[i], + expectedId = `${change.startOp + i}@${change.actor}` if (op.id !== expectedId) { throw new RangeError(`Expected opId ${expectedId}, got ${op.id}`) } @@ -949,7 +1141,9 @@ function decodeDocumentChanges(changes, expectedHeads) { change.deps = [] for (let index of change.depsNum.map(d => d.depsIndex)) { if (!changes[index] || !changes[index].hash) { - throw new RangeError(`No hash for index ${index} while processing index ${i}`) + throw new RangeError( + `No hash for index ${index} while processing index ${i}` + ) } const hash = changes[index].hash change.deps.push(hash) @@ -970,18 +1164,30 @@ function decodeDocumentChanges(changes, expectedHeads) { } const actualHeads = Object.keys(heads).sort() - let headsEqual = (actualHeads.length === expectedHeads.length), i = 0 + let headsEqual = actualHeads.length === expectedHeads.length, + i = 0 while (headsEqual && i < actualHeads.length) { - headsEqual = (actualHeads[i] === expectedHeads[i]) + headsEqual = actualHeads[i] === expectedHeads[i] i++ } if (!headsEqual) { - throw new RangeError(`Mismatched heads hashes: expected ${expectedHeads.join(', ')}, got ${actualHeads.join(', ')}`) + throw new RangeError( + `Mismatched heads hashes: expected ${expectedHeads.join( + ", " + )}, got ${actualHeads.join(", ")}` + ) } } function encodeDocumentHeader(doc) { - const { changesColumns, opsColumns, actorIds, heads, headsIndexes, extraBytes } = doc + const { + changesColumns, + opsColumns, + actorIds, + heads, + headsIndexes, + extraBytes, + } = doc for (let column of changesColumns) deflateColumn(column) for (let column of opsColumns) deflateColumn(column) @@ -996,7 +1202,8 @@ function encodeDocumentHeader(doc) { } encodeColumnInfo(encoder, changesColumns) encodeColumnInfo(encoder, opsColumns) - for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer) + for (let column of changesColumns) + encoder.appendRawBytes(column.encoder.buffer) for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer) for (let index of headsIndexes) encoder.appendUint53(index) if (extraBytes) encoder.appendRawBytes(extraBytes) @@ -1007,14 +1214,19 @@ function decodeDocumentHeader(buffer) { const documentDecoder = new Decoder(buffer) const header = decodeContainerHeader(documentDecoder, true) const decoder = new Decoder(header.chunkData) - if (!documentDecoder.done) throw new RangeError('Encoded document has trailing data') - if (header.chunkType !== CHUNK_TYPE_DOCUMENT) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) + if (!documentDecoder.done) + throw new RangeError("Encoded document has trailing data") + if (header.chunkType !== CHUNK_TYPE_DOCUMENT) + throw new RangeError(`Unexpected chunk type: ${header.chunkType}`) - const actorIds = [], numActors = decoder.readUint53() + const actorIds = [], + numActors = decoder.readUint53() for (let i = 0; i < numActors; i++) { actorIds.push(decoder.readHexString()) } - const heads = [], headsIndexes = [], numHeads = decoder.readUint53() + const heads = [], + headsIndexes = [], + numHeads = decoder.readUint53() for (let i = 0; i < numHeads; i++) { heads.push(bytesToHexString(decoder.readRawBytes(32))) } @@ -1033,14 +1245,27 @@ function decodeDocumentHeader(buffer) { for (let i = 0; i < numHeads; i++) headsIndexes.push(decoder.readUint53()) } - const extraBytes = decoder.readRawBytes(decoder.buf.byteLength - decoder.offset) - return { changesColumns, opsColumns, actorIds, heads, headsIndexes, extraBytes } + const extraBytes = decoder.readRawBytes( + decoder.buf.byteLength - decoder.offset + ) + return { + changesColumns, + opsColumns, + actorIds, + heads, + headsIndexes, + extraBytes, + } } function decodeDocument(buffer) { - const { changesColumns, opsColumns, actorIds, heads } = decodeDocumentHeader(buffer) + const { changesColumns, opsColumns, actorIds, heads } = + decodeDocumentHeader(buffer) const changes = decodeColumns(changesColumns, actorIds, DOCUMENT_COLUMNS) - const ops = decodeOps(decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), true) + const ops = decodeOps( + decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), + true + ) groupChangeOps(changes, ops) decodeDocumentChanges(changes, heads) return changes @@ -1051,7 +1276,7 @@ function decodeDocument(buffer) { */ function deflateColumn(column) { if (column.encoder.buffer.byteLength >= DEFLATE_MIN_SIZE) { - column.encoder = {buffer: pako.deflateRaw(column.encoder.buffer)} + column.encoder = { buffer: pako.deflateRaw(column.encoder.buffer) } column.columnId |= COLUMN_TYPE_DEFLATE } } @@ -1067,8 +1292,24 @@ function inflateColumn(column) { } module.exports = { - COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS, DOCUMENT_COLUMNS, - encoderByColumnId, decoderByColumnId, makeDecoders, decodeValue, - splitContainers, encodeChange, decodeChangeColumns, decodeChange, decodeChangeMeta, decodeChanges, - encodeDocumentHeader, decodeDocumentHeader, decodeDocument + COLUMN_TYPE, + VALUE_TYPE, + ACTIONS, + OBJECT_TYPE, + DOC_OPS_COLUMNS, + CHANGE_COLUMNS, + DOCUMENT_COLUMNS, + encoderByColumnId, + decoderByColumnId, + makeDecoders, + decodeValue, + splitContainers, + encodeChange, + decodeChangeColumns, + decodeChange, + decodeChangeMeta, + decodeChanges, + encodeDocumentHeader, + decodeDocumentHeader, + decodeDocument, } diff --git a/automerge-js/test/legacy/common.js b/javascript/test/legacy/common.js similarity index 80% rename from automerge-js/test/legacy/common.js rename to javascript/test/legacy/common.js index 02e91392..7668e982 100644 --- a/automerge-js/test/legacy/common.js +++ b/javascript/test/legacy/common.js @@ -1,5 +1,5 @@ function isObject(obj) { - return typeof obj === 'object' && obj !== null + return typeof obj === "object" && obj !== null } /** @@ -20,11 +20,11 @@ function copyObject(obj) { * with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`. */ function parseOpId(opId) { - const match = /^(\d+)@(.*)$/.exec(opId || '') + const match = /^(\d+)@(.*)$/.exec(opId || "") if (!match) { throw new RangeError(`Not a valid opId: ${opId}`) } - return {counter: parseInt(match[1], 10), actorId: match[2]} + return { counter: parseInt(match[1], 10), actorId: match[2] } } /** @@ -32,7 +32,7 @@ function parseOpId(opId) { */ function equalBytes(array1, array2) { if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) { - throw new TypeError('equalBytes can only compare Uint8Arrays') + throw new TypeError("equalBytes can only compare Uint8Arrays") } if (array1.byteLength !== array2.byteLength) return false for (let i = 0; i < array1.byteLength; i++) { @@ -51,5 +51,9 @@ function createArrayOfNulls(length) { } module.exports = { - isObject, copyObject, parseOpId, equalBytes, createArrayOfNulls + isObject, + copyObject, + parseOpId, + equalBytes, + createArrayOfNulls, } diff --git a/automerge-wasm/test/helpers/encoding.js b/javascript/test/legacy/encoding.js similarity index 74% rename from automerge-wasm/test/helpers/encoding.js rename to javascript/test/legacy/encoding.js index 92b62df6..f7650faf 100644 --- a/automerge-wasm/test/helpers/encoding.js +++ b/javascript/test/legacy/encoding.js @@ -6,7 +6,7 @@ * https://github.com/anonyco/FastestSmallestTextEncoderDecoder */ const utf8encoder = new TextEncoder() -const utf8decoder = new TextDecoder('utf-8') +const utf8decoder = new TextDecoder("utf-8") function stringToUtf8(string) { return utf8encoder.encode(string) @@ -20,30 +20,48 @@ function utf8ToString(buffer) { * Converts a string consisting of hexadecimal digits into an Uint8Array. */ function hexStringToBytes(value) { - if (typeof value !== 'string') { - throw new TypeError('value is not a string') + if (typeof value !== "string") { + throw new TypeError("value is not a string") } if (!/^([0-9a-f][0-9a-f])*$/.test(value)) { - throw new RangeError('value is not hexadecimal') + throw new RangeError("value is not hexadecimal") } - if (value === '') { + if (value === "") { return new Uint8Array(0) } else { return new Uint8Array(value.match(/../g).map(b => parseInt(b, 16))) } } -const NIBBLE_TO_HEX = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'] +const NIBBLE_TO_HEX = [ + "0", + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8", + "9", + "a", + "b", + "c", + "d", + "e", + "f", +] const BYTE_TO_HEX = new Array(256) for (let i = 0; i < 256; i++) { - BYTE_TO_HEX[i] = `${NIBBLE_TO_HEX[(i >>> 4) & 0xf]}${NIBBLE_TO_HEX[i & 0xf]}`; + BYTE_TO_HEX[i] = `${NIBBLE_TO_HEX[(i >>> 4) & 0xf]}${NIBBLE_TO_HEX[i & 0xf]}` } /** * Converts a Uint8Array into the equivalent hexadecimal string. */ function bytesToHexString(bytes) { - let hex = '', len = bytes.byteLength + let hex = "", + len = bytes.byteLength for (let i = 0; i < len; i++) { hex += BYTE_TO_HEX[bytes[i]] } @@ -95,14 +113,17 @@ class Encoder { * appends it to the buffer. Returns the number of bytes written. */ appendUint32(value) { - if (!Number.isInteger(value)) throw new RangeError('value is not an integer') - if (value < 0 || value > 0xffffffff) throw new RangeError('number out of range') + if (!Number.isInteger(value)) + throw new RangeError("value is not an integer") + if (value < 0 || value > 0xffffffff) + throw new RangeError("number out of range") const numBytes = Math.max(1, Math.ceil((32 - Math.clz32(value)) / 7)) if (this.offset + numBytes > this.buf.byteLength) this.grow() for (let i = 0; i < numBytes; i++) { - this.buf[this.offset + i] = (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = + (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) value >>>= 7 // zero-filling right shift } this.offset += numBytes @@ -115,14 +136,19 @@ class Encoder { * it to the buffer. Returns the number of bytes written. */ appendInt32(value) { - if (!Number.isInteger(value)) throw new RangeError('value is not an integer') - if (value < -0x80000000 || value > 0x7fffffff) throw new RangeError('number out of range') + if (!Number.isInteger(value)) + throw new RangeError("value is not an integer") + if (value < -0x80000000 || value > 0x7fffffff) + throw new RangeError("number out of range") - const numBytes = Math.ceil((33 - Math.clz32(value >= 0 ? value : -value - 1)) / 7) + const numBytes = Math.ceil( + (33 - Math.clz32(value >= 0 ? value : -value - 1)) / 7 + ) if (this.offset + numBytes > this.buf.byteLength) this.grow() for (let i = 0; i < numBytes; i++) { - this.buf[this.offset + i] = (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = + (value & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) value >>= 7 // sign-propagating right shift } this.offset += numBytes @@ -135,9 +161,10 @@ class Encoder { * (53 bits). */ appendUint53(value) { - if (!Number.isInteger(value)) throw new RangeError('value is not an integer') + if (!Number.isInteger(value)) + throw new RangeError("value is not an integer") if (value < 0 || value > Number.MAX_SAFE_INTEGER) { - throw new RangeError('number out of range') + throw new RangeError("number out of range") } const high32 = Math.floor(value / 0x100000000) const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned @@ -150,9 +177,10 @@ class Encoder { * (53 bits). */ appendInt53(value) { - if (!Number.isInteger(value)) throw new RangeError('value is not an integer') + if (!Number.isInteger(value)) + throw new RangeError("value is not an integer") if (value < Number.MIN_SAFE_INTEGER || value > Number.MAX_SAFE_INTEGER) { - throw new RangeError('number out of range') + throw new RangeError("number out of range") } const high32 = Math.floor(value / 0x100000000) const low32 = (value & 0xffffffff) >>> 0 // right shift to interpret as unsigned @@ -167,10 +195,10 @@ class Encoder { */ appendUint64(high32, low32) { if (!Number.isInteger(high32) || !Number.isInteger(low32)) { - throw new RangeError('value is not an integer') + throw new RangeError("value is not an integer") } if (high32 < 0 || high32 > 0xffffffff || low32 < 0 || low32 > 0xffffffff) { - throw new RangeError('number out of range') + throw new RangeError("number out of range") } if (high32 === 0) return this.appendUint32(low32) @@ -180,10 +208,12 @@ class Encoder { this.buf[this.offset + i] = (low32 & 0x7f) | 0x80 low32 >>>= 7 // zero-filling right shift } - this.buf[this.offset + 4] = (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) + this.buf[this.offset + 4] = + (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) high32 >>>= 3 for (let i = 5; i < numBytes; i++) { - this.buf[this.offset + i] = (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = + (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) high32 >>>= 7 } this.offset += numBytes @@ -200,25 +230,35 @@ class Encoder { */ appendInt64(high32, low32) { if (!Number.isInteger(high32) || !Number.isInteger(low32)) { - throw new RangeError('value is not an integer') + throw new RangeError("value is not an integer") } - if (high32 < -0x80000000 || high32 > 0x7fffffff || low32 < -0x80000000 || low32 > 0xffffffff) { - throw new RangeError('number out of range') + if ( + high32 < -0x80000000 || + high32 > 0x7fffffff || + low32 < -0x80000000 || + low32 > 0xffffffff + ) { + throw new RangeError("number out of range") } low32 >>>= 0 // interpret as unsigned if (high32 === 0 && low32 <= 0x7fffffff) return this.appendInt32(low32) - if (high32 === -1 && low32 >= 0x80000000) return this.appendInt32(low32 - 0x100000000) + if (high32 === -1 && low32 >= 0x80000000) + return this.appendInt32(low32 - 0x100000000) - const numBytes = Math.ceil((65 - Math.clz32(high32 >= 0 ? high32 : -high32 - 1)) / 7) + const numBytes = Math.ceil( + (65 - Math.clz32(high32 >= 0 ? high32 : -high32 - 1)) / 7 + ) if (this.offset + numBytes > this.buf.byteLength) this.grow() for (let i = 0; i < 4; i++) { this.buf[this.offset + i] = (low32 & 0x7f) | 0x80 low32 >>>= 7 // zero-filling right shift } - this.buf[this.offset + 4] = (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) + this.buf[this.offset + 4] = + (low32 & 0x0f) | ((high32 & 0x07) << 4) | (numBytes === 5 ? 0x00 : 0x80) high32 >>= 3 // sign-propagating right shift for (let i = 5; i < numBytes; i++) { - this.buf[this.offset + i] = (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) + this.buf[this.offset + i] = + (high32 & 0x7f) | (i === numBytes - 1 ? 0x00 : 0x80) high32 >>= 7 } this.offset += numBytes @@ -243,7 +283,7 @@ class Encoder { * number of bytes appended. */ appendRawString(value) { - if (typeof value !== 'string') throw new TypeError('value is not a string') + if (typeof value !== "string") throw new TypeError("value is not a string") return this.appendRawBytes(stringToUtf8(value)) } @@ -262,7 +302,7 @@ class Encoder { * (where the length is encoded as an unsigned LEB128 integer). */ appendPrefixedString(value) { - if (typeof value !== 'string') throw new TypeError('value is not a string') + if (typeof value !== "string") throw new TypeError("value is not a string") this.appendPrefixedBytes(stringToUtf8(value)) return this } @@ -281,8 +321,7 @@ class Encoder { * Flushes any unwritten data to the buffer. Call this before reading from * the buffer constructed by this Encoder. */ - finish() { - } + finish() {} } /** @@ -321,7 +360,7 @@ class Decoder { */ skip(bytes) { if (this.offset + bytes > this.buf.byteLength) { - throw new RangeError('cannot skip beyond end of buffer') + throw new RangeError("cannot skip beyond end of buffer") } this.offset += bytes } @@ -339,18 +378,20 @@ class Decoder { * Throws an exception if the value doesn't fit in a 32-bit unsigned int. */ readUint32() { - let result = 0, shift = 0 + let result = 0, + shift = 0 while (this.offset < this.buf.byteLength) { const nextByte = this.buf[this.offset] - if (shift === 28 && (nextByte & 0xf0) !== 0) { // more than 5 bytes, or value > 0xffffffff - throw new RangeError('number out of range') + if (shift === 28 && (nextByte & 0xf0) !== 0) { + // more than 5 bytes, or value > 0xffffffff + throw new RangeError("number out of range") } - result = (result | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned + result = (result | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned shift += 7 this.offset++ if ((nextByte & 0x80) === 0) return result } - throw new RangeError('buffer ended with incomplete number') + throw new RangeError("buffer ended with incomplete number") } /** @@ -358,13 +399,17 @@ class Decoder { * Throws an exception if the value doesn't fit in a 32-bit signed int. */ readInt32() { - let result = 0, shift = 0 + let result = 0, + shift = 0 while (this.offset < this.buf.byteLength) { const nextByte = this.buf[this.offset] - if ((shift === 28 && (nextByte & 0x80) !== 0) || // more than 5 bytes - (shift === 28 && (nextByte & 0x40) === 0 && (nextByte & 0x38) !== 0) || // positive int > 0x7fffffff - (shift === 28 && (nextByte & 0x40) !== 0 && (nextByte & 0x38) !== 0x38)) { // negative int < -0x80000000 - throw new RangeError('number out of range') + if ( + (shift === 28 && (nextByte & 0x80) !== 0) || // more than 5 bytes + (shift === 28 && (nextByte & 0x40) === 0 && (nextByte & 0x38) !== 0) || // positive int > 0x7fffffff + (shift === 28 && (nextByte & 0x40) !== 0 && (nextByte & 0x38) !== 0x38) + ) { + // negative int < -0x80000000 + throw new RangeError("number out of range") } result |= (nextByte & 0x7f) << shift shift += 7 @@ -378,7 +423,7 @@ class Decoder { } } } - throw new RangeError('buffer ended with incomplete number') + throw new RangeError("buffer ended with incomplete number") } /** @@ -389,7 +434,7 @@ class Decoder { readUint53() { const { low32, high32 } = this.readUint64() if (high32 < 0 || high32 > 0x1fffff) { - throw new RangeError('number out of range') + throw new RangeError("number out of range") } return high32 * 0x100000000 + low32 } @@ -401,8 +446,12 @@ class Decoder { */ readInt53() { const { low32, high32 } = this.readInt64() - if (high32 < -0x200000 || (high32 === -0x200000 && low32 === 0) || high32 > 0x1fffff) { - throw new RangeError('number out of range') + if ( + high32 < -0x200000 || + (high32 === -0x200000 && low32 === 0) || + high32 > 0x1fffff + ) { + throw new RangeError("number out of range") } return high32 * 0x100000000 + low32 } @@ -414,10 +463,12 @@ class Decoder { * `{high32, low32}`. */ readUint64() { - let low32 = 0, high32 = 0, shift = 0 + let low32 = 0, + high32 = 0, + shift = 0 while (this.offset < this.buf.byteLength && shift <= 28) { const nextByte = this.buf[this.offset] - low32 = (low32 | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned + low32 = (low32 | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned if (shift === 28) { high32 = (nextByte & 0x70) >>> 4 } @@ -429,15 +480,16 @@ class Decoder { shift = 3 while (this.offset < this.buf.byteLength) { const nextByte = this.buf[this.offset] - if (shift === 31 && (nextByte & 0xfe) !== 0) { // more than 10 bytes, or value > 2^64 - 1 - throw new RangeError('number out of range') + if (shift === 31 && (nextByte & 0xfe) !== 0) { + // more than 10 bytes, or value > 2^64 - 1 + throw new RangeError("number out of range") } - high32 = (high32 | (nextByte & 0x7f) << shift) >>> 0 + high32 = (high32 | ((nextByte & 0x7f) << shift)) >>> 0 shift += 7 this.offset++ if ((nextByte & 0x80) === 0) return { high32, low32 } } - throw new RangeError('buffer ended with incomplete number') + throw new RangeError("buffer ended with incomplete number") } /** @@ -448,17 +500,20 @@ class Decoder { * sign of the `high32` half indicates the sign of the 64-bit number. */ readInt64() { - let low32 = 0, high32 = 0, shift = 0 + let low32 = 0, + high32 = 0, + shift = 0 while (this.offset < this.buf.byteLength && shift <= 28) { const nextByte = this.buf[this.offset] - low32 = (low32 | (nextByte & 0x7f) << shift) >>> 0 // right shift to interpret value as unsigned + low32 = (low32 | ((nextByte & 0x7f) << shift)) >>> 0 // right shift to interpret value as unsigned if (shift === 28) { high32 = (nextByte & 0x70) >>> 4 } shift += 7 this.offset++ if ((nextByte & 0x80) === 0) { - if ((nextByte & 0x40) !== 0) { // sign-extend negative integer + if ((nextByte & 0x40) !== 0) { + // sign-extend negative integer if (shift < 32) low32 = (low32 | (-1 << shift)) >>> 0 high32 |= -1 << Math.max(shift - 32, 0) } @@ -472,19 +527,20 @@ class Decoder { // On the 10th byte there are only two valid values: all 7 value bits zero // (if the value is positive) or all 7 bits one (if the value is negative) if (shift === 31 && nextByte !== 0 && nextByte !== 0x7f) { - throw new RangeError('number out of range') + throw new RangeError("number out of range") } high32 |= (nextByte & 0x7f) << shift shift += 7 this.offset++ if ((nextByte & 0x80) === 0) { - if ((nextByte & 0x40) !== 0 && shift < 32) { // sign-extend negative integer + if ((nextByte & 0x40) !== 0 && shift < 32) { + // sign-extend negative integer high32 |= -1 << shift } return { high32, low32 } } } - throw new RangeError('buffer ended with incomplete number') + throw new RangeError("buffer ended with incomplete number") } /** @@ -494,7 +550,7 @@ class Decoder { readRawBytes(length) { const start = this.offset if (start + length > this.buf.byteLength) { - throw new RangeError('subarray exceeds buffer size') + throw new RangeError("subarray exceeds buffer size") } this.offset += length return this.buf.subarray(start, this.offset) @@ -559,7 +615,7 @@ class RLEEncoder extends Encoder { constructor(type) { super() this.type = type - this.state = 'empty' + this.state = "empty" this.lastValue = undefined this.count = 0 this.literal = [] @@ -578,76 +634,81 @@ class RLEEncoder extends Encoder { */ _appendValue(value, repetitions = 1) { if (repetitions <= 0) return - if (this.state === 'empty') { - this.state = (value === null ? 'nulls' : (repetitions === 1 ? 'loneValue' : 'repetition')) + if (this.state === "empty") { + this.state = + value === null + ? "nulls" + : repetitions === 1 + ? "loneValue" + : "repetition" this.lastValue = value this.count = repetitions - } else if (this.state === 'loneValue') { + } else if (this.state === "loneValue") { if (value === null) { this.flush() - this.state = 'nulls' + this.state = "nulls" this.count = repetitions } else if (value === this.lastValue) { - this.state = 'repetition' + this.state = "repetition" this.count = 1 + repetitions } else if (repetitions > 1) { this.flush() - this.state = 'repetition' + this.state = "repetition" this.count = repetitions this.lastValue = value } else { - this.state = 'literal' + this.state = "literal" this.literal = [this.lastValue] this.lastValue = value } - } else if (this.state === 'repetition') { + } else if (this.state === "repetition") { if (value === null) { this.flush() - this.state = 'nulls' + this.state = "nulls" this.count = repetitions } else if (value === this.lastValue) { this.count += repetitions } else if (repetitions > 1) { this.flush() - this.state = 'repetition' + this.state = "repetition" this.count = repetitions this.lastValue = value } else { this.flush() - this.state = 'loneValue' + this.state = "loneValue" this.lastValue = value } - } else if (this.state === 'literal') { + } else if (this.state === "literal") { if (value === null) { this.literal.push(this.lastValue) this.flush() - this.state = 'nulls' + this.state = "nulls" this.count = repetitions } else if (value === this.lastValue) { this.flush() - this.state = 'repetition' + this.state = "repetition" this.count = 1 + repetitions } else if (repetitions > 1) { this.literal.push(this.lastValue) this.flush() - this.state = 'repetition' + this.state = "repetition" this.count = repetitions this.lastValue = value } else { this.literal.push(this.lastValue) this.lastValue = value } - } else if (this.state === 'nulls') { + } else if (this.state === "nulls") { if (value === null) { this.count += repetitions } else if (repetitions > 1) { this.flush() - this.state = 'repetition' + this.state = "repetition" this.count = repetitions this.lastValue = value } else { this.flush() - this.state = 'loneValue' + this.state = "loneValue" this.lastValue = value } } @@ -666,13 +727,16 @@ class RLEEncoder extends Encoder { */ copyFrom(decoder, options = {}) { const { count, sumValues, sumShift } = options - if (!(decoder instanceof RLEDecoder) || (decoder.type !== this.type)) { - throw new TypeError('incompatible type of decoder') + if (!(decoder instanceof RLEDecoder) || decoder.type !== this.type) { + throw new TypeError("incompatible type of decoder") } - let remaining = (typeof count === 'number' ? count : Number.MAX_SAFE_INTEGER) - let nonNullValues = 0, sum = 0 - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) - if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} + let remaining = typeof count === "number" ? count : Number.MAX_SAFE_INTEGER + let nonNullValues = 0, + sum = 0 + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) + return sumValues ? { nonNullValues, sum } : { nonNullValues } // Copy a value so that we have a well-defined starting state. NB: when super.copyFrom() is // called by the DeltaEncoder subclass, the following calls to readValue() and appendValue() @@ -684,87 +748,101 @@ class RLEEncoder extends Encoder { remaining -= numNulls decoder.count -= numNulls - 1 this.appendValue(null, numNulls) - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) - if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) + return sumValues ? { nonNullValues, sum } : { nonNullValues } firstValue = decoder.readValue() - if (firstValue === null) throw new RangeError('null run must be followed by non-null value') + if (firstValue === null) + throw new RangeError("null run must be followed by non-null value") } this.appendValue(firstValue) remaining-- nonNullValues++ - if (sumValues) sum += (sumShift ? (firstValue >>> sumShift) : firstValue) - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) - if (remaining === 0 || decoder.done) return sumValues ? {nonNullValues, sum} : {nonNullValues} + if (sumValues) sum += sumShift ? firstValue >>> sumShift : firstValue + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) + if (remaining === 0 || decoder.done) + return sumValues ? { nonNullValues, sum } : { nonNullValues } // Copy data at the record level without expanding repetitions - let firstRun = (decoder.count > 0) + let firstRun = decoder.count > 0 while (remaining > 0 && !decoder.done) { if (!firstRun) decoder.readRecord() const numValues = Math.min(decoder.count, remaining) decoder.count -= numValues - if (decoder.state === 'literal') { + if (decoder.state === "literal") { nonNullValues += numValues for (let i = 0; i < numValues; i++) { - if (decoder.done) throw new RangeError('incomplete literal') + if (decoder.done) throw new RangeError("incomplete literal") const value = decoder.readRawValue() - if (value === decoder.lastValue) throw new RangeError('Repetition of values is not allowed in literal') + if (value === decoder.lastValue) + throw new RangeError( + "Repetition of values is not allowed in literal" + ) decoder.lastValue = value this._appendValue(value) - if (sumValues) sum += (sumShift ? (value >>> sumShift) : value) + if (sumValues) sum += sumShift ? value >>> sumShift : value } - } else if (decoder.state === 'repetition') { + } else if (decoder.state === "repetition") { nonNullValues += numValues - if (sumValues) sum += numValues * (sumShift ? (decoder.lastValue >>> sumShift) : decoder.lastValue) + if (sumValues) + sum += + numValues * + (sumShift ? decoder.lastValue >>> sumShift : decoder.lastValue) const value = decoder.lastValue this._appendValue(value) if (numValues > 1) { this._appendValue(value) - if (this.state !== 'repetition') throw new RangeError(`Unexpected state ${this.state}`) + if (this.state !== "repetition") + throw new RangeError(`Unexpected state ${this.state}`) this.count += numValues - 2 } - } else if (decoder.state === 'nulls') { + } else if (decoder.state === "nulls") { this._appendValue(null) - if (this.state !== 'nulls') throw new RangeError(`Unexpected state ${this.state}`) + if (this.state !== "nulls") + throw new RangeError(`Unexpected state ${this.state}`) this.count += numValues - 1 } firstRun = false remaining -= numValues } - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) - return sumValues ? {nonNullValues, sum} : {nonNullValues} + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) + return sumValues ? { nonNullValues, sum } : { nonNullValues } } /** * Private method, do not call from outside the class. */ flush() { - if (this.state === 'loneValue') { + if (this.state === "loneValue") { this.appendInt32(-1) this.appendRawValue(this.lastValue) - } else if (this.state === 'repetition') { + } else if (this.state === "repetition") { this.appendInt53(this.count) this.appendRawValue(this.lastValue) - } else if (this.state === 'literal') { + } else if (this.state === "literal") { this.appendInt53(-this.literal.length) for (let v of this.literal) this.appendRawValue(v) - } else if (this.state === 'nulls') { + } else if (this.state === "nulls") { this.appendInt32(0) this.appendUint53(this.count) } - this.state = 'empty' + this.state = "empty" } /** * Private method, do not call from outside the class. */ appendRawValue(value) { - if (this.type === 'int') { + if (this.type === "int") { this.appendInt53(value) - } else if (this.type === 'uint') { + } else if (this.type === "uint") { this.appendUint53(value) - } else if (this.type === 'utf8') { + } else if (this.type === "utf8") { this.appendPrefixedString(value) } else { throw new RangeError(`Unknown RLEEncoder datatype: ${this.type}`) @@ -776,9 +854,9 @@ class RLEEncoder extends Encoder { * the buffer constructed by this Encoder. */ finish() { - if (this.state === 'literal') this.literal.push(this.lastValue) + if (this.state === "literal") this.literal.push(this.lastValue) // Don't write anything if the only values we have seen are nulls - if (this.state !== 'nulls' || this.offset > 0) this.flush() + if (this.state !== "nulls" || this.offset > 0) this.flush() } } @@ -800,7 +878,7 @@ class RLEDecoder extends Decoder { * position, and true if we are at the end of the buffer. */ get done() { - return (this.count === 0) && (this.offset === this.buf.byteLength) + return this.count === 0 && this.offset === this.buf.byteLength } /** @@ -821,9 +899,10 @@ class RLEDecoder extends Decoder { if (this.done) return null if (this.count === 0) this.readRecord() this.count -= 1 - if (this.state === 'literal') { + if (this.state === "literal") { const value = this.readRawValue() - if (value === this.lastValue) throw new RangeError('Repetition of values is not allowed in literal') + if (value === this.lastValue) + throw new RangeError("Repetition of values is not allowed in literal") this.lastValue = value return value } else { @@ -839,20 +918,22 @@ class RLEDecoder extends Decoder { if (this.count === 0) { this.count = this.readInt53() if (this.count > 0) { - this.lastValue = (this.count <= numSkip) ? this.skipRawValues(1) : this.readRawValue() - this.state = 'repetition' + this.lastValue = + this.count <= numSkip ? this.skipRawValues(1) : this.readRawValue() + this.state = "repetition" } else if (this.count < 0) { this.count = -this.count - this.state = 'literal' - } else { // this.count == 0 + this.state = "literal" + } else { + // this.count == 0 this.count = this.readUint53() this.lastValue = null - this.state = 'nulls' + this.state = "nulls" } } const consume = Math.min(numSkip, this.count) - if (this.state === 'literal') this.skipRawValues(consume) + if (this.state === "literal") this.skipRawValues(consume) numSkip -= consume this.count -= consume } @@ -866,23 +947,34 @@ class RLEDecoder extends Decoder { this.count = this.readInt53() if (this.count > 1) { const value = this.readRawValue() - if ((this.state === 'repetition' || this.state === 'literal') && this.lastValue === value) { - throw new RangeError('Successive repetitions with the same value are not allowed') + if ( + (this.state === "repetition" || this.state === "literal") && + this.lastValue === value + ) { + throw new RangeError( + "Successive repetitions with the same value are not allowed" + ) } - this.state = 'repetition' + this.state = "repetition" this.lastValue = value } else if (this.count === 1) { - throw new RangeError('Repetition count of 1 is not allowed, use a literal instead') + throw new RangeError( + "Repetition count of 1 is not allowed, use a literal instead" + ) } else if (this.count < 0) { this.count = -this.count - if (this.state === 'literal') throw new RangeError('Successive literals are not allowed') - this.state = 'literal' - } else { // this.count == 0 - if (this.state === 'nulls') throw new RangeError('Successive null runs are not allowed') + if (this.state === "literal") + throw new RangeError("Successive literals are not allowed") + this.state = "literal" + } else { + // this.count == 0 + if (this.state === "nulls") + throw new RangeError("Successive null runs are not allowed") this.count = this.readUint53() - if (this.count === 0) throw new RangeError('Zero-length null runs are not allowed') + if (this.count === 0) + throw new RangeError("Zero-length null runs are not allowed") this.lastValue = null - this.state = 'nulls' + this.state = "nulls" } } @@ -891,11 +983,11 @@ class RLEDecoder extends Decoder { * Reads one value of the datatype configured on construction. */ readRawValue() { - if (this.type === 'int') { + if (this.type === "int") { return this.readInt53() - } else if (this.type === 'uint') { + } else if (this.type === "uint") { return this.readUint53() - } else if (this.type === 'utf8') { + } else if (this.type === "utf8") { return this.readPrefixedString() } else { throw new RangeError(`Unknown RLEDecoder datatype: ${this.type}`) @@ -907,14 +999,14 @@ class RLEDecoder extends Decoder { * Skips over `num` values of the datatype configured on construction. */ skipRawValues(num) { - if (this.type === 'utf8') { + if (this.type === "utf8") { for (let i = 0; i < num; i++) this.skip(this.readUint53()) } else { while (num > 0 && this.offset < this.buf.byteLength) { if ((this.buf[this.offset] & 0x80) === 0) num-- this.offset++ } - if (num > 0) throw new RangeError('cannot skip beyond end of buffer') + if (num > 0) throw new RangeError("cannot skip beyond end of buffer") } } } @@ -931,7 +1023,7 @@ class RLEDecoder extends Decoder { */ class DeltaEncoder extends RLEEncoder { constructor() { - super('int') + super("int") this.absoluteValue = 0 } @@ -941,7 +1033,7 @@ class DeltaEncoder extends RLEEncoder { */ appendValue(value, repetitions = 1) { if (repetitions <= 0) return - if (typeof value === 'number') { + if (typeof value === "number") { super.appendValue(value - this.absoluteValue, 1) this.absoluteValue = value if (repetitions > 1) super.appendValue(0, repetitions - 1) @@ -957,26 +1049,29 @@ class DeltaEncoder extends RLEEncoder { */ copyFrom(decoder, options = {}) { if (options.sumValues) { - throw new RangeError('unsupported options for DeltaEncoder.copyFrom()') + throw new RangeError("unsupported options for DeltaEncoder.copyFrom()") } if (!(decoder instanceof DeltaDecoder)) { - throw new TypeError('incompatible type of decoder') + throw new TypeError("incompatible type of decoder") } let remaining = options.count - if (remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${remaining} values`) + if (remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${remaining} values`) if (remaining === 0 || decoder.done) return // Copy any null values, and the first non-null value, so that appendValue() computes the // difference between the encoder's last value and the decoder's first (absolute) value. - let value = decoder.readValue(), nulls = 0 + let value = decoder.readValue(), + nulls = 0 this.appendValue(value) if (value === null) { nulls = decoder.count + 1 if (remaining !== undefined && remaining < nulls) nulls = remaining decoder.count -= nulls - 1 this.count += nulls - 1 - if (remaining > nulls && decoder.done) throw new RangeError(`cannot copy ${remaining} values`) + if (remaining > nulls && decoder.done) + throw new RangeError(`cannot copy ${remaining} values`) if (remaining === nulls || decoder.done) return // The next value read is certain to be non-null because we're not at the end of the decoder, @@ -989,7 +1084,10 @@ class DeltaEncoder extends RLEEncoder { // value, while subsequent values are relative. Thus, the sum of all of the (non-null) copied // values must equal the absolute value of the final element copied. if (remaining !== undefined) remaining -= nulls + 1 - const { nonNullValues, sum } = super.copyFrom(decoder, {count: remaining, sumValues: true}) + const { nonNullValues, sum } = super.copyFrom(decoder, { + count: remaining, + sumValues: true, + }) if (nonNullValues > 0) { this.absoluteValue = sum decoder.absoluteValue = sum @@ -1003,7 +1101,7 @@ class DeltaEncoder extends RLEEncoder { */ class DeltaDecoder extends RLEDecoder { constructor(buffer) { - super('int', buffer) + super("int", buffer) this.absoluteValue = 0 } @@ -1036,12 +1134,12 @@ class DeltaDecoder extends RLEDecoder { while (numSkip > 0 && !this.done) { if (this.count === 0) this.readRecord() const consume = Math.min(numSkip, this.count) - if (this.state === 'literal') { + if (this.state === "literal") { for (let i = 0; i < consume; i++) { this.lastValue = this.readRawValue() this.absoluteValue += this.lastValue } - } else if (this.state === 'repetition') { + } else if (this.state === "repetition") { this.absoluteValue += consume * this.lastValue } numSkip -= consume @@ -1090,12 +1188,13 @@ class BooleanEncoder extends Encoder { */ copyFrom(decoder, options = {}) { if (!(decoder instanceof BooleanDecoder)) { - throw new TypeError('incompatible type of decoder') + throw new TypeError("incompatible type of decoder") } const { count } = options - let remaining = (typeof count === 'number' ? count : Number.MAX_SAFE_INTEGER) - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + let remaining = typeof count === "number" ? count : Number.MAX_SAFE_INTEGER + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) if (remaining === 0 || decoder.done) return // Copy one value to bring decoder and encoder state into sync, then finish that value's repetitions @@ -1108,7 +1207,8 @@ class BooleanEncoder extends Encoder { while (remaining > 0 && !decoder.done) { decoder.count = decoder.readUint53() - if (decoder.count === 0) throw new RangeError('Zero-length runs are not allowed') + if (decoder.count === 0) + throw new RangeError("Zero-length runs are not allowed") decoder.lastValue = !decoder.lastValue this.appendUint53(this.count) @@ -1119,7 +1219,8 @@ class BooleanEncoder extends Encoder { remaining -= numCopied } - if (count && remaining > 0 && decoder.done) throw new RangeError(`cannot copy ${count} values`) + if (count && remaining > 0 && decoder.done) + throw new RangeError(`cannot copy ${count} values`) } /** @@ -1151,7 +1252,7 @@ class BooleanDecoder extends Decoder { * position, and true if we are at the end of the buffer. */ get done() { - return (this.count === 0) && (this.offset === this.buf.byteLength) + return this.count === 0 && this.offset === this.buf.byteLength } /** @@ -1174,7 +1275,7 @@ class BooleanDecoder extends Decoder { this.count = this.readUint53() this.lastValue = !this.lastValue if (this.count === 0 && !this.firstRun) { - throw new RangeError('Zero-length runs are not allowed') + throw new RangeError("Zero-length runs are not allowed") } this.firstRun = false } @@ -1190,7 +1291,8 @@ class BooleanDecoder extends Decoder { if (this.count === 0) { this.count = this.readUint53() this.lastValue = !this.lastValue - if (this.count === 0) throw new RangeError('Zero-length runs are not allowed') + if (this.count === 0) + throw new RangeError("Zero-length runs are not allowed") } if (this.count < numSkip) { numSkip -= this.count @@ -1204,6 +1306,16 @@ class BooleanDecoder extends Decoder { } module.exports = { - stringToUtf8, utf8ToString, hexStringToBytes, bytesToHexString, - Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder + stringToUtf8, + utf8ToString, + hexStringToBytes, + bytesToHexString, + Encoder, + Decoder, + RLEEncoder, + RLEDecoder, + DeltaEncoder, + DeltaDecoder, + BooleanEncoder, + BooleanDecoder, } diff --git a/automerge-js/test/legacy/sync.js b/javascript/test/legacy/sync.js similarity index 80% rename from automerge-js/test/legacy/sync.js rename to javascript/test/legacy/sync.js index 3bb1571d..233c4292 100644 --- a/automerge-js/test/legacy/sync.js +++ b/javascript/test/legacy/sync.js @@ -17,9 +17,14 @@ */ const Backend = null //require('./backend') -const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding') -const { decodeChangeMeta } = require('./columnar') -const { copyObject } = require('./common') +const { + hexStringToBytes, + bytesToHexString, + Encoder, + Decoder, +} = require("./encoding") +const { decodeChangeMeta } = require("./columnar") +const { copyObject } = require("./common") const HASH_SIZE = 32 // 256 bits = 32 bytes const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification @@ -28,7 +33,8 @@ const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identif // These constants correspond to a 1% false positive rate. The values can be changed without // breaking compatibility of the network protocol, since the parameters used for a particular // Bloom filter are encoded in the wire format. -const BITS_PER_ENTRY = 10, NUM_PROBES = 7 +const BITS_PER_ENTRY = 10, + NUM_PROBES = 7 /** * A Bloom filter implementation that can be serialised to a byte array for transmission @@ -36,13 +42,15 @@ const BITS_PER_ENTRY = 10, NUM_PROBES = 7 * so this implementation does not perform its own hashing. */ class BloomFilter { - constructor (arg) { + constructor(arg) { if (Array.isArray(arg)) { // arg is an array of SHA256 hashes in hexadecimal encoding this.numEntries = arg.length this.numBitsPerEntry = BITS_PER_ENTRY this.numProbes = NUM_PROBES - this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + this.bits = new Uint8Array( + Math.ceil((this.numEntries * this.numBitsPerEntry) / 8) + ) for (let hash of arg) this.addHash(hash) } else if (arg instanceof Uint8Array) { if (arg.byteLength === 0) { @@ -55,10 +63,12 @@ class BloomFilter { this.numEntries = decoder.readUint32() this.numBitsPerEntry = decoder.readUint32() this.numProbes = decoder.readUint32() - this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8)) + this.bits = decoder.readRawBytes( + Math.ceil((this.numEntries * this.numBitsPerEntry) / 8) + ) } } else { - throw new TypeError('invalid argument') + throw new TypeError("invalid argument") } } @@ -86,12 +96,32 @@ class BloomFilter { * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf */ getProbes(hash) { - const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength - if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`) + const hashBytes = hexStringToBytes(hash), + modulo = 8 * this.bits.byteLength + if (hashBytes.byteLength !== 32) + throw new RangeError(`Not a 256-bit hash: ${hash}`) // on the next three lines, the right shift means interpret value as unsigned - let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo - let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo - let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo + let x = + ((hashBytes[0] | + (hashBytes[1] << 8) | + (hashBytes[2] << 16) | + (hashBytes[3] << 24)) >>> + 0) % + modulo + let y = + ((hashBytes[4] | + (hashBytes[5] << 8) | + (hashBytes[6] << 16) | + (hashBytes[7] << 24)) >>> + 0) % + modulo + let z = + ((hashBytes[8] | + (hashBytes[9] << 8) | + (hashBytes[10] << 16) | + (hashBytes[11] << 24)) >>> + 0) % + modulo const probes = [x] for (let i = 1; i < this.numProbes; i++) { x = (x + y) % modulo @@ -128,12 +158,14 @@ class BloomFilter { * Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array. */ function encodeHashes(encoder, hashes) { - if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array') + if (!Array.isArray(hashes)) throw new TypeError("hashes must be an array") encoder.appendUint32(hashes.length) for (let i = 0; i < hashes.length; i++) { - if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted') + if (i > 0 && hashes[i - 1] >= hashes[i]) + throw new RangeError("hashes must be sorted") const bytes = hexStringToBytes(hashes[i]) - if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits') + if (bytes.byteLength !== HASH_SIZE) + throw new TypeError("heads hashes must be 256 bits") encoder.appendRawBytes(bytes) } } @@ -143,7 +175,8 @@ function encodeHashes(encoder, hashes) { * array of hex strings. */ function decodeHashes(decoder) { - let length = decoder.readUint32(), hashes = [] + let length = decoder.readUint32(), + hashes = [] for (let i = 0; i < length; i++) { hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE))) } @@ -183,11 +216,11 @@ function decodeSyncMessage(bytes) { const heads = decodeHashes(decoder) const need = decodeHashes(decoder) const haveCount = decoder.readUint32() - let message = {heads, need, have: [], changes: []} + let message = { heads, need, have: [], changes: [] } for (let i = 0; i < haveCount; i++) { const lastSync = decodeHashes(decoder) const bloom = decoder.readPrefixedBytes(decoder) - message.have.push({lastSync, bloom}) + message.have.push({ lastSync, bloom }) } const changeCount = decoder.readUint32() for (let i = 0; i < changeCount; i++) { @@ -234,7 +267,7 @@ function decodeSyncState(bytes) { function makeBloomFilter(backend, lastSync) { const newChanges = Backend.getChanges(backend, lastSync) const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash) - return {lastSync, bloom: new BloomFilter(hashes).bytes} + return { lastSync, bloom: new BloomFilter(hashes).bytes } } /** @@ -245,20 +278,26 @@ function makeBloomFilter(backend, lastSync) { */ function getChangesToSend(backend, have, need) { if (have.length === 0) { - return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined) + return need + .map(hash => Backend.getChangeByHash(backend, hash)) + .filter(change => change !== undefined) } - let lastSyncHashes = {}, bloomFilters = [] + let lastSyncHashes = {}, + bloomFilters = [] for (let h of have) { for (let hash of h.lastSync) lastSyncHashes[hash] = true bloomFilters.push(new BloomFilter(h.bloom)) } // Get all changes that were added since the last sync - const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)) - .map(change => decodeChangeMeta(change, true)) + const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes)).map( + change => decodeChangeMeta(change, true) + ) - let changeHashes = {}, dependents = {}, hashesToSend = {} + let changeHashes = {}, + dependents = {}, + hashesToSend = {} for (let change of changes) { changeHashes[change.hash] = true @@ -292,7 +331,8 @@ function getChangesToSend(backend, have, need) { let changesToSend = [] for (let hash of need) { hashesToSend[hash] = true - if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()? + if (!changeHashes[hash]) { + // Change is not among those returned by getMissingChanges()? const change = Backend.getChangeByHash(backend, hash) if (change) changesToSend.push(change) } @@ -317,7 +357,7 @@ function initSyncState() { } function compareArrays(a, b) { - return (a.length === b.length) && a.every((v, i) => v === b[i]) + return a.length === b.length && a.every((v, i) => v === b[i]) } /** @@ -329,10 +369,19 @@ function generateSyncMessage(backend, syncState) { throw new Error("generateSyncMessage called with no Automerge document") } if (!syncState) { - throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") + throw new Error( + "generateSyncMessage requires a syncState, which can be created with initSyncState()" + ) } - let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState + let { + sharedHeads, + lastSentHeads, + theirHeads, + theirNeed, + theirHave, + sentHashes, + } = syncState const ourHeads = Backend.getHeads(backend) // Hashes to explicitly request from the remote peer: any missing dependencies of unapplied @@ -356,18 +405,28 @@ function generateSyncMessage(backend, syncState) { const lastSync = theirHave[0].lastSync if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) { // we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need - const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []} + const resetMsg = { + heads: ourHeads, + need: [], + have: [{ lastSync: [], bloom: new Uint8Array(0) }], + changes: [], + } return [syncState, encodeSyncMessage(resetMsg)] } } // XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size // these changes should ideally be RLE encoded but we haven't implemented that yet. - let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : [] + let changesToSend = + Array.isArray(theirHave) && Array.isArray(theirNeed) + ? getChangesToSend(backend, theirHave, theirNeed) + : [] // If the heads are equal, we're in sync and don't need to do anything further - const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) - const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) + const headsUnchanged = + Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads) + const headsEqual = + Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads) if (headsUnchanged && headsEqual && changesToSend.length === 0) { // no need to send a sync message if we know we're synced! return [syncState, null] @@ -375,12 +434,19 @@ function generateSyncMessage(backend, syncState) { // TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the // unnecessary recomputation - changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash]) + changesToSend = changesToSend.filter( + change => !sentHashes[decodeChangeMeta(change, true).hash] + ) // Regular response to a sync message: send any changes that the other node // doesn't have. We leave the "have" field empty because the previous message // generated by `syncStart` already indicated what changes we have. - const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend} + const syncMessage = { + heads: ourHeads, + have: ourHave, + need: ourNeed, + changes: changesToSend, + } if (changesToSend.length > 0) { sentHashes = copyObject(sentHashes) for (const change of changesToSend) { @@ -388,7 +454,10 @@ function generateSyncMessage(backend, syncState) { } } - syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes}) + syncState = Object.assign({}, syncState, { + lastSentHeads: ourHeads, + sentHashes, + }) return [syncState, encodeSyncMessage(syncMessage)] } @@ -406,13 +475,14 @@ function generateSyncMessage(backend, syncState) { * another peer, that means that peer had those changes, and therefore we now both know about them. */ function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) { - const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head)) - const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head)) + const newHeads = myNewHeads.filter(head => !myOldHeads.includes(head)) + const commonHeads = ourOldSharedHeads.filter(head => + myNewHeads.includes(head) + ) const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort() return advancedHeads } - /** * Given a backend, a message message and the state of our peer, apply any changes, update what * we believe about the peer, and (if there were applied changes) produce a patch for the frontend @@ -422,10 +492,13 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) { throw new Error("generateSyncMessage called with no Automerge document") } if (!oldSyncState) { - throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()") + throw new Error( + "generateSyncMessage requires a syncState, which can be created with initSyncState()" + ) } - let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null + let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, + patch = null const message = decodeSyncMessage(binaryMessage) const beforeHeads = Backend.getHeads(backend) @@ -434,18 +507,27 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) { // changes without applying them. The set of changes may also be incomplete if the sender decided // to break a large set of changes into chunks. if (message.changes.length > 0) { - [backend, patch] = Backend.applyChanges(backend, message.changes) - sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads) + ;[backend, patch] = Backend.applyChanges(backend, message.changes) + sharedHeads = advanceHeads( + beforeHeads, + Backend.getHeads(backend), + sharedHeads + ) } // If heads are equal, indicate we don't need to send a response message - if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) { + if ( + message.changes.length === 0 && + compareArrays(message.heads, beforeHeads) + ) { lastSentHeads = message.heads } // If all of the remote heads are known to us, that means either our heads are equal, or we are // ahead of the remote peer. In this case, take the remote heads to be our shared heads. - const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head)) + const knownHeads = message.heads.filter(head => + Backend.getChangeByHash(backend, head) + ) if (knownHeads.length === message.heads.length) { sharedHeads = message.heads // If the remote peer has lost all its data, reset our state to perform a full resync @@ -467,14 +549,18 @@ function receiveSyncMessage(backend, oldSyncState, binaryMessage) { theirHave: message.have, // the information we need to calculate the changes they need theirHeads: message.heads, theirNeed: message.need, - sentHashes + sentHashes, } return [backend, syncState, patch] } module.exports = { - receiveSyncMessage, generateSyncMessage, - encodeSyncMessage, decodeSyncMessage, - initSyncState, encodeSyncState, decodeSyncState, - BloomFilter // BloomFilter is a private API, exported only for testing purposes + receiveSyncMessage, + generateSyncMessage, + encodeSyncMessage, + decodeSyncMessage, + initSyncState, + encodeSyncState, + decodeSyncState, + BloomFilter, // BloomFilter is a private API, exported only for testing purposes } diff --git a/javascript/test/legacy_tests.ts b/javascript/test/legacy_tests.ts new file mode 100644 index 00000000..8c2e552e --- /dev/null +++ b/javascript/test/legacy_tests.ts @@ -0,0 +1,1874 @@ +import * as assert from "assert" +import { unstable as Automerge } from "../src" +import { assertEqualsOneOf } from "./helpers" +import { decodeChange } from "./legacy/columnar" + +const UUID_PATTERN = /^[0-9a-f]{32}$/ +const OPID_PATTERN = /^[0-9]+@([0-9a-f][0-9a-f])*$/ + +// CORE FEATURES +// +// TODO - Cursors +// TODO - Tables +// TODO - on-pass load() & reconstruct change from opset +// TODO - micro-patches (needed for fully hydrated object in js) +// TODO - valueAt(heads) / GC +// +// AUTOMERGE UNSUPPORTED +// +// TODO - patchCallback + +describe("Automerge", () => { + describe("initialization ", () => { + it("should initially be an empty map", () => { + const doc = Automerge.init() + assert.deepStrictEqual(doc, {}) + }) + + it("should allow instantiating from an existing object", () => { + const initialState = { birds: { wrens: 3, magpies: 4 } } + const doc = Automerge.from(initialState) + assert.deepStrictEqual(doc, initialState) + }) + + it("should allow merging of an object initialized with `from`", () => { + let doc1 = Automerge.from({ cards: [] }) + let doc2 = Automerge.merge(Automerge.init(), doc1) + assert.deepStrictEqual(doc2, { cards: [] }) + }) + + it("should allow passing an actorId when instantiating from an existing object", () => { + const actorId = "1234" + let doc = Automerge.from({ foo: 1 }, actorId) + assert.strictEqual(Automerge.getActorId(doc), "1234") + }) + + it("accepts an empty object as initial state", () => { + const doc = Automerge.from({}) + assert.deepStrictEqual(doc, {}) + }) + + it("accepts an array as initial state, but converts it to an object", () => { + // @ts-ignore + const doc = Automerge.from(["a", "b", "c"]) + assert.deepStrictEqual(doc, { "0": "a", "1": "b", "2": "c" }) + }) + + it("accepts strings as initial values, but treats them as an array of characters", () => { + // @ts-ignore + const doc = Automerge.from("abc") + assert.deepStrictEqual(doc, { "0": "a", "1": "b", "2": "c" }) + }) + + it("ignores numbers provided as initial values", () => { + // @ts-ignore + const doc = Automerge.from(123) + assert.deepStrictEqual(doc, {}) + }) + + it("ignores booleans provided as initial values", () => { + // @ts-ignore + const doc1 = Automerge.from(false) + assert.deepStrictEqual(doc1, {}) + // @ts-ignore + const doc2 = Automerge.from(true) + assert.deepStrictEqual(doc2, {}) + }) + }) + + describe("sequential use", () => { + let s1: Automerge.Doc, s2: Automerge.Doc + beforeEach(() => { + s1 = Automerge.init("aabbcc") + }) + + it("should not mutate objects", () => { + s2 = Automerge.change(s1, doc => (doc.foo = "bar")) + assert.strictEqual(s1.foo, undefined) + assert.strictEqual(s2.foo, "bar") + }) + + it("changes should be retrievable", () => { + const change1 = Automerge.getLastLocalChange(s1) + s2 = Automerge.change(s1, doc => (doc.foo = "bar")) + const change2 = Automerge.getLastLocalChange(s2) + assert.strictEqual(change1, undefined) + const change = Automerge.decodeChange(change2!) + assert.deepStrictEqual(change, { + actor: change.actor, + deps: [], + seq: 1, + startOp: 1, + hash: change.hash, + message: null, + time: change.time, + ops: [ + { obj: "_root", key: "foo", action: "makeText", pred: [] }, + { + action: "set", + elemId: "_head", + insert: true, + obj: "1@aabbcc", + pred: [], + value: "b", + }, + { + action: "set", + elemId: "2@aabbcc", + insert: true, + obj: "1@aabbcc", + pred: [], + value: "a", + }, + { + action: "set", + elemId: "3@aabbcc", + insert: true, + obj: "1@aabbcc", + pred: [], + value: "r", + }, + ], + }) + }) + + it("should not register any conflicts on repeated assignment", () => { + assert.strictEqual(Automerge.getConflicts(s1, "foo"), undefined) + s1 = Automerge.change(s1, "change", doc => (doc.foo = "one")) + assert.strictEqual(Automerge.getConflicts(s1, "foo"), undefined) + s1 = Automerge.change(s1, "change", doc => (doc.foo = "two")) + assert.strictEqual(Automerge.getConflicts(s1, "foo"), undefined) + }) + + describe("changes", () => { + it("should group several changes", () => { + s2 = Automerge.change(s1, "change message", doc => { + doc.first = "one" + assert.strictEqual(doc.first, "one") + doc.second = "two" + assert.deepStrictEqual(doc, { + first: "one", + second: "two", + }) + }) + assert.deepStrictEqual(s1, {}) + assert.deepStrictEqual(s2, { first: "one", second: "two" }) + }) + + it("should freeze objects if desired", () => { + s1 = Automerge.init({ freeze: true }) + s2 = Automerge.change(s1, doc => (doc.foo = "bar")) + try { + // @ts-ignore + s2.foo = "lemon" + } catch (e) {} + assert.strictEqual(s2.foo, "bar") + + let deleted = false + try { + // @ts-ignore + deleted = delete s2.foo + } catch (e) {} + assert.strictEqual(s2.foo, "bar") + assert.strictEqual(deleted, false) + + Automerge.change(s2, () => { + try { + // @ts-ignore + s2.foo = "lemon" + } catch (e) {} + assert.strictEqual(s2.foo, "bar") + }) + + assert.throws(() => { + Object.assign(s2, { x: 4 }) + }) + assert.strictEqual(s2.x, undefined) + }) + + it("should allow repeated reading and writing of values", () => { + s2 = Automerge.change(s1, "change message", doc => { + doc.value = "a" + assert.strictEqual(doc.value, "a") + doc.value = "b" + doc.value = "c" + assert.strictEqual(doc.value, "c") + }) + assert.deepStrictEqual(s1, {}) + assert.deepStrictEqual(s2, { value: "c" }) + }) + + it("should not record conflicts when writing the same field several times within one change", () => { + s1 = Automerge.change(s1, "change message", doc => { + doc.value = "a" + doc.value = "b" + doc.value = "c" + }) + assert.strictEqual(s1.value, "c") + assert.strictEqual(Automerge.getConflicts(s1, "value"), undefined) + }) + + it("should return the unchanged state object if nothing changed", () => { + s2 = Automerge.change(s1, () => {}) + assert.strictEqual(s2, s1) + }) + + it("should ignore field updates that write the existing value", () => { + s1 = Automerge.change(s1, doc => (doc.field = 123)) + s2 = Automerge.change(s1, doc => (doc.field = 123)) + assert.strictEqual(s2, s1) + }) + + it("should not ignore field updates that resolve a conflict", () => { + s2 = Automerge.merge(Automerge.init(), s1) + s1 = Automerge.change(s1, doc => (doc.field = 123)) + s2 = Automerge.change(s2, doc => (doc.field = 321)) + s1 = Automerge.merge(s1, s2) + assert.strictEqual( + Object.keys(Automerge.getConflicts(s1, "field")!).length, + 2 + ) + const resolved = Automerge.change(s1, doc => (doc.field = s1.field)) + assert.notStrictEqual(resolved, s1) + assert.deepStrictEqual(resolved, { field: s1.field }) + assert.strictEqual(Automerge.getConflicts(resolved, "field"), undefined) + }) + + it("should ignore list element updates that write the existing value", () => { + s1 = Automerge.change(s1, doc => (doc.list = [123])) + s2 = Automerge.change(s1, doc => (doc.list[0] = 123)) + assert.strictEqual(s2, s1) + }) + + it("should not ignore list element updates that resolve a conflict", () => { + s1 = Automerge.change(s1, doc => (doc.list = [1])) + s2 = Automerge.merge(Automerge.init(), s1) + s1 = Automerge.change(s1, doc => (doc.list[0] = 123)) + s2 = Automerge.change(s2, doc => (doc.list[0] = 321)) + s1 = Automerge.merge(s1, s2) + assert.deepStrictEqual(Automerge.getConflicts(s1.list, 0), { + [`3@${Automerge.getActorId(s1)}`]: 123, + [`3@${Automerge.getActorId(s2)}`]: 321, + }) + const resolved = Automerge.change(s1, doc => (doc.list[0] = s1.list[0])) + assert.deepStrictEqual(resolved, s1) + assert.notStrictEqual(resolved, s1) + assert.strictEqual(Automerge.getConflicts(resolved.list, 0), undefined) + }) + + it("should sanity-check arguments", () => { + s1 = Automerge.change(s1, doc => (doc.nested = {})) + assert.throws(() => { + // @ts-ignore + Automerge.change({}, doc => (doc.foo = "bar")) + }, /must be the document root/) + assert.throws(() => { + // @ts-ignore + Automerge.change(s1.nested, doc => (doc.foo = "bar")) + }, /must be the document root/) + }) + + it("should not allow nested change blocks", () => { + assert.throws(() => { + Automerge.change(s1, doc1 => { + Automerge.change(doc1, doc2 => { + // @ts-ignore + doc2.foo = "bar" + }) + }) + }, /Calls to Automerge.change cannot be nested/) + assert.throws(() => { + s1 = Automerge.change(s1, doc1 => { + s2 = Automerge.change(s1, doc2 => (doc2.two = 2)) + doc1.one = 1 + }) + }, /Attempting to change an outdated document/) + }) + + it("should not allow the same base document to be used for multiple changes", () => { + assert.throws(() => { + Automerge.change(s1, doc => (doc.one = 1)) + Automerge.change(s1, doc => (doc.two = 2)) + }, /Attempting to change an outdated document/) + }) + + it("should allow a document to be cloned", () => { + s1 = Automerge.change(s1, doc => (doc.zero = 0)) + s2 = Automerge.clone(s1) + s1 = Automerge.change(s1, doc => (doc.one = 1)) + s2 = Automerge.change(s2, doc => (doc.two = 2)) + assert.deepStrictEqual(s1, { zero: 0, one: 1 }) + assert.deepStrictEqual(s2, { zero: 0, two: 2 }) + Automerge.free(s1) + Automerge.free(s2) + }) + + it("should work with Object.assign merges", () => { + s1 = Automerge.change(s1, doc1 => { + doc1.stuff = { foo: "bar", baz: "blur" } + }) + s1 = Automerge.change(s1, doc1 => { + doc1.stuff = Object.assign({}, doc1.stuff, { baz: "updated!" }) + }) + assert.deepStrictEqual(s1, { stuff: { foo: "bar", baz: "updated!" } }) + }) + + it("should support Date objects in maps", () => { + const now = new Date() + s1 = Automerge.change(s1, doc => (doc.now = now)) + let changes = Automerge.getAllChanges(s1) + ;[s2] = Automerge.applyChanges(Automerge.init(), changes) + assert.strictEqual(s2.now instanceof Date, true) + assert.strictEqual(s2.now.getTime(), now.getTime()) + }) + + it("should support Date objects in lists", () => { + const now = new Date() + s1 = Automerge.change(s1, doc => (doc.list = [now])) + let changes = Automerge.getAllChanges(s1) + ;[s2] = Automerge.applyChanges(Automerge.init(), changes) + assert.strictEqual(s2.list[0] instanceof Date, true) + assert.strictEqual(s2.list[0].getTime(), now.getTime()) + }) + + it("should call patchCallback if supplied", () => { + const callbacks: Array<{ + patches: Array + before: Automerge.Doc + after: Automerge.Doc + }> = [] + const s2 = Automerge.change( + s1, + { + patchCallback: (patches, before, after) => + callbacks.push({ patches, before, after }), + }, + doc => { + doc.birds = ["Goldfinch"] + } + ) + assert.strictEqual(callbacks.length, 1) + assert.deepStrictEqual(callbacks[0].patches[0], { + action: "put", + path: ["birds"], + value: [], + }) + assert.deepStrictEqual(callbacks[0].patches[1], { + action: "insert", + path: ["birds", 0], + values: [""], + }) + assert.deepStrictEqual(callbacks[0].patches[2], { + action: "splice", + path: ["birds", 0, 0], + value: "Goldfinch", + }) + assert.strictEqual(callbacks[0].before, s1) + assert.strictEqual(callbacks[0].after, s2) + }) + + it("should call a patchCallback set up on document initialisation", () => { + const callbacks: Array<{ + patches: Array + before: Automerge.Doc + after: Automerge.Doc + }> = [] + s1 = Automerge.init({ + patchCallback: (patches, before, after) => + callbacks.push({ patches, before, after }), + }) + const s2 = Automerge.change(s1, doc => (doc.bird = "Goldfinch")) + assert.strictEqual(callbacks.length, 1) + assert.deepStrictEqual(callbacks[0].patches[0], { + action: "put", + path: ["bird"], + value: "", + }) + assert.deepStrictEqual(callbacks[0].patches[1], { + action: "splice", + path: ["bird", 0], + value: "Goldfinch", + }) + assert.strictEqual(callbacks[0].before, s1) + assert.strictEqual(callbacks[0].after, s2) + }) + }) + + describe("emptyChange()", () => { + it("should append an empty change to the history", () => { + s1 = Automerge.change(s1, "first change", doc => (doc.field = 123)) + s2 = Automerge.emptyChange(s1, "empty change") + assert.notStrictEqual(s2, s1) + assert.deepStrictEqual(s2, s1) + assert.deepStrictEqual( + Automerge.getHistory(s2).map(state => state.change.message), + ["first change", "empty change"] + ) + }) + + it("should reference dependencies", () => { + s1 = Automerge.change(s1, doc => (doc.field = 123)) + s2 = Automerge.merge(Automerge.init(), s1) + s2 = Automerge.change(s2, doc => (doc.other = "hello")) + s1 = Automerge.emptyChange(Automerge.merge(s1, s2)) + const history = Automerge.getHistory(s1) + const emptyChange = history[2].change + assert.deepStrictEqual( + emptyChange.deps, + [history[0].change.hash, history[1].change.hash].sort() + ) + assert.deepStrictEqual(emptyChange.ops, []) + }) + }) + + describe("root object", () => { + it("should handle single-property assignment", () => { + s1 = Automerge.change(s1, "set bar", doc => (doc.foo = "bar")) + s1 = Automerge.change(s1, "set zap", doc => (doc.zip = "zap")) + assert.strictEqual(s1.foo, "bar") + assert.strictEqual(s1.zip, "zap") + assert.deepStrictEqual(s1, { foo: "bar", zip: "zap" }) + }) + + it("should allow floating-point values", () => { + s1 = Automerge.change(s1, doc => (doc.number = 1589032171.1)) + assert.strictEqual(s1.number, 1589032171.1) + }) + + it("should handle multi-property assignment", () => { + s1 = Automerge.change(s1, "multi-assign", doc => { + Object.assign(doc, { foo: "bar", answer: 42 }) + }) + assert.strictEqual(s1.foo, "bar") + assert.strictEqual(s1.answer, 42) + assert.deepStrictEqual(s1, { foo: "bar", answer: 42 }) + }) + + it("should handle root property deletion", () => { + s1 = Automerge.change(s1, "set foo", doc => { + doc.foo = "bar" + doc.something = null + }) + s1 = Automerge.change(s1, "del foo", doc => { + delete doc.foo + }) + assert.strictEqual(s1.foo, undefined) + assert.strictEqual(s1.something, null) + assert.deepStrictEqual(s1, { something: null }) + }) + + it("should follow JS delete behavior", () => { + s1 = Automerge.change(s1, "set foo", doc => { + doc.foo = "bar" + }) + let deleted: any + s1 = Automerge.change(s1, "del foo", doc => { + deleted = delete doc.foo + }) + assert.strictEqual(deleted, true) + let deleted2: any + assert.doesNotThrow(() => { + s1 = Automerge.change(s1, "del baz", doc => { + deleted2 = delete doc.baz + }) + }) + assert.strictEqual(deleted2, true) + }) + + it("should allow the type of a property to be changed", () => { + s1 = Automerge.change(s1, "set number", doc => (doc.prop = 123)) + assert.strictEqual(s1.prop, 123) + s1 = Automerge.change(s1, "set string", doc => (doc.prop = "123")) + assert.strictEqual(s1.prop, "123") + s1 = Automerge.change(s1, "set null", doc => (doc.prop = null)) + assert.strictEqual(s1.prop, null) + s1 = Automerge.change(s1, "set bool", doc => (doc.prop = true)) + assert.strictEqual(s1.prop, true) + }) + + it("should require property names to be valid", () => { + assert.throws(() => { + Automerge.change(s1, "foo", doc => (doc[""] = "x")) + }, /must not be an empty string/) + }) + + it("should not allow assignment of unsupported datatypes", () => { + Automerge.change(s1, doc => { + assert.throws(() => { + doc.foo = undefined + }, /Unsupported type of value: undefined/) + assert.throws(() => { + doc.foo = { prop: undefined } + }, /Unsupported type of value: undefined/) + assert.throws(() => { + doc.foo = () => {} + }, /Unsupported type of value: function/) + assert.throws(() => { + doc.foo = Symbol("foo") + }, /Unsupported type of value: symbol/) + }) + }) + }) + + describe("nested maps", () => { + it("should assign an objectId to nested maps", () => { + s1 = Automerge.change(s1, doc => { + doc.nested = {} + }) + Automerge.getObjectId(s1.nested) + assert.strictEqual( + OPID_PATTERN.test(Automerge.getObjectId(s1.nested)!), + true + ) + assert.notEqual(Automerge.getObjectId(s1.nested), "_root") + }) + + it("should handle assignment of a nested property", () => { + s1 = Automerge.change(s1, "first change", doc => { + doc.nested = {} + doc.nested.foo = "bar" + }) + s1 = Automerge.change(s1, "second change", doc => { + doc.nested.one = 1 + }) + assert.deepStrictEqual(s1, { nested: { foo: "bar", one: 1 } }) + assert.deepStrictEqual(s1.nested, { foo: "bar", one: 1 }) + assert.strictEqual(s1.nested.foo, "bar") + assert.strictEqual(s1.nested.one, 1) + }) + + it("should handle assignment of an object literal", () => { + s1 = Automerge.change(s1, doc => { + doc.textStyle = { bold: false, fontSize: 12 } + }) + assert.deepStrictEqual(s1, { + textStyle: { bold: false, fontSize: 12 }, + }) + assert.deepStrictEqual(s1.textStyle, { bold: false, fontSize: 12 }) + assert.strictEqual(s1.textStyle.bold, false) + assert.strictEqual(s1.textStyle.fontSize, 12) + }) + + it("should handle assignment of multiple nested properties", () => { + s1 = Automerge.change(s1, doc => { + doc.textStyle = { bold: false, fontSize: 12 } + Object.assign(doc.textStyle, { typeface: "Optima", fontSize: 14 }) + }) + assert.strictEqual(s1.textStyle.typeface, "Optima") + assert.strictEqual(s1.textStyle.bold, false) + assert.strictEqual(s1.textStyle.fontSize, 14) + assert.deepStrictEqual(s1.textStyle, { + typeface: "Optima", + bold: false, + fontSize: 14, + }) + }) + + it("should handle arbitrary-depth nesting", () => { + s1 = Automerge.change(s1, doc => { + doc.a = { b: { c: { d: { e: { f: { g: "h" } } } } } } + }) + s1 = Automerge.change(s1, doc => { + doc.a.b.c.d.e.f.i = "j" + }) + assert.deepStrictEqual(s1, { + a: { b: { c: { d: { e: { f: { g: "h", i: "j" } } } } } }, + }) + assert.strictEqual(s1.a.b.c.d.e.f.g, "h") + assert.strictEqual(s1.a.b.c.d.e.f.i, "j") + }) + + it("should allow an old object to be replaced with a new one", () => { + s1 = Automerge.change(s1, "change 1", doc => { + doc.myPet = { species: "dog", legs: 4, breed: "dachshund" } + }) + let s2 = Automerge.change(s1, "change 2", doc => { + doc.myPet = { + species: "koi", + variety: "紅白", + colors: { red: true, white: true, black: false }, + } + }) + assert.deepStrictEqual(s1.myPet, { + species: "dog", + legs: 4, + breed: "dachshund", + }) + assert.strictEqual(s1.myPet.breed, "dachshund") + assert.deepStrictEqual(s2.myPet, { + species: "koi", + variety: "紅白", + colors: { red: true, white: true, black: false }, + }) + // @ts-ignore + assert.strictEqual(s2.myPet.breed, undefined) + assert.strictEqual(s2.myPet.variety, "紅白") + }) + + it("should allow fields to be changed between primitive and nested map", () => { + s1 = Automerge.change(s1, doc => (doc.color = "#ff7f00")) + assert.strictEqual(s1.color, "#ff7f00") + s1 = Automerge.change( + s1, + doc => (doc.color = { red: 255, green: 127, blue: 0 }) + ) + assert.deepStrictEqual(s1.color, { red: 255, green: 127, blue: 0 }) + s1 = Automerge.change(s1, doc => (doc.color = "#ff7f00")) + assert.strictEqual(s1.color, "#ff7f00") + }) + + it("should not allow several references to the same map object", () => { + s1 = Automerge.change(s1, doc => (doc.object = {})) + assert.throws(() => { + Automerge.change(s1, doc => { + doc.x = doc.object + }) + }, /Cannot create a reference to an existing document object/) + assert.throws(() => { + Automerge.change(s1, doc => { + doc.x = s1.object + }) + }, /Cannot create a reference to an existing document object/) + assert.throws(() => { + Automerge.change(s1, doc => { + doc.x = {} + doc.y = doc.x + }) + }, /Cannot create a reference to an existing document object/) + }) + + it("should not allow object-copying idioms", () => { + s1 = Automerge.change(s1, doc => { + doc.items = [ + { id: "id1", name: "one" }, + { id: "id2", name: "two" }, + ] + }) + // People who have previously worked with immutable state in JavaScript may be tempted + // to use idioms like this, which don't work well with Automerge -- see e.g. + // https://github.com/automerge/automerge/issues/260 + assert.throws(() => { + Automerge.change(s1, doc => { + doc.items = [...doc.items, { id: "id3", name: "three" }] + }) + }, /Cannot create a reference to an existing document object/) + }) + + it("should handle deletion of properties within a map", () => { + s1 = Automerge.change(s1, "set style", doc => { + doc.textStyle = { typeface: "Optima", bold: false, fontSize: 12 } + }) + s1 = Automerge.change(s1, "non-bold", doc => delete doc.textStyle.bold) + assert.strictEqual(s1.textStyle.bold, undefined) + assert.deepStrictEqual(s1.textStyle, { + typeface: "Optima", + fontSize: 12, + }) + }) + + it("should handle deletion of references to a map", () => { + s1 = Automerge.change(s1, "make rich text doc", doc => { + Object.assign(doc, { + title: "Hello", + textStyle: { typeface: "Optima", fontSize: 12 }, + }) + }) + s1 = Automerge.change(s1, doc => delete doc.textStyle) + assert.strictEqual(s1.textStyle, undefined) + assert.deepStrictEqual(s1, { title: "Hello" }) + }) + + it("should validate field names", () => { + s1 = Automerge.change(s1, doc => (doc.nested = {})) + assert.throws(() => { + Automerge.change(s1, doc => (doc.nested[""] = "x")) + }, /must not be an empty string/) + assert.throws(() => { + Automerge.change(s1, doc => (doc.nested = { "": "x" })) + }, /must not be an empty string/) + }) + }) + + describe("lists", () => { + it("should allow elements to be inserted", () => { + s1 = Automerge.change(s1, doc => (doc.noodles = [])) + s1 = Automerge.change(s1, doc => + doc.noodles.insertAt(0, "udon", "soba") + ) + s1 = Automerge.change(s1, doc => doc.noodles.insertAt(1, "ramen")) + assert.deepStrictEqual(s1, { noodles: ["udon", "ramen", "soba"] }) + assert.deepStrictEqual(s1.noodles, ["udon", "ramen", "soba"]) + assert.strictEqual(s1.noodles[0], "udon") + assert.strictEqual(s1.noodles[1], "ramen") + assert.strictEqual(s1.noodles[2], "soba") + assert.strictEqual(s1.noodles.length, 3) + }) + + it("should handle assignment of a list literal", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "ramen", "soba"]) + ) + assert.deepStrictEqual(s1, { noodles: ["udon", "ramen", "soba"] }) + assert.deepStrictEqual(s1.noodles, ["udon", "ramen", "soba"]) + assert.strictEqual(s1.noodles[0], "udon") + assert.strictEqual(s1.noodles[1], "ramen") + assert.strictEqual(s1.noodles[2], "soba") + assert.strictEqual(s1.noodles[3], undefined) + assert.strictEqual(s1.noodles.length, 3) + }) + + it("should only allow numeric indexes", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "ramen", "soba"]) + ) + s1 = Automerge.change(s1, doc => (doc.noodles[1] = "Ramen!")) + assert.strictEqual(s1.noodles[1], "Ramen!") + s1 = Automerge.change(s1, doc => (doc.noodles["1"] = "RAMEN!!!")) + assert.strictEqual(s1.noodles[1], "RAMEN!!!") + assert.throws(() => { + Automerge.change(s1, doc => (doc.noodles.favourite = "udon")) + }, /list index must be a number/) + assert.throws(() => { + Automerge.change(s1, doc => (doc.noodles[""] = "udon")) + }, /list index must be a number/) + assert.throws(() => { + Automerge.change(s1, doc => (doc.noodles["1e6"] = "udon")) + }, /list index must be a number/) + }) + + it("should handle deletion of list elements", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "ramen", "soba"]) + ) + s1 = Automerge.change(s1, doc => delete doc.noodles[1]) + assert.deepStrictEqual(s1.noodles, ["udon", "soba"]) + s1 = Automerge.change(s1, doc => doc.noodles.deleteAt(1)) + assert.deepStrictEqual(s1.noodles, ["udon"]) + assert.strictEqual(s1.noodles[0], "udon") + assert.strictEqual(s1.noodles[1], undefined) + assert.strictEqual(s1.noodles[2], undefined) + assert.strictEqual(s1.noodles.length, 1) + }) + + it("should handle assignment of individual list indexes", () => { + s1 = Automerge.change( + s1, + doc => (doc.japaneseFood = ["udon", "ramen", "soba"]) + ) + s1 = Automerge.change(s1, doc => (doc.japaneseFood[1] = "sushi")) + assert.deepStrictEqual(s1.japaneseFood, ["udon", "sushi", "soba"]) + assert.strictEqual(s1.japaneseFood[0], "udon") + assert.strictEqual(s1.japaneseFood[1], "sushi") + assert.strictEqual(s1.japaneseFood[2], "soba") + assert.strictEqual(s1.japaneseFood[3], undefined) + assert.strictEqual(s1.japaneseFood.length, 3) + }) + + it("concurrent edits insert in reverse actorid order if counters equal", () => { + s1 = Automerge.init("aaaa") + s2 = Automerge.init("bbbb") + s1 = Automerge.change(s1, doc => (doc.list = [])) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "2@bbbb")) + s2 = Automerge.merge(s2, s1) + assert.deepStrictEqual(Automerge.toJS(s2).list, ["2@bbbb", "2@aaaa"]) + }) + + it("concurrent edits insert in reverse counter order if different", () => { + s1 = Automerge.init("aaaa") + s2 = Automerge.init("bbbb") + s1 = Automerge.change(s1, doc => (doc.list = [])) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa")) + s2 = Automerge.change(s2, doc => (doc.foo = "2@bbbb")) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "3@bbbb")) + s2 = Automerge.merge(s2, s1) + assert.deepStrictEqual(s2.list, ["3@bbbb", "2@aaaa"]) + }) + + it("should treat out-by-one assignment as insertion", () => { + s1 = Automerge.change(s1, doc => (doc.japaneseFood = ["udon"])) + s1 = Automerge.change(s1, doc => (doc.japaneseFood[1] = "sushi")) + assert.deepStrictEqual(s1.japaneseFood, ["udon", "sushi"]) + assert.strictEqual(s1.japaneseFood[0], "udon") + assert.strictEqual(s1.japaneseFood[1], "sushi") + assert.strictEqual(s1.japaneseFood[2], undefined) + assert.strictEqual(s1.japaneseFood.length, 2) + }) + + it("should not allow out-of-range assignment", () => { + s1 = Automerge.change(s1, doc => (doc.japaneseFood = ["udon"])) + assert.throws(() => { + Automerge.change(s1, doc => (doc.japaneseFood[4] = "ramen")) + }, /is out of bounds/) + }) + + it("should allow bulk assignment of multiple list indexes", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "ramen", "soba"]) + ) + s1 = Automerge.change(s1, doc => + Object.assign(doc.noodles, { 0: "うどん", 2: "そば" }) + ) + assert.deepStrictEqual(s1.noodles, ["うどん", "ramen", "そば"]) + assert.strictEqual(s1.noodles[0], "うどん") + assert.strictEqual(s1.noodles[1], "ramen") + assert.strictEqual(s1.noodles[2], "そば") + assert.strictEqual(s1.noodles.length, 3) + }) + + it("should handle nested objects", () => { + s1 = Automerge.change( + s1, + doc => + (doc.noodles = [{ type: "ramen", dishes: ["tonkotsu", "shoyu"] }]) + ) + s1 = Automerge.change(s1, doc => + doc.noodles.push({ type: "udon", dishes: ["tempura udon"] }) + ) + s1 = Automerge.change(s1, doc => doc.noodles[0].dishes.push("miso")) + assert.deepStrictEqual(s1, { + noodles: [ + { type: "ramen", dishes: ["tonkotsu", "shoyu", "miso"] }, + { type: "udon", dishes: ["tempura udon"] }, + ], + }) + assert.deepStrictEqual(s1.noodles[0], { + type: "ramen", + dishes: ["tonkotsu", "shoyu", "miso"], + }) + assert.deepStrictEqual(s1.noodles[1], { + type: "udon", + dishes: ["tempura udon"], + }) + }) + + it("should handle nested lists", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodleMatrix = [["ramen", "tonkotsu", "shoyu"]]) + ) + s1 = Automerge.change(s1, doc => + doc.noodleMatrix.push(["udon", "tempura udon"]) + ) + s1 = Automerge.change(s1, doc => doc.noodleMatrix[0].push("miso")) + assert.deepStrictEqual(s1.noodleMatrix, [ + ["ramen", "tonkotsu", "shoyu", "miso"], + ["udon", "tempura udon"], + ]) + assert.deepStrictEqual(s1.noodleMatrix[0], [ + "ramen", + "tonkotsu", + "shoyu", + "miso", + ]) + assert.deepStrictEqual(s1.noodleMatrix[1], ["udon", "tempura udon"]) + }) + + it("should handle deep nesting", () => { + s1 = Automerge.change( + s1, + doc => + (doc.nesting = { + maps: { m1: { m2: { foo: "bar", baz: {} }, m2a: {} } }, + lists: [ + [1, 2, 3], + [[3, 4, 5, [6]], 7], + ], + mapsinlists: [{ foo: "bar" }, [{ bar: "baz" }]], + listsinmaps: { foo: [1, 2, 3], bar: [[{ baz: "123" }]] }, + }) + ) + s1 = Automerge.change(s1, doc => { + doc.nesting.maps.m1a = "123" + doc.nesting.maps.m1.m2.baz.xxx = "123" + delete doc.nesting.maps.m1.m2a + doc.nesting.lists.shift() + doc.nesting.lists[0][0].pop() + doc.nesting.lists[0][0].push(100) + doc.nesting.mapsinlists[0].foo = "baz" + doc.nesting.mapsinlists[1][0].foo = "bar" + delete doc.nesting.mapsinlists[1] + doc.nesting.listsinmaps.foo.push(4) + doc.nesting.listsinmaps.bar[0][0].baz = "456" + delete doc.nesting.listsinmaps.bar + }) + assert.deepStrictEqual(s1, { + nesting: { + maps: { + m1: { m2: { foo: "bar", baz: { xxx: "123" } } }, + m1a: "123", + }, + lists: [[[3, 4, 5, 100], 7]], + mapsinlists: [{ foo: "baz" }], + listsinmaps: { foo: [1, 2, 3, 4] }, + }, + }) + }) + + it("should handle replacement of the entire list", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "soba", "ramen"]) + ) + s1 = Automerge.change( + s1, + doc => (doc.japaneseNoodles = doc.noodles.slice()) + ) + s1 = Automerge.change(s1, doc => (doc.noodles = ["wonton", "pho"])) + assert.deepStrictEqual(s1, { + noodles: ["wonton", "pho"], + japaneseNoodles: ["udon", "soba", "ramen"], + }) + assert.deepStrictEqual(s1.noodles, ["wonton", "pho"]) + assert.strictEqual(s1.noodles[0], "wonton") + assert.strictEqual(s1.noodles[1], "pho") + assert.strictEqual(s1.noodles[2], undefined) + assert.strictEqual(s1.noodles.length, 2) + }) + + it("should allow assignment to change the type of a list element", () => { + s1 = Automerge.change( + s1, + doc => (doc.noodles = ["udon", "soba", "ramen"]) + ) + assert.deepStrictEqual(s1.noodles, ["udon", "soba", "ramen"]) + s1 = Automerge.change( + s1, + doc => (doc.noodles[1] = { type: "soba", options: ["hot", "cold"] }) + ) + assert.deepStrictEqual(s1.noodles, [ + "udon", + { type: "soba", options: ["hot", "cold"] }, + "ramen", + ]) + s1 = Automerge.change( + s1, + doc => (doc.noodles[1] = ["hot soba", "cold soba"]) + ) + assert.deepStrictEqual(s1.noodles, [ + "udon", + ["hot soba", "cold soba"], + "ramen", + ]) + s1 = Automerge.change(s1, doc => (doc.noodles[1] = "soba is the best")) + assert.deepStrictEqual(s1.noodles, [ + "udon", + "soba is the best", + "ramen", + ]) + }) + + it("should allow list creation and assignment in the same change callback", () => { + s1 = Automerge.change(Automerge.init(), doc => { + doc.letters = ["a", "b", "c"] + doc.letters[1] = "d" + }) + assert.strictEqual(s1.letters[1], "d") + }) + + it("should allow adding and removing list elements in the same change callback", () => { + let s1 = Automerge.change( + Automerge.init<{ noodles: Array }>(), + // @ts-ignore + doc => (doc.noodles = []) + ) + s1 = Automerge.change(s1, doc => { + doc.noodles.push("udon") + // @ts-ignore + doc.noodles.deleteAt(0) + }) + assert.deepStrictEqual(s1, { noodles: [] }) + // do the add-remove cycle twice, test for #151 (https://github.com/automerge/automerge/issues/151) + s1 = Automerge.change(s1, doc => { + // @ts-ignore + doc.noodles.push("soba") + // @ts-ignore + doc.noodles.deleteAt(0) + }) + assert.deepStrictEqual(s1, { noodles: [] }) + }) + + it("should handle arbitrary-depth nesting", () => { + s1 = Automerge.change( + s1, + doc => (doc.maze = [[[[[[[["noodles", ["here"]]]]]]]]]) + ) + s1 = Automerge.change(s1, doc => + doc.maze[0][0][0][0][0][0][0][1].unshift("found") + ) + assert.deepStrictEqual(s1.maze, [ + [[[[[[["noodles", ["found", "here"]]]]]]]], + ]) + assert.deepStrictEqual(s1.maze[0][0][0][0][0][0][0][1][1], "here") + s2 = Automerge.load(Automerge.save(s1)) + assert.deepStrictEqual(s1, s2) + }) + + it("should not allow several references to the same list object", () => { + s1 = Automerge.change(s1, doc => (doc.list = [])) + assert.throws(() => { + Automerge.change(s1, doc => { + doc.x = doc.list + }) + }, /Cannot create a reference to an existing document object/) + assert.throws(() => { + Automerge.change(s1, doc => { + doc.x = s1.list + }) + }, /Cannot create a reference to an existing document object/) + assert.throws(() => { + Automerge.change(s1, doc => { + doc.x = [] + doc.y = doc.x + }) + }, /Cannot create a reference to an existing document object/) + }) + }) + + describe("counters", () => { + // counter + it("should allow deleting counters from maps", () => { + const s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = { wrens: new Automerge.Counter(1) }) + ) + const s2 = Automerge.change(s1, doc => doc.birds.wrens.increment(2)) + const s3 = Automerge.change(s2, doc => delete doc.birds.wrens) + assert.deepStrictEqual(s2, { + birds: { wrens: new Automerge.Counter(3) }, + }) + assert.deepStrictEqual(s3, { birds: {} }) + }) + + // counter + /* + it('should not allow deleting counters from lists', () => { + const s1 = Automerge.change(Automerge.init(), doc => doc.recordings = [new Automerge.Counter(1)]) + const s2 = Automerge.change(s1, doc => doc.recordings[0].increment(2)) + assert.deepStrictEqual(s2, {recordings: [new Automerge.Counter(3)]}) + assert.throws(() => { Automerge.change(s2, doc => doc.recordings.deleteAt(0)) }, /Unsupported operation/) + }) + */ + }) + }) + + describe("concurrent use", () => { + let s1: Automerge.Doc, + s2: Automerge.Doc, + s3: Automerge.Doc, + s4: Automerge.Doc + beforeEach(() => { + s1 = Automerge.init() + s2 = Automerge.init() + s3 = Automerge.init() + s4 = Automerge.init() + }) + + it("should merge concurrent updates of different properties", () => { + s1 = Automerge.change(s1, doc => (doc.foo = "bar")) + s2 = Automerge.change(s2, doc => (doc.hello = "world")) + s3 = Automerge.merge(s1, s2) + assert.strictEqual(s3.foo, "bar") + assert.strictEqual(s3.hello, "world") + assert.deepStrictEqual(s3, { foo: "bar", hello: "world" }) + assert.strictEqual(Automerge.getConflicts(s3, "foo"), undefined) + assert.strictEqual(Automerge.getConflicts(s3, "hello"), undefined) + s4 = Automerge.load(Automerge.save(s3)) + assert.deepEqual(s3, s4) + }) + + it("should add concurrent increments of the same property", () => { + s1 = Automerge.change(s1, doc => (doc.counter = new Automerge.Counter())) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.counter.increment()) + s2 = Automerge.change(s2, doc => doc.counter.increment(2)) + s3 = Automerge.merge(s1, s2) + assert.strictEqual(s1.counter.value, 1) + assert.strictEqual(s2.counter.value, 2) + assert.strictEqual(s3.counter.value, 3) + assert.strictEqual(Automerge.getConflicts(s3, "counter"), undefined) + s4 = Automerge.load(Automerge.save(s3)) + assert.deepEqual(s3, s4) + }) + + it("should add increments only to the values they precede", () => { + s1 = Automerge.change(s1, doc => (doc.counter = new Automerge.Counter(0))) + s1 = Automerge.change(s1, doc => doc.counter.increment()) + s2 = Automerge.change( + s2, + doc => (doc.counter = new Automerge.Counter(100)) + ) + s2 = Automerge.change(s2, doc => doc.counter.increment(3)) + s3 = Automerge.merge(s1, s2) + if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { + assert.deepStrictEqual(s3, { counter: new Automerge.Counter(1) }) + } else { + assert.deepStrictEqual(s3, { counter: new Automerge.Counter(103) }) + } + assert.deepStrictEqual(Automerge.getConflicts(s3, "counter"), { + [`1@${Automerge.getActorId(s1)}`]: new Automerge.Counter(1), + [`1@${Automerge.getActorId(s2)}`]: new Automerge.Counter(103), + }) + s4 = Automerge.load(Automerge.save(s3)) + assert.deepEqual(s3, s4) + }) + + it("should detect concurrent updates of the same field", () => { + s1 = Automerge.change(s1, doc => (doc.field = "one")) + s2 = Automerge.change(s2, doc => (doc.field = "two")) + s3 = Automerge.merge(s1, s2) + if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { + assert.deepStrictEqual(s3, { field: "one" }) + } else { + assert.deepStrictEqual(s3, { field: "two" }) + } + assert.deepStrictEqual(Automerge.getConflicts(s3, "field"), { + [`1@${Automerge.getActorId(s1)}`]: "one", + [`1@${Automerge.getActorId(s2)}`]: "two", + }) + }) + + it("should detect concurrent updates of the same list element", () => { + s1 = Automerge.change(s1, doc => (doc.birds = ["finch"])) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => (doc.birds[0] = "greenfinch")) + s2 = Automerge.change(s2, doc => (doc.birds[0] = "goldfinch_")) + s3 = Automerge.merge(s1, s2) + if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { + assert.deepStrictEqual(s3.birds, ["greenfinch"]) + } else { + assert.deepStrictEqual(s3.birds, ["goldfinch_"]) + } + assert.deepStrictEqual(Automerge.getConflicts(s3.birds, 0), { + [`8@${Automerge.getActorId(s1)}`]: "greenfinch", + [`8@${Automerge.getActorId(s2)}`]: "goldfinch_", + }) + }) + + it("should handle assignment conflicts of different types", () => { + s1 = Automerge.change(s1, doc => (doc.field = "string")) + s2 = Automerge.change(s2, doc => (doc.field = ["list"])) + s3 = Automerge.change(s3, doc => (doc.field = { thing: "map" })) + s1 = Automerge.merge(Automerge.merge(s1, s2), s3) + assertEqualsOneOf(s1.field, "string", ["list"], { thing: "map" }) + assert.deepStrictEqual(Automerge.getConflicts(s1, "field"), { + [`1@${Automerge.getActorId(s1)}`]: "string", + [`1@${Automerge.getActorId(s2)}`]: ["list"], + [`1@${Automerge.getActorId(s3)}`]: { thing: "map" }, + }) + }) + + it("should handle changes within a conflicting map field", () => { + s1 = Automerge.change(s1, doc => (doc.field = "string")) + s2 = Automerge.change(s2, doc => (doc.field = {})) + s2 = Automerge.change(s2, doc => (doc.field.innerKey = 42)) + s3 = Automerge.merge(s1, s2) + assertEqualsOneOf(s3.field, "string", { innerKey: 42 }) + assert.deepStrictEqual(Automerge.getConflicts(s3, "field"), { + [`1@${Automerge.getActorId(s1)}`]: "string", + [`1@${Automerge.getActorId(s2)}`]: { innerKey: 42 }, + }) + }) + + it("should handle changes within a conflicting list element", () => { + s1 = Automerge.change(s1, doc => (doc.list = ["hello"])) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => (doc.list[0] = { map1: true })) + s1 = Automerge.change(s1, doc => (doc.list[0].key = 1)) + s2 = Automerge.change(s2, doc => (doc.list[0] = { map2: true })) + s2 = Automerge.change(s2, doc => (doc.list[0].key = 2)) + s3 = Automerge.merge(s1, s2) + if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) { + assert.deepStrictEqual(s3.list, [{ map1: true, key: 1 }]) + } else { + assert.deepStrictEqual(s3.list, [{ map2: true, key: 2 }]) + } + assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), { + [`8@${Automerge.getActorId(s1)}`]: { map1: true, key: 1 }, + [`8@${Automerge.getActorId(s2)}`]: { map2: true, key: 2 }, + }) + }) + + it("should not merge concurrently assigned nested maps", () => { + s1 = Automerge.change(s1, doc => (doc.config = { background: "blue" })) + s2 = Automerge.change(s2, doc => (doc.config = { logo_url: "logo.png" })) + s3 = Automerge.merge(s1, s2) + assertEqualsOneOf( + s3.config, + { background: "blue" }, + { logo_url: "logo.png" } + ) + assert.deepStrictEqual(Automerge.getConflicts(s3, "config"), { + [`1@${Automerge.getActorId(s1)}`]: { background: "blue" }, + [`1@${Automerge.getActorId(s2)}`]: { logo_url: "logo.png" }, + }) + }) + + it("should clear conflicts after assigning a new value", () => { + s1 = Automerge.change(s1, doc => (doc.field = "one")) + s2 = Automerge.change(s2, doc => (doc.field = "two")) + s3 = Automerge.merge(s1, s2) + s3 = Automerge.change(s3, doc => (doc.field = "three")) + assert.deepStrictEqual(s3, { field: "three" }) + assert.strictEqual(Automerge.getConflicts(s3, "field"), undefined) + s2 = Automerge.merge(s2, s3) + assert.deepStrictEqual(s2, { field: "three" }) + assert.strictEqual(Automerge.getConflicts(s2, "field"), undefined) + }) + + it("should handle concurrent insertions at different list positions", () => { + s1 = Automerge.change(s1, doc => (doc.list = ["one", "three"])) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.list.splice(1, 0, "two")) + s2 = Automerge.change(s2, doc => doc.list.push("four")) + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s3, { list: ["one", "two", "three", "four"] }) + assert.strictEqual(Automerge.getConflicts(s3, "list"), undefined) + }) + + it("should handle concurrent insertions at the same list position", () => { + s1 = Automerge.change(s1, doc => (doc.birds = ["parakeet"])) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.birds.push("starling")) + s2 = Automerge.change(s2, doc => doc.birds.push("chaffinch")) + s3 = Automerge.merge(s1, s2) + assertEqualsOneOf( + s3.birds, + ["parakeet", "starling", "chaffinch"], + ["parakeet", "chaffinch", "starling"] + ) + s2 = Automerge.merge(s2, s3) + assert.deepStrictEqual(s2, s3) + }) + + it("should handle concurrent assignment and deletion of a map entry", () => { + // Add-wins semantics + s1 = Automerge.change(s1, doc => (doc.bestBird = "robin")) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => delete doc.bestBird) + s2 = Automerge.change(s2, doc => (doc.bestBird = "magpie")) + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s1, {}) + assert.deepStrictEqual(s2, { bestBird: "magpie" }) + assert.deepStrictEqual(s3, { bestBird: "magpie" }) + assert.strictEqual(Automerge.getConflicts(s3, "bestBird"), undefined) + }) + + it("should handle concurrent assignment and deletion of a list element", () => { + // Concurrent assignment ressurects a deleted list element. Perhaps a little + // surprising, but consistent with add-wins semantics of maps (see test above) + s1 = Automerge.change( + s1, + doc => (doc.birds = ["blackbird", "thrush", "goldfinch"]) + ) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => (doc.birds[1] = "starling")) + s2 = Automerge.change(s2, doc => doc.birds.splice(1, 1)) + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s1.birds, ["blackbird", "starling", "goldfinch"]) + assert.deepStrictEqual(s2.birds, ["blackbird", "goldfinch"]) + assert.deepStrictEqual(s3.birds, ["blackbird", "starling", "goldfinch"]) + s4 = Automerge.load(Automerge.save(s3)) + assert.deepStrictEqual(s3, s4) + }) + + it("should handle insertion after a deleted list element", () => { + s1 = Automerge.change( + s1, + doc => (doc.birds = ["blackbird", "thrush", "goldfinch"]) + ) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.birds.splice(1, 2)) + s2 = Automerge.change(s2, doc => doc.birds.splice(2, 0, "starling")) + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s3, { birds: ["blackbird", "starling"] }) + assert.deepStrictEqual(Automerge.merge(s2, s3), { + birds: ["blackbird", "starling"], + }) + }) + + it("should handle concurrent deletion of the same element", () => { + s1 = Automerge.change( + s1, + doc => (doc.birds = ["albatross", "buzzard", "cormorant"]) + ) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.birds.deleteAt(1)) // buzzard + s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s3.birds, ["albatross", "cormorant"]) + }) + + it("should handle concurrent deletion of different elements", () => { + s1 = Automerge.change( + s1, + doc => (doc.birds = ["albatross", "buzzard", "cormorant"]) + ) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => doc.birds.deleteAt(0)) // albatross + s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s3.birds, ["cormorant"]) + }) + + it("should handle concurrent updates at different levels of the tree", () => { + // A delete higher up in the tree overrides an update in a subtree + s1 = Automerge.change( + s1, + doc => + (doc.animals = { + birds: { pink: "flamingo", black: "starling" }, + mammals: ["badger"], + }) + ) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => (doc.animals.birds.brown = "sparrow")) + s2 = Automerge.change(s2, doc => delete doc.animals.birds) + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s1.animals, { + birds: { + pink: "flamingo", + brown: "sparrow", + black: "starling", + }, + mammals: ["badger"], + }) + assert.deepStrictEqual(s2.animals, { mammals: ["badger"] }) + assert.deepStrictEqual(s3.animals, { mammals: ["badger"] }) + }) + + it("should handle updates of concurrently deleted objects", () => { + s1 = Automerge.change( + s1, + doc => (doc.birds = { blackbird: { feathers: "black" } }) + ) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => delete doc.birds.blackbird) + s2 = Automerge.change(s2, doc => (doc.birds.blackbird.beak = "orange")) + s3 = Automerge.merge(s1, s2) + assert.deepStrictEqual(s1, { birds: {} }) + }) + + it("should not interleave sequence insertions at the same position", () => { + s1 = Automerge.change(s1, doc => (doc.wisdom = [])) + s2 = Automerge.merge(s2, s1) + s1 = Automerge.change(s1, doc => + doc.wisdom.push("to", "be", "is", "to", "do") + ) + s2 = Automerge.change(s2, doc => + doc.wisdom.push("to", "do", "is", "to", "be") + ) + s3 = Automerge.merge(s1, s2) + assertEqualsOneOf( + s3.wisdom, + ["to", "be", "is", "to", "do", "to", "do", "is", "to", "be"], + ["to", "do", "is", "to", "be", "to", "be", "is", "to", "do"] + ) + // In case you're wondering: http://quoteinvestigator.com/2013/09/16/do-be-do/ + }) + + describe("multiple insertions at the same list position", () => { + it("should handle insertion by greater actor ID", () => { + s1 = Automerge.init("aaaa") + s2 = Automerge.init("bbbb") + s1 = Automerge.change(s1, doc => (doc.list = ["two"])) + s2 = Automerge.merge(s2, s1) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "one")) + assert.deepStrictEqual(s2.list, ["one", "two"]) + }) + + it("should handle insertion by lesser actor ID", () => { + s1 = Automerge.init("bbbb") + s2 = Automerge.init("aaaa") + s1 = Automerge.change(s1, doc => (doc.list = ["two"])) + s2 = Automerge.merge(s2, s1) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "one")) + assert.deepStrictEqual(s2.list, ["one", "two"]) + }) + + it("should handle insertion regardless of actor ID", () => { + s1 = Automerge.change(s1, doc => (doc.list = ["two"])) + s2 = Automerge.merge(s2, s1) + s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "one")) + assert.deepStrictEqual(s2.list, ["one", "two"]) + }) + + it("should make insertion order consistent with causality", () => { + s1 = Automerge.change(s1, doc => (doc.list = ["four"])) + s2 = Automerge.merge(s2, s1) + s2 = Automerge.change(s2, doc => doc.list.unshift("three")) + s1 = Automerge.merge(s1, s2) + s1 = Automerge.change(s1, doc => doc.list.unshift("two")) + s2 = Automerge.merge(s2, s1) + s2 = Automerge.change(s2, doc => doc.list.unshift("one")) + assert.deepStrictEqual(s2.list, ["one", "two", "three", "four"]) + }) + }) + }) + + describe("saving and loading", () => { + it("should save and restore an empty document", () => { + let s = Automerge.load(Automerge.save(Automerge.init())) + assert.deepStrictEqual(s, {}) + }) + + it("should generate a new random actor ID", () => { + let s1 = Automerge.init() + let s2 = Automerge.load(Automerge.save(s1)) + assert.strictEqual( + UUID_PATTERN.test(Automerge.getActorId(s1).toString()), + true + ) + assert.strictEqual( + UUID_PATTERN.test(Automerge.getActorId(s2).toString()), + true + ) + assert.notEqual(Automerge.getActorId(s1), Automerge.getActorId(s2)) + }) + + it("should allow a custom actor ID to be set", () => { + let s = Automerge.load(Automerge.save(Automerge.init()), "333333") + assert.strictEqual(Automerge.getActorId(s), "333333") + }) + + it("should reconstitute complex datatypes", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.todos = [{ title: "water plants", done: false }]) + ) + let s2 = Automerge.load(Automerge.save(s1)) + assert.deepStrictEqual(s2, { + todos: [{ title: "water plants", done: false }], + }) + }) + + it("should save and load maps with @ symbols in the keys", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc["123@4567"] = "hello") + ) + let s2 = Automerge.load(Automerge.save(s1)) + assert.deepStrictEqual(s2, { "123@4567": "hello" }) + }) + + it("should reconstitute conflicts", () => { + let s1 = Automerge.change( + Automerge.init("111111"), + doc => (doc.x = 3) + ) + let s2 = Automerge.change( + Automerge.init("222222"), + doc => (doc.x = 5) + ) + s1 = Automerge.merge(s1, s2) + let s3 = Automerge.load(Automerge.save(s1)) + assert.strictEqual(s1.x, 5) + assert.strictEqual(s3.x, 5) + assert.deepStrictEqual(Automerge.getConflicts(s1, "x"), { + "1@111111": 3, + "1@222222": 5, + }) + assert.deepStrictEqual(Automerge.getConflicts(s3, "x"), { + "1@111111": 3, + "1@222222": 5, + }) + }) + + it("should reconstitute element ID counters", () => { + const s1 = Automerge.init("01234567") + const s2 = Automerge.change(s1, doc => (doc.list = ["a"])) + const listId = Automerge.getObjectId(s2.list) + const changes12 = Automerge.getAllChanges(s2).map(Automerge.decodeChange) + assert.deepStrictEqual(changes12, [ + { + hash: changes12[0].hash, + actor: "01234567", + seq: 1, + startOp: 1, + time: changes12[0].time, + message: null, + deps: [], + ops: [ + { obj: "_root", action: "makeList", key: "list", pred: [] }, + { + obj: listId, + action: "makeText", + elemId: "_head", + insert: true, + pred: [], + }, + { + obj: "2@01234567", + action: "set", + elemId: "_head", + insert: true, + value: "a", + pred: [], + }, + ], + }, + ]) + const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0)) + const s4 = Automerge.load(Automerge.save(s3), "01234567") + const s5 = Automerge.change(s4, doc => doc.list.push("b")) + const changes45 = Automerge.getAllChanges(s5).map(Automerge.decodeChange) + assert.deepStrictEqual(s5, { list: ["b"] }) + assert.deepStrictEqual(changes45[2], { + hash: changes45[2].hash, + actor: "01234567", + seq: 3, + startOp: 5, + time: changes45[2].time, + message: null, + deps: [changes45[1].hash], + ops: [ + { + obj: listId, + action: "makeText", + elemId: "_head", + insert: true, + pred: [], + }, + { + obj: "5@01234567", + action: "set", + elemId: "_head", + insert: true, + value: "b", + pred: [], + }, + ], + }) + }) + + it("should allow a reloaded list to be mutated", () => { + let doc = Automerge.change(Automerge.init(), doc => (doc.foo = [])) + doc = Automerge.load(Automerge.save(doc)) + doc = Automerge.change(doc, "add", doc => doc.foo.push(1)) + doc = Automerge.load(Automerge.save(doc)) + assert.deepStrictEqual(doc.foo, [1]) + }) + + it("should reload a document containing deflated columns", () => { + // In this test, the keyCtr column is long enough for deflate compression to kick in, but the + // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr. + // When checking whether the columns appear in ascending order, we must ignore the deflate bit. + let doc = Automerge.change(Automerge.init(), doc => { + doc.list = [] + for (let i = 0; i < 200; i++) + doc.list.insertAt(Math.floor(Math.random() * i), "a") + }) + Automerge.load(Automerge.save(doc)) + let expected: Array = [] + for (let i = 0; i < 200; i++) expected.push("a") + assert.deepStrictEqual(doc, { list: expected }) + }) + + it.skip("should call patchCallback if supplied to load", () => { + const s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Goldfinch"]) + ) + const s2 = Automerge.change(s1, doc => doc.birds.push("Chaffinch")) + const callbacks: Array = [], + actor = Automerge.getActorId(s1) + const reloaded = Automerge.load(Automerge.save(s2), { + patchCallback(patch, before, after) { + callbacks.push({ patch, before, after }) + }, + }) + assert.strictEqual(callbacks.length, 1) + assert.deepStrictEqual(callbacks[0].patch, { + maxOp: 3, + deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], + clock: { [actor]: 2 }, + pendingChanges: 0, + diffs: { + objectId: "_root", + type: "map", + props: { + birds: { + [`1@${actor}`]: { + objectId: `1@${actor}`, + type: "list", + edits: [ + { + action: "multi-insert", + index: 0, + elemId: `2@${actor}`, + values: ["Goldfinch", "Chaffinch"], + }, + ], + }, + }, + }, + }, + }) + assert.deepStrictEqual(callbacks[0].before, {}) + assert.strictEqual(callbacks[0].after, reloaded) + assert.strictEqual(callbacks[0].local, false) + }) + }) + + describe("history API", () => { + it("should return an empty history for an empty document", () => { + assert.deepStrictEqual(Automerge.getHistory(Automerge.init()), []) + }) + + it("should make past document states accessible", () => { + let s = Automerge.init() + s = Automerge.change(s, doc => (doc.config = { background: "blue" })) + s = Automerge.change(s, doc => (doc.birds = ["mallard"])) + s = Automerge.change(s, doc => doc.birds.unshift("oystercatcher")) + assert.deepStrictEqual( + Automerge.getHistory(s).map(state => state.snapshot), + [ + { config: { background: "blue" } }, + { config: { background: "blue" }, birds: ["mallard"] }, + { + config: { background: "blue" }, + birds: ["oystercatcher", "mallard"], + }, + ] + ) + }) + + it("should make change messages accessible", () => { + let s = Automerge.init() + s = Automerge.change(s, "Empty Bookshelf", doc => (doc.books = [])) + s = Automerge.change(s, "Add Orwell", doc => + doc.books.push("Nineteen Eighty-Four") + ) + s = Automerge.change(s, "Add Huxley", doc => + doc.books.push("Brave New World") + ) + assert.deepStrictEqual(s.books, [ + "Nineteen Eighty-Four", + "Brave New World", + ]) + assert.deepStrictEqual( + Automerge.getHistory(s).map(state => state.change.message), + ["Empty Bookshelf", "Add Orwell", "Add Huxley"] + ) + }) + }) + + describe("changes API", () => { + it("should return an empty list on an empty document", () => { + let changes = Automerge.getAllChanges(Automerge.init()) + assert.deepStrictEqual(changes, []) + }) + + it("should return an empty list when nothing changed", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Chaffinch"]) + ) + assert.deepStrictEqual(Automerge.getChanges(s1, s1), []) + }) + + it("should do nothing when applying an empty list of changes", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Chaffinch"]) + ) + assert.deepStrictEqual(Automerge.applyChanges(s1, [])[0], s1) + }) + + it("should return all changes when compared to an empty document", () => { + let s1 = Automerge.change( + Automerge.init(), + "Add Chaffinch", + doc => (doc.birds = ["Chaffinch"]) + ) + let s2 = Automerge.change(s1, "Add Bullfinch", doc => + doc.birds.push("Bullfinch") + ) + let changes = Automerge.getChanges(Automerge.init(), s2) + assert.strictEqual(changes.length, 2) + }) + + it("should allow a document copy to be reconstructed from scratch", () => { + let s1 = Automerge.change( + Automerge.init(), + "Add Chaffinch", + doc => (doc.birds = ["Chaffinch"]) + ) + let s2 = Automerge.change(s1, "Add Bullfinch", doc => + doc.birds.push("Bullfinch") + ) + let changes = Automerge.getAllChanges(s2) + let [s3] = Automerge.applyChanges(Automerge.init(), changes) + assert.deepStrictEqual(s3.birds, ["Chaffinch", "Bullfinch"]) + }) + + it("should return changes since the last given version", () => { + let s1 = Automerge.change( + Automerge.init(), + "Add Chaffinch", + doc => (doc.birds = ["Chaffinch"]) + ) + let changes1 = Automerge.getAllChanges(s1) + let s2 = Automerge.change(s1, "Add Bullfinch", doc => + doc.birds.push("Bullfinch") + ) + let changes2 = Automerge.getChanges(s1, s2) + assert.strictEqual(changes1.length, 1) // Add Chaffinch + assert.strictEqual(changes2.length, 1) // Add Bullfinch + }) + + it("should incrementally apply changes since the last given version", () => { + let s1 = Automerge.change( + Automerge.init(), + "Add Chaffinch", + doc => (doc.birds = ["Chaffinch"]) + ) + let changes1 = Automerge.getAllChanges(s1) + let s2 = Automerge.change(s1, "Add Bullfinch", doc => + doc.birds.push("Bullfinch") + ) + let changes2 = Automerge.getChanges(s1, s2) + let [s3] = Automerge.applyChanges(Automerge.init(), changes1) + let [s4] = Automerge.applyChanges(s3, changes2) + assert.deepStrictEqual(s3.birds, ["Chaffinch"]) + assert.deepStrictEqual(s4.birds, ["Chaffinch", "Bullfinch"]) + }) + + it("should handle updates to a list element", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Chaffinch", "Bullfinch"]) + ) + let s2 = Automerge.change(s1, doc => (doc.birds[0] = "Goldfinch")) + let [s3] = Automerge.applyChanges( + Automerge.init(), + Automerge.getAllChanges(s2) + ) + assert.deepStrictEqual(s3.birds, ["Goldfinch", "Bullfinch"]) + assert.strictEqual(Automerge.getConflicts(s3.birds, 0), undefined) + }) + + // TEXT + it("should handle updates to a text object", () => { + let s1 = Automerge.change(Automerge.init(), doc => (doc.text = "ab")) + let s2 = Automerge.change(s1, doc => + Automerge.splice(doc, "text", 0, 1, "A") + ) + let [s3] = Automerge.applyChanges( + Automerge.init(), + Automerge.getAllChanges(s2) + ) + assert.deepStrictEqual([...s3.text], ["A", "b"]) + }) + + /* + it.skip('should report missing dependencies', () => { + let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch']) + let s2 = Automerge.merge(Automerge.init(), s1) + s2 = Automerge.change(s2, doc => doc.birds.push('Bullfinch')) + let changes = Automerge.getAllChanges(s2) + let [s3, patch] = Automerge.applyChanges(Automerge.init(), [changes[1]]) + assert.deepStrictEqual(s3, {}) + assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), + decodeChange(changes[1]).deps) + assert.strictEqual(patch.pendingChanges, 1) + ;[s3, patch] = Automerge.applyChanges(s3, [changes[0]]) + assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch']) + assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), []) + assert.strictEqual(patch.pendingChanges, 0) + }) + */ + + it("should report missing dependencies with out-of-order applyChanges", () => { + let s0 = Automerge.init() + let s1 = Automerge.change(s0, doc => (doc.test = ["a"])) + let changes01 = Automerge.getAllChanges(s1) + let s2 = Automerge.change(s1, doc => (doc.test = ["b"])) + let changes12 = Automerge.getChanges(s1, s2) + let s3 = Automerge.change(s2, doc => (doc.test = ["c"])) + let changes23 = Automerge.getChanges(s2, s3) + let s4 = Automerge.init() + let [s5] = Automerge.applyChanges(s4, changes23) + let [s6] = Automerge.applyChanges(s5, changes12) + assert.deepStrictEqual(Automerge.getMissingDeps(s6, []), [ + decodeChange(changes01[0]).hash, + ]) + }) + + it("should call patchCallback if supplied when applying changes", () => { + const s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Goldfinch"]) + ) + const callbacks: Array = [] + const before = Automerge.init() + const [after] = Automerge.applyChanges( + before, + Automerge.getAllChanges(s1), + { + patchCallback(patch, before, after) { + callbacks.push({ patch, before, after }) + }, + } + ) + assert.strictEqual(callbacks.length, 1) + assert.deepStrictEqual(callbacks[0].patch[0], { + action: "put", + path: ["birds"], + value: [], + }) + assert.deepStrictEqual(callbacks[0].patch[1], { + action: "insert", + path: ["birds", 0], + values: [""], + }) + assert.deepStrictEqual(callbacks[0].patch[2], { + action: "splice", + path: ["birds", 0, 0], + value: "Goldfinch", + }) + assert.strictEqual(callbacks[0].before, before) + assert.strictEqual(callbacks[0].after, after) + }) + + it("should merge multiple applied changes into one patch", () => { + const s1 = Automerge.change( + Automerge.init(), + doc => (doc.birds = ["Goldfinch"]) + ) + const s2 = Automerge.change(s1, doc => doc.birds.push("Chaffinch")) + const patches: Array = [] + Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2), { + patchCallback: p => patches.push(...p), + }) + assert.deepStrictEqual(patches, [ + { action: "put", path: ["birds"], value: [] }, + { action: "insert", path: ["birds", 0], values: ["", ""] }, + { action: "splice", path: ["birds", 0, 0], value: "Goldfinch" }, + { action: "splice", path: ["birds", 1, 0], value: "Chaffinch" }, + ]) + }) + + it("should call a patchCallback registered on doc initialisation", () => { + const s1 = Automerge.change( + Automerge.init(), + doc => (doc.bird = "Goldfinch") + ) + const patches: Array = [] + const before = Automerge.init({ + patchCallback: p => patches.push(...p), + }) + Automerge.applyChanges(before, Automerge.getAllChanges(s1)) + assert.deepStrictEqual(patches, [ + { action: "put", path: ["bird"], value: "" }, + { action: "splice", path: ["bird", 0], value: "Goldfinch" }, + ]) + }) + }) +}) diff --git a/javascript/test/stable_unstable_interop.ts b/javascript/test/stable_unstable_interop.ts new file mode 100644 index 00000000..dc57f338 --- /dev/null +++ b/javascript/test/stable_unstable_interop.ts @@ -0,0 +1,99 @@ +import * as assert from "assert" +import * as stable from "../src" +import { unstable } from "../src" + +describe("stable/unstable interop", () => { + it("should allow reading Text from stable as strings in unstable", () => { + let stableDoc = stable.from({ + text: new stable.Text("abc"), + }) + let unstableDoc = unstable.init() + unstableDoc = unstable.merge(unstableDoc, stableDoc) + assert.deepStrictEqual(unstableDoc.text, "abc") + }) + + it("should allow string from stable as Text in unstable", () => { + let unstableDoc = unstable.from({ + text: "abc", + }) + let stableDoc = stable.init() + stableDoc = unstable.merge(stableDoc, unstableDoc) + assert.deepStrictEqual(stableDoc.text, new stable.Text("abc")) + }) + + it("should allow reading strings from stable as RawString in unstable", () => { + let stableDoc = stable.from({ + text: "abc", + }) + let unstableDoc = unstable.init() + unstableDoc = unstable.merge(unstableDoc, stableDoc) + assert.deepStrictEqual(unstableDoc.text, new unstable.RawString("abc")) + }) + + it("should allow reading RawString from unstable as string in stable", () => { + let unstableDoc = unstable.from({ + text: new unstable.RawString("abc"), + }) + let stableDoc = stable.init() + stableDoc = unstable.merge(stableDoc, unstableDoc) + assert.deepStrictEqual(stableDoc.text, "abc") + }) + + it("should show conflicts on text objects", () => { + let doc1 = stable.from({ text: new stable.Text("abc") }, "bb") + let doc2 = stable.from({ text: new stable.Text("def") }, "aa") + doc1 = stable.merge(doc1, doc2) + let conflicts = stable.getConflicts(doc1, "text")! + assert.equal(conflicts["1@bb"]!.toString(), "abc") + assert.equal(conflicts["1@aa"]!.toString(), "def") + + let unstableDoc = unstable.init() + unstableDoc = unstable.merge(unstableDoc, doc1) + let conflicts2 = unstable.getConflicts(unstableDoc, "text")! + assert.equal(conflicts2["1@bb"]!.toString(), "abc") + assert.equal(conflicts2["1@aa"]!.toString(), "def") + }) + + it("should allow filling a list with text in stable", () => { + let doc = stable.from<{ list: Array }>({ + list: [null, null, null], + }) + doc = stable.change(doc, doc => { + doc.list.fill(new stable.Text("abc"), 0, 3) + }) + assert.deepStrictEqual(doc.list, [ + new stable.Text("abc"), + new stable.Text("abc"), + new stable.Text("abc"), + ]) + }) + + it("should allow filling a list with text in unstable", () => { + let doc = unstable.from<{ list: Array }>({ + list: [null, null, null], + }) + doc = stable.change(doc, doc => { + doc.list.fill("abc", 0, 3) + }) + assert.deepStrictEqual(doc.list, ["abc", "abc", "abc"]) + }) + + it("should allow splicing text into a list on stable", () => { + let doc = stable.from<{ list: Array }>({ list: [] }) + doc = stable.change(doc, doc => { + doc.list.splice(0, 0, new stable.Text("abc"), new stable.Text("def")) + }) + assert.deepStrictEqual(doc.list, [ + new stable.Text("abc"), + new stable.Text("def"), + ]) + }) + + it("should allow splicing text into a list on unstable", () => { + let doc = unstable.from<{ list: Array }>({ list: [] }) + doc = unstable.change(doc, doc => { + doc.list.splice(0, 0, "abc", "def") + }) + assert.deepStrictEqual(doc.list, ["abc", "def"]) + }) +}) diff --git a/automerge-js/test/sync_test.ts b/javascript/test/sync_test.ts similarity index 54% rename from automerge-js/test/sync_test.ts rename to javascript/test/sync_test.ts index 7b1e52ef..5724985c 100644 --- a/automerge-js/test/sync_test.ts +++ b/javascript/test/sync_test.ts @@ -1,51 +1,57 @@ -import * as assert from 'assert' -import * as Automerge from '../src' -import { BloomFilter } from './legacy/sync' -import { decodeChangeMeta } from './legacy/columnar' -import { decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState, initSyncState } from "../src" -import * as AutomergeWASM from "automerge-wasm" - -Automerge.use(AutomergeWASM) - -function inspect(a) { - const util = require("util"); - return util.inspect(a,false,null,true) -} +import * as assert from "assert" +import * as Automerge from "../src" +import { BloomFilter } from "./legacy/sync" +import { + decodeSyncMessage, + encodeSyncMessage, + decodeSyncState, + encodeSyncState, + initSyncState, +} from "../src" function getHeads(doc) { return Automerge.getHeads(doc) } function getMissingDeps(doc) { - return Automerge.getMissingDeps(doc) + return Automerge.getMissingDeps(doc, []) } -function sync(a, b, aSyncState = initSyncState(), bSyncState = initSyncState()) { +function sync( + a, + b, + aSyncState = initSyncState(), + bSyncState = initSyncState() +) { const MAX_ITER = 10 - let aToBmsg = null, bToAmsg = null, i = 0 + let aToBmsg: Automerge.SyncMessage | null = null, + bToAmsg: Automerge.SyncMessage | null = null, + i = 0 do { - [aSyncState, aToBmsg] = Automerge.generateSyncMessage(a, aSyncState) + ;[aSyncState, aToBmsg] = Automerge.generateSyncMessage(a, aSyncState) ;[bSyncState, bToAmsg] = Automerge.generateSyncMessage(b, bSyncState) if (aToBmsg) { - [b, bSyncState] = Automerge.receiveSyncMessage(b, bSyncState, aToBmsg) + ;[b, bSyncState] = Automerge.receiveSyncMessage(b, bSyncState, aToBmsg) } if (bToAmsg) { - [a, aSyncState] = Automerge.receiveSyncMessage(a, aSyncState, bToAmsg) + ;[a, aSyncState] = Automerge.receiveSyncMessage(a, aSyncState, bToAmsg) } if (i++ > MAX_ITER) { - throw new Error(`Did not synchronize within ${MAX_ITER} iterations. Do you have a bug causing an infinite loop?`) + throw new Error( + `Did not synchronize within ${MAX_ITER} iterations. Do you have a bug causing an infinite loop?` + ) } } while (aToBmsg || bToAmsg) return [a, b, aSyncState, bSyncState] } -describe('Data sync protocol', () => { - describe('with docs already in sync', () => { - describe('an empty local doc', () => { - it('should send a sync message implying no local data', () => { +describe("Data sync protocol", () => { + describe("with docs already in sync", () => { + describe("an empty local doc", () => { + it("should send a sync message implying no local data", () => { let n1 = Automerge.init() let s1 = initSyncState() let m1 @@ -59,26 +65,35 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(message.changes, []) }) - it('should not reply if we have no data as well', () => { - let n1 = Automerge.init(), n2 = Automerge.init() - let s1 = initSyncState(), s2 = initSyncState() - let m1 = null, m2 = null + it("should not reply if we have no data as well", () => { + let n1 = Automerge.init(), + n2 = Automerge.init() + let s1 = initSyncState(), + s2 = initSyncState() + let m1: Automerge.SyncMessage | null = null, + m2: Automerge.SyncMessage | null = null ;[s1, m1] = Automerge.generateSyncMessage(n1, s1) - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) + if (m1 != null) { + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) + } ;[s2, m2] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(m2, null) }) }) - describe('documents with data', () => { - it('repos with equal heads do not need a reply message', () => { - let n1 = Automerge.init(), n2 = Automerge.init() - let s1 = initSyncState(), s2 = initSyncState() - let m1 = null, m2 = null + describe("documents with data", () => { + it("repos with equal heads do not need a reply message", () => { + let n1 = Automerge.init(), + n2 = Automerge.init() + let s1 = initSyncState(), + s2 = initSyncState() + let m1: Automerge.SyncMessage | null = null, + m2: Automerge.SyncMessage | null = null // make two nodes with the same changes - n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i)) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = [])) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i)) ;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1)) assert.deepStrictEqual(n1, n2) @@ -87,82 +102,96 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(s1.lastSentHeads, getHeads(n1)) // heads are equal so this message should be null - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) + if (m1 != null) { + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, m1) + } ;[s2, m2] = Automerge.generateSyncMessage(n2, s2) assert.strictEqual(m2, null) }) - it('n1 should offer all changes to n2 when starting from nothing', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + it("n1 should offer all changes to n2 when starting from nothing", () => { + let n1 = Automerge.init(), + n2 = Automerge.init() // make changes for n1 that n2 should request - n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i)) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = [])) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i)) assert.notDeepStrictEqual(n1, n2) const [after1, after2] = sync(n1, n2) assert.deepStrictEqual(after1, after2) }) - it('should sync peers where one has commits the other does not', () => { - let n1 = Automerge.init(), n2 = Automerge.init() + it("should sync peers where one has commits the other does not", () => { + let n1 = Automerge.init(), + n2 = Automerge.init() // make changes for n1 that n2 should request - n1 = Automerge.change(n1, {time: 0}, doc => doc.n = []) - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.n.push(i)) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n = [])) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.n.push(i)) assert.notDeepStrictEqual(n1, n2) ;[n1, n2] = sync(n1, n2) assert.deepStrictEqual(n1, n2) }) - it('should work with prior sync state', () => { + it("should work with prior sync state", () => { // create & synchronize two nodes - let n1 = Automerge.init(), n2 = Automerge.init() - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init(), + n2 = Automerge.init() + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) // modify the first node further - for (let i = 5; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 5; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) assert.notDeepStrictEqual(n1, n2) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(n1, n2) }) - it('should not generate messages once synced', () => { + it("should not generate messages once synced", () => { // create & synchronize two nodes - let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("abc123"), + n2 = Automerge.init("def456") + let s1 = initSyncState(), + s2 = initSyncState() - let message, patch - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - for (let i = 0; i < 5; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.y = i) + let message + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 5; i++) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.y = i)) - // n1 reports what it has - ;[s1, message] = Automerge.generateSyncMessage(n1, s1, n1) + // n1 reports what it has + ;[s1, message] = Automerge.generateSyncMessage(n1, s1) // n2 receives that message and sends changes along with what it has - ;[n2, s2, patch] = Automerge.receiveSyncMessage(n2, s2, message) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) //assert.deepStrictEqual(patch, null) // no changes arrived // n1 receives the changes and replies with the changes it now knows n2 needs - ;[n1, s1, patch] = Automerge.receiveSyncMessage(n1, s1, message) + ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, message) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 5) //assert.deepStrictEqual(patch.diffs.props, {y: {'5@def456': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived // n2 applies the changes and sends confirmation ending the exchange - ;[n2, s2, patch] = Automerge.receiveSyncMessage(n2, s2, message) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) //assert.deepStrictEqual(patch.diffs.props, {x: {'5@abc123': {type: 'value', value: 4, datatype: 'int'}}}) // changes arrived // n1 receives the message and has nothing more to say - ;[n1, s1, patch] = Automerge.receiveSyncMessage(n1, s1, message) + ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, message) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.deepStrictEqual(message, null) //assert.deepStrictEqual(patch, null) // no changes arrived @@ -172,28 +201,38 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(message, null) }) - it('should allow simultaneous messages during synchronization', () => { + it("should allow simultaneous messages during synchronization", () => { // create & synchronize two nodes - let n1 = Automerge.init('abc123'), n2 = Automerge.init('def456') - let s1 = initSyncState(), s2 = initSyncState() - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - for (let i = 0; i < 5; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.y = i) - const head1 = getHeads(n1)[0], head2 = getHeads(n2)[0] + let n1 = Automerge.init("abc123"), + n2 = Automerge.init("def456") + let s1 = initSyncState(), + s2 = initSyncState() + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 0; i < 5; i++) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.y = i)) + const head1 = getHeads(n1)[0], + head2 = getHeads(n2)[0] // both sides report what they have but have no shared peer state let msg1to2, msg2to1 ;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1) ;[s2, msg2to1] = Automerge.generateSyncMessage(n2, s2) assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) - assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0) + assert.deepStrictEqual( + decodeSyncMessage(msg1to2).have[0].lastSync.length, + 0 + ) assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) - assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0) + assert.deepStrictEqual( + decodeSyncMessage(msg2to1).have[0].lastSync.length, + 0 + ) // n1 and n2 receives that message and update sync state but make no patch - let patch1, patch2 - ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) //assert.deepStrictEqual(patch1, null) // no changes arrived, so no patch - ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) //assert.deepStrictEqual(patch2, null) // no changes arrived, so no patch // now both reply with their local changes the other lacks @@ -204,15 +243,14 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) // both should now apply the changes and update the frontend - ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) assert.deepStrictEqual(getMissingDeps(n1), []) //assert.notDeepStrictEqual(patch1, null) - assert.deepStrictEqual(n1, {x: 4, y: 4}) - - ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + assert.deepStrictEqual(n1, { x: 4, y: 4 }) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) assert.deepStrictEqual(getMissingDeps(n2), []) //assert.notDeepStrictEqual(patch2, null) - assert.deepStrictEqual(n2, {x: 4, y: 4}) + assert.deepStrictEqual(n2, { x: 4, y: 4 }) // The response acknowledges the changes received, and sends no further changes ;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1) @@ -221,8 +259,8 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) // After receiving acknowledgements, their shared heads should be equal - ;[n1, s1, patch1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) - ;[n2, s2, patch2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) + ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg2to1) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg1to2) assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) //assert.deepStrictEqual(patch1, null) @@ -235,42 +273,56 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(msg2to1, null) // If we make one more change, and start another sync, its lastSync should be updated - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 5) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 5)) ;[s1, msg1to2] = Automerge.generateSyncMessage(n1, s1) - assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()) + assert.deepStrictEqual( + decodeSyncMessage(msg1to2).have[0].lastSync, + [head1, head2].sort() + ) }) - it('should assume sent changes were recieved until we hear otherwise', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), message = null - let s2 + it("should assume sent changes were recieved until we hear otherwise", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + message: Automerge.SyncMessage | null = null - n1 = Automerge.change(n1, {time: 0}, doc => doc.items = []) - ;[n1, n2, s1, s2 ] = sync(n1, n2) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.items = [])) + ;[n1, n2, s1] = sync(n1, n2) - n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('x')) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("x")) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + if (message != null) { + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + } - n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('y')) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("y")) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + if (message != null) { + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + } - n1 = Automerge.change(n1, {time: 0}, doc => doc.items.push('z')) + n1 = Automerge.change(n1, { time: 0 }, doc => doc.items.push("z")) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + if (message != null) { + assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) + } }) - it('should work regardless of who initiates the exchange', () => { + it("should work regardless of who initiates the exchange", () => { // create & synchronize two nodes - let n1 = Automerge.init(), n2 = Automerge.init() - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init(), + n2 = Automerge.init() + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // modify the first node further - for (let i = 5; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 5; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) assert.notDeepStrictEqual(n1, n2) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) @@ -279,21 +331,24 @@ describe('Data sync protocol', () => { }) }) - describe('with diverged documents', () => { - it('should work without prior sync state', () => { + describe("with diverged documents", () => { + it("should work without prior sync state", () => { // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- c15 <-- c16 <-- c17 // lastSync is undefined. // create two peers both with divergent commits - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2] = sync(n1, n2) - for (let i = 10; i < 15; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - for (let i = 15; i < 18; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = i) + for (let i = 10; i < 15; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 15; i < 18; i++) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = i)) assert.notDeepStrictEqual(n1, n2) ;[n1, n2] = sync(n1, n2) @@ -301,21 +356,26 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(n1, n2) }) - it('should work with prior sync state', () => { + it("should work with prior sync state", () => { // Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- c15 <-- c16 <-- c17 // lastSync is c9. // create two peers both with divergent commits - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) - for (let i = 10; i < 15; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - for (let i = 15; i < 18; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = i) + for (let i = 10; i < 15; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + for (let i = 15; i < 18; i++) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = i)) s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) @@ -325,27 +385,33 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(n1, n2) }) - it('should ensure non-empty state after sync', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + it("should ensure non-empty state after sync", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(s1.sharedHeads, getHeads(n1)) assert.deepStrictEqual(s2.sharedHeads, getHeads(n1)) }) - it('should re-sync after one node crashed with data loss', () => { + it("should re-sync after one node crashed with data loss", () => { // Scenario: (r) (n2) (n1) // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() // n1 makes three changes, which we sync to n2 - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // save a copy of n2 as "r" to simulate recovering from crash @@ -353,38 +419,43 @@ describe('Data sync protocol', () => { ;[r, rSyncState] = [Automerge.clone(n2), s2] // sync another few commits - for (let i = 3; i < 6; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 3; i < 6; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // everyone should be on the same page here assert.deepStrictEqual(getHeads(n1), getHeads(n2)) assert.deepStrictEqual(n1, n2) // now make a few more changes, then attempt to sync the fully-up-to-date n1 with the confused r - for (let i = 6; i < 9; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 6; i < 9; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) s1 = decodeSyncState(encodeSyncState(s1)) rSyncState = decodeSyncState(encodeSyncState(rSyncState)) assert.notDeepStrictEqual(getHeads(n1), getHeads(r)) assert.notDeepStrictEqual(n1, r) - assert.deepStrictEqual(n1, {x: 8}) - assert.deepStrictEqual(r, {x: 2}) + assert.deepStrictEqual(n1, { x: 8 }) + assert.deepStrictEqual(r, { x: 2 }) ;[n1, r, s1, rSyncState] = sync(n1, r, s1, rSyncState) assert.deepStrictEqual(getHeads(n1), getHeads(r)) assert.deepStrictEqual(n1, r) }) - it('should resync after one node experiences data loss without disconnecting', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + it("should resync after one node experiences data loss without disconnecting", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() // n1 makes three changes, which we sync to n2 - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(getHeads(n1), getHeads(n2)) assert.deepStrictEqual(n1, n2) - let n2AfterDataLoss = Automerge.init('89abcdef') + let n2AfterDataLoss = Automerge.init("89abcdef") // "n2" now has no data, but n1 still thinks it does. Note we don't do // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting @@ -393,29 +464,35 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(n1, n2) }) - it('should handle changes concurrent to the last sync heads', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') - let s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() + it("should handle changes concurrent to the last sync heads", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef"), + n3 = Automerge.init("fedcba98") + let s12 = initSyncState(), + s21 = initSyncState(), + s23 = initSyncState(), + s32 = initSyncState() // Change 1 is known to all three nodes - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 1)) ;[n1, n2, s12, s21] = sync(n1, n2, s12, s21) ;[n2, n3, s23, s32] = sync(n2, n3, s23, s32) // Change 2 is known to n1 and n2 - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 2) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 2)) ;[n1, n2, s12, s21] = sync(n1, n2, s12, s21) // Each of the three nodes makes one change (changes 3, 4, 5) - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 3) - n2 = Automerge.change(n2, {time: 0}, doc => doc.x = 4) - n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 5) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 3)) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = 4)) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 5)) // Apply n3's latest change to n2. If running in Node, turn the Uint8Array into a Buffer, to // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) let change = Automerge.getLastLocalChange(n3) - if (typeof Buffer === 'function') change = Buffer.from(change) - ;[n2] = Automerge.applyChanges(n2, [change]) + if (typeof Buffer === "function" && change != null) + change = Buffer.from(change) + ;[n2] = (change && Automerge.applyChanges(n2, [change])) || [n2] // Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync heads ;[n1, n2, s12, s21] = sync(n1, n2, s12, s21) @@ -423,12 +500,14 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(n1, n2) }) - it('should handle histories with lots of branching and merging', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('fedcba98') - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 0) - ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n1)]) - ;[n3] = Automerge.applyChanges(n3, [Automerge.getLastLocalChange(n1)]) - n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 1) + it("should handle histories with lots of branching and merging", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef"), + n3 = Automerge.init("fedcba98") + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 0)) + ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n1)!]) + ;[n3] = Automerge.applyChanges(n3, [Automerge.getLastLocalChange(n1)!]) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 1)) // - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 // / \/ \/ \/ @@ -437,29 +516,29 @@ describe('Data sync protocol', () => { // \ / // ---------------------------------------------- n3c1 <----- for (let i = 1; i < 20; i++) { - n1 = Automerge.change(n1, {time: 0}, doc => doc.n1 = i) - n2 = Automerge.change(n2, {time: 0}, doc => doc.n2 = i) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n1 = i)) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.n2 = i)) const change1 = Automerge.getLastLocalChange(n1) const change2 = Automerge.getLastLocalChange(n2) - ;[n1] = Automerge.applyChanges(n1, [change2]) - ;[n2] = Automerge.applyChanges(n2, [change1]) + ;[n1] = Automerge.applyChanges(n1, [change2!]) + ;[n2] = Automerge.applyChanges(n2, [change1!]) } - let s1 = initSyncState(), s2 = initSyncState() + let s1 = initSyncState(), + s2 = initSyncState() ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) // Having n3's last change concurrent to the last sync heads forces us into the slower code path - ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n3)]) - n1 = Automerge.change(n1, {time: 0}, doc => doc.n1 = 'final') - n2 = Automerge.change(n2, {time: 0}, doc => doc.n2 = 'final') - + ;[n2] = Automerge.applyChanges(n2, [Automerge.getLastLocalChange(n3)!]) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.n1 = "final")) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.n2 = "final")) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(getHeads(n1), getHeads(n2)) assert.deepStrictEqual(n1, n2) }) }) - describe('with false positives', () => { + describe("with false positives", () => { // NOTE: the following tests use brute force to search for Bloom filter false positives. The // tests make change hashes deterministic by fixing the actorId and change timestamp to be // constants. The loop that searches for false positives is then initialised such that it finds @@ -468,22 +547,36 @@ describe('Data sync protocol', () => { // then the false positive will no longer be the first loop iteration. The tests should still // pass because the loop will run until a false positive is found, but they will be slower. - it('should handle a false-positive head', () => { + it("should handle a false-positive head", () => { // Scenario: ,-- n1 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 is a false positive in the Bloom filter containing {n1}. // lastSync is c9. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) - for (let i = 1; ; i++) { // search for false positive; see comment above - const n1up = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2up = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + for (let i = 1; ; i++) { + // search for false positive; see comment above + const n1up = Automerge.change( + Automerge.clone(n1, { actor: "01234567" }), + { time: 0 }, + doc => (doc.x = `${i} @ n1`) + ) + const n2up = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + doc => (doc.x = `${i} @ n2`) + ) if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) { - n1 = n1up; n2 = n2up; break + n1 = n1up + n2 = n2up + break } } const allHeads = [...getHeads(n1), ...getHeads(n2)].sort() @@ -494,7 +587,7 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), allHeads) }) - describe('with a false-positive dependency', () => { + describe("with a false-positive dependency", () => { let n1, n2, s1, s2, n1hash2, n2hash2 beforeEach(() => { @@ -503,34 +596,57 @@ describe('Data sync protocol', () => { // `-- n2c1 <-- n2c2 // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. // lastSync is c9. - n1 = Automerge.init('01234567') - n2 = Automerge.init('89abcdef') + n1 = Automerge.init("01234567") + n2 = Automerge.init("89abcdef") s1 = initSyncState() s2 = initSyncState() - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, (doc: any) => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) let n1hash1, n2hash1 - for (let i = 29; ; i++) { // search for false positive; see comment above - const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) - n1hash1 = getHeads(n1us1)[0]; n2hash1 = getHeads(n2us1)[0] - const n1us2 = Automerge.change(n1us1, {time: 0}, doc => doc.x = 'final @ n1') - const n2us2 = Automerge.change(n2us1, {time: 0}, doc => doc.x = 'final @ n2') - n1hash2 = getHeads(n1us2)[0]; n2hash2 = getHeads(n2us2)[0] + for (let i = 29; ; i++) { + // search for false positive; see comment above + const n1us1 = Automerge.change( + Automerge.clone(n1, { actor: "01234567" }), + { time: 0 }, + (doc: any) => (doc.x = `${i} @ n1`) + ) + const n2us1 = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + (doc: any) => (doc.x = `${i} @ n2`) + ) + n1hash1 = getHeads(n1us1)[0] + n2hash1 = getHeads(n2us1)[0] + const n1us2 = Automerge.change( + n1us1, + { time: 0 }, + (doc: any) => (doc.x = "final @ n1") + ) + const n2us2 = Automerge.change( + n2us1, + { time: 0 }, + (doc: any) => (doc.x = "final @ n2") + ) + n1hash2 = getHeads(n1us2)[0] + n2hash2 = getHeads(n2us2)[0] if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { - n1 = n1us2; n2 = n2us2; break + n1 = n1us2 + n2 = n2us2 + break } } }) - it('should sync two nodes without connection reset', () => { - [n1, n2, s1, s2] = sync(n1, n2, s1, s2) + it("should sync two nodes without connection reset", () => { + ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) assert.deepStrictEqual(getHeads(n1), [n1hash2, n2hash2].sort()) assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort()) }) - it('should sync two nodes with connection reset', () => { + // FIXME - this has a periodic failure + it("should sync two nodes with connection reset", () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) @@ -538,7 +654,7 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), [n1hash2, n2hash2].sort()) }) - it('should sync three nodes', () => { + it.skip("should sync three nodes", () => { s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) @@ -558,37 +674,73 @@ describe('Data sync protocol', () => { assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - let n3 = Automerge.init('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + let n3 = Automerge.init("fedcba98"), + s13 = initSyncState(), + s31 = initSyncState() ;[n1, n3, s13, s31] = sync(n1, n3, s13, s31) assert.deepStrictEqual(getHeads(n1), [n1hash2]) assert.deepStrictEqual(getHeads(n3), [n1hash2]) }) }) - it('should not require an additional request when a false-positive depends on a true-negative', () => { + it("should not require an additional request when a false-positive depends on a true-negative", () => { // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. // lastSync is c4. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() let n1hash3, n2hash3 - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) - for (let i = 86; ; i++) { // search for false positive; see comment above - const n1us1 = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + for (let i = 86; ; i++) { + // search for false positive; see comment above + const n1us1 = Automerge.change( + Automerge.clone(n1, { actor: "01234567" }), + { time: 0 }, + doc => (doc.x = `${i} @ n1`) + ) + const n2us1 = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + doc => (doc.x = `${i} @ n2`) + ) const n1hash1 = getHeads(n1us1)[0] - const n1us2 = Automerge.change(n1us1, {time: 0}, doc => doc.x = `${i + 1} @ n1`) - const n2us2 = Automerge.change(n2us1, {time: 0}, doc => doc.x = `${i + 1} @ n2`) - const n1hash2 = getHeads(n1us2)[0], n2hash2 = getHeads(n2us2)[0] - const n1up3 = Automerge.change(n1us2, {time: 0}, doc => doc.x = 'final @ n1') - const n2up3 = Automerge.change(n2us2, {time: 0}, doc => doc.x = 'final @ n2') - n1hash3 = getHeads(n1up3)[0]; n2hash3 = getHeads(n2up3)[0] - if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) { - n1 = n1up3; n2 = n2up3; break + const n1us2 = Automerge.change( + n1us1, + { time: 0 }, + doc => (doc.x = `${i + 1} @ n1`) + ) + const n2us2 = Automerge.change( + n2us1, + { time: 0 }, + doc => (doc.x = `${i + 1} @ n2`) + ) + const n1hash2 = getHeads(n1us2)[0], + n2hash2 = getHeads(n2us2)[0] + const n1up3 = Automerge.change( + n1us2, + { time: 0 }, + doc => (doc.x = "final @ n1") + ) + const n2up3 = Automerge.change( + n2us2, + { time: 0 }, + doc => (doc.x = "final @ n2") + ) + n1hash3 = getHeads(n1up3)[0] + n2hash3 = getHeads(n2up3)[0] + if ( + new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2) + ) { + n1 = n1up3 + n2 = n2up3 + break } } const bothHeads = [n1hash3, n2hash3].sort() @@ -599,31 +751,46 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), bothHeads) }) - it('should handle chains of false-positives', () => { + it("should handle chains of false-positives", () => { // Scenario: ,-- c5 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. // lastSync is c4. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() - for (let i = 0; i < 5; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 5; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2, s1, s2) - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 5) - for (let i = 2; ; i++) { // search for false positive; see comment above - const n2us1 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 5)) + for (let i = 2; ; i++) { + // search for false positive; see comment above + const n2us1 = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + doc => (doc.x = `${i} @ n2`) + ) if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us1)[0])) { - n2 = n2us1; break + n2 = n2us1 + break } } - for (let i = 141; ; i++) { // search for false positive; see comment above - const n2us2 = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} again`) + for (let i = 141; ; i++) { + // search for false positive; see comment above + const n2us2 = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + doc => (doc.x = `${i} again`) + ) if (new BloomFilter(getHeads(n1)).containsHash(getHeads(n2us2)[0])) { - n2 = n2us2; break + n2 = n2us2 + break } } - n2 = Automerge.change(n2, {time: 0}, doc => doc.x = 'final @ n2') + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = "final @ n2")) const allHeads = [...getHeads(n1), ...getHeads(n2)].sort() s1 = decodeSyncState(encodeSyncState(s1)) @@ -633,32 +800,46 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n2), allHeads) }) - it('should allow the false-positive hash to be explicitly requested', () => { + it("should allow the false-positive hash to be explicitly requested", () => { // Scenario: ,-- n1 // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 causes a false positive in the Bloom filter containing {n1}. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() let message - for (let i = 0; i < 10; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 10; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) s1 = decodeSyncState(encodeSyncState(s1)) s2 = decodeSyncState(encodeSyncState(s2)) - for (let i = 1; ; i++) { // brute-force search for false positive; see comment above - const n1up = Automerge.change(Automerge.clone(n1, {actorId: '01234567'}), {time: 0}, doc => doc.x = `${i} @ n1`) - const n2up = Automerge.change(Automerge.clone(n2, {actorId: '89abcdef'}), {time: 0}, doc => doc.x = `${i} @ n2`) + for (let i = 1; ; i++) { + // brute-force search for false positive; see comment above + const n1up = Automerge.change( + Automerge.clone(n1, { actor: "01234567" }), + { time: 0 }, + doc => (doc.x = `${i} @ n1`) + ) + const n2up = Automerge.change( + Automerge.clone(n2, { actor: "89abcdef" }), + { time: 0 }, + doc => (doc.x = `${i} @ n2`) + ) // check if the bloom filter on n2 will believe n1 already has a particular hash // this will mean n2 won't offer that data to n2 by receiving a sync message from n1 if (new BloomFilter(getHeads(n1up)).containsHash(getHeads(n2up)[0])) { - n1 = n1up; n2 = n2up; break + n1 = n1up + n2 = n2up + break } } // n1 creates a sync message for n2 with an ill-fated bloom - [s1, message] = Automerge.generateSyncMessage(n1, s1) + ;[s1, message] = Automerge.generateSyncMessage(n1, s1) assert.strictEqual(decodeSyncMessage(message).changes.length, 0) // n2 receives it and DOESN'T send a change back @@ -682,32 +863,42 @@ describe('Data sync protocol', () => { }) }) - describe('protocol features', () => { - it('should allow multiple Bloom filters', () => { + describe("protocol features", () => { + it("should allow multiple Bloom filters", () => { // Scenario: ,-- n1c1 <-- n1c2 <-- n1c3 // c0 <-- c1 <-- c2 <-+--- n2c1 <-- n2c2 <-- n2c3 // `-- n3c1 <-- n3c2 <-- n3c3 // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') - let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() - let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef"), + n3 = Automerge.init("76543210") + let s13 = initSyncState() + let s32 = initSyncState(), + s31 = initSyncState(), + s23 = initSyncState() let message1, message2, message3 - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - // sync all 3 nodes - ;[n1, n2, s12, s21] = sync(n1, n2) // eslint-disable-line no-unused-vars -- kept for consistency + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) + // sync all 3 nodes + ;[n1, n2, ,] = sync(n1, n2) // eslint-disable-line no-unused-vars -- kept for consistency ;[n1, n3, s13, s31] = sync(n1, n3) ;[n3, n2, s32, s23] = sync(n3, n2) - for (let i = 0; i < 2; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `${i} @ n1`) - for (let i = 0; i < 2; i++) n2 = Automerge.change(n2, {time: 0}, doc => doc.x = `${i} @ n2`) + for (let i = 0; i < 2; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = `${i} @ n1`)) + for (let i = 0; i < 2; i++) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = `${i} @ n2`)) ;[n1] = Automerge.applyChanges(n1, Automerge.getAllChanges(n2)) ;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1)) - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = `3 @ n1`) - n2 = Automerge.change(n2, {time: 0}, doc => doc.x = `3 @ n2`) - for (let i = 0; i < 3; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = `${i} @ n3`) - const n1c3 = getHeads(n1)[0], n2c3 = getHeads(n2)[0], n3c3 = getHeads(n3)[0] + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = `3 @ n1`)) + n2 = Automerge.change(n2, { time: 0 }, doc => (doc.x = `3 @ n2`)) + for (let i = 0; i < 3; i++) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = `${i} @ n3`)) + const n1c3 = getHeads(n1)[0], + n2c3 = getHeads(n2)[0], + n3c3 = getHeads(n3)[0] s13 = decodeSyncState(encodeSyncState(s13)) s31 = decodeSyncState(encodeSyncState(s31)) s23 = decodeSyncState(encodeSyncState(s23)) @@ -729,7 +920,11 @@ describe('Data sync protocol', () => { const modifiedMessage = decodeSyncMessage(message3) modifiedMessage.have.push(decodeSyncMessage(message1).have[0]) assert.strictEqual(modifiedMessage.changes.length, 0) - ;[n2, s23] = Automerge.receiveSyncMessage(n2, s23, encodeSyncMessage(modifiedMessage)) + ;[n2, s23] = Automerge.receiveSyncMessage( + n2, + s23, + encodeSyncMessage(modifiedMessage) + ) // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) ;[s23, message2] = Automerge.generateSyncMessage(n2, s23) @@ -743,53 +938,76 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(getHeads(n3), [n1c3, n2c3, n3c3].sort()) }) - it('should allow any change to be requested', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - let message = null + it("should allow any change to be requested", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() + let message: Automerge.SyncMessage | null = null - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) const lastSync = getHeads(n1) - for (let i = 3; i < 6; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) - + for (let i = 3; i < 6; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n1, n2, s1, s2] = sync(n1, n2) s1.lastSentHeads = [] // force generateSyncMessage to return a message even though nothing changed ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - const modMsg = decodeSyncMessage(message) + const modMsg = decodeSyncMessage(message!) modMsg.need = lastSync // re-request change 2 - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, encodeSyncMessage(modMsg)) + ;[n2, s2] = Automerge.receiveSyncMessage( + n2, + s2, + encodeSyncMessage(modMsg) + ) ;[s1, message] = Automerge.generateSyncMessage(n2, s2) - assert.strictEqual(decodeSyncMessage(message).changes.length, 1) - assert.strictEqual(Automerge.decodeChange(decodeSyncMessage(message).changes[0]).hash, lastSync[0]) + assert.strictEqual(decodeSyncMessage(message!).changes.length, 1) + assert.strictEqual( + Automerge.decodeChange(decodeSyncMessage(message!).changes[0]).hash, + lastSync[0] + ) }) - it('should ignore requests for a nonexistent change', () => { - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef') - let s1 = initSyncState(), s2 = initSyncState() - let message = null + it("should ignore requests for a nonexistent change", () => { + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef") + let s1 = initSyncState(), + s2 = initSyncState() + let message: Automerge.SyncMessage | null = null - for (let i = 0; i < 3; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) + for (let i = 0; i < 3; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) ;[n2] = Automerge.applyChanges(n2, Automerge.getAllChanges(n1)) ;[s1, message] = Automerge.generateSyncMessage(n1, s1) - message.need = ['0000000000000000000000000000000000000000000000000000000000000000'] - ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message) + const decoded = Automerge.decodeSyncMessage(message!) + decoded.need = [ + "0000000000000000000000000000000000000000000000000000000000000000", + ] + message = Automerge.encodeSyncMessage(decoded) + ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, message!) ;[s2, message] = Automerge.generateSyncMessage(n2, s2) assert.strictEqual(message, null) }) - it('should allow a subset of changes to be sent', () => { + it("should allow a subset of changes to be sent", () => { // ,-- c1 <-- c2 // c0 <-+ // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - let n1 = Automerge.init('01234567'), n2 = Automerge.init('89abcdef'), n3 = Automerge.init('76543210') - let s1 = initSyncState(), s2 = initSyncState() + let n1 = Automerge.init("01234567"), + n2 = Automerge.init("89abcdef"), + n3 = Automerge.init("76543210") + let s1 = initSyncState(), + s2 = initSyncState() let msg, decodedMsg - n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 0) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = 0)) n3 = Automerge.merge(n3, n1) - for (let i = 1; i <= 2; i++) n1 = Automerge.change(n1, {time: 0}, doc => doc.x = i) // n1 has {c0, c1, c2} - for (let i = 3; i <= 4; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = i) // n3 has {c0, c3, c4} - const c2 = getHeads(n1)[0], c4 = getHeads(n3)[0] + for (let i = 1; i <= 2; i++) + n1 = Automerge.change(n1, { time: 0 }, doc => (doc.x = i)) // n1 has {c0, c1, c2} + for (let i = 3; i <= 4; i++) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = i)) // n3 has {c0, c3, c4} + const c2 = getHeads(n1)[0], + c4 = getHeads(n3)[0] n2 = Automerge.merge(n2, n3) // n2 has {c0, c3, c4} // Sync n1 and n2, so their shared heads are {c2, c4} @@ -800,11 +1018,13 @@ describe('Data sync protocol', () => { assert.deepStrictEqual(s2.sharedHeads, [c2, c4].sort()) // n2 and n3 apply {c5, c6, c7, c8} - n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 5) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 5)) const change5 = Automerge.getLastLocalChange(n3) - n3 = Automerge.change(n3, {time: 0}, doc => doc.x = 6) - const change6 = Automerge.getLastLocalChange(n3), c6 = getHeads(n3)[0] - for (let i = 7; i <= 8; i++) n3 = Automerge.change(n3, {time: 0}, doc => doc.x = i) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = 6)) + const change6 = Automerge.getLastLocalChange(n3), + c6 = getHeads(n3)[0] + for (let i = 7; i <= 8; i++) + n3 = Automerge.change(n3, { time: 0 }, doc => (doc.x = i)) const c8 = getHeads(n3)[0] n2 = Automerge.merge(n2, n3) @@ -815,9 +1035,10 @@ describe('Data sync protocol', () => { decodedMsg = decodeSyncMessage(msg) decodedMsg.changes = [change5, change6] msg = encodeSyncMessage(decodedMsg) - const sentHashes = {} - sentHashes[decodeChangeMeta(change5, true).hash] = true - sentHashes[decodeChangeMeta(change6, true).hash] = true + const sentHashes = [ + Automerge.decodeChange(change5!).hash, + Automerge.decodeChange(change6!).hash, + ] s2.sentHashes = sentHashes ;[n1, s1] = Automerge.receiveSyncMessage(n1, s1, msg) assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) @@ -826,7 +1047,10 @@ describe('Data sync protocol', () => { ;[s1, msg] = Automerge.generateSyncMessage(n1, s1) ;[n2, s2] = Automerge.receiveSyncMessage(n2, s2, msg) assert.deepStrictEqual(decodeSyncMessage(msg).need, [c8]) - assert.deepStrictEqual(decodeSyncMessage(msg).have[0].lastSync, [c2, c6].sort()) + assert.deepStrictEqual( + decodeSyncMessage(msg).have[0].lastSync, + [c2, c6].sort() + ) assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) assert.deepStrictEqual(s2.sharedHeads, [c2, c6].sort()) diff --git a/javascript/test/text_test.ts b/javascript/test/text_test.ts new file mode 100644 index 00000000..518c7d2b --- /dev/null +++ b/javascript/test/text_test.ts @@ -0,0 +1,111 @@ +import * as assert from "assert" +import { unstable as Automerge } from "../src" +import { assertEqualsOneOf } from "./helpers" + +type DocType = { + text: string + [key: string]: any +} + +describe("Automerge.Text", () => { + let s1: Automerge.Doc, s2: Automerge.Doc + beforeEach(() => { + s1 = Automerge.change(Automerge.init(), doc => (doc.text = "")) + s2 = Automerge.merge(Automerge.init(), s1) + }) + + it("should support insertion", () => { + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "a")) + assert.strictEqual(s1.text.length, 1) + assert.strictEqual(s1.text[0], "a") + assert.strictEqual(s1.text, "a") + //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`) + }) + + it("should support deletion", () => { + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 1)) + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text[0], "a") + assert.strictEqual(s1.text[1], "c") + assert.strictEqual(s1.text, "ac") + }) + + it("should support implicit and explicit deletion", () => { + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 1)) + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 1, 0)) + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text[0], "a") + assert.strictEqual(s1.text[1], "c") + assert.strictEqual(s1.text, "ac") + }) + + it("should handle concurrent insertion", () => { + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, "abc")) + s2 = Automerge.change(s2, doc => Automerge.splice(doc, "text", 0, 0, "xyz")) + s1 = Automerge.merge(s1, s2) + assert.strictEqual(s1.text.length, 6) + assertEqualsOneOf(s1.text, "abcxyz", "xyzabc") + }) + + it("should handle text and other ops in the same change", () => { + s1 = Automerge.change(s1, doc => { + doc.foo = "bar" + Automerge.splice(doc, "text", 0, 0, "a") + }) + assert.strictEqual(s1.foo, "bar") + assert.strictEqual(s1.text, "a") + assert.strictEqual(s1.text, "a") + }) + + it("should serialize to JSON as a simple string", () => { + s1 = Automerge.change(s1, doc => Automerge.splice(doc, "text", 0, 0, 'a"b')) + assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}') + }) + + it("should allow modification after an object is assigned to a document", () => { + s1 = Automerge.change(Automerge.init(), doc => { + doc.text = "" + Automerge.splice(doc, "text", 0, 0, "abcd") + Automerge.splice(doc, "text", 2, 1) + assert.strictEqual(doc.text, "abd") + }) + assert.strictEqual(s1.text, "abd") + }) + + it("should not allow modification outside of a change callback", () => { + assert.throws( + () => Automerge.splice(s1, "text", 0, 0, "a"), + /object cannot be modified outside of a change block/ + ) + }) + + describe("with initial value", () => { + it("should initialize text in Automerge.from()", () => { + let s1 = Automerge.from({ text: "init" }) + assert.strictEqual(s1.text.length, 4) + assert.strictEqual(s1.text[0], "i") + assert.strictEqual(s1.text[1], "n") + assert.strictEqual(s1.text[2], "i") + assert.strictEqual(s1.text[3], "t") + assert.strictEqual(s1.text, "init") + }) + + it("should encode the initial value as a change", () => { + const s1 = Automerge.from({ text: "init" }) + const changes = Automerge.getAllChanges(s1) + assert.strictEqual(changes.length, 1) + const [s2] = Automerge.applyChanges(Automerge.init(), changes) + assert.strictEqual(s2.text, "init") + assert.strictEqual(s2.text, "init") + }) + }) + + it("should support unicode when creating text", () => { + s1 = Automerge.from({ + text: "🐦", + }) + assert.strictEqual(s1.text, "🐦") + }) +}) diff --git a/javascript/test/text_v1.ts b/javascript/test/text_v1.ts new file mode 100644 index 00000000..b111530f --- /dev/null +++ b/javascript/test/text_v1.ts @@ -0,0 +1,281 @@ +import * as assert from "assert" +import * as Automerge from "../src" +import { assertEqualsOneOf } from "./helpers" + +type DocType = { text: Automerge.Text; [key: string]: any } + +describe("Automerge.Text", () => { + let s1: Automerge.Doc, s2: Automerge.Doc + beforeEach(() => { + s1 = Automerge.change( + Automerge.init(), + doc => (doc.text = new Automerge.Text()) + ) + s2 = Automerge.merge(Automerge.init(), s1) + }) + + it("should support insertion", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a")) + assert.strictEqual(s1.text.length, 1) + assert.strictEqual(s1.text.get(0), "a") + assert.strictEqual(s1.text.toString(), "a") + //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`) + }) + + it("should support deletion", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) + s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1)) + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text.get(0), "a") + assert.strictEqual(s1.text.get(1), "c") + assert.strictEqual(s1.text.toString(), "ac") + }) + + it("should support implicit and explicit deletion", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) + s1 = Automerge.change(s1, doc => doc.text.deleteAt(1)) + s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0)) + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text.get(0), "a") + assert.strictEqual(s1.text.get(1), "c") + assert.strictEqual(s1.text.toString(), "ac") + }) + + it("should handle concurrent insertion", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c")) + s2 = Automerge.change(s2, doc => doc.text.insertAt(0, "x", "y", "z")) + s1 = Automerge.merge(s1, s2) + assert.strictEqual(s1.text.length, 6) + assertEqualsOneOf(s1.text.toString(), "abcxyz", "xyzabc") + assertEqualsOneOf(s1.text.join(""), "abcxyz", "xyzabc") + }) + + it("should handle text and other ops in the same change", () => { + s1 = Automerge.change(s1, doc => { + doc.foo = "bar" + doc.text.insertAt(0, "a") + }) + assert.strictEqual(s1.foo, "bar") + assert.strictEqual(s1.text.toString(), "a") + assert.strictEqual(s1.text.join(""), "a") + }) + + it("should serialize to JSON as a simple string", () => { + s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", '"', "b")) + assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}') + }) + + it("should allow modification before an object is assigned to a document", () => { + s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text() + text.insertAt(0, "a", "b", "c", "d") + text.deleteAt(2) + doc.text = text + assert.strictEqual(doc.text.toString(), "abd") + assert.strictEqual(doc.text.join(""), "abd") + }) + assert.strictEqual(s1.text.toString(), "abd") + assert.strictEqual(s1.text.join(""), "abd") + }) + + it("should allow modification after an object is assigned to a document", () => { + s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text() + doc.text = text + doc.text.insertAt(0, "a", "b", "c", "d") + doc.text.deleteAt(2) + assert.strictEqual(doc.text.toString(), "abd") + assert.strictEqual(doc.text.join(""), "abd") + }) + assert.strictEqual(s1.text.join(""), "abd") + }) + + it("should not allow modification outside of a change callback", () => { + assert.throws( + () => s1.text.insertAt(0, "a"), + /object cannot be modified outside of a change block/ + ) + }) + + describe("with initial value", () => { + it("should accept a string as initial value", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.text = new Automerge.Text("init")) + ) + assert.strictEqual(s1.text.length, 4) + assert.strictEqual(s1.text.get(0), "i") + assert.strictEqual(s1.text.get(1), "n") + assert.strictEqual(s1.text.get(2), "i") + assert.strictEqual(s1.text.get(3), "t") + assert.strictEqual(s1.text.toString(), "init") + }) + + it("should accept an array as initial value", () => { + let s1 = Automerge.change( + Automerge.init(), + doc => (doc.text = new Automerge.Text(["i", "n", "i", "t"])) + ) + assert.strictEqual(s1.text.length, 4) + assert.strictEqual(s1.text.get(0), "i") + assert.strictEqual(s1.text.get(1), "n") + assert.strictEqual(s1.text.get(2), "i") + assert.strictEqual(s1.text.get(3), "t") + assert.strictEqual(s1.text.toString(), "init") + }) + + it("should initialize text in Automerge.from()", () => { + let s1 = Automerge.from({ text: new Automerge.Text("init") }) + assert.strictEqual(s1.text.length, 4) + assert.strictEqual(s1.text.get(0), "i") + assert.strictEqual(s1.text.get(1), "n") + assert.strictEqual(s1.text.get(2), "i") + assert.strictEqual(s1.text.get(3), "t") + assert.strictEqual(s1.text.toString(), "init") + }) + + it("should encode the initial value as a change", () => { + const s1 = Automerge.from({ text: new Automerge.Text("init") }) + const changes = Automerge.getAllChanges(s1) + assert.strictEqual(changes.length, 1) + const [s2] = Automerge.applyChanges(Automerge.init(), changes) + assert.strictEqual(s2.text instanceof Automerge.Text, true) + assert.strictEqual(s2.text.toString(), "init") + assert.strictEqual(s2.text.join(""), "init") + }) + + it("should allow immediate access to the value", () => { + Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text("init") + assert.strictEqual(text.length, 4) + assert.strictEqual(text.get(0), "i") + assert.strictEqual(text.toString(), "init") + doc.text = text + assert.strictEqual(doc.text.length, 4) + assert.strictEqual(doc.text.get(0), "i") + assert.strictEqual(doc.text.toString(), "init") + }) + }) + + it("should allow pre-assignment modification of the initial value", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text("init") + text.deleteAt(3) + assert.strictEqual(text.join(""), "ini") + doc.text = text + assert.strictEqual(doc.text.join(""), "ini") + assert.strictEqual(doc.text.toString(), "ini") + }) + assert.strictEqual(s1.text.toString(), "ini") + assert.strictEqual(s1.text.join(""), "ini") + }) + + it("should allow post-assignment modification of the initial value", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + const text = new Automerge.Text("init") + doc.text = text + doc.text.deleteAt(0) + doc.text.insertAt(0, "I") + assert.strictEqual(doc.text.join(""), "Init") + assert.strictEqual(doc.text.toString(), "Init") + }) + assert.strictEqual(s1.text.join(""), "Init") + assert.strictEqual(s1.text.toString(), "Init") + }) + }) + + describe("non-textual control characters", () => { + let s1: Automerge.Doc + beforeEach(() => { + s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text() + doc.text.insertAt(0, "a") + doc.text.insertAt(1, { attribute: "bold" }) + }) + }) + + it("should allow fetching non-textual characters", () => { + assert.deepEqual(s1.text.get(1), { attribute: "bold" }) + //assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`) + }) + + it("should include control characters in string length", () => { + assert.strictEqual(s1.text.length, 2) + assert.strictEqual(s1.text.get(0), "a") + }) + + it("should replace control characters from toString()", () => { + assert.strictEqual(s1.text.toString(), "a\uFFFC") + }) + + it("should allow control characters to be updated", () => { + const s2 = Automerge.change( + s1, + doc => (doc.text.get(1)!.attribute = "italic") + ) + const s3 = Automerge.load(Automerge.save(s2)) + assert.strictEqual(s1.text.get(1).attribute, "bold") + assert.strictEqual(s2.text.get(1).attribute, "italic") + assert.strictEqual(s3.text.get(1).attribute, "italic") + }) + + describe("spans interface to Text", () => { + it("should return a simple string as a single span", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text("hello world") + }) + assert.deepEqual(s1.text.toSpans(), ["hello world"]) + }) + it("should return an empty string as an empty array", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text() + }) + assert.deepEqual(s1.text.toSpans(), []) + }) + it("should split a span at a control character", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text("hello world") + doc.text.insertAt(5, { attributes: { bold: true } }) + }) + assert.deepEqual(s1.text.toSpans(), [ + "hello", + { attributes: { bold: true } }, + " world", + ]) + }) + it("should allow consecutive control characters", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text("hello world") + doc.text.insertAt(5, { attributes: { bold: true } }) + doc.text.insertAt(6, { attributes: { italic: true } }) + }) + assert.deepEqual(s1.text.toSpans(), [ + "hello", + { attributes: { bold: true } }, + { attributes: { italic: true } }, + " world", + ]) + }) + it("should allow non-consecutive control characters", () => { + let s1 = Automerge.change(Automerge.init(), doc => { + doc.text = new Automerge.Text("hello world") + doc.text.insertAt(5, { attributes: { bold: true } }) + doc.text.insertAt(12, { attributes: { italic: true } }) + }) + assert.deepEqual(s1.text.toSpans(), [ + "hello", + { attributes: { bold: true } }, + " world", + { attributes: { italic: true } }, + ]) + }) + }) + }) + + it("should support unicode when creating text", () => { + s1 = Automerge.from({ + text: new Automerge.Text("🐦"), + }) + assert.strictEqual(s1.text.get(0), "🐦") + }) +}) diff --git a/javascript/test/uuid_test.ts b/javascript/test/uuid_test.ts new file mode 100644 index 00000000..f6a0bde4 --- /dev/null +++ b/javascript/test/uuid_test.ts @@ -0,0 +1,32 @@ +import * as assert from "assert" +import * as Automerge from "../src" + +const uuid = Automerge.uuid + +describe("uuid", () => { + afterEach(() => { + uuid.reset() + }) + + describe("default implementation", () => { + it("generates unique values", () => { + assert.notEqual(uuid(), uuid()) + }) + }) + + describe("custom implementation", () => { + let counter + + function customUuid() { + return `custom-uuid-${counter++}` + } + + before(() => uuid.setFactory(customUuid)) + beforeEach(() => (counter = 0)) + + it("invokes the custom factory", () => { + assert.equal(uuid(), "custom-uuid-0") + assert.equal(uuid(), "custom-uuid-1") + }) + }) +}) diff --git a/javascript/tsconfig.json b/javascript/tsconfig.json new file mode 100644 index 00000000..628aea8e --- /dev/null +++ b/javascript/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "es2016", + "sourceMap": false, + "declaration": true, + "resolveJsonModule": true, + "module": "commonjs", + "moduleResolution": "node", + "noImplicitAny": false, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "strict": true, + "noFallthroughCasesInSwitch": true, + "skipLibCheck": true, + "outDir": "./dist" + }, + "include": ["src/**/*", "test/**/*"], + "exclude": ["./dist/**/*", "./node_modules", "./src/**/*.deno.ts"] +} diff --git a/automerge-js/tslint.json b/javascript/tslint.json similarity index 100% rename from automerge-js/tslint.json rename to javascript/tslint.json diff --git a/rust/.gitignore b/rust/.gitignore new file mode 100644 index 00000000..f859e0a3 --- /dev/null +++ b/rust/.gitignore @@ -0,0 +1,6 @@ +/target +/.direnv +perf.* +/Cargo.lock +build/ +.vim/* diff --git a/Cargo.toml b/rust/Cargo.toml similarity index 78% rename from Cargo.toml rename to rust/Cargo.toml index 9add8e60..5d29fc9f 100644 --- a/Cargo.toml +++ b/rust/Cargo.toml @@ -3,15 +3,15 @@ members = [ "automerge", "automerge-c", "automerge-cli", + "automerge-test", "automerge-wasm", "edit-trace", ] resolver = "2" [profile.release] -debug = true lto = true -opt-level = 3 +codegen-units = 1 [profile.bench] -debug = true +debug = true \ No newline at end of file diff --git a/rust/automerge-c/.clang-format b/rust/automerge-c/.clang-format new file mode 100644 index 00000000..dbf16c21 --- /dev/null +++ b/rust/automerge-c/.clang-format @@ -0,0 +1,250 @@ +--- +Language: Cpp +# BasedOnStyle: Chromium +AccessModifierOffset: -1 +AlignAfterOpenBracket: Align +AlignArrayOfStructures: None +AlignConsecutiveAssignments: + Enabled: false + AcrossEmptyLines: false + AcrossComments: false + AlignCompound: false + PadOperators: true +AlignConsecutiveBitFields: + Enabled: false + AcrossEmptyLines: false + AcrossComments: false + AlignCompound: false + PadOperators: false +AlignConsecutiveDeclarations: + Enabled: false + AcrossEmptyLines: false + AcrossComments: false + AlignCompound: false + PadOperators: false +AlignConsecutiveMacros: + Enabled: false + AcrossEmptyLines: false + AcrossComments: false + AlignCompound: false + PadOperators: false +AlignEscapedNewlines: Left +AlignOperands: Align +AlignTrailingComments: true +AllowAllArgumentsOnNextLine: true +AllowAllParametersOfDeclarationOnNextLine: false +AllowShortEnumsOnASingleLine: true +AllowShortBlocksOnASingleLine: Never +AllowShortCaseLabelsOnASingleLine: false +AllowShortFunctionsOnASingleLine: Inline +AllowShortLambdasOnASingleLine: All +AllowShortIfStatementsOnASingleLine: Never +AllowShortLoopsOnASingleLine: false +AlwaysBreakAfterDefinitionReturnType: None +AlwaysBreakAfterReturnType: None +AlwaysBreakBeforeMultilineStrings: true +AlwaysBreakTemplateDeclarations: Yes +AttributeMacros: + - __capability +BinPackArguments: true +BinPackParameters: false +BraceWrapping: + AfterCaseLabel: false + AfterClass: false + AfterControlStatement: Never + AfterEnum: false + AfterFunction: false + AfterNamespace: false + AfterObjCDeclaration: false + AfterStruct: false + AfterUnion: false + AfterExternBlock: false + BeforeCatch: false + BeforeElse: false + BeforeLambdaBody: false + BeforeWhile: false + IndentBraces: false + SplitEmptyFunction: true + SplitEmptyRecord: true + SplitEmptyNamespace: true +BreakBeforeBinaryOperators: None +BreakBeforeConceptDeclarations: Always +BreakBeforeBraces: Attach +BreakBeforeInheritanceComma: false +BreakInheritanceList: BeforeColon +BreakBeforeTernaryOperators: true +BreakConstructorInitializersBeforeComma: false +BreakConstructorInitializers: BeforeColon +BreakAfterJavaFieldAnnotations: false +BreakStringLiterals: true +ColumnLimit: 120 +CommentPragmas: '^ IWYU pragma:' +QualifierAlignment: Leave +CompactNamespaces: false +ConstructorInitializerIndentWidth: 4 +ContinuationIndentWidth: 4 +Cpp11BracedListStyle: true +DeriveLineEnding: true +DerivePointerAlignment: false +DisableFormat: false +EmptyLineAfterAccessModifier: Never +EmptyLineBeforeAccessModifier: LogicalBlock +ExperimentalAutoDetectBinPacking: false +PackConstructorInitializers: NextLine +BasedOnStyle: '' +ConstructorInitializerAllOnOneLineOrOnePerLine: false +AllowAllConstructorInitializersOnNextLine: true +FixNamespaceComments: true +ForEachMacros: + - foreach + - Q_FOREACH + - BOOST_FOREACH +IfMacros: + - KJ_IF_MAYBE +IncludeBlocks: Preserve +IncludeCategories: + - Regex: '^' + Priority: 2 + SortPriority: 0 + CaseSensitive: false + - Regex: '^<.*\.h>' + Priority: 1 + SortPriority: 0 + CaseSensitive: false + - Regex: '^<.*' + Priority: 2 + SortPriority: 0 + CaseSensitive: false + - Regex: '.*' + Priority: 3 + SortPriority: 0 + CaseSensitive: false +IncludeIsMainRegex: '([-_](test|unittest))?$' +IncludeIsMainSourceRegex: '' +IndentAccessModifiers: false +IndentCaseLabels: true +IndentCaseBlocks: false +IndentGotoLabels: true +IndentPPDirectives: None +IndentExternBlock: AfterExternBlock +IndentRequiresClause: true +IndentWidth: 4 +IndentWrappedFunctionNames: false +InsertBraces: false +InsertTrailingCommas: None +JavaScriptQuotes: Leave +JavaScriptWrapImports: true +KeepEmptyLinesAtTheStartOfBlocks: false +LambdaBodyIndentation: Signature +MacroBlockBegin: '' +MacroBlockEnd: '' +MaxEmptyLinesToKeep: 1 +NamespaceIndentation: None +ObjCBinPackProtocolList: Never +ObjCBlockIndentWidth: 2 +ObjCBreakBeforeNestedBlockParam: true +ObjCSpaceAfterProperty: false +ObjCSpaceBeforeProtocolList: true +PenaltyBreakAssignment: 2 +PenaltyBreakBeforeFirstCallParameter: 1 +PenaltyBreakComment: 300 +PenaltyBreakFirstLessLess: 120 +PenaltyBreakOpenParenthesis: 0 +PenaltyBreakString: 1000 +PenaltyBreakTemplateDeclaration: 10 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 200 +PenaltyIndentedWhitespace: 0 +PointerAlignment: Left +PPIndentWidth: -1 +RawStringFormats: + - Language: Cpp + Delimiters: + - cc + - CC + - cpp + - Cpp + - CPP + - 'c++' + - 'C++' + CanonicalDelimiter: '' + BasedOnStyle: google + - Language: TextProto + Delimiters: + - pb + - PB + - proto + - PROTO + EnclosingFunctions: + - EqualsProto + - EquivToProto + - PARSE_PARTIAL_TEXT_PROTO + - PARSE_TEST_PROTO + - PARSE_TEXT_PROTO + - ParseTextOrDie + - ParseTextProtoOrDie + - ParseTestProto + - ParsePartialTestProto + CanonicalDelimiter: pb + BasedOnStyle: google +ReferenceAlignment: Pointer +ReflowComments: true +RemoveBracesLLVM: false +RequiresClausePosition: OwnLine +SeparateDefinitionBlocks: Leave +ShortNamespaceLines: 1 +SortIncludes: CaseSensitive +SortJavaStaticImport: Before +SortUsingDeclarations: true +SpaceAfterCStyleCast: false +SpaceAfterLogicalNot: false +SpaceAfterTemplateKeyword: true +SpaceBeforeAssignmentOperators: true +SpaceBeforeCaseColon: false +SpaceBeforeCpp11BracedList: false +SpaceBeforeCtorInitializerColon: true +SpaceBeforeInheritanceColon: true +SpaceBeforeParens: ControlStatements +SpaceBeforeParensOptions: + AfterControlStatements: true + AfterForeachMacros: true + AfterFunctionDefinitionName: false + AfterFunctionDeclarationName: false + AfterIfMacros: true + AfterOverloadedOperator: false + AfterRequiresInClause: false + AfterRequiresInExpression: false + BeforeNonEmptyParentheses: false +SpaceAroundPointerQualifiers: Default +SpaceBeforeRangeBasedForLoopColon: true +SpaceInEmptyBlock: false +SpaceInEmptyParentheses: false +SpacesBeforeTrailingComments: 2 +SpacesInAngles: Never +SpacesInConditionalStatement: false +SpacesInContainerLiterals: true +SpacesInCStyleCastParentheses: false +SpacesInLineCommentPrefix: + Minimum: 1 + Maximum: -1 +SpacesInParentheses: false +SpacesInSquareBrackets: false +SpaceBeforeSquareBrackets: false +BitFieldColonSpacing: Both +Standard: Auto +StatementAttributeLikeMacros: + - Q_EMIT +StatementMacros: + - Q_UNUSED + - QT_REQUIRE_VERSION +TabWidth: 8 +UseCRLF: false +UseTab: Never +WhitespaceSensitiveMacros: + - STRINGIZE + - PP_STRINGIZE + - BOOST_PP_STRINGIZE + - NS_SWIFT_NAME + - CF_SWIFT_NAME +... + diff --git a/rust/automerge-c/.gitignore b/rust/automerge-c/.gitignore new file mode 100644 index 00000000..14d74973 --- /dev/null +++ b/rust/automerge-c/.gitignore @@ -0,0 +1,10 @@ +automerge +automerge.h +automerge.o +build/ +CMakeCache.txt +CMakeFiles +CMakePresets.json +Makefile +DartConfiguration.tcl +out/ diff --git a/rust/automerge-c/CMakeLists.txt b/rust/automerge-c/CMakeLists.txt new file mode 100644 index 00000000..0c35eebd --- /dev/null +++ b/rust/automerge-c/CMakeLists.txt @@ -0,0 +1,305 @@ +cmake_minimum_required(VERSION 3.23 FATAL_ERROR) + +project(automerge-c VERSION 0.1.0 + LANGUAGES C + DESCRIPTION "C bindings for the Automerge Rust library.") + +set(LIBRARY_NAME "automerge") + +set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) + +option(BUILD_SHARED_LIBS "Enable the choice of a shared or static library.") + +include(CTest) + +include(CMakePackageConfigHelpers) + +include(GNUInstallDirs) + +set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake") + +string(MAKE_C_IDENTIFIER ${PROJECT_NAME} SYMBOL_PREFIX) + +string(TOUPPER ${SYMBOL_PREFIX} SYMBOL_PREFIX) + +set(CARGO_TARGET_DIR "${CMAKE_BINARY_DIR}/Cargo/target") + +set(CBINDGEN_INCLUDEDIR "${CMAKE_BINARY_DIR}/${CMAKE_INSTALL_INCLUDEDIR}") + +set(CBINDGEN_TARGET_DIR "${CBINDGEN_INCLUDEDIR}/${PROJECT_NAME}") + +find_program ( + CARGO_CMD + "cargo" + PATHS "$ENV{CARGO_HOME}/bin" + DOC "The Cargo command" +) + +if(NOT CARGO_CMD) + message(FATAL_ERROR "Cargo (Rust package manager) not found! " + "Please install it and/or set the CARGO_HOME " + "environment variable to its path.") +endif() + +string(TOLOWER "${CMAKE_BUILD_TYPE}" BUILD_TYPE_LOWER) + +# In order to build with -Z build-std, we need to pass target explicitly. +# https://doc.rust-lang.org/cargo/reference/unstable.html#build-std +execute_process ( + COMMAND rustc -vV + OUTPUT_VARIABLE RUSTC_VERSION + OUTPUT_STRIP_TRAILING_WHITESPACE +) +string(REGEX REPLACE ".*host: ([^ \n]*).*" "\\1" + CARGO_TARGET + ${RUSTC_VERSION} +) + +if(BUILD_TYPE_LOWER STREQUAL debug) + set(CARGO_BUILD_TYPE "debug") + + set(CARGO_FLAG --target=${CARGO_TARGET}) +else() + set(CARGO_BUILD_TYPE "release") + + if (NOT RUSTC_VERSION MATCHES "nightly") + set(RUSTUP_TOOLCHAIN nightly) + endif() + + set(RUSTFLAGS -C\ panic=abort) + + set(CARGO_FLAG -Z build-std=std,panic_abort --release --target=${CARGO_TARGET}) +endif() + +set(CARGO_FEATURES "") + +set(CARGO_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_TARGET}/${CARGO_BUILD_TYPE}") + +set(BINDINGS_NAME "${LIBRARY_NAME}_core") + +configure_file( + ${CMAKE_MODULE_PATH}/Cargo.toml.in + ${CMAKE_SOURCE_DIR}/Cargo.toml + @ONLY + NEWLINE_STYLE LF +) + +set(INCLUDE_GUARD_PREFIX "${SYMBOL_PREFIX}") + +configure_file( + ${CMAKE_MODULE_PATH}/cbindgen.toml.in + ${CMAKE_SOURCE_DIR}/cbindgen.toml + @ONLY + NEWLINE_STYLE LF +) + +set(CARGO_OUTPUT + ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h + ${CARGO_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${BINDINGS_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX} +) + +# \note cbindgen's naming behavior isn't fully configurable and it ignores +# `const fn` calls (https://github.com/eqrion/cbindgen/issues/252). +add_custom_command( + OUTPUT + ${CARGO_OUTPUT} + COMMAND + # \note cbindgen won't regenerate its output header file after it's been removed but it will after its + # configuration file has been updated. + ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file-touch.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml + COMMAND + ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} CBINDGEN_TARGET_DIR=${CBINDGEN_TARGET_DIR} RUSTUP_TOOLCHAIN=${RUSTUP_TOOLCHAIN} RUSTFLAGS=${RUSTFLAGS} ${CARGO_CMD} build ${CARGO_FLAG} ${CARGO_FEATURES} + COMMAND + # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase". + ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file-regex-replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h + COMMAND + # Compensate for cbindgen ignoring `std:mem::size_of()` calls. + ${CMAKE_COMMAND} -DMATCH_REGEX=USIZE_ -DREPLACE_EXPR=\+${CMAKE_SIZEOF_VOID_P} -P ${CMAKE_SOURCE_DIR}/cmake/file-regex-replace.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h + MAIN_DEPENDENCY + src/lib.rs + DEPENDS + src/actor_id.rs + src/byte_span.rs + src/change.rs + src/doc.rs + src/doc/list.rs + src/doc/map.rs + src/doc/utils.rs + src/index.rs + src/item.rs + src/items.rs + src/obj.rs + src/result.rs + src/sync.rs + src/sync/have.rs + src/sync/message.rs + src/sync/state.rs + ${CMAKE_SOURCE_DIR}/build.rs + ${CMAKE_MODULE_PATH}/Cargo.toml.in + ${CMAKE_MODULE_PATH}/cbindgen.toml.in + WORKING_DIRECTORY + ${CMAKE_SOURCE_DIR} + COMMENT + "Producing the bindings' artifacts with Cargo..." + VERBATIM +) + +add_custom_target(${BINDINGS_NAME}_artifacts ALL + DEPENDS ${CARGO_OUTPUT} +) + +add_library(${BINDINGS_NAME} STATIC IMPORTED GLOBAL) + +target_include_directories(${BINDINGS_NAME} INTERFACE "${CBINDGEN_INCLUDEDIR}") + +set_target_properties( + ${BINDINGS_NAME} + PROPERTIES + # \note Cargo writes a debug build into a nested directory instead of + # decorating its name. + DEBUG_POSTFIX "" + DEFINE_SYMBOL "" + IMPORTED_IMPLIB "" + IMPORTED_LOCATION "${CARGO_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${BINDINGS_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}" + IMPORTED_NO_SONAME "TRUE" + IMPORTED_SONAME "" + LINKER_LANGUAGE C + PUBLIC_HEADER "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" + SOVERSION "${PROJECT_VERSION_MAJOR}" + VERSION "${PROJECT_VERSION}" + # \note Cargo exports all of the symbols automatically. + WINDOWS_EXPORT_ALL_SYMBOLS "TRUE" +) + +target_compile_definitions(${BINDINGS_NAME} INTERFACE $) + +set(UTILS_SUBDIR "utils") + +add_custom_command( + OUTPUT + ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h + ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c + COMMAND + ${CMAKE_COMMAND} -DPROJECT_NAME=${PROJECT_NAME} -DLIBRARY_NAME=${LIBRARY_NAME} -DSUBDIR=${UTILS_SUBDIR} -P ${CMAKE_SOURCE_DIR}/cmake/enum-string-functions-gen.cmake -- ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c + MAIN_DEPENDENCY + ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h + DEPENDS + ${CMAKE_SOURCE_DIR}/cmake/enum-string-functions-gen.cmake + WORKING_DIRECTORY + ${CMAKE_SOURCE_DIR} + COMMENT + "Generating the enum string functions with CMake..." + VERBATIM +) + +add_custom_target(${LIBRARY_NAME}_utilities + DEPENDS ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h + ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c +) + +add_library(${LIBRARY_NAME}) + +target_compile_features(${LIBRARY_NAME} PRIVATE c_std_99) + +set(CMAKE_THREAD_PREFER_PTHREAD TRUE) + +set(THREADS_PREFER_PTHREAD_FLAG TRUE) + +find_package(Threads REQUIRED) + +set(LIBRARY_DEPENDENCIES Threads::Threads ${CMAKE_DL_LIBS}) + +if(WIN32) + list(APPEND LIBRARY_DEPENDENCIES Bcrypt userenv ws2_32) +else() + list(APPEND LIBRARY_DEPENDENCIES m) +endif() + +target_link_libraries(${LIBRARY_NAME} + PUBLIC ${BINDINGS_NAME} + ${LIBRARY_DEPENDENCIES} +) + +# \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't +# contain a non-existent path so its build-time include directory +# must be specified for all of its dependent targets instead. +target_include_directories(${LIBRARY_NAME} + PUBLIC "$" + "$" +) + +add_dependencies(${LIBRARY_NAME} ${BINDINGS_NAME}_artifacts) + +# Generate the configuration header. +math(EXPR INTEGER_PROJECT_VERSION_MAJOR "${PROJECT_VERSION_MAJOR} * 100000") + +math(EXPR INTEGER_PROJECT_VERSION_MINOR "${PROJECT_VERSION_MINOR} * 100") + +math(EXPR INTEGER_PROJECT_VERSION_PATCH "${PROJECT_VERSION_PATCH}") + +math(EXPR INTEGER_PROJECT_VERSION "${INTEGER_PROJECT_VERSION_MAJOR} + \ + ${INTEGER_PROJECT_VERSION_MINOR} + \ + ${INTEGER_PROJECT_VERSION_PATCH}") + +configure_file( + ${CMAKE_MODULE_PATH}/config.h.in + ${CBINDGEN_TARGET_DIR}/config.h + @ONLY + NEWLINE_STYLE LF +) + +target_sources(${LIBRARY_NAME} + PRIVATE + src/${UTILS_SUBDIR}/result.c + src/${UTILS_SUBDIR}/stack_callback_data.c + src/${UTILS_SUBDIR}/stack.c + src/${UTILS_SUBDIR}/string.c + ${CMAKE_BINARY_DIR}/src/${UTILS_SUBDIR}/enum_string.c + PUBLIC + FILE_SET api TYPE HEADERS + BASE_DIRS + ${CBINDGEN_INCLUDEDIR} + ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR} + FILES + ${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h + ${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h + ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/result.h + ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack_callback_data.h + ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack.h + ${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/string.h + INTERFACE + FILE_SET config TYPE HEADERS + BASE_DIRS + ${CBINDGEN_INCLUDEDIR} + FILES + ${CBINDGEN_TARGET_DIR}/config.h +) + +install( + TARGETS ${LIBRARY_NAME} + EXPORT ${PROJECT_NAME}-config + FILE_SET api + FILE_SET config +) + +# \note Install the Cargo-built core bindings to enable direct linkage. +install( + FILES $ + DESTINATION ${CMAKE_INSTALL_LIBDIR} +) + +install(EXPORT ${PROJECT_NAME}-config + FILE ${PROJECT_NAME}-config.cmake + NAMESPACE "${PROJECT_NAME}::" + DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${LIB} +) + +if(BUILD_TESTING) + add_subdirectory(test EXCLUDE_FROM_ALL) + + enable_testing() +endif() + +add_subdirectory(docs) + +add_subdirectory(examples EXCLUDE_FROM_ALL) diff --git a/automerge-c/Cargo.toml b/rust/automerge-c/Cargo.toml similarity index 82% rename from automerge-c/Cargo.toml rename to rust/automerge-c/Cargo.toml index 851a3470..95a3a29c 100644 --- a/automerge-c/Cargo.toml +++ b/rust/automerge-c/Cargo.toml @@ -7,8 +7,8 @@ license = "MIT" rust-version = "1.57.0" [lib] -name = "automerge" -crate-type = ["cdylib", "staticlib"] +name = "automerge_core" +crate-type = ["staticlib"] bench = false doc = false @@ -19,4 +19,4 @@ libc = "^0.2" smol_str = "^0.1.21" [build-dependencies] -cbindgen = "^0.20" +cbindgen = "^0.24" diff --git a/rust/automerge-c/README.md b/rust/automerge-c/README.md new file mode 100644 index 00000000..1fbca3df --- /dev/null +++ b/rust/automerge-c/README.md @@ -0,0 +1,207 @@ +# Overview + +automerge-c exposes a C API that can either be used directly or as the basis +for other language bindings that have good support for calling C functions. + +# Installing + +See the main README for instructions on getting your environment set up and then +you can build the automerge-c library and install its constituent files within +a root directory of your choosing (e.g. "/usr/local") like so: +```shell +cmake -E make_directory automerge-c/build +cmake -S automerge-c -B automerge-c/build +cmake --build automerge-c/build +cmake --install automerge-c/build --prefix "/usr/local" +``` +Installation is important because the name, location and structure of CMake's +out-of-source build subdirectory is subject to change based on the platform and +the release version; generated headers like `automerge-c/config.h` and +`automerge-c/utils/enum_string.h` are only sure to be found within their +installed locations. + +It's not obvious because they are versioned but the `Cargo.toml` and +`cbindgen.toml` configuration files are also generated in order to ensure that +the project name, project version and library name that they contain match those +specified within the top-level `CMakeLists.txt` file. + +If you'd like to cross compile the library for different platforms you can do so +using [cross](https://github.com/cross-rs/cross). For example: + +- `cross build --manifest-path rust/automerge-c/Cargo.toml -r --target aarch64-unknown-linux-gnu` + +This will output a shared library in the directory `rust/target/aarch64-unknown-linux-gnu/release/`. + +You can replace `aarch64-unknown-linux-gnu` with any +[cross supported targets](https://github.com/cross-rs/cross#supported-targets). +The targets below are known to work, though other targets are expected to work +too: + +- `x86_64-apple-darwin` +- `aarch64-apple-darwin` +- `x86_64-unknown-linux-gnu` +- `aarch64-unknown-linux-gnu` + +As a caveat, CMake generates the `automerge.h` header file in terms of the +processor architecture of the computer on which it was built so, for example, +don't use a header generated for a 64-bit processor if your target is a 32-bit +processor. + +# Usage + +You can build and view the C API's HTML reference documentation like so: +```shell +cmake -E make_directory automerge-c/build +cmake -S automerge-c -B automerge-c/build +cmake --build automerge-c/build --target automerge_docs +firefox automerge-c/build/src/html/index.html +``` + +To get started quickly, look at the +[examples](https://github.com/automerge/automerge-rs/tree/main/rust/automerge-c/examples). + +Almost all operations in automerge-c act on an Automerge document +(`AMdoc` struct) which is structurally similar to a JSON document. + +You can get a document by calling either `AMcreate()` or `AMload()`. Operations +on a given document are not thread-safe so you must use a mutex or similar to +avoid calling more than one function on the same one concurrently. + +A C API function that could succeed or fail returns a result (`AMresult` struct) +containing a status code (`AMstatus` enum) and either a sequence of at least one +item (`AMitem` struct) or a read-only view onto a UTF-8 error message string +(`AMbyteSpan` struct). +An item contains up to three components: an index within its parent object +(`AMbyteSpan` struct or `size_t`), a unique identifier (`AMobjId` struct) and a +value. +The result of a successful function call that doesn't produce any values will +contain a single item that is void (`AM_VAL_TYPE_VOID`). +A returned result **must** be passed to `AMresultFree()` once the item(s) or +error message it contains is no longer needed in order to avoid a memory leak. +``` +#include +#include +#include +#include + +int main(int argc, char** argv) { + AMresult *docResult = AMcreate(NULL); + + if (AMresultStatus(docResult) != AM_STATUS_OK) { + char* const err_msg = AMstrdup(AMresultError(docResult), NULL); + printf("failed to create doc: %s", err_msg); + free(err_msg); + goto cleanup; + } + + AMdoc *doc; + AMitemToDoc(AMresultItem(docResult), &doc); + + // useful code goes here! + +cleanup: + AMresultFree(docResult); +} +``` + +If you are writing an application in C, the `AMstackItem()`, `AMstackItems()` +and `AMstackResult()` functions enable the lifetimes of anonymous results to be +centrally managed and allow the same validation logic to be reused without +relying upon the `goto` statement (see examples/quickstart.c). + +If you are wrapping automerge-c in another language, particularly one that has a +garbage collector, you can call the `AMresultFree()` function within a finalizer +to ensure that memory is reclaimed when it is no longer needed. + +Automerge documents consist of a mutable root which is always a map from string +keys to values. A value can be one of the following types: + +- A number of type double / int64_t / uint64_t +- An explicit true / false / null +- An immutable UTF-8 string (`AMbyteSpan`). +- An immutable array of arbitrary bytes (`AMbyteSpan`). +- A mutable map from string keys to values. +- A mutable list of values. +- A mutable UTF-8 string. + +If you read from a location in the document with no value, an item with type +`AM_VAL_TYPE_VOID` will be returned, but you cannot write such a value +explicitly. + +Under the hood, automerge references a mutable object by its object identifier +where `AM_ROOT` signifies a document's root map object. + +There are functions to put each type of value into either a map or a list, and +functions to read the current or a historical value from a map or a list. As (in general) collaborators +may edit the document at any time, you cannot guarantee that the type of the +value at a given part of the document will stay the same. As a result, reading +from the document will return an `AMitem` struct that you can inspect to +determine the type of value that it contains. + +Strings in automerge-c are represented using an `AMbyteSpan` which contains a +pointer and a length. Strings must be valid UTF-8 and may contain NUL (`0`) +characters. +For your convenience, you can call `AMstr()` to get the `AMbyteSpan` struct +equivalent of a null-terminated byte string or `AMstrdup()` to get the +representation of an `AMbyteSpan` struct as a null-terminated byte string +wherein its NUL characters have been removed/replaced as you choose. + +Putting all of that together, to read and write from the root of the document +you can do this: + +``` +#include +#include +#include +#include + +int main(int argc, char** argv) { + // ...previous example... + AMdoc *doc; + AMitemToDoc(AMresultItem(docResult), &doc); + + AMresult *putResult = AMmapPutStr(doc, AM_ROOT, AMstr("key"), AMstr("value")); + if (AMresultStatus(putResult) != AM_STATUS_OK) { + char* const err_msg = AMstrdup(AMresultError(putResult), NULL); + printf("failed to put: %s", err_msg); + free(err_msg); + goto cleanup; + } + + AMresult *getResult = AMmapGet(doc, AM_ROOT, AMstr("key"), NULL); + if (AMresultStatus(getResult) != AM_STATUS_OK) { + char* const err_msg = AMstrdup(AMresultError(putResult), NULL); + printf("failed to get: %s", err_msg); + free(err_msg); + goto cleanup; + } + + AMbyteSpan got; + if (AMitemToStr(AMresultItem(getResult), &got)) { + char* const c_str = AMstrdup(got, NULL); + printf("Got %zu-character string \"%s\"", got.count, c_str); + free(c_str); + } else { + printf("expected to read a string!"); + goto cleanup; + } + + +cleanup: + AMresultFree(getResult); + AMresultFree(putResult); + AMresultFree(docResult); +} +``` + +Functions that do not return an `AMresult` (for example `AMitemKey()`) do +not allocate memory but rather reference memory that was previously +allocated. It's therefore important to keep the original `AMresult` alive (in +this case the one returned by `AMmapRange()`) until after you are finished with +the items that it contains. However, the memory for an individual `AMitem` can +be shared with a new `AMresult` by calling `AMitemResult()` on it. In other +words, a select group of items can be filtered out of a collection and only each +one's corresponding `AMresult` must be kept alive from that point forward; the +originating collection's `AMresult` can be safely freed. + +Beyond that, good luck! diff --git a/automerge-c/build.rs b/rust/automerge-c/build.rs similarity index 90% rename from automerge-c/build.rs rename to rust/automerge-c/build.rs index 00fd0f87..bf12a105 100644 --- a/automerge-c/build.rs +++ b/rust/automerge-c/build.rs @@ -10,7 +10,7 @@ fn main() { let config = cbindgen::Config::from_file("cbindgen.toml") .expect("Unable to find cbindgen.toml configuration file"); - if let Ok(writer) = cbindgen::generate_with_config(&crate_dir, config) { + if let Ok(writer) = cbindgen::generate_with_config(crate_dir, config) { // \note CMake sets this environment variable before invoking Cargo so // that it can direct the generated header file into its // out-of-source build directory for post-processing. diff --git a/automerge-c/cbindgen.toml b/rust/automerge-c/cbindgen.toml similarity index 71% rename from automerge-c/cbindgen.toml rename to rust/automerge-c/cbindgen.toml index ada7f48d..21eaaadd 100644 --- a/automerge-c/cbindgen.toml +++ b/rust/automerge-c/cbindgen.toml @@ -1,7 +1,7 @@ after_includes = """\n /** * \\defgroup enumerations Public Enumerations - Symbolic names for integer constants. + * Symbolic names for integer constants. */ /** @@ -12,21 +12,23 @@ after_includes = """\n #define AM_ROOT NULL /** - * \\memberof AMchangeHash + * \\memberof AMdoc * \\def AM_CHANGE_HASH_SIZE * \\brief The count of bytes in a change hash. */ #define AM_CHANGE_HASH_SIZE 32 """ -autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */" +autogen_warning = """ +/** + * \\file + * \\brief All constants, functions and types in the core Automerge C API. + * + * \\warning This file is auto-generated by cbindgen. + */ +""" documentation = true documentation_style = "doxy" -header = """ -/** \\file - * All constants, functions and types in the Automerge library's C API. - */ - """ -include_guard = "AUTOMERGE_H" +include_guard = "AUTOMERGE_C_H" includes = [] language = "C" line_length = 140 diff --git a/rust/automerge-c/cmake/Cargo.toml.in b/rust/automerge-c/cmake/Cargo.toml.in new file mode 100644 index 00000000..781e2fef --- /dev/null +++ b/rust/automerge-c/cmake/Cargo.toml.in @@ -0,0 +1,22 @@ +[package] +name = "@PROJECT_NAME@" +version = "@PROJECT_VERSION@" +authors = ["Orion Henry ", "Jason Kankiewicz "] +edition = "2021" +license = "MIT" +rust-version = "1.57.0" + +[lib] +name = "@BINDINGS_NAME@" +crate-type = ["staticlib"] +bench = false +doc = false + +[dependencies] +@LIBRARY_NAME@ = { path = "../@LIBRARY_NAME@" } +hex = "^0.4.3" +libc = "^0.2" +smol_str = "^0.1.21" + +[build-dependencies] +cbindgen = "^0.24" diff --git a/automerge-c/cmake/automerge-c-config.cmake.in b/rust/automerge-c/cmake/automerge-c-config.cmake.in similarity index 100% rename from automerge-c/cmake/automerge-c-config.cmake.in rename to rust/automerge-c/cmake/automerge-c-config.cmake.in diff --git a/rust/automerge-c/cmake/cbindgen.toml.in b/rust/automerge-c/cmake/cbindgen.toml.in new file mode 100644 index 00000000..5122b75c --- /dev/null +++ b/rust/automerge-c/cmake/cbindgen.toml.in @@ -0,0 +1,48 @@ +after_includes = """\n +/** + * \\defgroup enumerations Public Enumerations + * Symbolic names for integer constants. + */ + +/** + * \\memberof AMdoc + * \\def AM_ROOT + * \\brief The root object of a document. + */ +#define AM_ROOT NULL + +/** + * \\memberof AMdoc + * \\def AM_CHANGE_HASH_SIZE + * \\brief The count of bytes in a change hash. + */ +#define AM_CHANGE_HASH_SIZE 32 +""" +autogen_warning = """ +/** + * \\file + * \\brief All constants, functions and types in the core Automerge C API. + * + * \\warning This file is auto-generated by cbindgen. + */ +""" +documentation = true +documentation_style = "doxy" +include_guard = "@INCLUDE_GUARD_PREFIX@_H" +includes = [] +language = "C" +line_length = 140 +no_includes = true +style = "both" +sys_includes = ["stdbool.h", "stddef.h", "stdint.h", "time.h"] +usize_is_size_t = true + +[enum] +derive_const_casts = true +enum_class = true +must_use = "MUST_USE_ENUM" +prefix_with_name = true +rename_variants = "ScreamingSnakeCase" + +[export] +item_types = ["constants", "enums", "functions", "opaque", "structs", "typedefs"] diff --git a/rust/automerge-c/cmake/config.h.in b/rust/automerge-c/cmake/config.h.in new file mode 100644 index 00000000..40482cb9 --- /dev/null +++ b/rust/automerge-c/cmake/config.h.in @@ -0,0 +1,35 @@ +#ifndef @INCLUDE_GUARD_PREFIX@_CONFIG_H +#define @INCLUDE_GUARD_PREFIX@_CONFIG_H +/** + * \file + * \brief Configuration pararameters defined by the build system. + * + * \warning This file is auto-generated by CMake. + */ + +/** + * \def @SYMBOL_PREFIX@_VERSION + * \brief Denotes a semantic version of the form {MAJOR}{MINOR}{PATCH} as three, + * two-digit decimal numbers without leading zeros (e.g. 100 is 0.1.0). + */ +#define @SYMBOL_PREFIX@_VERSION @INTEGER_PROJECT_VERSION@ + +/** + * \def @SYMBOL_PREFIX@_MAJOR_VERSION + * \brief Denotes a semantic major version as a decimal number. + */ +#define @SYMBOL_PREFIX@_MAJOR_VERSION (@SYMBOL_PREFIX@_VERSION / 100000) + +/** + * \def @SYMBOL_PREFIX@_MINOR_VERSION + * \brief Denotes a semantic minor version as a decimal number. + */ +#define @SYMBOL_PREFIX@_MINOR_VERSION ((@SYMBOL_PREFIX@_VERSION / 100) % 1000) + +/** + * \def @SYMBOL_PREFIX@_PATCH_VERSION + * \brief Denotes a semantic patch version as a decimal number. + */ +#define @SYMBOL_PREFIX@_PATCH_VERSION (@SYMBOL_PREFIX@_VERSION % 100) + +#endif /* @INCLUDE_GUARD_PREFIX@_CONFIG_H */ diff --git a/rust/automerge-c/cmake/enum-string-functions-gen.cmake b/rust/automerge-c/cmake/enum-string-functions-gen.cmake new file mode 100644 index 00000000..77080e8d --- /dev/null +++ b/rust/automerge-c/cmake/enum-string-functions-gen.cmake @@ -0,0 +1,183 @@ +# This CMake script is used to generate a header and a source file for utility +# functions that convert the tags of generated enum types into strings and +# strings into the tags of generated enum types. +cmake_minimum_required(VERSION 3.23 FATAL_ERROR) + +# Seeks the starting line of the source enum's declaration. +macro(seek_enum_mode) + if (line MATCHES "^(typedef[ \t]+)?enum ") + string(REGEX REPLACE "^enum ([0-9a-zA-Z_]+).*$" "\\1" enum_name "${line}") + set(mode "read_tags") + endif() +endmacro() + +# Scans the input for the current enum's tags. +macro(read_tags_mode) + if(line MATCHES "^}") + set(mode "generate") + elseif(line MATCHES "^[A-Z0-9_]+.*$") + string(REGEX REPLACE "^([A-Za-z0-9_]+).*$" "\\1" tmp "${line}") + list(APPEND enum_tags "${tmp}") + endif() +endmacro() + +macro(write_header_file) + # Generate a to-string function declaration. + list(APPEND header_body + "/**\n" + " * \\ingroup enumerations\n" + " * \\brief Gets the string representation of an `${enum_name}` enum tag.\n" + " *\n" + " * \\param[in] tag An `${enum_name}` enum tag.\n" + " * \\return A null-terminated byte string.\n" + " */\n" + "char const* ${enum_name}ToString(${enum_name} const tag)\;\n" + "\n") + # Generate a from-string function declaration. + list(APPEND header_body + "/**\n" + " * \\ingroup enumerations\n" + " * \\brief Gets an `${enum_name}` enum tag from its string representation.\n" + " *\n" + " * \\param[out] dest An `${enum_name}` enum tag pointer.\n" + " * \\param[in] src A null-terminated byte string.\n" + " * \\return `true` if \\p src matches the string representation of an\n" + " * `${enum_name}` enum tag, `false` otherwise.\n" + " */\n" + "bool ${enum_name}FromString(${enum_name}* dest, char const* const src)\;\n" + "\n") +endmacro() + +macro(write_source_file) + # Generate a to-string function implementation. + list(APPEND source_body + "char const* ${enum_name}ToString(${enum_name} const tag) {\n" + " switch (tag) {\n" + " default:\n" + " return \"???\"\;\n") + foreach(label IN LISTS enum_tags) + list(APPEND source_body + " case ${label}:\n" + " return \"${label}\"\;\n") + endforeach() + list(APPEND source_body + " }\n" + "}\n" + "\n") + # Generate a from-string function implementation. + list(APPEND source_body + "bool ${enum_name}FromString(${enum_name}* dest, char const* const src) {\n") + foreach(label IN LISTS enum_tags) + list(APPEND source_body + " if (!strcmp(src, \"${label}\")) {\n" + " *dest = ${label}\;\n" + " return true\;\n" + " }\n") + endforeach() + list(APPEND source_body + " return false\;\n" + "}\n" + "\n") +endmacro() + +function(main) + set(header_body "") + # File header and includes. + list(APPEND header_body + "#ifndef ${include_guard}\n" + "#define ${include_guard}\n" + "/**\n" + " * \\file\n" + " * \\brief Utility functions for converting enum tags into null-terminated\n" + " * byte strings and vice versa.\n" + " *\n" + " * \\warning This file is auto-generated by CMake.\n" + " */\n" + "\n" + "#include \n" + "\n" + "#include <${library_include}>\n" + "\n") + set(source_body "") + # File includes. + list(APPEND source_body + "/** \\warning This file is auto-generated by CMake. */\n" + "\n" + "#include \"stdio.h\"\n" + "#include \"string.h\"\n" + "\n" + "#include <${header_include}>\n" + "\n") + set(enum_name "") + set(enum_tags "") + set(mode "seek_enum") + file(STRINGS "${input_path}" lines) + foreach(line IN LISTS lines) + string(REGEX REPLACE "^(.+)(//.*)?" "\\1" line "${line}") + string(STRIP "${line}" line) + if(mode STREQUAL "seek_enum") + seek_enum_mode() + elseif(mode STREQUAL "read_tags") + read_tags_mode() + else() + # The end of the enum declaration was reached. + if(NOT enum_name) + # The end of the file was reached. + return() + endif() + if(NOT enum_tags) + message(FATAL_ERROR "No tags found for `${enum_name}`.") + endif() + string(TOLOWER "${enum_name}" output_stem_prefix) + string(CONCAT output_stem "${output_stem_prefix}" "_string") + cmake_path(REPLACE_EXTENSION output_stem "h" OUTPUT_VARIABLE output_header_basename) + write_header_file() + write_source_file() + set(enum_name "") + set(enum_tags "") + set(mode "seek_enum") + endif() + endforeach() + # File footer. + list(APPEND header_body + "#endif /* ${include_guard} */\n") + message(STATUS "Generating header file \"${output_header_path}\"...") + file(WRITE "${output_header_path}" ${header_body}) + message(STATUS "Generating source file \"${output_source_path}\"...") + file(WRITE "${output_source_path}" ${source_body}) +endfunction() + +if(NOT DEFINED PROJECT_NAME) + message(FATAL_ERROR "Variable PROJECT_NAME is not defined.") +elseif(NOT DEFINED LIBRARY_NAME) + message(FATAL_ERROR "Variable LIBRARY_NAME is not defined.") +elseif(NOT DEFINED SUBDIR) + message(FATAL_ERROR "Variable SUBDIR is not defined.") +elseif(${CMAKE_ARGC} LESS 9) + message(FATAL_ERROR "Too few arguments.") +elseif(${CMAKE_ARGC} GREATER 10) + message(FATAL_ERROR "Too many arguments.") +elseif(NOT EXISTS ${CMAKE_ARGV5}) + message(FATAL_ERROR "Input header \"${CMAKE_ARGV7}\" not found.") +endif() +cmake_path(CONVERT "${CMAKE_ARGV7}" TO_CMAKE_PATH_LIST input_path NORMALIZE) +cmake_path(CONVERT "${CMAKE_ARGV8}" TO_CMAKE_PATH_LIST output_header_path NORMALIZE) +cmake_path(CONVERT "${CMAKE_ARGV9}" TO_CMAKE_PATH_LIST output_source_path NORMALIZE) +string(TOLOWER "${PROJECT_NAME}" project_root) +cmake_path(CONVERT "${SUBDIR}" TO_CMAKE_PATH_LIST project_subdir NORMALIZE) +string(TOLOWER "${project_subdir}" project_subdir) +string(TOLOWER "${LIBRARY_NAME}" library_stem) +cmake_path(REPLACE_EXTENSION library_stem "h" OUTPUT_VARIABLE library_basename) +string(JOIN "/" library_include "${project_root}" "${library_basename}") +string(TOUPPER "${PROJECT_NAME}" project_name_upper) +string(TOUPPER "${project_subdir}" include_guard_infix) +string(REGEX REPLACE "/" "_" include_guard_infix "${include_guard_infix}") +string(REGEX REPLACE "-" "_" include_guard_prefix "${project_name_upper}") +string(JOIN "_" include_guard_prefix "${include_guard_prefix}" "${include_guard_infix}") +string(JOIN "/" output_header_prefix "${project_root}" "${project_subdir}") +cmake_path(GET output_header_path STEM output_header_stem) +string(TOUPPER "${output_header_stem}" include_guard_stem) +string(JOIN "_" include_guard "${include_guard_prefix}" "${include_guard_stem}" "H") +cmake_path(GET output_header_path FILENAME output_header_basename) +string(JOIN "/" header_include "${output_header_prefix}" "${output_header_basename}") +main() diff --git a/automerge-c/cmake/file_regex_replace.cmake b/rust/automerge-c/cmake/file-regex-replace.cmake similarity index 87% rename from automerge-c/cmake/file_regex_replace.cmake rename to rust/automerge-c/cmake/file-regex-replace.cmake index 27306458..09005bc2 100644 --- a/automerge-c/cmake/file_regex_replace.cmake +++ b/rust/automerge-c/cmake/file-regex-replace.cmake @@ -1,4 +1,6 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) +# This CMake script is used to perform string substitutions within a generated +# file. +cmake_minimum_required(VERSION 3.23 FATAL_ERROR) if(NOT DEFINED MATCH_REGEX) message(FATAL_ERROR "Variable \"MATCH_REGEX\" is not defined.") diff --git a/automerge-c/cmake/file_touch.cmake b/rust/automerge-c/cmake/file-touch.cmake similarity index 82% rename from automerge-c/cmake/file_touch.cmake rename to rust/automerge-c/cmake/file-touch.cmake index 087d59b6..2c196755 100644 --- a/automerge-c/cmake/file_touch.cmake +++ b/rust/automerge-c/cmake/file-touch.cmake @@ -1,4 +1,6 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) +# This CMake script is used to force Cargo to regenerate the header file for the +# core bindings after the out-of-source build directory has been cleaned. +cmake_minimum_required(VERSION 3.23 FATAL_ERROR) if(NOT DEFINED CONDITION) message(FATAL_ERROR "Variable \"CONDITION\" is not defined.") diff --git a/rust/automerge-c/docs/CMakeLists.txt b/rust/automerge-c/docs/CMakeLists.txt new file mode 100644 index 00000000..1d94c872 --- /dev/null +++ b/rust/automerge-c/docs/CMakeLists.txt @@ -0,0 +1,35 @@ +find_package(Doxygen OPTIONAL_COMPONENTS dot) + +if(DOXYGEN_FOUND) + set(DOXYGEN_ALIASES "installed_headerfile=\\headerfile ${LIBRARY_NAME}.h <${PROJECT_NAME}/${LIBRARY_NAME}.h>") + + set(DOXYGEN_GENERATE_LATEX YES) + + set(DOXYGEN_PDF_HYPERLINKS YES) + + set(DOXYGEN_PROJECT_LOGO "${CMAKE_CURRENT_SOURCE_DIR}/img/brandmark.png") + + set(DOXYGEN_SORT_BRIEF_DOCS YES) + + set(DOXYGEN_USE_MDFILE_AS_MAINPAGE "${CMAKE_SOURCE_DIR}/README.md") + + doxygen_add_docs( + ${LIBRARY_NAME}_docs + "${CBINDGEN_TARGET_DIR}/${LIBRARY_NAME}.h" + "${CBINDGEN_TARGET_DIR}/config.h" + "${CBINDGEN_TARGET_DIR}/${UTILS_SUBDIR}/enum_string.h" + "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/result.h" + "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack_callback_data.h" + "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/stack.h" + "${CMAKE_SOURCE_DIR}/${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}/${UTILS_SUBDIR}/string.h" + "${CMAKE_SOURCE_DIR}/README.md" + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + COMMENT "Producing documentation with Doxygen..." + ) + + # \note A Doxygen input file isn't a file-level dependency so the Doxygen + # command must instead depend upon a target that either outputs the + # file or depends upon it also or it will just output an error message + # when it can't be found. + add_dependencies(${LIBRARY_NAME}_docs ${BINDINGS_NAME}_artifacts ${LIBRARY_NAME}_utilities) +endif() diff --git a/automerge-c/img/brandmark.png b/rust/automerge-c/docs/img/brandmark.png similarity index 100% rename from automerge-c/img/brandmark.png rename to rust/automerge-c/docs/img/brandmark.png diff --git a/automerge-c/examples/CMakeLists.txt b/rust/automerge-c/examples/CMakeLists.txt similarity index 66% rename from automerge-c/examples/CMakeLists.txt rename to rust/automerge-c/examples/CMakeLists.txt index 3395124c..f080237b 100644 --- a/automerge-c/examples/CMakeLists.txt +++ b/rust/automerge-c/examples/CMakeLists.txt @@ -1,41 +1,39 @@ -cmake_minimum_required(VERSION 3.18 FATAL_ERROR) - add_executable( - example_quickstart + ${LIBRARY_NAME}_quickstart quickstart.c ) -set_target_properties(example_quickstart PROPERTIES LINKER_LANGUAGE C) +set_target_properties(${LIBRARY_NAME}_quickstart PROPERTIES LINKER_LANGUAGE C) # \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't # contain a non-existent path so its build-time include directory # must be specified for all of its dependent targets instead. target_include_directories( - example_quickstart + ${LIBRARY_NAME}_quickstart PRIVATE "$" ) -target_link_libraries(example_quickstart PRIVATE ${LIBRARY_NAME}) +target_link_libraries(${LIBRARY_NAME}_quickstart PRIVATE ${LIBRARY_NAME}) -add_dependencies(example_quickstart ${LIBRARY_NAME}_artifacts) +add_dependencies(${LIBRARY_NAME}_quickstart ${BINDINGS_NAME}_artifacts) if(BUILD_SHARED_LIBS AND WIN32) add_custom_command( - TARGET example_quickstart + TARGET ${LIBRARY_NAME}_quickstart POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_if_different ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX} - ${CMAKE_CURRENT_BINARY_DIR} + ${CMAKE_BINARY_DIR} COMMENT "Copying the DLL built by Cargo into the examples directory..." VERBATIM ) endif() add_custom_command( - TARGET example_quickstart + TARGET ${LIBRARY_NAME}_quickstart POST_BUILD COMMAND - example_quickstart + ${LIBRARY_NAME}_quickstart COMMENT "Running the example quickstart..." VERBATIM diff --git a/automerge-c/examples/README.md b/rust/automerge-c/examples/README.md similarity index 68% rename from automerge-c/examples/README.md rename to rust/automerge-c/examples/README.md index 17aa2227..17e69412 100644 --- a/automerge-c/examples/README.md +++ b/rust/automerge-c/examples/README.md @@ -5,5 +5,5 @@ ```shell cmake -E make_directory automerge-c/build cmake -S automerge-c -B automerge-c/build -cmake --build automerge-c/build --target example_quickstart +cmake --build automerge-c/build --target automerge_quickstart ``` diff --git a/rust/automerge-c/examples/quickstart.c b/rust/automerge-c/examples/quickstart.c new file mode 100644 index 00000000..ab6769ef --- /dev/null +++ b/rust/automerge-c/examples/quickstart.c @@ -0,0 +1,129 @@ +#include +#include +#include + +#include +#include +#include +#include +#include + +static bool abort_cb(AMstack**, void*); + +/** + * \brief Based on https://automerge.github.io/docs/quickstart + */ +int main(int argc, char** argv) { + AMstack* stack = NULL; + AMdoc* doc1; + AMitemToDoc(AMstackItem(&stack, AMcreate(NULL), abort_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1); + AMobjId const* const cards = + AMitemObjId(AMstackItem(&stack, AMmapPutObject(doc1, AM_ROOT, AMstr("cards"), AM_OBJ_TYPE_LIST), abort_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMobjId const* const card1 = + AMitemObjId(AMstackItem(&stack, AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), abort_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMmapPutStr(doc1, card1, AMstr("title"), AMstr("Rewrite everything in Clojure")), abort_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutBool(doc1, card1, AMstr("done"), false), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMobjId const* const card2 = + AMitemObjId(AMstackItem(&stack, AMlistPutObject(doc1, cards, SIZE_MAX, true, AM_OBJ_TYPE_MAP), abort_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMmapPutStr(doc1, card2, AMstr("title"), AMstr("Rewrite everything in Haskell")), abort_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutBool(doc1, card2, AMstr("done"), false), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr("Add card"), NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMdoc* doc2; + AMitemToDoc(AMstackItem(&stack, AMcreate(NULL), abort_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2); + AMstackItem(NULL, AMmerge(doc2, doc1), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMbyteSpan binary; + AMitemToBytes(AMstackItem(&stack, AMsave(doc1), abort_cb, AMexpect(AM_VAL_TYPE_BYTES)), &binary); + AMitemToDoc(AMstackItem(&stack, AMload(binary.src, binary.count), abort_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2); + + AMstackItem(NULL, AMmapPutBool(doc1, card1, AMstr("done"), true), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr("Mark card as done"), NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMstackItem(NULL, AMlistDelete(doc2, cards, 0), abort_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc2, AMstr("Delete card"), NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMstackItem(NULL, AMmerge(doc1, doc2), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMitems changes = AMstackItems(&stack, AMgetChanges(doc1, NULL), abort_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMitem* item = NULL; + while ((item = AMitemsNext(&changes, 1)) != NULL) { + AMchange const* change; + AMitemToChange(item, &change); + AMitems const heads = AMstackItems(&stack, AMitemFromChangeHash(AMchangeHash(change)), abort_cb, + AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + char* const c_msg = AMstrdup(AMchangeMessage(change), NULL); + printf("%s %zu\n", c_msg, AMobjSize(doc1, cards, &heads)); + free(c_msg); + } + AMstackFree(&stack); +} + +/** + * \brief Examines the result at the top of the given stack and, if it's + * invalid, prints an error message to `stderr`, deallocates all results + * in the stack and exits. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] data A pointer to an owned `AMstackCallbackData` struct or `NULL`. + * \return `true` if the top `AMresult` in \p stack is valid, `false` otherwise. + * \pre \p stack `!= NULL`. + */ +static bool abort_cb(AMstack** stack, void* data) { + static char buffer[512] = {0}; + + char const* suffix = NULL; + if (!stack) { + suffix = "Stack*"; + } else if (!*stack) { + suffix = "Stack"; + } else if (!(*stack)->result) { + suffix = ""; + } + if (suffix) { + fprintf(stderr, "Null `AMresult%s*`.\n", suffix); + AMstackFree(stack); + exit(EXIT_FAILURE); + return false; + } + AMstatus const status = AMresultStatus((*stack)->result); + switch (status) { + case AM_STATUS_ERROR: + strcpy(buffer, "Error"); + break; + case AM_STATUS_INVALID_RESULT: + strcpy(buffer, "Invalid result"); + break; + case AM_STATUS_OK: + break; + default: + sprintf(buffer, "Unknown `AMstatus` tag %d", status); + } + if (buffer[0]) { + char* const c_msg = AMstrdup(AMresultError((*stack)->result), NULL); + fprintf(stderr, "%s; %s.\n", buffer, c_msg); + free(c_msg); + AMstackFree(stack); + exit(EXIT_FAILURE); + return false; + } + if (data) { + AMstackCallbackData* sc_data = (AMstackCallbackData*)data; + AMvalType const tag = AMitemValType(AMresultItem((*stack)->result)); + if (tag != sc_data->bitmask) { + fprintf(stderr, "Unexpected tag `%s` (%d) instead of `%s` at %s:%d.\n", AMvalTypeToString(tag), tag, + AMvalTypeToString(sc_data->bitmask), sc_data->file, sc_data->line); + free(sc_data); + AMstackFree(stack); + exit(EXIT_FAILURE); + return false; + } + } + free(data); + return true; +} diff --git a/rust/automerge-c/include/automerge-c/utils/result.h b/rust/automerge-c/include/automerge-c/utils/result.h new file mode 100644 index 00000000..ab8a2f93 --- /dev/null +++ b/rust/automerge-c/include/automerge-c/utils/result.h @@ -0,0 +1,30 @@ +#ifndef AUTOMERGE_C_UTILS_RESULT_H +#define AUTOMERGE_C_UTILS_RESULT_H +/** + * \file + * \brief Utility functions for use with `AMresult` structs. + */ + +#include + +#include + +/** + * \brief Transfers the items within an arbitrary list of results into a + * new result in their order of specification. + * \param[in] count The count of subsequent arguments. + * \param[in] ... A \p count list of arguments, each of which is a pointer to + * an `AMresult` struct whose items will be transferred out of it + * and which is subsequently freed. + * \return A pointer to an `AMresult` struct or `NULL`. + * \pre `∀𝑥 ∈` \p ... `, AMresultStatus(𝑥) == AM_STATUS_OK` + * \post `(∃𝑥 ∈` \p ... `, AMresultStatus(𝑥) != AM_STATUS_OK) -> NULL` + * \attention All `AMresult` struct pointer arguments are passed to + * `AMresultFree()` regardless of success; use `AMresultCat()` + * instead if you wish to pass them to `AMresultFree()` yourself. + * \warning The returned `AMresult` struct pointer must be passed to + * `AMresultFree()` in order to avoid a memory leak. + */ +AMresult* AMresultFrom(int count, ...); + +#endif /* AUTOMERGE_C_UTILS_RESULT_H */ diff --git a/rust/automerge-c/include/automerge-c/utils/stack.h b/rust/automerge-c/include/automerge-c/utils/stack.h new file mode 100644 index 00000000..a8e9fd08 --- /dev/null +++ b/rust/automerge-c/include/automerge-c/utils/stack.h @@ -0,0 +1,130 @@ +#ifndef AUTOMERGE_C_UTILS_STACK_H +#define AUTOMERGE_C_UTILS_STACK_H +/** + * \file + * \brief Utility data structures and functions for hiding `AMresult` structs, + * managing their lifetimes, and automatically applying custom + * validation logic to the `AMitem` structs that they contain. + * + * \note The `AMstack` struct and its related functions drastically reduce the + * need for boilerplate code and/or `goto` statement usage within a C + * application but a higher-level programming language offers even better + * ways to do the same things. + */ + +#include + +/** + * \struct AMstack + * \brief A node in a singly-linked list of result pointers. + */ +typedef struct AMstack { + /** A result to be deallocated. */ + AMresult* result; + /** The previous node in the singly-linked list or `NULL`. */ + struct AMstack* prev; +} AMstack; + +/** + * \memberof AMstack + * \brief The prototype of a function that examines the result at the top of + * the given stack in terms of some arbitrary data. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] data A pointer to arbitrary data or `NULL`. + * \return `true` if the top `AMresult` struct in \p stack is valid, `false` + * otherwise. + * \pre \p stack `!= NULL`. + */ +typedef bool (*AMstackCallback)(AMstack** stack, void* data); + +/** + * \memberof AMstack + * \brief Deallocates the storage for a stack of results. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \pre \p stack `!= NULL` + * \post `*stack == NULL` + */ +void AMstackFree(AMstack** stack); + +/** + * \memberof AMstack + * \brief Gets a result from the stack after removing it. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] result A pointer to the `AMresult` to be popped or `NULL` to + * select the top result in \p stack. + * \return A pointer to an `AMresult` struct or `NULL`. + * \pre \p stack `!= NULL` + * \warning The returned `AMresult` struct pointer must be passed to + * `AMresultFree()` in order to avoid a memory leak. + */ +AMresult* AMstackPop(AMstack** stack, AMresult const* result); + +/** + * \memberof AMstack + * \brief Pushes the given result onto the given stack, calls the given + * callback with the given data to validate it and then either gets the + * result if it's valid or gets `NULL` instead. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] result A pointer to an `AMresult` struct. + * \param[in] callback A pointer to a function with the same signature as + * `AMstackCallback()` or `NULL`. + * \param[in] data A pointer to arbitrary data or `NULL` which is passed to + * \p callback. + * \return \p result or `NULL`. + * \warning If \p stack `== NULL` then \p result is deallocated in order to + * avoid a memory leak. + */ +AMresult* AMstackResult(AMstack** stack, AMresult* result, AMstackCallback callback, void* data); + +/** + * \memberof AMstack + * \brief Pushes the given result onto the given stack, calls the given + * callback with the given data to validate it and then either gets the + * first item in the sequence of items within that result if it's valid + * or gets `NULL` instead. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] result A pointer to an `AMresult` struct. + * \param[in] callback A pointer to a function with the same signature as + * `AMstackCallback()` or `NULL`. + * \param[in] data A pointer to arbitrary data or `NULL` which is passed to + * \p callback. + * \return A pointer to an `AMitem` struct or `NULL`. + * \warning If \p stack `== NULL` then \p result is deallocated in order to + * avoid a memory leak. + */ +AMitem* AMstackItem(AMstack** stack, AMresult* result, AMstackCallback callback, void* data); + +/** + * \memberof AMstack + * \brief Pushes the given result onto the given stack, calls the given + * callback with the given data to validate it and then either gets an + * `AMitems` struct over the sequence of items within that result if it's + * valid or gets an empty `AMitems` instead. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] result A pointer to an `AMresult` struct. + * \param[in] callback A pointer to a function with the same signature as + * `AMstackCallback()` or `NULL`. + * \param[in] data A pointer to arbitrary data or `NULL` which is passed to + * \p callback. + * \return An `AMitems` struct. + * \warning If \p stack `== NULL` then \p result is deallocated immediately + * in order to avoid a memory leak. + */ +AMitems AMstackItems(AMstack** stack, AMresult* result, AMstackCallback callback, void* data); + +/** + * \memberof AMstack + * \brief Gets the count of results that have been pushed onto the stack. + * + * \param[in,out] stack A pointer to an `AMstack` struct. + * \return A 64-bit unsigned integer. + */ +size_t AMstackSize(AMstack const* const stack); + +#endif /* AUTOMERGE_C_UTILS_STACK_H */ diff --git a/rust/automerge-c/include/automerge-c/utils/stack_callback_data.h b/rust/automerge-c/include/automerge-c/utils/stack_callback_data.h new file mode 100644 index 00000000..6f9f1edb --- /dev/null +++ b/rust/automerge-c/include/automerge-c/utils/stack_callback_data.h @@ -0,0 +1,53 @@ +#ifndef AUTOMERGE_C_UTILS_PUSH_CALLBACK_DATA_H +#define AUTOMERGE_C_UTILS_PUSH_CALLBACK_DATA_H +/** + * \file + * \brief Utility data structures, functions and macros for supplying + * parameters to the custom validation logic applied to `AMitem` + * structs. + */ + +#include + +/** + * \struct AMstackCallbackData + * \brief A data structure for passing the parameters of an item value test + * to an implementation of the `AMstackCallback` function prototype. + */ +typedef struct { + /** A bitmask of `AMvalType` tags. */ + AMvalType bitmask; + /** A null-terminated file path string. */ + char const* file; + /** The ordinal number of a line within a file. */ + int line; +} AMstackCallbackData; + +/** + * \memberof AMstackCallbackData + * \brief Allocates a new `AMstackCallbackData` struct and initializes its + * members from their corresponding arguments. + * + * \param[in] bitmask A bitmask of `AMvalType` tags. + * \param[in] file A null-terminated file path string. + * \param[in] line The ordinal number of a line within a file. + * \return A pointer to a disowned `AMstackCallbackData` struct. + * \warning The returned pointer must be passed to `free()` to avoid a memory + * leak. + */ +AMstackCallbackData* AMstackCallbackDataInit(AMvalType const bitmask, char const* const file, int const line); + +/** + * \memberof AMstackCallbackData + * \def AMexpect + * \brief Allocates a new `AMstackCallbackData` struct and initializes it from + * an `AMvalueType` bitmask. + * + * \param[in] bitmask A bitmask of `AMvalType` tags. + * \return A pointer to a disowned `AMstackCallbackData` struct. + * \warning The returned pointer must be passed to `free()` to avoid a memory + * leak. + */ +#define AMexpect(bitmask) AMstackCallbackDataInit(bitmask, __FILE__, __LINE__) + +#endif /* AUTOMERGE_C_UTILS_PUSH_CALLBACK_DATA_H */ diff --git a/rust/automerge-c/include/automerge-c/utils/string.h b/rust/automerge-c/include/automerge-c/utils/string.h new file mode 100644 index 00000000..4d61c2e9 --- /dev/null +++ b/rust/automerge-c/include/automerge-c/utils/string.h @@ -0,0 +1,29 @@ +#ifndef AUTOMERGE_C_UTILS_STRING_H +#define AUTOMERGE_C_UTILS_STRING_H +/** + * \file + * \brief Utility functions for use with `AMbyteSpan` structs that provide + * UTF-8 string views. + */ + +#include + +/** + * \memberof AMbyteSpan + * \brief Returns a pointer to a null-terminated byte string which is a + * duplicate of the given UTF-8 string view except for the substitution + * of its NUL (0) characters with the specified null-terminated byte + * string. + * + * \param[in] str A UTF-8 string view as an `AMbyteSpan` struct. + * \param[in] nul A null-terminated byte string to substitute for NUL characters + * or `NULL` to substitute `"\\0"` for NUL characters. + * \return A disowned null-terminated byte string. + * \pre \p str.src `!= NULL` + * \pre \p str.count `<= sizeof(`\p str.src `)` + * \warning The returned pointer must be passed to `free()` to avoid a memory + * leak. + */ +char* AMstrdup(AMbyteSpan const str, char const* nul); + +#endif /* AUTOMERGE_C_UTILS_STRING_H */ diff --git a/rust/automerge-c/src/actor_id.rs b/rust/automerge-c/src/actor_id.rs new file mode 100644 index 00000000..5a28959e --- /dev/null +++ b/rust/automerge-c/src/actor_id.rs @@ -0,0 +1,193 @@ +use automerge as am; +use libc::c_int; +use std::cell::RefCell; +use std::cmp::Ordering; +use std::str::FromStr; + +use crate::byte_span::AMbyteSpan; +use crate::result::{to_result, AMresult}; + +macro_rules! to_actor_id { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::error("Invalid `AMactorId*`").into(), + } + }}; +} + +pub(crate) use to_actor_id; + +/// \struct AMactorId +/// \installed_headerfile +/// \brief An actor's unique identifier. +#[derive(Eq, PartialEq)] +pub struct AMactorId { + body: *const am::ActorId, + hex_str: RefCell>>, +} + +impl AMactorId { + pub fn new(actor_id: &am::ActorId) -> Self { + Self { + body: actor_id, + hex_str: Default::default(), + } + } + + pub fn as_hex_str(&self) -> AMbyteSpan { + let mut hex_str = self.hex_str.borrow_mut(); + match hex_str.as_mut() { + None => { + let hex_string = unsafe { (*self.body).to_hex_string() }; + hex_str + .insert(hex_string.into_boxed_str()) + .as_bytes() + .into() + } + Some(hex_str) => hex_str.as_bytes().into(), + } + } +} + +impl AsRef for AMactorId { + fn as_ref(&self) -> &am::ActorId { + unsafe { &*self.body } + } +} + +/// \memberof AMactorId +/// \brief Gets the value of an actor identifier as an array of bytes. +/// +/// \param[in] actor_id A pointer to an `AMactorId` struct. +/// \return An `AMbyteSpan` struct for an array of bytes. +/// \pre \p actor_id `!= NULL` +/// \internal +/// +/// # Safety +/// actor_id must be a valid pointer to an AMactorId +#[no_mangle] +pub unsafe extern "C" fn AMactorIdBytes(actor_id: *const AMactorId) -> AMbyteSpan { + match actor_id.as_ref() { + Some(actor_id) => actor_id.as_ref().into(), + None => Default::default(), + } +} + +/// \memberof AMactorId +/// \brief Compares two actor identifiers. +/// +/// \param[in] actor_id1 A pointer to an `AMactorId` struct. +/// \param[in] actor_id2 A pointer to an `AMactorId` struct. +/// \return `-1` if \p actor_id1 `<` \p actor_id2, `0` if +/// \p actor_id1 `==` \p actor_id2 and `1` if +/// \p actor_id1 `>` \p actor_id2. +/// \pre \p actor_id1 `!= NULL` +/// \pre \p actor_id2 `!= NULL` +/// \internal +/// +/// #Safety +/// actor_id1 must be a valid pointer to an AMactorId +/// actor_id2 must be a valid pointer to an AMactorId +#[no_mangle] +pub unsafe extern "C" fn AMactorIdCmp( + actor_id1: *const AMactorId, + actor_id2: *const AMactorId, +) -> c_int { + match (actor_id1.as_ref(), actor_id2.as_ref()) { + (Some(actor_id1), Some(actor_id2)) => match actor_id1.as_ref().cmp(actor_id2.as_ref()) { + Ordering::Less => -1, + Ordering::Equal => 0, + Ordering::Greater => 1, + }, + (None, Some(_)) => -1, + (None, None) => 0, + (Some(_), None) => 1, + } +} + +/// \memberof AMactorId +/// \brief Allocates a new actor identifier and initializes it from a random +/// UUID value. +/// +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMactorIdInit() -> *mut AMresult { + to_result(Ok::(am::ActorId::random())) +} + +/// \memberof AMactorId +/// \brief Allocates a new actor identifier and initializes it from an array of +/// bytes value. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The count of bytes to copy from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMactorIdFromBytes(src: *const u8, count: usize) -> *mut AMresult { + if !src.is_null() { + let value = std::slice::from_raw_parts(src, count); + to_result(Ok::(am::ActorId::from( + value, + ))) + } else { + AMresult::error("Invalid uint8_t*").into() + } +} + +/// \memberof AMactorId +/// \brief Allocates a new actor identifier and initializes it from a +/// hexadecimal UTF-8 string view value. +/// +/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// hex_str must be a valid pointer to an AMbyteSpan +#[no_mangle] +pub unsafe extern "C" fn AMactorIdFromStr(value: AMbyteSpan) -> *mut AMresult { + use am::AutomergeError::InvalidActorId; + + to_result(match (&value).try_into() { + Ok(s) => match am::ActorId::from_str(s) { + Ok(actor_id) => Ok(actor_id), + Err(_) => Err(InvalidActorId(String::from(s))), + }, + Err(e) => Err(e), + }) +} + +/// \memberof AMactorId +/// \brief Gets the value of an actor identifier as a UTF-8 hexadecimal string +/// view. +/// +/// \param[in] actor_id A pointer to an `AMactorId` struct. +/// \return A UTF-8 string view as an `AMbyteSpan` struct. +/// \pre \p actor_id `!= NULL` +/// \internal +/// +/// # Safety +/// actor_id must be a valid pointer to an AMactorId +#[no_mangle] +pub unsafe extern "C" fn AMactorIdStr(actor_id: *const AMactorId) -> AMbyteSpan { + match actor_id.as_ref() { + Some(actor_id) => actor_id.as_hex_str(), + None => Default::default(), + } +} diff --git a/rust/automerge-c/src/byte_span.rs b/rust/automerge-c/src/byte_span.rs new file mode 100644 index 00000000..5855cfc7 --- /dev/null +++ b/rust/automerge-c/src/byte_span.rs @@ -0,0 +1,223 @@ +use automerge as am; +use std::cmp::Ordering; +use std::convert::TryFrom; +use std::os::raw::c_char; + +use libc::{c_int, strlen}; +use smol_str::SmolStr; + +macro_rules! to_str { + ($byte_span:expr) => {{ + let result: Result<&str, am::AutomergeError> = (&$byte_span).try_into(); + match result { + Ok(s) => s, + Err(e) => return AMresult::error(&e.to_string()).into(), + } + }}; +} + +pub(crate) use to_str; + +/// \struct AMbyteSpan +/// \installed_headerfile +/// \brief A view onto an array of bytes. +#[repr(C)] +pub struct AMbyteSpan { + /// A pointer to the first byte of an array of bytes. + /// \warning \p src is only valid until the array of bytes to which it + /// points is freed. + /// \note If the `AMbyteSpan` came from within an `AMitem` struct then + /// \p src will be freed when the pointer to the `AMresult` struct + /// containing the `AMitem` struct is passed to `AMresultFree()`. + pub src: *const u8, + /// The count of bytes in the array. + pub count: usize, +} + +impl AMbyteSpan { + pub fn is_null(&self) -> bool { + self.src.is_null() + } +} + +impl Default for AMbyteSpan { + fn default() -> Self { + Self { + src: std::ptr::null(), + count: 0, + } + } +} + +impl PartialEq for AMbyteSpan { + fn eq(&self, other: &Self) -> bool { + if self.count != other.count { + return false; + } else if self.src == other.src { + return true; + } + <&[u8]>::from(self) == <&[u8]>::from(other) + } +} + +impl Eq for AMbyteSpan {} + +impl From<&am::ActorId> for AMbyteSpan { + fn from(actor: &am::ActorId) -> Self { + let slice = actor.to_bytes(); + Self { + src: slice.as_ptr(), + count: slice.len(), + } + } +} + +impl From<&mut am::ActorId> for AMbyteSpan { + fn from(actor: &mut am::ActorId) -> Self { + actor.as_ref().into() + } +} + +impl From<&am::ChangeHash> for AMbyteSpan { + fn from(change_hash: &am::ChangeHash) -> Self { + Self { + src: change_hash.0.as_ptr(), + count: change_hash.0.len(), + } + } +} + +impl From<*const c_char> for AMbyteSpan { + fn from(cs: *const c_char) -> Self { + if !cs.is_null() { + Self { + src: cs as *const u8, + count: unsafe { strlen(cs) }, + } + } else { + Self::default() + } + } +} + +impl From<&SmolStr> for AMbyteSpan { + fn from(smol_str: &SmolStr) -> Self { + smol_str.as_bytes().into() + } +} + +impl From<&[u8]> for AMbyteSpan { + fn from(slice: &[u8]) -> Self { + Self { + src: slice.as_ptr(), + count: slice.len(), + } + } +} + +impl From<&AMbyteSpan> for &[u8] { + fn from(byte_span: &AMbyteSpan) -> Self { + unsafe { std::slice::from_raw_parts(byte_span.src, byte_span.count) } + } +} + +impl From<&AMbyteSpan> for Vec { + fn from(byte_span: &AMbyteSpan) -> Self { + <&[u8]>::from(byte_span).to_vec() + } +} + +impl TryFrom<&AMbyteSpan> for am::ChangeHash { + type Error = am::AutomergeError; + + fn try_from(byte_span: &AMbyteSpan) -> Result { + use am::AutomergeError::InvalidChangeHashBytes; + + let slice: &[u8] = byte_span.into(); + match slice.try_into() { + Ok(change_hash) => Ok(change_hash), + Err(e) => Err(InvalidChangeHashBytes(e)), + } + } +} + +impl TryFrom<&AMbyteSpan> for &str { + type Error = am::AutomergeError; + + fn try_from(byte_span: &AMbyteSpan) -> Result { + use am::AutomergeError::InvalidCharacter; + + let slice = byte_span.into(); + match std::str::from_utf8(slice) { + Ok(str_) => Ok(str_), + Err(e) => Err(InvalidCharacter(e.valid_up_to())), + } + } +} + +/// \memberof AMbyteSpan +/// \brief Creates a view onto an array of bytes. +/// +/// \param[in] src A pointer to an array of bytes or `NULL`. +/// \param[in] count The count of bytes to view from the array pointed to by +/// \p src. +/// \return An `AMbyteSpan` struct. +/// \pre \p count `<= sizeof(`\p src `)` +/// \post `(`\p src `== NULL) -> (AMbyteSpan){NULL, 0}` +/// \internal +/// +/// #Safety +/// src must be a byte array of length `>= count` or `std::ptr::null()` +#[no_mangle] +pub unsafe extern "C" fn AMbytes(src: *const u8, count: usize) -> AMbyteSpan { + AMbyteSpan { + src, + count: if src.is_null() { 0 } else { count }, + } +} + +/// \memberof AMbyteSpan +/// \brief Creates a view onto a C string. +/// +/// \param[in] c_str A null-terminated byte string or `NULL`. +/// \return An `AMbyteSpan` struct. +/// \pre Each byte in \p c_str encodes one UTF-8 character. +/// \internal +/// +/// #Safety +/// c_str must be a null-terminated array of `std::os::raw::c_char` or `std::ptr::null()`. +#[no_mangle] +pub unsafe extern "C" fn AMstr(c_str: *const c_char) -> AMbyteSpan { + c_str.into() +} + +/// \memberof AMbyteSpan +/// \brief Compares two UTF-8 string views lexicographically. +/// +/// \param[in] lhs A UTF-8 string view as an `AMbyteSpan` struct. +/// \param[in] rhs A UTF-8 string view as an `AMbyteSpan` struct. +/// \return Negative value if \p lhs appears before \p rhs in lexicographical order. +/// Zero if \p lhs and \p rhs compare equal. +/// Positive value if \p lhs appears after \p rhs in lexicographical order. +/// \pre \p lhs.src `!= NULL` +/// \pre \p lhs.count `<= sizeof(`\p lhs.src `)` +/// \pre \p rhs.src `!= NULL` +/// \pre \p rhs.count `<= sizeof(`\p rhs.src `)` +/// \internal +/// +/// #Safety +/// lhs.src must be a byte array of length >= lhs.count +/// rhs.src must be a a byte array of length >= rhs.count +#[no_mangle] +pub unsafe extern "C" fn AMstrCmp(lhs: AMbyteSpan, rhs: AMbyteSpan) -> c_int { + match (<&str>::try_from(&lhs), <&str>::try_from(&rhs)) { + (Ok(lhs), Ok(rhs)) => match lhs.cmp(rhs) { + Ordering::Less => -1, + Ordering::Equal => 0, + Ordering::Greater => 1, + }, + (Err(_), Ok(_)) => -1, + (Err(_), Err(_)) => 0, + (Ok(_), Err(_)) => 1, + } +} diff --git a/automerge-c/src/change.rs b/rust/automerge-c/src/change.rs similarity index 67% rename from automerge-c/src/change.rs rename to rust/automerge-c/src/change.rs index afee98ed..8529ed94 100644 --- a/automerge-c/src/change.rs +++ b/rust/automerge-c/src/change.rs @@ -1,10 +1,7 @@ use automerge as am; use std::cell::RefCell; -use std::ffi::CString; -use std::os::raw::c_char; use crate::byte_span::AMbyteSpan; -use crate::change_hashes::AMchangeHashes; use crate::result::{to_result, AMresult}; macro_rules! to_change { @@ -12,7 +9,7 @@ macro_rules! to_change { let handle = $handle.as_ref(); match handle { Some(b) => b, - None => return AMresult::err("Invalid AMchange pointer").into(), + None => return AMresult::error("Invalid `AMchange*`").into(), } }}; } @@ -23,43 +20,31 @@ macro_rules! to_change { #[derive(Eq, PartialEq)] pub struct AMchange { body: *mut am::Change, - c_msg: RefCell>, - c_changehash: RefCell>, + change_hash: RefCell>, } impl AMchange { pub fn new(change: &mut am::Change) -> Self { Self { body: change, - c_msg: Default::default(), - c_changehash: Default::default(), + change_hash: Default::default(), } } - pub fn message(&self) -> *const c_char { - let mut c_msg = self.c_msg.borrow_mut(); - match c_msg.as_mut() { - None => { - if let Some(message) = unsafe { (*self.body).message() } { - return c_msg - .insert(CString::new(message.as_bytes()).unwrap()) - .as_ptr(); - } - } - Some(message) => { - return message.as_ptr(); - } + pub fn message(&self) -> AMbyteSpan { + if let Some(message) = unsafe { (*self.body).message() } { + return message.as_str().as_bytes().into(); } - std::ptr::null() + Default::default() } pub fn hash(&self) -> AMbyteSpan { - let mut c_changehash = self.c_changehash.borrow_mut(); - if let Some(c_changehash) = c_changehash.as_ref() { - c_changehash.into() + let mut change_hash = self.change_hash.borrow_mut(); + if let Some(change_hash) = change_hash.as_ref() { + change_hash.into() } else { let hash = unsafe { (*self.body).hash() }; - let ptr = c_changehash.insert(hash); + let ptr = change_hash.insert(hash); AMbyteSpan { src: ptr.0.as_ptr(), count: hash.as_ref().len(), @@ -84,12 +69,12 @@ impl AsRef for AMchange { /// \brief Gets the first referenced actor identifier in a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \pre \p change `!= NULL`. -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMactorId` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. +/// \pre \p change `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal +/// /// # Safety /// change must be a valid pointer to an AMchange #[no_mangle] @@ -103,8 +88,8 @@ pub unsafe extern "C" fn AMchangeActorId(change: *const AMchange) -> *mut AMresu /// \memberof AMchange /// \brief Compresses the raw bytes of a change. /// -/// \param[in,out] change A pointer to an `AMchange` struct. -/// \pre \p change `!= NULL`. +/// \param[in] change A pointer to an `AMchange` struct. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -120,18 +105,20 @@ pub unsafe extern "C" fn AMchangeCompress(change: *mut AMchange) { /// \brief Gets the dependencies of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return A pointer to an `AMchangeHashes` struct or `NULL`. -/// \pre \p change `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p change `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// change must be a valid pointer to an AMchange #[no_mangle] -pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> AMchangeHashes { - match change.as_ref() { - Some(change) => AMchangeHashes::new(change.as_ref().deps()), - None => AMchangeHashes::default(), - } +pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> *mut AMresult { + to_result(match change.as_ref() { + Some(change) => change.as_ref().deps(), + None => Default::default(), + }) } /// \memberof AMchange @@ -139,7 +126,7 @@ pub unsafe extern "C" fn AMchangeDeps(change: *const AMchange) -> AMchangeHashes /// /// \param[in] change A pointer to an `AMchange` struct. /// \return An `AMbyteSpan` struct. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -149,36 +136,38 @@ pub unsafe extern "C" fn AMchangeExtraBytes(change: *const AMchange) -> AMbyteSp if let Some(change) = change.as_ref() { change.as_ref().extra_bytes().into() } else { - AMbyteSpan::default() + Default::default() } } /// \memberof AMchange -/// \brief Loads a sequence of bytes into a change. +/// \brief Allocates a new change and initializes it from an array of bytes value. /// /// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMresult` struct containing an `AMchange` struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] count The count of bytes to load from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_CHANGE` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal +/// /// # Safety -/// src must be a byte array of size `>= count` +/// src must be a byte array of length `>= count` #[no_mangle] pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut AMresult { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(am::Change::from_bytes(data)) + let data = std::slice::from_raw_parts(src, count); + to_result(am::Change::from_bytes(data.to_vec())) } /// \memberof AMchange /// \brief Gets the hash of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return A change hash as an `AMbyteSpan` struct. -/// \pre \p change `!= NULL`. +/// \return An `AMbyteSpan` struct for a change hash. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -187,7 +176,7 @@ pub unsafe extern "C" fn AMchangeFromBytes(src: *const u8, count: usize) -> *mut pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { match change.as_ref() { Some(change) => change.hash(), - None => AMbyteSpan::default(), + None => Default::default(), } } @@ -195,8 +184,8 @@ pub unsafe extern "C" fn AMchangeHash(change: *const AMchange) -> AMbyteSpan { /// \brief Tests the emptiness of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return A boolean. -/// \pre \p change `!= NULL`. +/// \return `true` if \p change is empty, `false` otherwise. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -210,12 +199,37 @@ pub unsafe extern "C" fn AMchangeIsEmpty(change: *const AMchange) -> bool { } } +/// \memberof AMchange +/// \brief Loads a document into a sequence of changes. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The count of bytes to load from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMchangeLoadDocument(src: *const u8, count: usize) -> *mut AMresult { + let data = std::slice::from_raw_parts(src, count); + to_result::, _>>( + am::Automerge::load(data) + .and_then(|d| d.get_changes(&[]).map(|c| c.into_iter().cloned().collect())), + ) +} + /// \memberof AMchange /// \brief Gets the maximum operation index of a change. /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -233,18 +247,18 @@ pub unsafe extern "C" fn AMchangeMaxOp(change: *const AMchange) -> u64 { /// \brief Gets the message of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return A UTF-8 string or `NULL`. -/// \pre \p change `!= NULL`. +/// \return An `AMbyteSpan` struct for a UTF-8 string. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety /// change must be a valid pointer to an AMchange #[no_mangle] -pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> *const c_char { +pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> AMbyteSpan { if let Some(change) = change.as_ref() { return change.message(); }; - std::ptr::null() + Default::default() } /// \memberof AMchange @@ -252,7 +266,7 @@ pub unsafe extern "C" fn AMchangeMessage(change: *const AMchange) -> *const c_ch /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -271,7 +285,7 @@ pub unsafe extern "C" fn AMchangeSeq(change: *const AMchange) -> u64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -279,10 +293,9 @@ pub unsafe extern "C" fn AMchangeSeq(change: *const AMchange) -> u64 { #[no_mangle] pub unsafe extern "C" fn AMchangeSize(change: *const AMchange) -> usize { if let Some(change) = change.as_ref() { - change.as_ref().len() - } else { - 0 + return change.as_ref().len(); } + 0 } /// \memberof AMchange @@ -290,7 +303,7 @@ pub unsafe extern "C" fn AMchangeSize(change: *const AMchange) -> usize { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit unsigned integer. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -309,7 +322,7 @@ pub unsafe extern "C" fn AMchangeStartOp(change: *const AMchange) -> u64 { /// /// \param[in] change A pointer to an `AMchange` struct. /// \return A 64-bit signed integer. -/// \pre \p change `!= NULL`. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -327,8 +340,8 @@ pub unsafe extern "C" fn AMchangeTime(change: *const AMchange) -> i64 { /// \brief Gets the raw bytes of a change. /// /// \param[in] change A pointer to an `AMchange` struct. -/// \return An `AMbyteSpan` struct. -/// \pre \p change `!= NULL`. +/// \return An `AMbyteSpan` struct for an array of bytes. +/// \pre \p change `!= NULL` /// \internal /// /// # Safety @@ -338,30 +351,6 @@ pub unsafe extern "C" fn AMchangeRawBytes(change: *const AMchange) -> AMbyteSpan if let Some(change) = change.as_ref() { change.as_ref().raw_bytes().into() } else { - AMbyteSpan::default() + Default::default() } } - -/// \memberof AMchange -/// \brief Loads a document into a sequence of changes. -/// -/// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to load. -/// \return A pointer to an `AMresult` struct containing a sequence of -/// `AMchange` structs. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. -/// \internal -/// # Safety -/// src must be a byte array of size `>= count` -#[no_mangle] -pub unsafe extern "C" fn AMchangeLoadDocument(src: *const u8, count: usize) -> *mut AMresult { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result::, _>>( - am::Automerge::load(&data) - .and_then(|d| d.get_changes(&[]).map(|c| c.into_iter().cloned().collect())), - ) -} diff --git a/rust/automerge-c/src/doc.rs b/rust/automerge-c/src/doc.rs new file mode 100644 index 00000000..82f52bf7 --- /dev/null +++ b/rust/automerge-c/src/doc.rs @@ -0,0 +1,915 @@ +use automerge as am; +use automerge::sync::SyncDoc; +use automerge::transaction::{CommitOptions, Transactable}; +use automerge::ReadDoc; +use std::ops::{Deref, DerefMut}; + +use crate::actor_id::{to_actor_id, AMactorId}; +use crate::byte_span::{to_str, AMbyteSpan}; +use crate::items::AMitems; +use crate::obj::{to_obj_id, AMobjId, AMobjType}; +use crate::result::{to_result, AMresult}; +use crate::sync::{to_sync_message, AMsyncMessage, AMsyncState}; + +pub mod list; +pub mod map; +pub mod utils; + +use crate::doc::utils::{clamp, to_doc, to_doc_mut, to_items}; + +macro_rules! to_sync_state_mut { + ($handle:expr) => {{ + let handle = $handle.as_mut(); + match handle { + Some(b) => b, + None => return AMresult::error("Invalid `AMsyncState*`").into(), + } + }}; +} + +/// \struct AMdoc +/// \installed_headerfile +/// \brief A JSON-like CRDT. +#[derive(Clone)] +pub struct AMdoc(am::AutoCommit); + +impl AMdoc { + pub fn new(auto_commit: am::AutoCommit) -> Self { + Self(auto_commit) + } + + pub fn is_equal_to(&mut self, other: &mut Self) -> bool { + self.document().get_heads() == other.document().get_heads() + } +} + +impl AsRef for AMdoc { + fn as_ref(&self) -> &am::AutoCommit { + &self.0 + } +} + +impl Deref for AMdoc { + type Target = am::AutoCommit; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for AMdoc { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +/// \memberof AMdoc +/// \brief Applies a sequence of changes to a document. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] items A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE` +/// items. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p items `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// items must be a valid pointer to an AMitems. +#[no_mangle] +pub unsafe extern "C" fn AMapplyChanges(doc: *mut AMdoc, items: *const AMitems) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let items = to_items!(items); + match Vec::::try_from(items) { + Ok(changes) => to_result(doc.apply_changes(changes)), + Err(e) => AMresult::error(&e.to_string()).into(), + } +} + +/// \memberof AMdoc +/// \brief Allocates storage for a document and initializes it by duplicating +/// the given document. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_DOC` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMclone(doc: *const AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(doc.as_ref().clone()) +} + +/// \memberof AMdoc +/// \brief Allocates a new document and initializes it with defaults. +/// +/// \param[in] actor_id A pointer to an `AMactorId` struct or `NULL` for a +/// random one. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_DOC` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// actor_id must be a valid pointer to an AMactorId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMcreate(actor_id: *const AMactorId) -> *mut AMresult { + to_result(match actor_id.as_ref() { + Some(actor_id) => am::AutoCommit::new().with_actor(actor_id.as_ref().clone()), + None => am::AutoCommit::new(), + }) +} + +/// \memberof AMdoc +/// \brief Commits the current operations on a document with an optional +/// message and/or *nix timestamp (milliseconds). +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] message A UTF-8 string view as an `AMbyteSpan` struct. +/// \param[in] timestamp A pointer to a 64-bit integer or `NULL`. +/// \return A pointer to an `AMresult` struct with one `AM_VAL_TYPE_CHANGE_HASH` +/// item if there were operations to commit or an `AM_VAL_TYPE_VOID` item +/// if there were no operations to commit. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMcommit( + doc: *mut AMdoc, + message: AMbyteSpan, + timestamp: *const i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let mut options = CommitOptions::default(); + if !message.is_null() { + options.set_message(to_str!(message)); + } + if let Some(timestamp) = timestamp.as_ref() { + options.set_time(*timestamp); + } + to_result(doc.commit_with(options)) +} + +/// \memberof AMdoc +/// \brief Creates an empty change with an optional message and/or *nix +/// timestamp (milliseconds). +/// +/// \details This is useful if you wish to create a "merge commit" which has as +/// its dependents the current heads of the document but you don't have +/// any operations to add to the document. +/// +/// \note If there are outstanding uncommitted changes to the document +/// then two changes will be created: one for creating the outstanding +/// changes and one for the empty change. The empty change will always be +/// the latest change in the document after this call and the returned +/// hash will be the hash of that empty change. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] message A UTF-8 string view as an `AMbyteSpan` struct. +/// \param[in] timestamp A pointer to a 64-bit integer or `NULL`. +/// \return A pointer to an `AMresult` struct with one `AM_VAL_TYPE_CHANGE_HASH` +/// item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMemptyChange( + doc: *mut AMdoc, + message: AMbyteSpan, + timestamp: *const i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let mut options = CommitOptions::default(); + if !message.is_null() { + options.set_message(to_str!(message)); + } + if let Some(timestamp) = timestamp.as_ref() { + options.set_time(*timestamp); + } + to_result(doc.empty_change(options)) +} + +/// \memberof AMdoc +/// \brief Tests the equality of two documents after closing their respective +/// transactions. +/// +/// \param[in] doc1 A pointer to an `AMdoc` struct. +/// \param[in] doc2 A pointer to an `AMdoc` struct. +/// \return `true` if \p doc1 `==` \p doc2 and `false` otherwise. +/// \pre \p doc1 `!= NULL` +/// \pre \p doc2 `!= NULL` +/// \internal +/// +/// #Safety +/// doc1 must be a valid pointer to an AMdoc +/// doc2 must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMequal(doc1: *mut AMdoc, doc2: *mut AMdoc) -> bool { + match (doc1.as_mut(), doc2.as_mut()) { + (Some(doc1), Some(doc2)) => doc1.is_equal_to(doc2), + (None, None) | (None, Some(_)) | (Some(_), None) => false, + } +} + +/// \memberof AMdoc +/// \brief Forks this document at its current or a historical point for use by +/// a different actor. +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical point or `NULL` to select its +/// current point. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMfork(doc: *mut AMdoc, heads: *const AMitems) -> *mut AMresult { + let doc = to_doc_mut!(doc); + match heads.as_ref() { + None => to_result(doc.fork()), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.fork_at(&heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, + } +} + +/// \memberof AMdoc +/// \brief Generates a synchronization message for a peer based upon the given +/// synchronization state. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \return A pointer to an `AMresult` struct with either an +/// `AM_VAL_TYPE_SYNC_MESSAGE` or `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p sync_state `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// sync_state must be a valid pointer to an AMsyncState +#[no_mangle] +pub unsafe extern "C" fn AMgenerateSyncMessage( + doc: *mut AMdoc, + sync_state: *mut AMsyncState, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let sync_state = to_sync_state_mut!(sync_state); + to_result(doc.sync().generate_sync_message(sync_state.as_mut())) +} + +/// \memberof AMdoc +/// \brief Gets a document's actor identifier. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_ACTOR_ID` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetActorId(doc: *const AMdoc) -> *mut AMresult { + let doc = to_doc!(doc); + to_result(Ok::( + doc.get_actor().clone(), + )) +} + +/// \memberof AMdoc +/// \brief Gets the change added to a document by its respective hash. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The count of bytes to copy from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_CHANGE` item. +/// \pre \p doc `!= NULL` +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src') >= AM_CHANGE_HASH_SIZE` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// src must be a byte array of length `>= automerge::types::HASH_SIZE` +#[no_mangle] +pub unsafe extern "C" fn AMgetChangeByHash( + doc: *mut AMdoc, + src: *const u8, + count: usize, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let slice = std::slice::from_raw_parts(src, count); + match slice.try_into() { + Ok(change_hash) => to_result(doc.get_change_by_hash(&change_hash)), + Err(e) => AMresult::error(&e.to_string()).into(), + } +} + +/// \memberof AMdoc +/// \brief Gets the changes added to a document by their respective hashes. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] have_deps A pointer to an `AMitems` struct with +/// `AM_VAL_TYPE_CHANGE_HASH` items or `NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetChanges(doc: *mut AMdoc, have_deps: *const AMitems) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let have_deps = match have_deps.as_ref() { + Some(have_deps) => match Vec::::try_from(have_deps) { + Ok(change_hashes) => change_hashes, + Err(e) => return AMresult::error(&e.to_string()).into(), + }, + None => Vec::::new(), + }; + to_result(doc.get_changes(&have_deps)) +} + +/// \memberof AMdoc +/// \brief Gets the changes added to a second document that weren't added to +/// a first document. +/// +/// \param[in] doc1 A pointer to an `AMdoc` struct. +/// \param[in] doc2 A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. +/// \pre \p doc1 `!= NULL` +/// \pre \p doc2 `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc1 must be a valid pointer to an AMdoc +/// doc2 must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetChangesAdded(doc1: *mut AMdoc, doc2: *mut AMdoc) -> *mut AMresult { + let doc1 = to_doc_mut!(doc1); + let doc2 = to_doc_mut!(doc2); + to_result(doc1.get_changes_added(doc2)) +} + +/// \memberof AMdoc +/// \brief Gets the current heads of a document. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetHeads(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc_mut!(doc); + to_result(Ok::, am::AutomergeError>( + doc.get_heads(), + )) +} + +/// \memberof AMdoc +/// \brief Gets the hashes of the changes in a document that aren't transitive +/// dependencies of the given hashes of changes. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items or `NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMgetMissingDeps(doc: *mut AMdoc, heads: *const AMitems) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let heads = match heads.as_ref() { + None => Vec::::new(), + Some(heads) => match >::try_from(heads) { + Ok(heads) => heads, + Err(e) => { + return AMresult::error(&e.to_string()).into(); + } + }, + }; + to_result(doc.get_missing_deps(heads.as_slice())) +} + +/// \memberof AMdoc +/// \brief Gets the last change made to a document. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct containing either an +/// `AM_VAL_TYPE_CHANGE` or `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMgetLastLocalChange(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc_mut!(doc); + to_result(doc.get_last_local_change()) +} + +/// \memberof AMdoc +/// \brief Gets the current or historical keys of a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select historical keys or `NULL` to select current +/// keys. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_STR` items. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMkeys( + doc: *const AMdoc, + obj_id: *const AMobjId, + heads: *const AMitems, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => to_result(doc.keys(obj_id)), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.keys_at(obj_id, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, + } +} + +/// \memberof AMdoc +/// \brief Allocates storage for a document and initializes it with the compact +/// form of an incremental save. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The count of bytes to load from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_DOC` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMload(src: *const u8, count: usize) -> *mut AMresult { + let data = std::slice::from_raw_parts(src, count); + to_result(am::AutoCommit::load(data)) +} + +/// \memberof AMdoc +/// \brief Loads the compact form of an incremental save into a document. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The count of bytes to load from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_UINT` item. +/// \pre \p doc `!= NULL` +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// src must be a byte array of length `>= count` +#[no_mangle] +pub unsafe extern "C" fn AMloadIncremental( + doc: *mut AMdoc, + src: *const u8, + count: usize, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let data = std::slice::from_raw_parts(src, count); + to_result(doc.load_incremental(data)) +} + +/// \memberof AMdoc +/// \brief Applies all of the changes in \p src which are not in \p dest to +/// \p dest. +/// +/// \param[in] dest A pointer to an `AMdoc` struct. +/// \param[in] src A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p dest `!= NULL` +/// \pre \p src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// dest must be a valid pointer to an AMdoc +/// src must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMmerge(dest: *mut AMdoc, src: *mut AMdoc) -> *mut AMresult { + let dest = to_doc_mut!(dest); + to_result(dest.merge(to_doc_mut!(src))) +} + +/// \memberof AMdoc +/// \brief Gets the current or historical size of an object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical size or `NULL` to select its +/// current size. +/// \return The count of items in the object identified by \p obj_id. +/// \pre \p doc `!= NULL` +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMobjSize( + doc: *const AMdoc, + obj_id: *const AMobjId, + heads: *const AMitems, +) -> usize { + if let Some(doc) = doc.as_ref() { + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => { + return doc.length(obj_id); + } + Some(heads) => { + if let Ok(heads) = >::try_from(heads) { + return doc.length_at(obj_id, &heads); + } + } + } + } + 0 +} + +/// \memberof AMdoc +/// \brief Gets the type of an object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \return An `AMobjType` tag or `0`. +/// \pre \p doc `!= NULL` +/// \pre \p obj_id `!= NULL` +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMobjObjType(doc: *const AMdoc, obj_id: *const AMobjId) -> AMobjType { + if let Some(doc) = doc.as_ref() { + let obj_id = to_obj_id!(obj_id); + if let Ok(obj_type) = doc.object_type(obj_id) { + return (&obj_type).into(); + } + } + Default::default() +} + +/// \memberof AMdoc +/// \brief Gets the current or historical items of an entire object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select its historical items or `NULL` to select +/// its current items. +/// \return A pointer to an `AMresult` struct with an `AMitems` struct. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMobjItems( + doc: *const AMdoc, + obj_id: *const AMobjId, + heads: *const AMitems, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => to_result(doc.values(obj_id)), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.values_at(obj_id, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, + } +} + +/// \memberof AMdoc +/// \brief Gets the number of pending operations added during a document's +/// current transaction. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return The count of pending operations for \p doc. +/// \pre \p doc `!= NULL` +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMpendingOps(doc: *const AMdoc) -> usize { + if let Some(doc) = doc.as_ref() { + return doc.pending_ops(); + } + 0 +} + +/// \memberof AMdoc +/// \brief Receives a synchronization message from a peer based upon a given +/// synchronization state. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] sync_state A pointer to an `AMsyncState` struct. +/// \param[in] sync_message A pointer to an `AMsyncMessage` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p sync_state `!= NULL` +/// \pre \p sync_message `!= NULL` +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// sync_state must be a valid pointer to an AMsyncState +/// sync_message must be a valid pointer to an AMsyncMessage +#[no_mangle] +pub unsafe extern "C" fn AMreceiveSyncMessage( + doc: *mut AMdoc, + sync_state: *mut AMsyncState, + sync_message: *const AMsyncMessage, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let sync_state = to_sync_state_mut!(sync_state); + let sync_message = to_sync_message!(sync_message); + to_result( + doc.sync() + .receive_sync_message(sync_state.as_mut(), sync_message.as_ref().clone()), + ) +} + +/// \memberof AMdoc +/// \brief Cancels the pending operations added during a document's current +/// transaction and gets the number of cancellations. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return The count of pending operations for \p doc that were cancelled. +/// \pre \p doc `!= NULL` +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMrollback(doc: *mut AMdoc) -> usize { + if let Some(doc) = doc.as_mut() { + return doc.rollback(); + } + 0 +} + +/// \memberof AMdoc +/// \brief Saves the entirety of a document into a compact form. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMsave(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc_mut!(doc); + to_result(Ok(doc.save())) +} + +/// \memberof AMdoc +/// \brief Saves the changes to a document since its last save into a compact +/// form. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +#[no_mangle] +pub unsafe extern "C" fn AMsaveIncremental(doc: *mut AMdoc) -> *mut AMresult { + let doc = to_doc_mut!(doc); + to_result(Ok(doc.save_incremental())) +} + +/// \memberof AMdoc +/// \brief Puts the actor identifier of a document. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] actor_id A pointer to an `AMactorId` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p actor_id `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// actor_id must be a valid pointer to an AMactorId +#[no_mangle] +pub unsafe extern "C" fn AMsetActorId( + doc: *mut AMdoc, + actor_id: *const AMactorId, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let actor_id = to_actor_id!(actor_id); + doc.set_actor(actor_id.as_ref().clone()); + to_result(Ok(())) +} + +/// \memberof AMdoc +/// \brief Splices values into and/or removes values from the identified object +/// at a given position within it. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos A position in the object identified by \p obj_id or +/// `SIZE_MAX` to indicate one past its end. +/// \param[in] del The number of values to delete or `SIZE_MAX` to indicate +/// all of them. +/// \param[in] values A copy of an `AMitems` struct from which values will be +/// spliced starting at its current position; call +/// `AMitemsRewound()` on a used `AMitems` first to ensure +/// that all of its values are spliced in. Pass `(AMitems){0}` +/// when zero values should be spliced in. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id `)` or \p del `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// values must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMsplice( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + del: usize, + values: AMitems, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let len = doc.length(obj_id); + let pos = clamp!(pos, len, "pos"); + let del = clamp!(del, len, "del"); + match Vec::::try_from(&values) { + Ok(vals) => to_result(doc.splice(obj_id, pos, del, vals)), + Err(e) => AMresult::error(&e.to_string()).into(), + } +} + +/// \memberof AMdoc +/// \brief Splices characters into and/or removes characters from the +/// identified object at a given position within it. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos A position in the text object identified by \p obj_id or +/// `SIZE_MAX` to indicate one past its end. +/// \param[in] del The number of characters to delete or `SIZE_MAX` to indicate +/// all of them. +/// \param[in] text A UTF-8 string view as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \pre `0 <=` \p del `<= AMobjSize(`\p obj_id `)` or \p del `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMspliceText( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + del: usize, + text: AMbyteSpan, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let len = doc.length(obj_id); + let pos = clamp!(pos, len, "pos"); + let del = clamp!(del, len, "del"); + to_result(doc.splice_text(obj_id, pos, del, to_str!(text))) +} + +/// \memberof AMdoc +/// \brief Gets the current or historical string represented by a text object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] heads A pointer to an `AMitems` struct containing +/// `AM_VAL_TYPE_CHANGE_HASH` items to select a historical string +/// or `NULL` to select the current string. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_STR` item. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMtext( + doc: *const AMdoc, + obj_id: *const AMobjId, + heads: *const AMitems, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + match heads.as_ref() { + None => to_result(doc.text(obj_id)), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.text_at(obj_id, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, + } +} diff --git a/rust/automerge-c/src/doc/list.rs b/rust/automerge-c/src/doc/list.rs new file mode 100644 index 00000000..c4503322 --- /dev/null +++ b/rust/automerge-c/src/doc/list.rs @@ -0,0 +1,636 @@ +use automerge as am; +use automerge::transaction::Transactable; +use automerge::ReadDoc; + +use crate::byte_span::{to_str, AMbyteSpan}; +use crate::doc::{to_doc, to_doc_mut, AMdoc}; +use crate::items::AMitems; +use crate::obj::{to_obj_id, to_obj_type, AMobjId, AMobjType}; +use crate::result::{to_result, AMresult}; + +macro_rules! adjust { + ($pos:expr, $insert:expr, $len:expr) => {{ + // An empty object can only be inserted into. + let insert = $insert || $len == 0; + let end = if insert { $len } else { $len - 1 }; + if $pos > end && $pos != usize::MAX { + return AMresult::error(&format!("Invalid pos {}", $pos)).into(); + } + (std::cmp::min($pos, end), insert) + }}; +} + +macro_rules! to_range { + ($begin:expr, $end:expr) => {{ + if $begin > $end { + return AMresult::error(&format!("Invalid range [{}-{})", $begin, $end)).into(); + }; + ($begin..$end) + }}; +} + +/// \memberof AMdoc +/// \brief Deletes an item from a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistDelete( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, _) = adjust!(pos, false, doc.length(obj_id)); + to_result(doc.delete(obj_id, pos)) +} + +/// \memberof AMdoc +/// \brief Gets a current or historical item within a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical item at \p pos or `NULL` +/// to select the current item at \p pos. +/// \return A pointer to an `AMresult` struct with an `AMitem` struct. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistGet( + doc: *const AMdoc, + obj_id: *const AMobjId, + pos: usize, + heads: *const AMitems, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, _) = adjust!(pos, false, doc.length(obj_id)); + match heads.as_ref() { + None => to_result(doc.get(obj_id, pos)), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.get_at(obj_id, pos, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, + } +} + +/// \memberof AMdoc +/// \brief Gets all of the historical items at a position within a list object +/// until its current one or a specific one. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical last item or `NULL` to select +/// the current last item. +/// \return A pointer to an `AMresult` struct with an `AMitems` struct. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistGetAll( + doc: *const AMdoc, + obj_id: *const AMobjId, + pos: usize, + heads: *const AMitems, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, _) = adjust!(pos, false, doc.length(obj_id)); + match heads.as_ref() { + None => to_result(doc.get_all(obj_id, pos)), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.get_all_at(obj_id, pos, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, + } +} + +/// \memberof AMdoc +/// \brief Increments a counter value in an item within a list object by the +/// given value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistIncrement( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + value: i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, _) = adjust!(pos, false, doc.length(obj_id)); + to_result(doc.increment(obj_id, pos, value)) +} + +/// \memberof AMdoc +/// \brief Puts a boolean value into an item within a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \param[in] value A boolean. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistPutBool( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + insert: bool, + value: bool, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + let value = am::ScalarValue::Boolean(value); + to_result(if insert { + doc.insert(obj_id, pos, value) + } else { + doc.put(obj_id, pos, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts an array of bytes value at a position within a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \param[in] value A view onto the array of bytes to copy from as an +/// `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \pre \p value.src `!= NULL` +/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// value.src must be a byte array of length >= value.count +#[no_mangle] +pub unsafe extern "C" fn AMlistPutBytes( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + insert: bool, + value: AMbyteSpan, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + let value: Vec = (&value).into(); + to_result(if insert { + doc.insert(obj_id, pos, value) + } else { + doc.put(obj_id, pos, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts a CRDT counter value into an item within a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistPutCounter( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + insert: bool, + value: i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + let value = am::ScalarValue::Counter(value.into()); + to_result(if insert { + doc.insert(obj_id, pos, value) + } else { + doc.put(obj_id, pos, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts a float value into an item within a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \param[in] value A 64-bit float. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistPutF64( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + insert: bool, + value: f64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + to_result(if insert { + doc.insert(obj_id, pos, value) + } else { + doc.put(obj_id, pos, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts a signed integer value into an item within a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistPutInt( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + insert: bool, + value: i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + to_result(if insert { + doc.insert(obj_id, pos, value) + } else { + doc.put(obj_id, pos, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts a null value into an item within a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistPutNull( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + insert: bool, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + to_result(if insert { + doc.insert(obj_id, pos, ()) + } else { + doc.put(obj_id, pos, ()) + }) +} + +/// \memberof AMdoc +/// \brief Puts an empty object value into an item within a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \param[in] obj_type An `AMobjIdType` enum tag. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_OBJ_TYPE` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistPutObject( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + insert: bool, + obj_type: AMobjType, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + let obj_type = to_obj_type!(obj_type); + to_result(if insert { + (doc.insert_object(obj_id, pos, obj_type), obj_type) + } else { + (doc.put_object(obj_id, pos, obj_type), obj_type) + }) +} + +/// \memberof AMdoc +/// \brief Puts a UTF-8 string value into an item within a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \pre \p value.src `!= NULL` +/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// value.src must be a byte array of length >= value.count +#[no_mangle] +pub unsafe extern "C" fn AMlistPutStr( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + insert: bool, + value: AMbyteSpan, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + let value = to_str!(value); + to_result(if insert { + doc.insert(obj_id, pos, value) + } else { + doc.put(obj_id, pos, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts a *nix timestamp (milliseconds) value into an item within a +/// list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistPutTimestamp( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + insert: bool, + value: i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + let value = am::ScalarValue::Timestamp(value); + to_result(if insert { + doc.insert(obj_id, pos, value) + } else { + doc.put(obj_id, pos, value) + }) +} + +/// \memberof AMdoc +/// \brief Puts an unsigned integer value into an item within a list object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] pos The position of an item within the list object identified by +/// \p obj_id or `SIZE_MAX` to indicate its last item if +/// \p insert `== false` or one past its last item if +/// \p insert `== true`. +/// \param[in] insert A flag for inserting a new item for \p value before +/// \p pos instead of putting \p value into the item at +/// \p pos. +/// \param[in] value A 64-bit unsigned integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre `0 <=` \p pos `<= AMobjSize(`\p obj_id `)` or \p pos `== SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistPutUint( + doc: *mut AMdoc, + obj_id: *const AMobjId, + pos: usize, + insert: bool, + value: u64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let obj_id = to_obj_id!(obj_id); + let (pos, insert) = adjust!(pos, insert, doc.length(obj_id)); + to_result(if insert { + doc.insert(obj_id, pos, value) + } else { + doc.put(obj_id, pos, value) + }) +} + +/// \memberof AMdoc +/// \brief Gets the current or historical items in the list object within the +/// given range. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] begin The first pos in a range of indices. +/// \param[in] end At least one past the last pos in a range of indices. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select historical items or `NULL` to select +/// current items. +/// \return A pointer to an `AMresult` struct with an `AMitems` struct. +/// \pre \p doc `!= NULL` +/// \pre \p begin `<=` \p end `<= SIZE_MAX` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMlistRange( + doc: *const AMdoc, + obj_id: *const AMobjId, + begin: usize, + end: usize, + heads: *const AMitems, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let range = to_range!(begin, end); + match heads.as_ref() { + None => to_result(doc.list_range(obj_id, range)), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.list_range_at(obj_id, range, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, + } +} diff --git a/rust/automerge-c/src/doc/map.rs b/rust/automerge-c/src/doc/map.rs new file mode 100644 index 00000000..b2f7db02 --- /dev/null +++ b/rust/automerge-c/src/doc/map.rs @@ -0,0 +1,552 @@ +use automerge as am; +use automerge::transaction::Transactable; +use automerge::ReadDoc; + +use crate::byte_span::{to_str, AMbyteSpan}; +use crate::doc::{to_doc, to_doc_mut, AMdoc}; +use crate::items::AMitems; +use crate::obj::{to_obj_id, to_obj_type, AMobjId, AMobjType}; +use crate::result::{to_result, AMresult}; + +/// \memberof AMdoc +/// \brief Deletes an item from a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapDelete( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + to_result(doc.delete(to_obj_id!(obj_id), key)) +} + +/// \memberof AMdoc +/// \brief Gets a current or historical item within a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical item at \p key or `NULL` +/// to select the current item at \p key. +/// \return A pointer to an `AMresult` struct with an `AMitem` struct. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMmapGet( + doc: *const AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + heads: *const AMitems, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let key = to_str!(key); + match heads.as_ref() { + None => to_result(doc.get(obj_id, key)), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.get_at(obj_id, key, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, + } +} + +/// \memberof AMdoc +/// \brief Gets all of the historical items at a key within a map object until +/// its current one or a specific one. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select a historical last item or `NULL` to +/// select the current last item. +/// \return A pointer to an `AMresult` struct with an `AMItems` struct. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMmapGetAll( + doc: *const AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + heads: *const AMitems, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let key = to_str!(key); + match heads.as_ref() { + None => to_result(doc.get_all(obj_id, key)), + Some(heads) => match >::try_from(heads) { + Ok(heads) => to_result(doc.get_all_at(obj_id, key, &heads)), + Err(e) => AMresult::error(&e.to_string()).into(), + }, + } +} + +/// \memberof AMdoc +/// \brief Increments a counter at a key in a map object by the given value. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapIncrement( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + value: i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + to_result(doc.increment(to_obj_id!(obj_id), key, value)) +} + +/// \memberof AMdoc +/// \brief Puts a boolean as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A boolean. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapPutBool( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + value: bool, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, value)) +} + +/// \memberof AMdoc +/// \brief Puts an array of bytes value at a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key The UTF-8 string view key of an item within the map object +/// identified by \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A view onto an array of bytes as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \pre \p value.src `!= NULL` +/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +/// value.src must be a byte array of length >= value.count +#[no_mangle] +pub unsafe extern "C" fn AMmapPutBytes( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + value: AMbyteSpan, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, Vec::::from(&value))) +} + +/// \memberof AMdoc +/// \brief Puts a CRDT counter as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapPutCounter( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + value: i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + to_result(doc.put( + to_obj_id!(obj_id), + key, + am::ScalarValue::Counter(value.into()), + )) +} + +/// \memberof AMdoc +/// \brief Puts null as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapPutNull( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, ())) +} + +/// \memberof AMdoc +/// \brief Puts an empty object as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. +/// \param[in] obj_type An `AMobjIdType` enum tag. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_OBJ_TYPE` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapPutObject( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + obj_type: AMobjType, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + let obj_type = to_obj_type!(obj_type); + to_result((doc.put_object(to_obj_id!(obj_id), key, obj_type), obj_type)) +} + +/// \memberof AMdoc +/// \brief Puts a float as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A 64-bit float. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapPutF64( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + value: f64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, value)) +} + +/// \memberof AMdoc +/// \brief Puts a signed integer as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapPutInt( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + value: i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, value)) +} + +/// \memberof AMdoc +/// \brief Puts a UTF-8 string as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapPutStr( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + value: AMbyteSpan, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + to_result(doc.put(to_obj_id!(obj_id), to_str!(key), to_str!(value))) +} + +/// \memberof AMdoc +/// \brief Puts a *nix timestamp (milliseconds) as the value of a key in a map +/// object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapPutTimestamp( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + value: i64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, am::ScalarValue::Timestamp(value))) +} + +/// \memberof AMdoc +/// \brief Puts an unsigned integer as the value of a key in a map object. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] key A UTF-8 string view key for the map object identified by +/// \p obj_id as an `AMbyteSpan` struct. +/// \param[in] value A 64-bit unsigned integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_VOID` item. +/// \pre \p doc `!= NULL` +/// \pre \p key.src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// key.src must be a byte array of length >= key.count +#[no_mangle] +pub unsafe extern "C" fn AMmapPutUint( + doc: *mut AMdoc, + obj_id: *const AMobjId, + key: AMbyteSpan, + value: u64, +) -> *mut AMresult { + let doc = to_doc_mut!(doc); + let key = to_str!(key); + to_result(doc.put(to_obj_id!(obj_id), key, value)) +} + +/// \memberof AMdoc +/// \brief Gets the current or historical items of the map object within the +/// given range. +/// +/// \param[in] doc A pointer to an `AMdoc` struct. +/// \param[in] obj_id A pointer to an `AMobjId` struct or `AM_ROOT`. +/// \param[in] begin The first key in a subrange or `AMstr(NULL)` to indicate the +/// absolute first key. +/// \param[in] end The key one past the last key in a subrange or `AMstr(NULL)` +/// to indicate one past the absolute last key. +/// \param[in] heads A pointer to an `AMitems` struct with `AM_VAL_TYPE_CHANGE_HASH` +/// items to select historical items or `NULL` to select +/// current items. +/// \return A pointer to an `AMresult` struct with an `AMitems` struct. +/// \pre \p doc `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// doc must be a valid pointer to an AMdoc +/// obj_id must be a valid pointer to an AMobjId or std::ptr::null() +/// begin.src must be a byte array of length >= begin.count or std::ptr::null() +/// end.src must be a byte array of length >= end.count or std::ptr::null() +/// heads must be a valid pointer to an AMitems or std::ptr::null() +#[no_mangle] +pub unsafe extern "C" fn AMmapRange( + doc: *const AMdoc, + obj_id: *const AMobjId, + begin: AMbyteSpan, + end: AMbyteSpan, + heads: *const AMitems, +) -> *mut AMresult { + let doc = to_doc!(doc); + let obj_id = to_obj_id!(obj_id); + let heads = match heads.as_ref() { + None => None, + Some(heads) => match >::try_from(heads) { + Ok(heads) => Some(heads), + Err(e) => { + return AMresult::error(&e.to_string()).into(); + } + }, + }; + match (begin.is_null(), end.is_null()) { + (false, false) => { + let (begin, end) = (to_str!(begin).to_string(), to_str!(end).to_string()); + if begin > end { + return AMresult::error(&format!("Invalid range [{}-{})", begin, end)).into(); + }; + let bounds = begin..end; + if let Some(heads) = heads { + to_result(doc.map_range_at(obj_id, bounds, &heads)) + } else { + to_result(doc.map_range(obj_id, bounds)) + } + } + (false, true) => { + let bounds = to_str!(begin).to_string()..; + if let Some(heads) = heads { + to_result(doc.map_range_at(obj_id, bounds, &heads)) + } else { + to_result(doc.map_range(obj_id, bounds)) + } + } + (true, false) => { + let bounds = ..to_str!(end).to_string(); + if let Some(heads) = heads { + to_result(doc.map_range_at(obj_id, bounds, &heads)) + } else { + to_result(doc.map_range(obj_id, bounds)) + } + } + (true, true) => { + let bounds = ..; + if let Some(heads) = heads { + to_result(doc.map_range_at(obj_id, bounds, &heads)) + } else { + to_result(doc.map_range(obj_id, bounds)) + } + } + } +} diff --git a/rust/automerge-c/src/doc/utils.rs b/rust/automerge-c/src/doc/utils.rs new file mode 100644 index 00000000..ce465b84 --- /dev/null +++ b/rust/automerge-c/src/doc/utils.rs @@ -0,0 +1,46 @@ +macro_rules! clamp { + ($index:expr, $len:expr, $param_name:expr) => {{ + if $index > $len && $index != usize::MAX { + return AMresult::error(&format!("Invalid {} {}", $param_name, $index)).into(); + } + std::cmp::min($index, $len) + }}; +} + +pub(crate) use clamp; + +macro_rules! to_doc { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::error("Invalid `AMdoc*`").into(), + } + }}; +} + +pub(crate) use to_doc; + +macro_rules! to_doc_mut { + ($handle:expr) => {{ + let handle = $handle.as_mut(); + match handle { + Some(b) => b, + None => return AMresult::error("Invalid `AMdoc*`").into(), + } + }}; +} + +pub(crate) use to_doc_mut; + +macro_rules! to_items { + ($handle:expr) => {{ + let handle = $handle.as_ref(); + match handle { + Some(b) => b, + None => return AMresult::error("Invalid `AMitems*`").into(), + } + }}; +} + +pub(crate) use to_items; diff --git a/rust/automerge-c/src/index.rs b/rust/automerge-c/src/index.rs new file mode 100644 index 00000000..f1ea153b --- /dev/null +++ b/rust/automerge-c/src/index.rs @@ -0,0 +1,84 @@ +use automerge as am; + +use std::any::type_name; + +use smol_str::SmolStr; + +use crate::byte_span::AMbyteSpan; + +/// \struct AMindex +/// \installed_headerfile +/// \brief An item index. +#[derive(PartialEq)] +pub enum AMindex { + /// A UTF-8 string key variant. + Key(SmolStr), + /// A 64-bit unsigned integer position variant. + Pos(usize), +} + +impl TryFrom<&AMindex> for AMbyteSpan { + type Error = am::AutomergeError; + + fn try_from(item: &AMindex) -> Result { + use am::AutomergeError::InvalidValueType; + use AMindex::*; + + if let Key(key) = item { + return Ok(key.into()); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl TryFrom<&AMindex> for usize { + type Error = am::AutomergeError; + + fn try_from(item: &AMindex) -> Result { + use am::AutomergeError::InvalidValueType; + use AMindex::*; + + if let Pos(pos) = item { + return Ok(*pos); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +/// \ingroup enumerations +/// \enum AMidxType +/// \installed_headerfile +/// \brief The type of an item's index. +#[derive(PartialEq, Eq)] +#[repr(u8)] +pub enum AMidxType { + /// The default tag, not a type signifier. + Default = 0, + /// A UTF-8 string view key. + Key, + /// A 64-bit unsigned integer position. + Pos, +} + +impl Default for AMidxType { + fn default() -> Self { + Self::Default + } +} + +impl From<&AMindex> for AMidxType { + fn from(index: &AMindex) -> Self { + use AMindex::*; + + match index { + Key(_) => Self::Key, + Pos(_) => Self::Pos, + } + } +} diff --git a/rust/automerge-c/src/item.rs b/rust/automerge-c/src/item.rs new file mode 100644 index 00000000..94735464 --- /dev/null +++ b/rust/automerge-c/src/item.rs @@ -0,0 +1,1963 @@ +use automerge as am; + +use std::any::type_name; +use std::borrow::Cow; +use std::cell::{RefCell, UnsafeCell}; +use std::rc::Rc; + +use crate::actor_id::AMactorId; +use crate::byte_span::{to_str, AMbyteSpan}; +use crate::change::AMchange; +use crate::doc::AMdoc; +use crate::index::{AMidxType, AMindex}; +use crate::obj::AMobjId; +use crate::result::{to_result, AMresult}; +use crate::sync::{AMsyncHave, AMsyncMessage, AMsyncState}; + +/// \struct AMunknownValue +/// \installed_headerfile +/// \brief A value (typically for a `set` operation) whose type is unknown. +#[derive(Default, Eq, PartialEq)] +#[repr(C)] +pub struct AMunknownValue { + /// The value's raw bytes. + bytes: AMbyteSpan, + /// The value's encoded type identifier. + type_code: u8, +} + +pub enum Value { + ActorId(am::ActorId, UnsafeCell>), + Change(Box, UnsafeCell>), + ChangeHash(am::ChangeHash), + Doc(RefCell), + SyncHave(AMsyncHave), + SyncMessage(AMsyncMessage), + SyncState(RefCell), + Value(am::Value<'static>), +} + +impl Value { + pub fn try_into_bytes(&self) -> Result { + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Self::Value(Scalar(scalar)) = &self { + if let Bytes(vector) = scalar.as_ref() { + return Ok(vector.as_slice().into()); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } + + pub fn try_into_change_hash(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Self::ChangeHash(change_hash) = &self { + return Ok(change_hash.into()); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } + + pub fn try_into_counter(&self) -> Result { + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Self::Value(Scalar(scalar)) = &self { + if let Counter(counter) = scalar.as_ref() { + return Ok(counter.into()); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } + + pub fn try_into_int(&self) -> Result { + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Self::Value(Scalar(scalar)) = &self { + if let Int(int) = scalar.as_ref() { + return Ok(*int); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } + + pub fn try_into_str(&self) -> Result { + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Self::Value(Scalar(scalar)) = &self { + if let Str(smol_str) = scalar.as_ref() { + return Ok(smol_str.into()); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } + + pub fn try_into_timestamp(&self) -> Result { + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Self::Value(Scalar(scalar)) = &self { + if let Timestamp(timestamp) = scalar.as_ref() { + return Ok(*timestamp); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl From for Value { + fn from(actor_id: am::ActorId) -> Self { + Self::ActorId(actor_id, Default::default()) + } +} + +impl From for Value { + fn from(auto_commit: am::AutoCommit) -> Self { + Self::Doc(RefCell::new(AMdoc::new(auto_commit))) + } +} + +impl From for Value { + fn from(change: am::Change) -> Self { + Self::Change(Box::new(change), Default::default()) + } +} + +impl From for Value { + fn from(change_hash: am::ChangeHash) -> Self { + Self::ChangeHash(change_hash) + } +} + +impl From for Value { + fn from(have: am::sync::Have) -> Self { + Self::SyncHave(AMsyncHave::new(have)) + } +} + +impl From for Value { + fn from(message: am::sync::Message) -> Self { + Self::SyncMessage(AMsyncMessage::new(message)) + } +} + +impl From for Value { + fn from(state: am::sync::State) -> Self { + Self::SyncState(RefCell::new(AMsyncState::new(state))) + } +} + +impl From> for Value { + fn from(value: am::Value<'static>) -> Self { + Self::Value(value) + } +} + +impl From for Value { + fn from(string: String) -> Self { + Self::Value(am::Value::Scalar(Cow::Owned(am::ScalarValue::Str( + string.into(), + )))) + } +} + +impl<'a> TryFrom<&'a Value> for &'a am::Change { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + Change(change, _) => Ok(change), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a Value> for &'a am::ChangeHash { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + ChangeHash(change_hash) => Ok(change_hash), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a Value> for &'a am::ScalarValue { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + use am::Value::*; + + if let Value(Scalar(scalar)) = value { + return Ok(scalar.as_ref()); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl<'a> TryFrom<&'a Value> for &'a AMactorId { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + ActorId(actor_id, c_actor_id) => unsafe { + Ok((*c_actor_id.get()).get_or_insert(AMactorId::new(actor_id))) + }, + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a mut Value> for &'a mut AMchange { + type Error = am::AutomergeError; + + fn try_from(value: &'a mut Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + Change(change, c_change) => unsafe { + Ok((*c_change.get()).get_or_insert(AMchange::new(change))) + }, + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a mut Value> for &'a mut AMdoc { + type Error = am::AutomergeError; + + fn try_from(value: &'a mut Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + Doc(doc) => Ok(doc.get_mut()), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a Value> for &'a AMsyncHave { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + SyncHave(sync_have) => Ok(sync_have), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a Value> for &'a AMsyncMessage { + type Error = am::AutomergeError; + + fn try_from(value: &'a Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + SyncMessage(sync_message) => Ok(sync_message), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl<'a> TryFrom<&'a mut Value> for &'a mut AMsyncState { + type Error = am::AutomergeError; + + fn try_from(value: &'a mut Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + + match value { + SyncState(sync_state) => Ok(sync_state.get_mut()), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), + } + } +} + +impl TryFrom<&Value> for bool { + type Error = am::AutomergeError; + + fn try_from(value: &Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Value(Scalar(scalar)) = value { + if let Boolean(boolean) = scalar.as_ref() { + return Ok(*boolean); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl TryFrom<&Value> for f64 { + type Error = am::AutomergeError; + + fn try_from(value: &Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Value(Scalar(scalar)) = value { + if let F64(float) = scalar.as_ref() { + return Ok(*float); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl TryFrom<&Value> for u64 { + type Error = am::AutomergeError; + + fn try_from(value: &Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Value(Scalar(scalar)) = value { + if let Uint(uint) = scalar.as_ref() { + return Ok(*uint); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl TryFrom<&Value> for AMunknownValue { + type Error = am::AutomergeError; + + fn try_from(value: &Value) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidValueType; + use am::ScalarValue::*; + use am::Value::*; + + if let Value(Scalar(scalar)) = value { + if let Unknown { bytes, type_code } = scalar.as_ref() { + return Ok(Self { + bytes: bytes.as_slice().into(), + type_code: *type_code, + }); + } + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }) + } +} + +impl PartialEq for Value { + fn eq(&self, other: &Self) -> bool { + use self::Value::*; + + match (self, other) { + (ActorId(lhs, _), ActorId(rhs, _)) => *lhs == *rhs, + (Change(lhs, _), Change(rhs, _)) => lhs == rhs, + (ChangeHash(lhs), ChangeHash(rhs)) => lhs == rhs, + (Doc(lhs), Doc(rhs)) => lhs.as_ptr() == rhs.as_ptr(), + (SyncMessage(lhs), SyncMessage(rhs)) => *lhs == *rhs, + (SyncState(lhs), SyncState(rhs)) => *lhs == *rhs, + (Value(lhs), Value(rhs)) => lhs == rhs, + _ => false, + } + } +} + +#[derive(Default)] +pub struct Item { + /// The item's index. + index: Option, + /// The item's identifier. + obj_id: Option, + /// The item's value. + value: Option, +} + +impl Item { + pub fn try_into_bytes(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_bytes(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + + pub fn try_into_change_hash(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_change_hash(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + + pub fn try_into_counter(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_counter(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + + pub fn try_into_int(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_int(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + + pub fn try_into_str(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_str(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + + pub fn try_into_timestamp(&self) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &self.value { + return value.try_into_timestamp(); + } + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } +} + +impl From for Item { + fn from(actor_id: am::ActorId) -> Self { + Value::from(actor_id).into() + } +} + +impl From for Item { + fn from(auto_commit: am::AutoCommit) -> Self { + Value::from(auto_commit).into() + } +} + +impl From for Item { + fn from(change: am::Change) -> Self { + Value::from(change).into() + } +} + +impl From for Item { + fn from(change_hash: am::ChangeHash) -> Self { + Value::from(change_hash).into() + } +} + +impl From<(am::ObjId, am::ObjType)> for Item { + fn from((obj_id, obj_type): (am::ObjId, am::ObjType)) -> Self { + Self { + index: None, + obj_id: Some(AMobjId::new(obj_id)), + value: Some(am::Value::Object(obj_type).into()), + } + } +} + +impl From for Item { + fn from(have: am::sync::Have) -> Self { + Value::from(have).into() + } +} + +impl From for Item { + fn from(message: am::sync::Message) -> Self { + Value::from(message).into() + } +} + +impl From for Item { + fn from(state: am::sync::State) -> Self { + Value::from(state).into() + } +} + +impl From> for Item { + fn from(value: am::Value<'static>) -> Self { + Value::from(value).into() + } +} + +impl From for Item { + fn from(string: String) -> Self { + Value::from(string).into() + } +} + +impl From for Item { + fn from(value: Value) -> Self { + Self { + index: None, + obj_id: None, + value: Some(value), + } + } +} + +impl PartialEq for Item { + fn eq(&self, other: &Self) -> bool { + self.index == other.index && self.obj_id == other.obj_id && self.value == other.value + } +} + +impl<'a> TryFrom<&'a Item> for &'a am::Change { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a Item> for &'a am::ChangeHash { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a Item> for &'a am::ScalarValue { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a Item> for &'a AMactorId { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a mut Item> for &'a mut AMchange { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &mut item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a mut Item> for &'a mut AMdoc { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &mut item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl From<&Item> for AMidxType { + fn from(item: &Item) -> Self { + if let Some(index) = &item.index { + return index.into(); + } + Default::default() + } +} + +impl<'a> TryFrom<&'a Item> for &'a AMsyncHave { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a Item> for &'a AMsyncMessage { + type Error = am::AutomergeError; + + fn try_from(item: &'a Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl<'a> TryFrom<&'a mut Item> for &'a mut AMsyncState { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &mut item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl TryFrom<&Item> for bool { + type Error = am::AutomergeError; + + fn try_from(item: &Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl TryFrom<&Item> for f64 { + type Error = am::AutomergeError; + + fn try_from(item: &Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl TryFrom<&Item> for u64 { + type Error = am::AutomergeError; + + fn try_from(item: &Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl TryFrom<&Item> for AMunknownValue { + type Error = am::AutomergeError; + + fn try_from(item: &Item) -> Result { + use am::AutomergeError::InvalidValueType; + + if let Some(value) = &item.value { + value.try_into() + } else { + Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::>().to_string(), + }) + } + } +} + +impl TryFrom<&Item> for (am::Value<'static>, am::ObjId) { + type Error = am::AutomergeError; + + fn try_from(item: &Item) -> Result { + use self::Value::*; + use am::AutomergeError::InvalidObjId; + use am::AutomergeError::InvalidValueType; + + let expected = type_name::().to_string(); + match (&item.obj_id, &item.value) { + (None, None) | (None, Some(_)) => Err(InvalidObjId("".to_string())), + (Some(_), None) => Err(InvalidValueType { + expected, + unexpected: type_name::>().to_string(), + }), + (Some(obj_id), Some(value)) => match value { + ActorId(_, _) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + ChangeHash(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + Change(_, _) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + Doc(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + SyncHave(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + SyncMessage(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + SyncState(_) => Err(InvalidValueType { + expected, + unexpected: type_name::().to_string(), + }), + Value(v) => Ok((v.clone(), obj_id.as_ref().clone())), + }, + } + } +} + +/// \struct AMitem +/// \installed_headerfile +/// \brief An item within a result. +#[derive(Clone)] +pub struct AMitem(Rc); + +impl AMitem { + pub fn exact(obj_id: am::ObjId, value: Value) -> Self { + Self(Rc::new(Item { + index: None, + obj_id: Some(AMobjId::new(obj_id)), + value: Some(value), + })) + } + + pub fn indexed(index: AMindex, obj_id: am::ObjId, value: Value) -> Self { + Self(Rc::new(Item { + index: Some(index), + obj_id: Some(AMobjId::new(obj_id)), + value: Some(value), + })) + } +} + +impl AsRef for AMitem { + fn as_ref(&self) -> &Item { + self.0.as_ref() + } +} + +impl Default for AMitem { + fn default() -> Self { + Self(Rc::new(Item { + index: None, + obj_id: None, + value: None, + })) + } +} + +impl From for AMitem { + fn from(actor_id: am::ActorId) -> Self { + Value::from(actor_id).into() + } +} + +impl From for AMitem { + fn from(auto_commit: am::AutoCommit) -> Self { + Value::from(auto_commit).into() + } +} + +impl From for AMitem { + fn from(change: am::Change) -> Self { + Value::from(change).into() + } +} + +impl From for AMitem { + fn from(change_hash: am::ChangeHash) -> Self { + Value::from(change_hash).into() + } +} + +impl From<(am::ObjId, am::ObjType)> for AMitem { + fn from((obj_id, obj_type): (am::ObjId, am::ObjType)) -> Self { + Self(Rc::new(Item::from((obj_id, obj_type)))) + } +} + +impl From for AMitem { + fn from(have: am::sync::Have) -> Self { + Value::from(have).into() + } +} + +impl From for AMitem { + fn from(message: am::sync::Message) -> Self { + Value::from(message).into() + } +} + +impl From for AMitem { + fn from(state: am::sync::State) -> Self { + Value::from(state).into() + } +} + +impl From> for AMitem { + fn from(value: am::Value<'static>) -> Self { + Value::from(value).into() + } +} + +impl From for AMitem { + fn from(string: String) -> Self { + Value::from(string).into() + } +} + +impl From for AMitem { + fn from(value: Value) -> Self { + Self(Rc::new(Item::from(value))) + } +} + +impl PartialEq for AMitem { + fn eq(&self, other: &Self) -> bool { + self.as_ref() == other.as_ref() + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a am::Change { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a am::ChangeHash { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a am::ScalarValue { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a AMactorId { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a mut AMitem> for &'a mut AMchange { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut AMitem) -> Result { + if let Some(item) = Rc::get_mut(&mut item.0) { + item.try_into() + } else { + Err(Self::Error::Fail) + } + } +} + +impl<'a> TryFrom<&'a mut AMitem> for &'a mut AMdoc { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut AMitem) -> Result { + if let Some(item) = Rc::get_mut(&mut item.0) { + item.try_into() + } else { + Err(Self::Error::Fail) + } + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a AMsyncHave { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a AMitem> for &'a AMsyncMessage { + type Error = am::AutomergeError; + + fn try_from(item: &'a AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl<'a> TryFrom<&'a mut AMitem> for &'a mut AMsyncState { + type Error = am::AutomergeError; + + fn try_from(item: &'a mut AMitem) -> Result { + if let Some(item) = Rc::get_mut(&mut item.0) { + item.try_into() + } else { + Err(Self::Error::Fail) + } + } +} + +impl TryFrom<&AMitem> for bool { + type Error = am::AutomergeError; + + fn try_from(item: &AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl TryFrom<&AMitem> for f64 { + type Error = am::AutomergeError; + + fn try_from(item: &AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl TryFrom<&AMitem> for u64 { + type Error = am::AutomergeError; + + fn try_from(item: &AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl TryFrom<&AMitem> for AMunknownValue { + type Error = am::AutomergeError; + + fn try_from(item: &AMitem) -> Result { + item.as_ref().try_into() + } +} + +impl TryFrom<&AMitem> for (am::Value<'static>, am::ObjId) { + type Error = am::AutomergeError; + + fn try_from(item: &AMitem) -> Result { + item.as_ref().try_into() + } +} + +/// \ingroup enumerations +/// \enum AMvalType +/// \installed_headerfile +/// \brief The type of an item's value. +#[derive(PartialEq, Eq)] +#[repr(u32)] +pub enum AMvalType { + /// An actor identifier value. + ActorId = 1 << 1, + /// A boolean value. + Bool = 1 << 2, + /// A view onto an array of bytes value. + Bytes = 1 << 3, + /// A change value. + Change = 1 << 4, + /// A change hash value. + ChangeHash = 1 << 5, + /// A CRDT counter value. + Counter = 1 << 6, + /// The default tag, not a type signifier. + Default = 0, + /// A document value. + Doc = 1 << 7, + /// A 64-bit float value. + F64 = 1 << 8, + /// A 64-bit signed integer value. + Int = 1 << 9, + /// A null value. + Null = 1 << 10, + /// An object type value. + ObjType = 1 << 11, + /// A UTF-8 string view value. + Str = 1 << 12, + /// A synchronization have value. + SyncHave = 1 << 13, + /// A synchronization message value. + SyncMessage = 1 << 14, + /// A synchronization state value. + SyncState = 1 << 15, + /// A *nix timestamp (milliseconds) value. + Timestamp = 1 << 16, + /// A 64-bit unsigned integer value. + Uint = 1 << 17, + /// An unknown type of value. + Unknown = 1 << 18, + /// A void. + Void = 1 << 0, +} + +impl Default for AMvalType { + fn default() -> Self { + Self::Default + } +} + +impl From<&am::Value<'static>> for AMvalType { + fn from(value: &am::Value<'static>) -> Self { + use am::ScalarValue::*; + use am::Value::*; + + match value { + Object(_) => Self::ObjType, + Scalar(scalar) => match scalar.as_ref() { + Boolean(_) => Self::Bool, + Bytes(_) => Self::Bytes, + Counter(_) => Self::Counter, + F64(_) => Self::F64, + Int(_) => Self::Int, + Null => Self::Null, + Str(_) => Self::Str, + Timestamp(_) => Self::Timestamp, + Uint(_) => Self::Uint, + Unknown { .. } => Self::Unknown, + }, + } + } +} + +impl From<&Value> for AMvalType { + fn from(value: &Value) -> Self { + use self::Value::*; + + match value { + ActorId(_, _) => Self::ActorId, + Change(_, _) => Self::Change, + ChangeHash(_) => Self::ChangeHash, + Doc(_) => Self::Doc, + SyncHave(_) => Self::SyncHave, + SyncMessage(_) => Self::SyncMessage, + SyncState(_) => Self::SyncState, + Value(v) => v.into(), + } + } +} + +impl From<&Item> for AMvalType { + fn from(item: &Item) -> Self { + if let Some(value) = &item.value { + return value.into(); + } + Self::Void + } +} + +/// \memberof AMitem +/// \brief Tests the equality of two items. +/// +/// \param[in] item1 A pointer to an `AMitem` struct. +/// \param[in] item2 A pointer to an `AMitem` struct. +/// \return `true` if \p item1 `==` \p item2 and `false` otherwise. +/// \pre \p item1 `!= NULL` +/// \pre \p item2 `!= NULL` +/// \post `!(`\p item1 `&&` \p item2 `) -> false` +/// \internal +/// +/// #Safety +/// item1 must be a valid AMitem pointer +/// item2 must be a valid AMitem pointer +#[no_mangle] +pub unsafe extern "C" fn AMitemEqual(item1: *const AMitem, item2: *const AMitem) -> bool { + match (item1.as_ref(), item2.as_ref()) { + (Some(item1), Some(item2)) => *item1 == *item2, + (None, None) | (None, Some(_)) | (Some(_), None) => false, + } +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a boolean value. +/// +/// \param[in] value A boolean. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BOOL` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromBool(value: bool) -> *mut AMresult { + AMresult::item(am::Value::from(value).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from an array of bytes value. +/// +/// \param[in] src A pointer to an array of bytes. +/// \param[in] count The count of bytes to copy from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// value.src must be a byte array of length >= value.count +#[no_mangle] +pub unsafe extern "C" fn AMitemFromBytes(src: *const u8, count: usize) -> *mut AMresult { + let value = std::slice::from_raw_parts(src, count); + AMresult::item(am::Value::bytes(value.to_vec()).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a change hash value. +/// +/// \param[in] value A change hash as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_CHANGE_HASH` item. +/// \pre \p value.src `!= NULL` +/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// value.src must be a byte array of length >= value.count +#[no_mangle] +pub unsafe extern "C" fn AMitemFromChangeHash(value: AMbyteSpan) -> *mut AMresult { + to_result(am::ChangeHash::try_from(&value)) +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a CRDT counter value. +/// +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_COUNTER` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromCounter(value: i64) -> *mut AMresult { + AMresult::item(am::Value::counter(value).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a float value. +/// +/// \param[in] value A 64-bit float. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_F64` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromF64(value: f64) -> *mut AMresult { + AMresult::item(am::Value::f64(value).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a signed integer value. +/// +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_INT` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromInt(value: i64) -> *mut AMresult { + AMresult::item(am::Value::int(value).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a null value. +/// +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_NULL` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromNull() -> *mut AMresult { + AMresult::item(am::Value::from(()).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a UTF-8 string value. +/// +/// \param[in] value A UTF-8 string view as an `AMbyteSpan` struct. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_STR` item. +/// \pre \p value.src `!= NULL` +/// \pre `0 <` \p value.count `<= sizeof(`\p value.src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// value.src must be a byte array of length >= value.count +#[no_mangle] +pub unsafe extern "C" fn AMitemFromStr(value: AMbyteSpan) -> *mut AMresult { + AMresult::item(am::Value::str(to_str!(value)).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from a *nix timestamp +/// (milliseconds) value. +/// +/// \param[in] value A 64-bit signed integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_TIMESTAMP` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromTimestamp(value: i64) -> *mut AMresult { + AMresult::item(am::Value::timestamp(value).into()).into() +} + +/// \memberof AMitem +/// \brief Allocates a new item and initializes it from an unsigned integer value. +/// +/// \param[in] value A 64-bit unsigned integer. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_UINT` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +#[no_mangle] +pub unsafe extern "C" fn AMitemFromUint(value: u64) -> *mut AMresult { + AMresult::item(am::Value::uint(value).into()).into() +} + +/// \memberof AMitem +/// \brief Gets the type of an item's index. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \return An `AMidxType` enum tag. +/// \pre \p item `!= NULL` +/// \post `(`\p item `== NULL) -> 0` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemIdxType(item: *const AMitem) -> AMidxType { + if let Some(item) = item.as_ref() { + return item.0.as_ref().into(); + } + Default::default() +} + +/// \memberof AMitem +/// \brief Gets the object identifier of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \return A pointer to an `AMobjId` struct. +/// \pre \p item `!= NULL` +/// \post `(`\p item `== NULL) -> NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemObjId(item: *const AMitem) -> *const AMobjId { + if let Some(item) = item.as_ref() { + if let Some(obj_id) = &item.as_ref().obj_id { + return obj_id; + } + } + std::ptr::null() +} + +/// \memberof AMitem +/// \brief Gets the UTF-8 string view key index of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a UTF-8 string view as an `AMbyteSpan` struct. +/// \return `true` if `AMitemIdxType(`\p item `) == AM_IDX_TYPE_KEY` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemKey(item: *const AMitem, value: *mut AMbyteSpan) -> bool { + if let Some(item) = item.as_ref() { + if let Some(index) = &item.as_ref().index { + if let Ok(key) = index.try_into() { + if !value.is_null() { + *value = key; + return true; + } + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the unsigned integer position index of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a `size_t`. +/// \return `true` if `AMitemIdxType(`\p item `) == AM_IDX_TYPE_POS` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemPos(item: *const AMitem, value: *mut usize) -> bool { + if let Some(item) = item.as_ref() { + if let Some(index) = &item.as_ref().index { + if let Ok(pos) = index.try_into() { + if !value.is_null() { + *value = pos; + return true; + } + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the reference count of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \return A 64-bit unsigned integer. +/// \pre \p item `!= NULL` +/// \post `(`\p item `== NULL) -> 0` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemRefCount(item: *const AMitem) -> usize { + if let Some(item) = item.as_ref() { + return Rc::strong_count(&item.0); + } + 0 +} + +/// \memberof AMitem +/// \brief Gets a new result for an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \return A pointer to an `AMresult` struct. +/// \pre \p item `!= NULL` +/// \post `(`\p item `== NULL) -> NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemResult(item: *const AMitem) -> *mut AMresult { + if let Some(item) = item.as_ref() { + return AMresult::item(item.clone()).into(); + } + std::ptr::null_mut() +} + +/// \memberof AMitem +/// \brief Gets the actor identifier value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMactorId` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_ACTOR_ID` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToActorId( + item: *const AMitem, + value: *mut *const AMactorId, +) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(actor_id) = <&AMactorId>::try_from(item) { + if !value.is_null() { + *value = actor_id; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the boolean value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a boolean. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_BOOL` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToBool(item: *const AMitem, value: *mut bool) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(boolean) = item.try_into() { + if !value.is_null() { + *value = boolean; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the array of bytes value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMbyteSpan` struct. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_BYTES` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToBytes(item: *const AMitem, value: *mut AMbyteSpan) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(bytes) = item.as_ref().try_into_bytes() { + if !value.is_null() { + *value = bytes; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the change value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMchange` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_CHANGE` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToChange(item: *mut AMitem, value: *mut *mut AMchange) -> bool { + if let Some(item) = item.as_mut() { + if let Ok(change) = <&mut AMchange>::try_from(item) { + if !value.is_null() { + *value = change; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the change hash value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMbyteSpan` struct. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_CHANGE_HASH` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToChangeHash(item: *const AMitem, value: *mut AMbyteSpan) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(change_hash) = item.as_ref().try_into_change_hash() { + if !value.is_null() { + *value = change_hash; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the CRDT counter value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a signed 64-bit integer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_COUNTER` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToCounter(item: *const AMitem, value: *mut i64) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(counter) = item.as_ref().try_into_counter() { + if !value.is_null() { + *value = counter; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the document value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMdoc` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_DOC` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToDoc(item: *mut AMitem, value: *mut *const AMdoc) -> bool { + if let Some(item) = item.as_mut() { + if let Ok(doc) = <&mut AMdoc>::try_from(item) { + if !value.is_null() { + *value = doc; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the float value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a 64-bit float. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_F64` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToF64(item: *const AMitem, value: *mut f64) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(float) = item.try_into() { + if !value.is_null() { + *value = float; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the integer value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a signed 64-bit integer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_INT` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToInt(item: *const AMitem, value: *mut i64) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(int) = item.as_ref().try_into_int() { + if !value.is_null() { + *value = int; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the UTF-8 string view value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a UTF-8 string view as an `AMbyteSpan` struct. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_STR` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToStr(item: *const AMitem, value: *mut AMbyteSpan) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(str) = item.as_ref().try_into_str() { + if !value.is_null() { + *value = str; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the synchronization have value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMsyncHave` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_SYNC_HAVE` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToSyncHave( + item: *const AMitem, + value: *mut *const AMsyncHave, +) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(sync_have) = <&AMsyncHave>::try_from(item) { + if !value.is_null() { + *value = sync_have; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the synchronization message value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMsyncMessage` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_SYNC_MESSAGE` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToSyncMessage( + item: *const AMitem, + value: *mut *const AMsyncMessage, +) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(sync_message) = <&AMsyncMessage>::try_from(item) { + if !value.is_null() { + *value = sync_message; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the synchronization state value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMsyncState` struct pointer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_SYNC_STATE` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToSyncState( + item: *mut AMitem, + value: *mut *mut AMsyncState, +) -> bool { + if let Some(item) = item.as_mut() { + if let Ok(sync_state) = <&mut AMsyncState>::try_from(item) { + if !value.is_null() { + *value = sync_state; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the *nix timestamp (milliseconds) value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a signed 64-bit integer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_TIMESTAMP` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToTimestamp(item: *const AMitem, value: *mut i64) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(timestamp) = item.as_ref().try_into_timestamp() { + if !value.is_null() { + *value = timestamp; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the unsigned integer value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to a unsigned 64-bit integer. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_UINT` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToUint(item: *const AMitem, value: *mut u64) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(uint) = item.try_into() { + if !value.is_null() { + *value = uint; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the unknown type of value of an item. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \param[out] value A pointer to an `AMunknownValue` struct. +/// \return `true` if `AMitemValType(`\p item `) == AM_VAL_TYPE_UNKNOWN` and +/// \p *value has been reassigned, `false` otherwise. +/// \pre \p item `!= NULL` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemToUnknown(item: *const AMitem, value: *mut AMunknownValue) -> bool { + if let Some(item) = item.as_ref() { + if let Ok(unknown) = item.try_into() { + if !value.is_null() { + *value = unknown; + return true; + } + } + } + false +} + +/// \memberof AMitem +/// \brief Gets the type of an item's value. +/// +/// \param[in] item A pointer to an `AMitem` struct. +/// \return An `AMvalType` enum tag. +/// \pre \p item `!= NULL` +/// \post `(`\p item `== NULL) -> 0` +/// \internal +/// +/// # Safety +/// item must be a valid pointer to an AMitem +#[no_mangle] +pub unsafe extern "C" fn AMitemValType(item: *const AMitem) -> AMvalType { + if let Some(item) = item.as_ref() { + return item.0.as_ref().into(); + } + Default::default() +} diff --git a/rust/automerge-c/src/items.rs b/rust/automerge-c/src/items.rs new file mode 100644 index 00000000..361078b3 --- /dev/null +++ b/rust/automerge-c/src/items.rs @@ -0,0 +1,401 @@ +use automerge as am; + +use std::ffi::c_void; +use std::marker::PhantomData; +use std::mem::size_of; + +use crate::item::AMitem; +use crate::result::AMresult; + +#[repr(C)] +struct Detail { + len: usize, + offset: isize, + ptr: *const c_void, +} + +/// \note cbindgen won't propagate the value of a `std::mem::size_of()` call +/// (https://github.com/eqrion/cbindgen/issues/252) but it will +/// propagate the name of a constant initialized from it so if the +/// constant's name is a symbolic representation of the value it can be +/// converted into a number by post-processing the header it generated. +pub const USIZE_USIZE_USIZE_: usize = size_of::(); + +impl Detail { + fn new(items: &[AMitem], offset: isize) -> Self { + Self { + len: items.len(), + offset, + ptr: items.as_ptr() as *mut c_void, + } + } + + pub fn advance(&mut self, n: isize) { + if n == 0 { + return; + } + let len = self.len as isize; + self.offset = if self.offset < 0 { + // It's reversed. + let unclipped = self.offset.checked_sub(n).unwrap_or(isize::MIN); + if unclipped >= 0 { + // Clip it to the forward stop. + len + } else { + std::cmp::min(std::cmp::max(-(len + 1), unclipped), -1) + } + } else { + let unclipped = self.offset.checked_add(n).unwrap_or(isize::MAX); + if unclipped < 0 { + // Clip it to the reverse stop. + -(len + 1) + } else { + std::cmp::max(0, std::cmp::min(unclipped, len)) + } + } + } + + pub fn get_index(&self) -> usize { + (self.offset + + if self.offset < 0 { + self.len as isize + } else { + 0 + }) as usize + } + + pub fn next(&mut self, n: isize) -> Option<&mut AMitem> { + if self.is_stopped() { + return None; + } + let slice: &mut [AMitem] = + unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut AMitem, self.len) }; + let value = &mut slice[self.get_index()]; + self.advance(n); + Some(value) + } + + pub fn is_stopped(&self) -> bool { + let len = self.len as isize; + self.offset < -len || self.offset == len + } + + pub fn prev(&mut self, n: isize) -> Option<&mut AMitem> { + self.advance(-n); + if self.is_stopped() { + return None; + } + let slice: &mut [AMitem] = + unsafe { std::slice::from_raw_parts_mut(self.ptr as *mut AMitem, self.len) }; + Some(&mut slice[self.get_index()]) + } + + pub fn reversed(&self) -> Self { + Self { + len: self.len, + offset: -(self.offset + 1), + ptr: self.ptr, + } + } + + pub fn rewound(&self) -> Self { + Self { + len: self.len, + offset: if self.offset < 0 { -1 } else { 0 }, + ptr: self.ptr, + } + } +} + +impl From for [u8; USIZE_USIZE_USIZE_] { + fn from(detail: Detail) -> Self { + unsafe { + std::slice::from_raw_parts((&detail as *const Detail) as *const u8, USIZE_USIZE_USIZE_) + .try_into() + .unwrap() + } + } +} + +/// \struct AMitems +/// \installed_headerfile +/// \brief A random-access iterator over a sequence of `AMitem` structs. +#[repr(C)] +#[derive(Eq, PartialEq)] +pub struct AMitems<'a> { + /// An implementation detail that is intentionally opaque. + /// \warning Modifying \p detail will cause undefined behavior. + /// \note The actual size of \p detail will vary by platform, this is just + /// the one for the platform this documentation was built on. + detail: [u8; USIZE_USIZE_USIZE_], + phantom: PhantomData<&'a mut AMresult>, +} + +impl<'a> AMitems<'a> { + pub fn new(items: &[AMitem]) -> Self { + Self { + detail: Detail::new(items, 0).into(), + phantom: PhantomData, + } + } + + pub fn advance(&mut self, n: isize) { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.advance(n); + } + + pub fn len(&self) -> usize { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + detail.len + } + + pub fn next(&mut self, n: isize) -> Option<&mut AMitem> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.next(n) + } + + pub fn prev(&mut self, n: isize) -> Option<&mut AMitem> { + let detail = unsafe { &mut *(self.detail.as_mut_ptr() as *mut Detail) }; + detail.prev(n) + } + + pub fn reversed(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.reversed().into(), + phantom: PhantomData, + } + } + + pub fn rewound(&self) -> Self { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + Self { + detail: detail.rewound().into(), + phantom: PhantomData, + } + } +} + +impl<'a> AsRef<[AMitem]> for AMitems<'a> { + fn as_ref(&self) -> &[AMitem] { + let detail = unsafe { &*(self.detail.as_ptr() as *const Detail) }; + unsafe { std::slice::from_raw_parts(detail.ptr as *const AMitem, detail.len) } + } +} + +impl<'a> Default for AMitems<'a> { + fn default() -> Self { + Self { + detail: [0; USIZE_USIZE_USIZE_], + phantom: PhantomData, + } + } +} + +impl TryFrom<&AMitems<'_>> for Vec { + type Error = am::AutomergeError; + + fn try_from(items: &AMitems<'_>) -> Result { + let mut changes = Vec::::with_capacity(items.len()); + for item in items.as_ref().iter() { + match <&am::Change>::try_from(item.as_ref()) { + Ok(change) => { + changes.push(change.clone()); + } + Err(e) => { + return Err(e); + } + } + } + Ok(changes) + } +} + +impl TryFrom<&AMitems<'_>> for Vec { + type Error = am::AutomergeError; + + fn try_from(items: &AMitems<'_>) -> Result { + let mut change_hashes = Vec::::with_capacity(items.len()); + for item in items.as_ref().iter() { + match <&am::ChangeHash>::try_from(item.as_ref()) { + Ok(change_hash) => { + change_hashes.push(*change_hash); + } + Err(e) => { + return Err(e); + } + } + } + Ok(change_hashes) + } +} + +impl TryFrom<&AMitems<'_>> for Vec { + type Error = am::AutomergeError; + + fn try_from(items: &AMitems<'_>) -> Result { + let mut scalars = Vec::::with_capacity(items.len()); + for item in items.as_ref().iter() { + match <&am::ScalarValue>::try_from(item.as_ref()) { + Ok(scalar) => { + scalars.push(scalar.clone()); + } + Err(e) => { + return Err(e); + } + } + } + Ok(scalars) + } +} + +/// \memberof AMitems +/// \brief Advances an iterator over a sequence of object items by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsAdvance(items: *mut AMitems, n: isize) { + if let Some(items) = items.as_mut() { + items.advance(n); + }; +} + +/// \memberof AMitems +/// \brief Tests the equality of two sequences of object items underlying a +/// pair of iterators. +/// +/// \param[in] items1 A pointer to an `AMitems` struct. +/// \param[in] items2 A pointer to an `AMitems` struct. +/// \return `true` if \p items1 `==` \p items2 and `false` otherwise. +/// \pre \p items1 `!= NULL` +/// \pre \p items1 `!= NULL` +/// \post `!(`\p items1 `&&` \p items2 `) -> false` +/// \internal +/// +/// #Safety +/// items1 must be a valid pointer to an AMitems +/// items2 must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsEqual(items1: *const AMitems, items2: *const AMitems) -> bool { + match (items1.as_ref(), items2.as_ref()) { + (Some(items1), Some(items2)) => items1.as_ref() == items2.as_ref(), + (None, None) | (None, Some(_)) | (Some(_), None) => false, + } +} + +/// \memberof AMitems +/// \brief Gets the object item at the current position of an iterator over a +/// sequence of object items and then advances it by at most \p |n| +/// positions where the sign of \p n is relative to the iterator's +/// direction. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMitem` struct that's `NULL` when \p items +/// was previously advanced past its forward/reverse limit. +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsNext(items: *mut AMitems, n: isize) -> *mut AMitem { + if let Some(items) = items.as_mut() { + if let Some(item) = items.next(n) { + return item; + } + } + std::ptr::null_mut() +} + +/// \memberof AMitems +/// \brief Advances an iterator over a sequence of object items by at most +/// \p |n| positions where the sign of \p n is relative to the +/// iterator's direction and then gets the object item at its new +/// position. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \param[in] n The direction (\p -n -> opposite, \p n -> same) and maximum +/// number of positions to advance. +/// \return A pointer to an `AMitem` struct that's `NULL` when \p items +/// is presently advanced past its forward/reverse limit. +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsPrev(items: *mut AMitems, n: isize) -> *mut AMitem { + if let Some(items) = items.as_mut() { + if let Some(obj_item) = items.prev(n) { + return obj_item; + } + } + std::ptr::null_mut() +} + +/// \memberof AMitems +/// \brief Gets the size of the sequence underlying an iterator. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \return The count of items in \p items. +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsSize(items: *const AMitems) -> usize { + if let Some(items) = items.as_ref() { + return items.len(); + } + 0 +} + +/// \memberof AMitems +/// \brief Creates an iterator over the same sequence of items as the +/// given one but with the opposite position and direction. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \return An `AMitems` struct +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsReversed(items: *const AMitems) -> AMitems { + if let Some(items) = items.as_ref() { + return items.reversed(); + } + Default::default() +} + +/// \memberof AMitems +/// \brief Creates an iterator at the starting position over the same sequence +/// of items as the given one. +/// +/// \param[in] items A pointer to an `AMitems` struct. +/// \return An `AMitems` struct +/// \pre \p items `!= NULL` +/// \internal +/// +/// #Safety +/// items must be a valid pointer to an AMitems +#[no_mangle] +pub unsafe extern "C" fn AMitemsRewound(items: *const AMitems) -> AMitems { + if let Some(items) = items.as_ref() { + return items.rewound(); + } + Default::default() +} diff --git a/rust/automerge-c/src/lib.rs b/rust/automerge-c/src/lib.rs new file mode 100644 index 00000000..1ee1a85d --- /dev/null +++ b/rust/automerge-c/src/lib.rs @@ -0,0 +1,12 @@ +mod actor_id; +mod byte_span; +mod change; +mod doc; +mod index; +mod item; +mod items; +mod obj; +mod result; +mod sync; + +// include!(concat!(env!("OUT_DIR"), "/enum_string_functions.rs")); diff --git a/automerge-c/src/obj.rs b/rust/automerge-c/src/obj.rs similarity index 61% rename from automerge-c/src/obj.rs rename to rust/automerge-c/src/obj.rs index a674660e..3d52286c 100644 --- a/automerge-c/src/obj.rs +++ b/rust/automerge-c/src/obj.rs @@ -1,11 +1,32 @@ use automerge as am; +use std::any::type_name; use std::cell::RefCell; use std::ops::Deref; use crate::actor_id::AMactorId; -pub mod item; -pub mod items; +macro_rules! to_obj_id { + ($handle:expr) => {{ + match $handle.as_ref() { + Some(obj_id) => obj_id, + None => &automerge::ROOT, + } + }}; +} + +pub(crate) use to_obj_id; + +macro_rules! to_obj_type { + ($c_obj_type:expr) => {{ + let result: Result = (&$c_obj_type).try_into(); + match result { + Ok(obj_type) => obj_type, + Err(e) => return AMresult::error(&e.to_string()).into(), + } + }}; +} + +pub(crate) use to_obj_type; /// \struct AMobjId /// \installed_headerfile @@ -55,11 +76,11 @@ impl Deref for AMobjId { } /// \memberof AMobjId -/// \brief Gets the actor identifier of an object identifier. +/// \brief Gets the actor identifier component of an object identifier. /// /// \param[in] obj_id A pointer to an `AMobjId` struct. /// \return A pointer to an `AMactorId` struct or `NULL`. -/// \pre \p obj_id `!= NULL`. +/// \pre \p obj_id `!= NULL` /// \internal /// /// # Safety @@ -73,11 +94,11 @@ pub unsafe extern "C" fn AMobjIdActorId(obj_id: *const AMobjId) -> *const AMacto } /// \memberof AMobjId -/// \brief Gets the counter of an object identifier. +/// \brief Gets the counter component of an object identifier. /// /// \param[in] obj_id A pointer to an `AMobjId` struct. /// \return A 64-bit unsigned integer. -/// \pre \p obj_id `!= NULL`. +/// \pre \p obj_id `!= NULL` /// \internal /// /// # Safety @@ -100,8 +121,9 @@ pub unsafe extern "C" fn AMobjIdCounter(obj_id: *const AMobjId) -> u64 { /// \param[in] obj_id1 A pointer to an `AMobjId` struct. /// \param[in] obj_id2 A pointer to an `AMobjId` struct. /// \return `true` if \p obj_id1 `==` \p obj_id2 and `false` otherwise. -/// \pre \p obj_id1 `!= NULL`. -/// \pre \p obj_id2 `!= NULL`. +/// \pre \p obj_id1 `!= NULL` +/// \pre \p obj_id1 `!= NULL` +/// \post `!(`\p obj_id1 `&&` \p obj_id2 `) -> false` /// \internal /// /// #Safety @@ -111,26 +133,28 @@ pub unsafe extern "C" fn AMobjIdCounter(obj_id: *const AMobjId) -> u64 { pub unsafe extern "C" fn AMobjIdEqual(obj_id1: *const AMobjId, obj_id2: *const AMobjId) -> bool { match (obj_id1.as_ref(), obj_id2.as_ref()) { (Some(obj_id1), Some(obj_id2)) => obj_id1 == obj_id2, - (None, Some(_)) | (Some(_), None) | (None, None) => false, + (None, None) | (None, Some(_)) | (Some(_), None) => false, } } /// \memberof AMobjId -/// \brief Gets the index of an object identifier. +/// \brief Gets the index component of an object identifier. /// /// \param[in] obj_id A pointer to an `AMobjId` struct. /// \return A 64-bit unsigned integer. -/// \pre \p obj_id `!= NULL`. +/// \pre \p obj_id `!= NULL` /// \internal /// /// # Safety /// obj_id must be a valid pointer to an AMobjId #[no_mangle] pub unsafe extern "C" fn AMobjIdIndex(obj_id: *const AMobjId) -> usize { + use am::ObjId::*; + if let Some(obj_id) = obj_id.as_ref() { match obj_id.as_ref() { - am::ObjId::Id(_, _, index) => *index, - am::ObjId::Root => 0, + Id(_, _, index) => *index, + Root => 0, } } else { usize::MAX @@ -139,9 +163,13 @@ pub unsafe extern "C" fn AMobjIdIndex(obj_id: *const AMobjId) -> usize { /// \ingroup enumerations /// \enum AMobjType +/// \installed_headerfile /// \brief The type of an object value. +#[derive(PartialEq, Eq)] #[repr(u8)] pub enum AMobjType { + /// The default tag, not a type signifier. + Default = 0, /// A list. List = 1, /// A key-value map. @@ -150,12 +178,39 @@ pub enum AMobjType { Text, } -impl From for am::ObjType { - fn from(o: AMobjType) -> Self { +impl Default for AMobjType { + fn default() -> Self { + Self::Default + } +} + +impl From<&am::ObjType> for AMobjType { + fn from(o: &am::ObjType) -> Self { + use am::ObjType::*; + match o { - AMobjType::Map => am::ObjType::Map, - AMobjType::List => am::ObjType::List, - AMobjType::Text => am::ObjType::Text, + List => Self::List, + Map | Table => Self::Map, + Text => Self::Text, + } + } +} + +impl TryFrom<&AMobjType> for am::ObjType { + type Error = am::AutomergeError; + + fn try_from(c_obj_type: &AMobjType) -> Result { + use am::AutomergeError::InvalidValueType; + use AMobjType::*; + + match c_obj_type { + List => Ok(Self::List), + Map => Ok(Self::Map), + Text => Ok(Self::Text), + _ => Err(InvalidValueType { + expected: type_name::().to_string(), + unexpected: type_name::().to_string(), + }), } } } diff --git a/rust/automerge-c/src/result.rs b/rust/automerge-c/src/result.rs new file mode 100644 index 00000000..2975f38b --- /dev/null +++ b/rust/automerge-c/src/result.rs @@ -0,0 +1,660 @@ +use automerge as am; + +use std::ops::{Range, RangeFrom, RangeFull, RangeTo}; + +use crate::byte_span::AMbyteSpan; +use crate::index::AMindex; +use crate::item::AMitem; +use crate::items::AMitems; + +/// \struct AMresult +/// \installed_headerfile +/// \brief A discriminated union of result variants. +pub enum AMresult { + Items(Vec), + Error(String), +} + +impl AMresult { + pub(crate) fn error(s: &str) -> Self { + Self::Error(s.to_string()) + } + + pub(crate) fn item(item: AMitem) -> Self { + Self::Items(vec![item]) + } + + pub(crate) fn items(items: Vec) -> Self { + Self::Items(items) + } +} + +impl Default for AMresult { + fn default() -> Self { + Self::Items(vec![]) + } +} + +impl From for AMresult { + fn from(auto_commit: am::AutoCommit) -> Self { + Self::item(AMitem::exact(am::ROOT, auto_commit.into())) + } +} + +impl From for AMresult { + fn from(change: am::Change) -> Self { + Self::item(change.into()) + } +} + +impl From for AMresult { + fn from(change_hash: am::ChangeHash) -> Self { + Self::item(change_hash.into()) + } +} + +impl From> for AMresult { + fn from(maybe: Option) -> Self { + match maybe { + Some(change_hash) => change_hash.into(), + None => Self::item(Default::default()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(change_hash) => change_hash.into(), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From for AMresult { + fn from(state: am::sync::State) -> Self { + Self::item(state.into()) + } +} + +impl From> for AMresult { + fn from(pairs: am::Values<'static>) -> Self { + Self::items(pairs.map(|(v, o)| AMitem::exact(o, v.into())).collect()) + } +} + +impl From for *mut AMresult { + fn from(b: AMresult) -> Self { + Box::into_raw(Box::new(b)) + } +} + +impl From> for AMresult { + fn from(keys: am::Keys<'_, '_>) -> Self { + Self::items(keys.map(|s| s.into()).collect()) + } +} + +impl From> for AMresult { + fn from(keys: am::KeysAt<'_, '_>) -> Self { + Self::items(keys.map(|s| s.into()).collect()) + } +} + +impl From>> for AMresult { + fn from(list_range: am::ListRange<'static, Range>) -> Self { + Self::items( + list_range + .map(|(i, v, o)| AMitem::indexed(AMindex::Pos(i), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(list_range: am::ListRangeAt<'static, Range>) -> Self { + Self::items( + list_range + .map(|(i, v, o)| AMitem::indexed(AMindex::Pos(i), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRange<'static, Range>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRangeAt<'static, Range>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRange<'static, RangeFrom>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRangeAt<'static, RangeFrom>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From> for AMresult { + fn from(map_range: am::MapRange<'static, RangeFull>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From> for AMresult { + fn from(map_range: am::MapRangeAt<'static, RangeFull>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRange<'static, RangeTo>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From>> for AMresult { + fn from(map_range: am::MapRangeAt<'static, RangeTo>) -> Self { + Self::items( + map_range + .map(|(k, v, o)| AMitem::indexed(AMindex::Key(k.into()), o, v.into())) + .collect(), + ) + } +} + +impl From> for AMresult { + fn from(maybe: Option<&am::Change>) -> Self { + Self::item(match maybe { + Some(change) => change.clone().into(), + None => Default::default(), + }) + } +} + +impl From> for AMresult { + fn from(maybe: Option) -> Self { + Self::item(match maybe { + Some(message) => message.into(), + None => Default::default(), + }) + } +} + +impl From> for AMresult { + fn from(maybe: Result<(), am::AutomergeError>) -> Self { + match maybe { + Ok(()) => Self::item(Default::default()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(actor_id) => Self::item(actor_id.into()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(actor_id) => Self::item(actor_id.into()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(auto_commit) => Self::item(auto_commit.into()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(change) => Self::item(change.into()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From<(Result, am::ObjType)> for AMresult { + fn from(tuple: (Result, am::ObjType)) -> Self { + match tuple { + (Ok(obj_id), obj_type) => Self::item((obj_id, obj_type).into()), + (Err(e), _) => Self::error(&e.to_string()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(message) => Self::item(message.into()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(state) => Self::item(state.into()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + Ok(value) => Self::item(value.into()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::ObjId)>, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { + match maybe { + Ok(Some((value, obj_id))) => Self::item(AMitem::exact(obj_id, value.into())), + Ok(None) => Self::item(Default::default()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(string) => Self::item(string.into()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From> for AMresult { + fn from(maybe: Result) -> Self { + match maybe { + Ok(size) => Self::item(am::Value::uint(size as u64).into()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + Ok(changes) => Self::items(changes.into_iter().map(|change| change.into()).collect()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + Ok(changes) => Self::items( + changes + .into_iter() + .map(|change| change.clone().into()) + .collect(), + ), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::LoadChangeError>> for AMresult { + fn from(maybe: Result, am::LoadChangeError>) -> Self { + match maybe { + Ok(changes) => Self::items(changes.into_iter().map(|change| change.into()).collect()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + Ok(change_hashes) => Self::items( + change_hashes + .into_iter() + .map(|change_hash| change_hash.into()) + .collect(), + ), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::InvalidChangeHashSlice>> for AMresult { + fn from(maybe: Result, am::InvalidChangeHashSlice>) -> Self { + match maybe { + Ok(change_hashes) => Self::items( + change_hashes + .into_iter() + .map(|change_hash| change_hash.into()) + .collect(), + ), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::ObjId)>, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self { + match maybe { + Ok(pairs) => Self::items( + pairs + .into_iter() + .map(|(v, o)| AMitem::exact(o, v.into())) + .collect(), + ), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From, am::AutomergeError>> for AMresult { + fn from(maybe: Result, am::AutomergeError>) -> Self { + match maybe { + Ok(bytes) => Self::item(am::Value::bytes(bytes).into()), + Err(e) => Self::error(&e.to_string()), + } + } +} + +impl From<&[am::Change]> for AMresult { + fn from(changes: &[am::Change]) -> Self { + Self::items(changes.iter().map(|change| change.clone().into()).collect()) + } +} + +impl From> for AMresult { + fn from(changes: Vec<&am::Change>) -> Self { + Self::items( + changes + .into_iter() + .map(|change| change.clone().into()) + .collect(), + ) + } +} + +impl From<&[am::ChangeHash]> for AMresult { + fn from(change_hashes: &[am::ChangeHash]) -> Self { + Self::items( + change_hashes + .iter() + .map(|change_hash| (*change_hash).into()) + .collect(), + ) + } +} + +impl From<&[am::sync::Have]> for AMresult { + fn from(haves: &[am::sync::Have]) -> Self { + Self::items(haves.iter().map(|have| have.clone().into()).collect()) + } +} + +impl From> for AMresult { + fn from(change_hashes: Vec) -> Self { + Self::items( + change_hashes + .into_iter() + .map(|change_hash| change_hash.into()) + .collect(), + ) + } +} + +impl From> for AMresult { + fn from(haves: Vec) -> Self { + Self::items(haves.into_iter().map(|have| have.into()).collect()) + } +} + +impl From> for AMresult { + fn from(bytes: Vec) -> Self { + Self::item(am::Value::bytes(bytes).into()) + } +} + +pub fn to_result>(r: R) -> *mut AMresult { + (r.into()).into() +} + +/// \ingroup enumerations +/// \enum AMstatus +/// \installed_headerfile +/// \brief The status of an API call. +#[derive(PartialEq, Eq)] +#[repr(u8)] +pub enum AMstatus { + /// Success. + /// \note This tag is unalphabetized so that `0` indicates success. + Ok, + /// Failure due to an error. + Error, + /// Failure due to an invalid result. + InvalidResult, +} + +/// \memberof AMresult +/// \brief Concatenates the items from two results. +/// +/// \param[in] dest A pointer to an `AMresult` struct. +/// \param[in] src A pointer to an `AMresult` struct. +/// \return A pointer to an `AMresult` struct with the items from \p dest in +/// their original order followed by the items from \p src in their +/// original order. +/// \pre \p dest `!= NULL` +/// \pre \p src `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +/// \internal +/// +/// # Safety +/// dest must be a valid pointer to an AMresult +/// src must be a valid pointer to an AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultCat(dest: *const AMresult, src: *const AMresult) -> *mut AMresult { + use AMresult::*; + + match (dest.as_ref(), src.as_ref()) { + (Some(dest), Some(src)) => match (dest, src) { + (Items(dest_items), Items(src_items)) => { + return AMresult::items( + dest_items + .iter() + .cloned() + .chain(src_items.iter().cloned()) + .collect(), + ) + .into(); + } + (Error(_), Error(_)) | (Error(_), Items(_)) | (Items(_), Error(_)) => { + AMresult::error("Invalid `AMresult`").into() + } + }, + (None, None) | (None, Some(_)) | (Some(_), None) => { + AMresult::error("Invalid `AMresult*`").into() + } + } +} + +/// \memberof AMresult +/// \brief Gets a result's error message string. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return A UTF-8 string view as an `AMbyteSpan` struct. +/// \pre \p result `!= NULL` +/// \internal +/// +/// # Safety +/// result must be a valid pointer to an AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultError(result: *const AMresult) -> AMbyteSpan { + use AMresult::*; + + if let Some(Error(message)) = result.as_ref() { + return message.as_bytes().into(); + } + Default::default() +} + +/// \memberof AMresult +/// \brief Deallocates the storage for a result. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \pre \p result `!= NULL` +/// \internal +/// +/// # Safety +/// result must be a valid pointer to an AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultFree(result: *mut AMresult) { + if !result.is_null() { + let result: AMresult = *Box::from_raw(result); + drop(result) + } +} + +/// \memberof AMresult +/// \brief Gets a result's first item. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return A pointer to an `AMitem` struct. +/// \pre \p result `!= NULL` +/// \internal +/// +/// # Safety +/// result must be a valid pointer to an AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultItem(result: *mut AMresult) -> *mut AMitem { + use AMresult::*; + + if let Some(Items(items)) = result.as_mut() { + if !items.is_empty() { + return &mut items[0]; + } + } + std::ptr::null_mut() +} + +/// \memberof AMresult +/// \brief Gets a result's items. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return An `AMitems` struct. +/// \pre \p result `!= NULL` +/// \internal +/// +/// # Safety +/// result must be a valid pointer to an AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultItems<'a>(result: *mut AMresult) -> AMitems<'a> { + use AMresult::*; + + if let Some(Items(items)) = result.as_mut() { + if !items.is_empty() { + return AMitems::new(items); + } + } + Default::default() +} + +/// \memberof AMresult +/// \brief Gets the size of a result. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return The count of items within \p result. +/// \pre \p result `!= NULL` +/// \internal +/// +/// # Safety +/// result must be a valid pointer to an AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultSize(result: *const AMresult) -> usize { + use self::AMresult::*; + + if let Some(Items(items)) = result.as_ref() { + return items.len(); + } + 0 +} + +/// \memberof AMresult +/// \brief Gets the status code of a result. +/// +/// \param[in] result A pointer to an `AMresult` struct. +/// \return An `AMstatus` enum tag. +/// \pre \p result `!= NULL` +/// \internal +/// +/// # Safety +/// result must be a valid pointer to an AMresult +#[no_mangle] +pub unsafe extern "C" fn AMresultStatus(result: *const AMresult) -> AMstatus { + use AMresult::*; + + if let Some(result) = result.as_ref() { + match result { + Error(_) => { + return AMstatus::Error; + } + _ => { + return AMstatus::Ok; + } + } + } + AMstatus::InvalidResult +} diff --git a/automerge-c/src/sync.rs b/rust/automerge-c/src/sync.rs similarity index 79% rename from automerge-c/src/sync.rs rename to rust/automerge-c/src/sync.rs index cfed1af5..fe0332a1 100644 --- a/automerge-c/src/sync.rs +++ b/rust/automerge-c/src/sync.rs @@ -1,7 +1,7 @@ mod have; -mod haves; mod message; mod state; +pub(crate) use have::AMsyncHave; pub(crate) use message::{to_sync_message, AMsyncMessage}; pub(crate) use state::AMsyncState; diff --git a/automerge-c/src/sync/have.rs b/rust/automerge-c/src/sync/have.rs similarity index 53% rename from automerge-c/src/sync/have.rs rename to rust/automerge-c/src/sync/have.rs index f7ff4cb0..37d2031f 100644 --- a/automerge-c/src/sync/have.rs +++ b/rust/automerge-c/src/sync/have.rs @@ -1,23 +1,23 @@ use automerge as am; -use crate::change_hashes::AMchangeHashes; +use crate::result::{to_result, AMresult}; /// \struct AMsyncHave /// \installed_headerfile /// \brief A summary of the changes that the sender of a synchronization /// message already has. #[derive(Clone, Eq, PartialEq)] -pub struct AMsyncHave(*const am::sync::Have); +pub struct AMsyncHave(am::sync::Have); impl AMsyncHave { - pub fn new(have: &am::sync::Have) -> Self { + pub fn new(have: am::sync::Have) -> Self { Self(have) } } impl AsRef for AMsyncHave { fn as_ref(&self) -> &am::sync::Have { - unsafe { &*self.0 } + &self.0 } } @@ -25,17 +25,18 @@ impl AsRef for AMsyncHave { /// \brief Gets the heads of the sender. /// /// \param[in] sync_have A pointer to an `AMsyncHave` struct. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_have `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_have `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_have must be a valid pointer to an AMsyncHave #[no_mangle] -pub unsafe extern "C" fn AMsyncHaveLastSync(sync_have: *const AMsyncHave) -> AMchangeHashes { - if let Some(sync_have) = sync_have.as_ref() { - AMchangeHashes::new(&sync_have.as_ref().last_sync) - } else { - AMchangeHashes::default() - } +pub unsafe extern "C" fn AMsyncHaveLastSync(sync_have: *const AMsyncHave) -> *mut AMresult { + to_result(match sync_have.as_ref() { + Some(sync_have) => sync_have.as_ref().last_sync.as_slice(), + None => Default::default(), + }) } diff --git a/automerge-c/src/sync/message.rs b/rust/automerge-c/src/sync/message.rs similarity index 52% rename from automerge-c/src/sync/message.rs rename to rust/automerge-c/src/sync/message.rs index 7e398f8c..bdb1db34 100644 --- a/automerge-c/src/sync/message.rs +++ b/rust/automerge-c/src/sync/message.rs @@ -3,18 +3,15 @@ use std::cell::RefCell; use std::collections::BTreeMap; use crate::change::AMchange; -use crate::change_hashes::AMchangeHashes; -use crate::changes::AMchanges; use crate::result::{to_result, AMresult}; use crate::sync::have::AMsyncHave; -use crate::sync::haves::AMsyncHaves; macro_rules! to_sync_message { ($handle:expr) => {{ let handle = $handle.as_ref(); match handle { Some(b) => b, - None => return AMresult::err("Invalid AMsyncMessage pointer").into(), + None => return AMresult::error("Invalid `AMsyncMessage*`").into(), } }}; } @@ -51,55 +48,54 @@ impl AsRef for AMsyncMessage { /// \brief Gets the changes for the recipient to apply. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return An `AMchanges` struct. -/// \pre \p sync_message `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE` items. +/// \pre \p sync_message `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] -pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage) -> AMchanges { - if let Some(sync_message) = sync_message.as_ref() { - AMchanges::new( - &sync_message.body.changes, - &mut sync_message.changes_storage.borrow_mut(), - ) - } else { - AMchanges::default() - } +pub unsafe extern "C" fn AMsyncMessageChanges(sync_message: *const AMsyncMessage) -> *mut AMresult { + to_result(match sync_message.as_ref() { + Some(sync_message) => sync_message.body.changes.as_slice(), + None => Default::default(), + }) } /// \memberof AMsyncMessage -/// \brief Decodes a sequence of bytes into a synchronization message. +/// \brief Decodes an array of bytes into a synchronization message. /// /// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to decode. -/// \return A pointer to an `AMresult` struct containing an `AMsyncMessage` -/// struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] count The count of bytes to decode from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_SYNC_MESSAGE` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal +/// /// # Safety -/// src must be a byte array of size `>= count` +/// src must be a byte array of length `>= count` #[no_mangle] pub unsafe extern "C" fn AMsyncMessageDecode(src: *const u8, count: usize) -> *mut AMresult { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(am::sync::Message::decode(&data)) + let data = std::slice::from_raw_parts(src, count); + to_result(am::sync::Message::decode(data)) } /// \memberof AMsyncMessage -/// \brief Encodes a synchronization message as a sequence of bytes. +/// \brief Encodes a synchronization message as an array of bytes. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return A pointer to an `AMresult` struct containing an array of bytes as -/// an `AMbyteSpan` struct. -/// \pre \p sync_message `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTES` item. +/// \pre \p sync_message `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal +/// /// # Safety /// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] @@ -112,41 +108,40 @@ pub unsafe extern "C" fn AMsyncMessageEncode(sync_message: *const AMsyncMessage) /// \brief Gets a summary of the changes that the sender already has. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return An `AMhaves` struct. -/// \pre \p sync_message `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_SYNC_HAVE` items. +/// \pre \p sync_message `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] -pub unsafe extern "C" fn AMsyncMessageHaves(sync_message: *const AMsyncMessage) -> AMsyncHaves { - if let Some(sync_message) = sync_message.as_ref() { - AMsyncHaves::new( - &sync_message.as_ref().have, - &mut sync_message.haves_storage.borrow_mut(), - ) - } else { - AMsyncHaves::default() - } +pub unsafe extern "C" fn AMsyncMessageHaves(sync_message: *const AMsyncMessage) -> *mut AMresult { + to_result(match sync_message.as_ref() { + Some(sync_message) => sync_message.as_ref().have.as_slice(), + None => Default::default(), + }) } /// \memberof AMsyncMessage /// \brief Gets the heads of the sender. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_message `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_message `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] -pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) -> AMchangeHashes { - if let Some(sync_message) = sync_message.as_ref() { - AMchangeHashes::new(&sync_message.as_ref().heads) - } else { - AMchangeHashes::default() - } +pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) -> *mut AMresult { + to_result(match sync_message.as_ref() { + Some(sync_message) => sync_message.as_ref().heads.as_slice(), + None => Default::default(), + }) } /// \memberof AMsyncMessage @@ -154,17 +149,18 @@ pub unsafe extern "C" fn AMsyncMessageHeads(sync_message: *const AMsyncMessage) /// by the recipient. /// /// \param[in] sync_message A pointer to an `AMsyncMessage` struct. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_message `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_message `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_message must be a valid pointer to an AMsyncMessage #[no_mangle] -pub unsafe extern "C" fn AMsyncMessageNeeds(sync_message: *const AMsyncMessage) -> AMchangeHashes { - if let Some(sync_message) = sync_message.as_ref() { - AMchangeHashes::new(&sync_message.as_ref().need) - } else { - AMchangeHashes::default() - } +pub unsafe extern "C" fn AMsyncMessageNeeds(sync_message: *const AMsyncMessage) -> *mut AMresult { + to_result(match sync_message.as_ref() { + Some(sync_message) => sync_message.as_ref().need.as_slice(), + None => Default::default(), + }) } diff --git a/automerge-c/src/sync/state.rs b/rust/automerge-c/src/sync/state.rs similarity index 57% rename from automerge-c/src/sync/state.rs rename to rust/automerge-c/src/sync/state.rs index 54fd5fe4..1d85ed98 100644 --- a/automerge-c/src/sync/state.rs +++ b/rust/automerge-c/src/sync/state.rs @@ -2,17 +2,15 @@ use automerge as am; use std::cell::RefCell; use std::collections::BTreeMap; -use crate::change_hashes::AMchangeHashes; use crate::result::{to_result, AMresult}; use crate::sync::have::AMsyncHave; -use crate::sync::haves::AMsyncHaves; macro_rules! to_sync_state { ($handle:expr) => {{ let handle = $handle.as_ref(); match handle { Some(b) => b, - None => return AMresult::err("Invalid AMsyncState pointer").into(), + None => return AMresult::error("Invalid `AMsyncState*`").into(), } }}; } @@ -56,36 +54,37 @@ impl From for *mut AMsyncState { } /// \memberof AMsyncState -/// \brief Decodes a sequence of bytes into a synchronization state. +/// \brief Decodes an array of bytes into a synchronization state. /// /// \param[in] src A pointer to an array of bytes. -/// \param[in] count The number of bytes in \p src to decode. -/// \return A pointer to an `AMresult` struct containing an `AMsyncState` -/// struct. -/// \pre \p src `!= NULL`. -/// \pre `0 <` \p count `<= sizeof(`\p src`)`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \param[in] count The count of bytes to decode from the array pointed to by +/// \p src. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_SYNC_STATE` item. +/// \pre \p src `!= NULL` +/// \pre `sizeof(`\p src `) > 0` +/// \pre \p count `<= sizeof(`\p src `)` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal +/// /// # Safety -/// src must be a byte array of size `>= count` +/// src must be a byte array of length `>= count` #[no_mangle] pub unsafe extern "C" fn AMsyncStateDecode(src: *const u8, count: usize) -> *mut AMresult { - let mut data = Vec::new(); - data.extend_from_slice(std::slice::from_raw_parts(src, count)); - to_result(am::sync::State::decode(&data)) + let data = std::slice::from_raw_parts(src, count); + to_result(am::sync::State::decode(data)) } /// \memberof AMsyncState -/// \brief Encodes a synchronizaton state as a sequence of bytes. +/// \brief Encodes a synchronization state as an array of bytes. /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return A pointer to an `AMresult` struct containing an array of bytes as -/// an `AMbyteSpan` struct. -/// \pre \p sync_state `!= NULL`. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_BYTE_SPAN` item. +/// \pre \p sync_state `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal +/// /// # Safety /// sync_state must be a valid pointer to an AMsyncState #[no_mangle] @@ -100,8 +99,9 @@ pub unsafe extern "C" fn AMsyncStateEncode(sync_state: *const AMsyncState) -> *m /// \param[in] sync_state1 A pointer to an `AMsyncState` struct. /// \param[in] sync_state2 A pointer to an `AMsyncState` struct. /// \return `true` if \p sync_state1 `==` \p sync_state2 and `false` otherwise. -/// \pre \p sync_state1 `!= NULL`. -/// \pre \p sync_state2 `!= NULL`. +/// \pre \p sync_state1 `!= NULL` +/// \pre \p sync_state2 `!= NULL` +/// \post `!(`\p sync_state1 `&&` \p sync_state2 `) -> false` /// \internal /// /// #Safety @@ -114,18 +114,17 @@ pub unsafe extern "C" fn AMsyncStateEqual( ) -> bool { match (sync_state1.as_ref(), sync_state2.as_ref()) { (Some(sync_state1), Some(sync_state2)) => sync_state1.as_ref() == sync_state2.as_ref(), - (None, Some(_)) | (Some(_), None) | (None, None) => false, + (None, None) | (None, Some(_)) | (Some(_), None) => false, } } /// \memberof AMsyncState -/// \brief Allocates a new synchronization state and initializes it with -/// defaults. +/// \brief Allocates a new synchronization state and initializes it from +/// default values. /// -/// \return A pointer to an `AMresult` struct containing a pointer to an -/// `AMsyncState` struct. -/// \warning The returned `AMresult` struct must be deallocated with `AMfree()` -/// in order to prevent a memory leak. +/// \return A pointer to an `AMresult` struct with an `AM_VAL_TYPE_SYNC_STATE` item. +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. #[no_mangle] pub extern "C" fn AMsyncStateInit() -> *mut AMresult { to_result(am::sync::State::new()) @@ -135,40 +134,36 @@ pub extern "C" fn AMsyncStateInit() -> *mut AMresult { /// \brief Gets the heads that are shared by both peers. /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_state `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_state `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_state must be a valid pointer to an AMsyncState #[no_mangle] -pub unsafe extern "C" fn AMsyncStateSharedHeads(sync_state: *const AMsyncState) -> AMchangeHashes { - if let Some(sync_state) = sync_state.as_ref() { - AMchangeHashes::new(&sync_state.as_ref().shared_heads) - } else { - AMchangeHashes::default() - } +pub unsafe extern "C" fn AMsyncStateSharedHeads(sync_state: *const AMsyncState) -> *mut AMresult { + let sync_state = to_sync_state!(sync_state); + to_result(sync_state.as_ref().shared_heads.as_slice()) } /// \memberof AMsyncState /// \brief Gets the heads that were last sent by this peer. /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_state `!= NULL`. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_state `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_state must be a valid pointer to an AMsyncState #[no_mangle] -pub unsafe extern "C" fn AMsyncStateLastSentHeads( - sync_state: *const AMsyncState, -) -> AMchangeHashes { - if let Some(sync_state) = sync_state.as_ref() { - AMchangeHashes::new(&sync_state.as_ref().last_sent_heads) - } else { - AMchangeHashes::default() - } +pub unsafe extern "C" fn AMsyncStateLastSentHeads(sync_state: *const AMsyncState) -> *mut AMresult { + let sync_state = to_sync_state!(sync_state); + to_result(sync_state.as_ref().last_sent_heads.as_slice()) } /// \memberof AMsyncState @@ -176,11 +171,13 @@ pub unsafe extern "C" fn AMsyncStateLastSentHeads( /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \param[out] has_value A pointer to a boolean flag that is set to `true` if -/// the returned `AMhaves` struct is relevant, `false` otherwise. -/// \return An `AMhaves` struct. -/// \pre \p sync_state `!= NULL`. -/// \pre \p has_value `!= NULL`. -/// \internal +/// the returned `AMitems` struct is relevant, `false` otherwise. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_SYNC_HAVE` items. +/// \pre \p sync_state `!= NULL` +/// \pre \p has_value `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. +//// \internal /// /// # Safety /// sync_state must be a valid pointer to an AMsyncState @@ -189,15 +186,15 @@ pub unsafe extern "C" fn AMsyncStateLastSentHeads( pub unsafe extern "C" fn AMsyncStateTheirHaves( sync_state: *const AMsyncState, has_value: *mut bool, -) -> AMsyncHaves { +) -> *mut AMresult { if let Some(sync_state) = sync_state.as_ref() { if let Some(haves) = &sync_state.as_ref().their_have { *has_value = true; - return AMsyncHaves::new(haves, &mut sync_state.their_haves_storage.borrow_mut()); - }; + return to_result(haves.as_slice()); + } }; *has_value = false; - AMsyncHaves::default() + to_result(Vec::::new()) } /// \memberof AMsyncState @@ -205,29 +202,31 @@ pub unsafe extern "C" fn AMsyncStateTheirHaves( /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \param[out] has_value A pointer to a boolean flag that is set to `true` if -/// the returned `AMchangeHashes` struct is relevant, `false` -/// otherwise. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_state `!= NULL`. -/// \pre \p has_value `!= NULL`. +/// the returned `AMitems` struct is relevant, `false` +/// otherwise. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_state `!= NULL` +/// \pre \p has_value `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_state must be a valid pointer to an AMsyncState -/// has_value must be a valid pointer to a bool. +/// has_value must be a valid pointer to a bool #[no_mangle] pub unsafe extern "C" fn AMsyncStateTheirHeads( sync_state: *const AMsyncState, has_value: *mut bool, -) -> AMchangeHashes { +) -> *mut AMresult { if let Some(sync_state) = sync_state.as_ref() { if let Some(change_hashes) = &sync_state.as_ref().their_heads { *has_value = true; - return AMchangeHashes::new(change_hashes); + return to_result(change_hashes.as_slice()); } }; *has_value = false; - AMchangeHashes::default() + to_result(Vec::::new()) } /// \memberof AMsyncState @@ -235,27 +234,29 @@ pub unsafe extern "C" fn AMsyncStateTheirHeads( /// /// \param[in] sync_state A pointer to an `AMsyncState` struct. /// \param[out] has_value A pointer to a boolean flag that is set to `true` if -/// the returned `AMchangeHashes` struct is relevant, `false` -/// otherwise. -/// \return An `AMchangeHashes` struct. -/// \pre \p sync_state `!= NULL`. -/// \pre \p has_value `!= NULL`. +/// the returned `AMitems` struct is relevant, `false` +/// otherwise. +/// \return A pointer to an `AMresult` struct with `AM_VAL_TYPE_CHANGE_HASH` items. +/// \pre \p sync_state `!= NULL` +/// \pre \p has_value `!= NULL` +/// \warning The returned `AMresult` struct pointer must be passed to +/// `AMresultFree()` in order to avoid a memory leak. /// \internal /// /// # Safety /// sync_state must be a valid pointer to an AMsyncState -/// has_value must be a valid pointer to a bool. +/// has_value must be a valid pointer to a bool #[no_mangle] pub unsafe extern "C" fn AMsyncStateTheirNeeds( sync_state: *const AMsyncState, has_value: *mut bool, -) -> AMchangeHashes { +) -> *mut AMresult { if let Some(sync_state) = sync_state.as_ref() { if let Some(change_hashes) = &sync_state.as_ref().their_need { *has_value = true; - return AMchangeHashes::new(change_hashes); + return to_result(change_hashes.as_slice()); } }; *has_value = false; - AMchangeHashes::default() + to_result(Vec::::new()) } diff --git a/rust/automerge-c/src/utils/result.c b/rust/automerge-c/src/utils/result.c new file mode 100644 index 00000000..f922ca31 --- /dev/null +++ b/rust/automerge-c/src/utils/result.c @@ -0,0 +1,33 @@ +#include + +#include + +AMresult* AMresultFrom(int count, ...) { + AMresult* result = NULL; + bool is_ok = true; + va_list args; + va_start(args, count); + for (int i = 0; i != count; ++i) { + AMresult* src = va_arg(args, AMresult*); + AMresult* dest = result; + is_ok = (AMresultStatus(src) == AM_STATUS_OK); + if (is_ok) { + if (dest) { + result = AMresultCat(dest, src); + is_ok = (AMresultStatus(result) == AM_STATUS_OK); + AMresultFree(dest); + AMresultFree(src); + } else { + result = src; + } + } else { + AMresultFree(src); + } + } + va_end(args); + if (!is_ok) { + AMresultFree(result); + result = NULL; + } + return result; +} diff --git a/rust/automerge-c/src/utils/stack.c b/rust/automerge-c/src/utils/stack.c new file mode 100644 index 00000000..2cad7c5c --- /dev/null +++ b/rust/automerge-c/src/utils/stack.c @@ -0,0 +1,106 @@ +#include +#include + +#include +#include + +void AMstackFree(AMstack** stack) { + if (stack) { + while (*stack) { + AMresultFree(AMstackPop(stack, NULL)); + } + } +} + +AMresult* AMstackPop(AMstack** stack, const AMresult* result) { + if (!stack) { + return NULL; + } + AMstack** prev = stack; + if (result) { + while (*prev && ((*prev)->result != result)) { + *prev = (*prev)->prev; + } + } + if (!*prev) { + return NULL; + } + AMstack* target = *prev; + *prev = target->prev; + AMresult* popped = target->result; + free(target); + return popped; +} + +AMresult* AMstackResult(AMstack** stack, AMresult* result, AMstackCallback callback, void* data) { + if (!stack) { + if (callback) { + /* Create a local stack so that the callback can still examine the + * result. */ + AMstack node = {.result = result, .prev = NULL}; + AMstack* stack = &node; + callback(&stack, data); + } else { + /* \note There is no reason to call this function when both the + * stack and the callback are null. */ + fprintf(stderr, "ERROR: NULL AMstackCallback!\n"); + } + /* \note Nothing can be returned without a stack regardless of + * whether or not the callback validated the result. */ + AMresultFree(result); + return NULL; + } + /* Always push the result onto the stack, even if it's null, so that the + * callback can examine it. */ + AMstack* next = calloc(1, sizeof(AMstack)); + *next = (AMstack){.result = result, .prev = *stack}; + AMstack* top = next; + *stack = top; + if (callback) { + if (!callback(stack, data)) { + /* The result didn't pass the callback's examination. */ + return NULL; + } + } else { + /* Report an obvious error. */ + if (result) { + AMbyteSpan const err_msg = AMresultError(result); + if (err_msg.src && err_msg.count) { + /* \note The callback may be null because the result is supposed + * to be examined externally so return it despite an + * error. */ + char* const cstr = AMstrdup(err_msg, NULL); + fprintf(stderr, "WARNING: %s.\n", cstr); + free(cstr); + } + } else { + /* \note There's no reason to call this function when both the + * result and the callback are null. */ + fprintf(stderr, "ERROR: NULL AMresult*!\n"); + return NULL; + } + } + return result; +} + +AMitem* AMstackItem(AMstack** stack, AMresult* result, AMstackCallback callback, void* data) { + AMitems items = AMstackItems(stack, result, callback, data); + return AMitemsNext(&items, 1); +} + +AMitems AMstackItems(AMstack** stack, AMresult* result, AMstackCallback callback, void* data) { + return (AMstackResult(stack, result, callback, data)) ? AMresultItems(result) : (AMitems){0}; +} + +size_t AMstackSize(AMstack const* const stack) { + if (!stack) { + return 0; + } + size_t count = 0; + AMstack const* prev = stack; + while (prev) { + ++count; + prev = prev->prev; + } + return count; +} \ No newline at end of file diff --git a/rust/automerge-c/src/utils/stack_callback_data.c b/rust/automerge-c/src/utils/stack_callback_data.c new file mode 100644 index 00000000..f1e988d8 --- /dev/null +++ b/rust/automerge-c/src/utils/stack_callback_data.c @@ -0,0 +1,9 @@ +#include + +#include + +AMstackCallbackData* AMstackCallbackDataInit(AMvalType const bitmask, char const* const file, int const line) { + AMstackCallbackData* data = malloc(sizeof(AMstackCallbackData)); + *data = (AMstackCallbackData){.bitmask = bitmask, .file = file, .line = line}; + return data; +} diff --git a/rust/automerge-c/src/utils/string.c b/rust/automerge-c/src/utils/string.c new file mode 100644 index 00000000..a0d1ebe3 --- /dev/null +++ b/rust/automerge-c/src/utils/string.c @@ -0,0 +1,46 @@ +#include +#include + +#include + +char* AMstrdup(AMbyteSpan const str, char const* nul) { + if (!str.src) { + return NULL; + } else if (!str.count) { + return strdup(""); + } + nul = (nul) ? nul : "\\0"; + size_t const nul_len = strlen(nul); + char* dup = NULL; + size_t dup_len = 0; + char const* begin = str.src; + char const* end = begin; + for (size_t i = 0; i != str.count; ++i, ++end) { + if (!*end) { + size_t const len = end - begin; + size_t const alloc_len = dup_len + len + nul_len; + if (dup) { + dup = realloc(dup, alloc_len + 1); + } else { + dup = malloc(alloc_len + 1); + } + memcpy(dup + dup_len, begin, len); + memcpy(dup + dup_len + len, nul, nul_len); + dup[alloc_len] = '\0'; + begin = end + 1; + dup_len = alloc_len; + } + } + if (begin != end) { + size_t const len = end - begin; + size_t const alloc_len = dup_len + len; + if (dup) { + dup = realloc(dup, alloc_len + 1); + } else { + dup = malloc(alloc_len + 1); + } + memcpy(dup + dup_len, begin, len); + dup[alloc_len] = '\0'; + } + return dup; +} diff --git a/rust/automerge-c/test/CMakeLists.txt b/rust/automerge-c/test/CMakeLists.txt new file mode 100644 index 00000000..1759f140 --- /dev/null +++ b/rust/automerge-c/test/CMakeLists.txt @@ -0,0 +1,55 @@ +find_package(cmocka CONFIG REQUIRED) + +add_executable( + ${LIBRARY_NAME}_test + actor_id_tests.c + base_state.c + byte_span_tests.c + cmocka_utils.c + enum_string_tests.c + doc_state.c + doc_tests.c + item_tests.c + list_tests.c + macro_utils.c + main.c + map_tests.c + str_utils.c + ported_wasm/basic_tests.c + ported_wasm/suite.c + ported_wasm/sync_tests.c +) + +set_target_properties(${LIBRARY_NAME}_test PROPERTIES LINKER_LANGUAGE C) + +if(WIN32) + set(CMOCKA "cmocka::cmocka") +else() + set(CMOCKA "cmocka") +endif() + +target_link_libraries(${LIBRARY_NAME}_test PRIVATE ${CMOCKA} ${LIBRARY_NAME}) + +add_dependencies(${LIBRARY_NAME}_test ${BINDINGS_NAME}_artifacts) + +if(BUILD_SHARED_LIBS AND WIN32) + add_custom_command( + TARGET ${LIBRARY_NAME}_test + POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different $ $ + COMMENT "Copying the DLL into the tests directory..." + VERBATIM + ) +endif() + +add_test(NAME ${LIBRARY_NAME}_test COMMAND ${LIBRARY_NAME}_test) + +add_custom_command( + TARGET ${LIBRARY_NAME}_test + POST_BUILD + COMMAND + ${CMAKE_CTEST_COMMAND} --config $ --output-on-failure + COMMENT + "Running the test(s)..." + VERBATIM +) diff --git a/rust/automerge-c/test/actor_id_tests.c b/rust/automerge-c/test/actor_id_tests.c new file mode 100644 index 00000000..918d6213 --- /dev/null +++ b/rust/automerge-c/test/actor_id_tests.c @@ -0,0 +1,140 @@ +#include +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include "cmocka_utils.h" +#include "str_utils.h" + +/** + * \brief State for a group of cmocka test cases. + */ +typedef struct { + /** An actor ID as an array of bytes. */ + uint8_t* src; + /** The count of bytes in \p src. */ + size_t count; + /** A stack of results. */ + AMstack* stack; + /** An actor ID as a hexadecimal string. */ + AMbyteSpan str; +} DocState; + +static int group_setup(void** state) { + DocState* doc_state = test_calloc(1, sizeof(DocState)); + doc_state->str = AMstr("000102030405060708090a0b0c0d0e0f"); + doc_state->count = doc_state->str.count / 2; + doc_state->src = test_calloc(doc_state->count, sizeof(uint8_t)); + hex_to_bytes(doc_state->str.src, doc_state->src, doc_state->count); + *state = doc_state; + return 0; +} + +static int group_teardown(void** state) { + DocState* doc_state = *state; + test_free(doc_state->src); + AMstackFree(&doc_state->stack); + test_free(doc_state); + return 0; +} + +static void test_AMactorIdFromBytes(void** state) { + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->stack; + /* Non-empty string. */ + AMresult* result = AMstackResult(stack_ptr, AMactorIdFromBytes(doc_state->src, doc_state->count), NULL, NULL); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg_view("%s", AMresultError(result)); + } + assert_int_equal(AMresultSize(result), 1); + AMitem* const item = AMresultItem(result); + assert_int_equal(AMitemValType(item), AM_VAL_TYPE_ACTOR_ID); + AMactorId const* actor_id; + assert_true(AMitemToActorId(item, &actor_id)); + AMbyteSpan const bytes = AMactorIdBytes(actor_id); + assert_int_equal(bytes.count, doc_state->count); + assert_memory_equal(bytes.src, doc_state->src, bytes.count); + /* Empty array. */ + /** \todo Find out if this is intentionally allowed. */ + result = AMstackResult(stack_ptr, AMactorIdFromBytes(doc_state->src, 0), NULL, NULL); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg_view("%s", AMresultError(result)); + } + /* NULL array. */ + result = AMstackResult(stack_ptr, AMactorIdFromBytes(NULL, doc_state->count), NULL, NULL); + if (AMresultStatus(result) == AM_STATUS_OK) { + fail_msg("AMactorId from NULL."); + } +} + +static void test_AMactorIdFromStr(void** state) { + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->stack; + AMresult* result = AMstackResult(stack_ptr, AMactorIdFromStr(doc_state->str), NULL, NULL); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg_view("%s", AMresultError(result)); + } + assert_int_equal(AMresultSize(result), 1); + AMitem* const item = AMresultItem(result); + assert_int_equal(AMitemValType(item), AM_VAL_TYPE_ACTOR_ID); + /* The hexadecimal string should've been decoded as identical bytes. */ + AMactorId const* actor_id; + assert_true(AMitemToActorId(item, &actor_id)); + AMbyteSpan const bytes = AMactorIdBytes(actor_id); + assert_int_equal(bytes.count, doc_state->count); + assert_memory_equal(bytes.src, doc_state->src, bytes.count); + /* The bytes should've been encoded as an identical hexadecimal string. */ + assert_true(AMitemToActorId(item, &actor_id)); + AMbyteSpan const str = AMactorIdStr(actor_id); + assert_int_equal(str.count, doc_state->str.count); + assert_memory_equal(str.src, doc_state->str.src, str.count); +} + +static void test_AMactorIdInit(void** state) { + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->stack; + AMresult* prior_result = NULL; + AMbyteSpan prior_bytes = {NULL, 0}; + AMbyteSpan prior_str = {NULL, 0}; + for (size_t i = 0; i != 11; ++i) { + AMresult* result = AMstackResult(stack_ptr, AMactorIdInit(), NULL, NULL); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg_view("%s", AMresultError(result)); + } + assert_int_equal(AMresultSize(result), 1); + AMitem* const item = AMresultItem(result); + assert_int_equal(AMitemValType(item), AM_VAL_TYPE_ACTOR_ID); + AMactorId const* actor_id; + assert_true(AMitemToActorId(item, &actor_id)); + AMbyteSpan const bytes = AMactorIdBytes(actor_id); + assert_true(AMitemToActorId(item, &actor_id)); + AMbyteSpan const str = AMactorIdStr(actor_id); + if (prior_result) { + size_t const max_byte_count = fmax(bytes.count, prior_bytes.count); + assert_memory_not_equal(bytes.src, prior_bytes.src, max_byte_count); + size_t const max_char_count = fmax(str.count, prior_str.count); + assert_memory_not_equal(str.src, prior_str.src, max_char_count); + } + prior_result = result; + prior_bytes = bytes; + prior_str = str; + } +} + +int run_actor_id_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMactorIdFromBytes), + cmocka_unit_test(test_AMactorIdFromStr), + cmocka_unit_test(test_AMactorIdInit), + }; + + return cmocka_run_group_tests(tests, group_setup, group_teardown); +} diff --git a/rust/automerge-c/test/base_state.c b/rust/automerge-c/test/base_state.c new file mode 100644 index 00000000..53325a99 --- /dev/null +++ b/rust/automerge-c/test/base_state.c @@ -0,0 +1,17 @@ +#include + +/* local */ +#include "base_state.h" + +int setup_base(void** state) { + BaseState* base_state = calloc(1, sizeof(BaseState)); + *state = base_state; + return 0; +} + +int teardown_base(void** state) { + BaseState* base_state = *state; + AMstackFree(&base_state->stack); + free(base_state); + return 0; +} diff --git a/rust/automerge-c/test/base_state.h b/rust/automerge-c/test/base_state.h new file mode 100644 index 00000000..3c4ff01b --- /dev/null +++ b/rust/automerge-c/test/base_state.h @@ -0,0 +1,39 @@ +#ifndef TESTS_BASE_STATE_H +#define TESTS_BASE_STATE_H + +#include + +/* local */ +#include +#include + +/** + * \struct BaseState + * \brief The shared state for one or more cmocka test cases. + */ +typedef struct { + /** A stack of results. */ + AMstack* stack; +} BaseState; + +/** + * \memberof BaseState + * \brief Sets up the shared state for one or more cmocka test cases. + * + * \param[in,out] state A pointer to a pointer to a `BaseState` struct. + * \pre \p state `!= NULL`. + * \warning The `BaseState` struct returned through \p state must be + * passed to `teardown_base()` in order to avoid a memory leak. + */ +int setup_base(void** state); + +/** + * \memberof BaseState + * \brief Tears down the shared state for one or more cmocka test cases. + * + * \param[in] state A pointer to a pointer to a `BaseState` struct. + * \pre \p state `!= NULL`. + */ +int teardown_base(void** state); + +#endif /* TESTS_BASE_STATE_H */ diff --git a/rust/automerge-c/test/byte_span_tests.c b/rust/automerge-c/test/byte_span_tests.c new file mode 100644 index 00000000..0b1c86a1 --- /dev/null +++ b/rust/automerge-c/test/byte_span_tests.c @@ -0,0 +1,119 @@ +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include + +static void test_AMbytes(void** state) { + static char const DATA[] = {0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf}; + + AMbyteSpan bytes = AMbytes(DATA, sizeof(DATA)); + assert_int_equal(bytes.count, sizeof(DATA)); + assert_memory_equal(bytes.src, DATA, bytes.count); + assert_ptr_equal(bytes.src, DATA); + /* Empty view */ + bytes = AMbytes(DATA, 0); + assert_int_equal(bytes.count, 0); + assert_ptr_equal(bytes.src, DATA); + /* Invalid array */ + bytes = AMbytes(NULL, SIZE_MAX); + assert_int_not_equal(bytes.count, SIZE_MAX); + assert_int_equal(bytes.count, 0); + assert_ptr_equal(bytes.src, NULL); +} + +static void test_AMstr(void** state) { + AMbyteSpan str = AMstr("abcdefghijkl"); + assert_int_equal(str.count, strlen("abcdefghijkl")); + assert_memory_equal(str.src, "abcdefghijkl", str.count); + /* Empty string */ + static char const* const EMPTY = ""; + + str = AMstr(EMPTY); + assert_int_equal(str.count, 0); + assert_ptr_equal(str.src, EMPTY); + /* Invalid string */ + str = AMstr(NULL); + assert_int_equal(str.count, 0); + assert_ptr_equal(str.src, NULL); +} + +static void test_AMstrCmp(void** state) { + /* Length ordering */ + assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("abcdefghijkl")), -1); + assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("abcdefghijkl")), 0); + assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("abcdef")), 1); + /* Lexicographical ordering */ + assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("ghijkl")), -1); + assert_int_equal(AMstrCmp(AMstr("ghijkl"), AMstr("abcdef")), 1); + /* Case ordering */ + assert_int_equal(AMstrCmp(AMstr("ABCDEFGHIJKL"), AMstr("abcdefghijkl")), -1); + assert_int_equal(AMstrCmp(AMstr("ABCDEFGHIJKL"), AMstr("ABCDEFGHIJKL")), 0); + assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("ABCDEFGHIJKL")), 1); + assert_int_equal(AMstrCmp(AMstr("ABCDEFGHIJKL"), AMstr("abcdef")), -1); + assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("ABCDEFGHIJKL")), 1); + assert_int_equal(AMstrCmp(AMstr("GHIJKL"), AMstr("abcdef")), -1); + assert_int_equal(AMstrCmp(AMstr("abcdef"), AMstr("GHIJKL")), 1); + /* NUL character inclusion */ + static char const SRC[] = {'a', 'b', 'c', 'd', 'e', 'f', '\0', 'g', 'h', 'i', 'j', 'k', 'l'}; + static AMbyteSpan const NUL_STR = {.src = SRC, .count = 13}; + + assert_int_equal(AMstrCmp(AMstr("abcdef"), NUL_STR), -1); + assert_int_equal(AMstrCmp(NUL_STR, NUL_STR), 0); + assert_int_equal(AMstrCmp(NUL_STR, AMstr("abcdef")), 1); + /* Empty string */ + assert_int_equal(AMstrCmp(AMstr(""), AMstr("abcdefghijkl")), -1); + assert_int_equal(AMstrCmp(AMstr(""), AMstr("")), 0); + assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr("")), 1); + /* Invalid string */ + assert_int_equal(AMstrCmp(AMstr(NULL), AMstr("abcdefghijkl")), -1); + assert_int_equal(AMstrCmp(AMstr(NULL), AMstr(NULL)), 0); + assert_int_equal(AMstrCmp(AMstr("abcdefghijkl"), AMstr(NULL)), 1); +} + +static void test_AMstrdup(void** state) { + static char const SRC[] = {'a', 'b', 'c', '\0', 'd', 'e', 'f', '\0', 'g', 'h', 'i', '\0', 'j', 'k', 'l'}; + static AMbyteSpan const NUL_STR = {.src = SRC, .count = 15}; + + /* Default substitution ("\\0") for NUL */ + char* dup = AMstrdup(NUL_STR, NULL); + assert_int_equal(strlen(dup), 18); + assert_string_equal(dup, "abc\\0def\\0ghi\\0jkl"); + free(dup); + /* Arbitrary substitution for NUL */ + dup = AMstrdup(NUL_STR, ":-O"); + assert_int_equal(strlen(dup), 21); + assert_string_equal(dup, "abc:-Odef:-Oghi:-Ojkl"); + free(dup); + /* Empty substitution for NUL */ + dup = AMstrdup(NUL_STR, ""); + assert_int_equal(strlen(dup), 12); + assert_string_equal(dup, "abcdefghijkl"); + free(dup); + /* Empty string */ + dup = AMstrdup(AMstr(""), NULL); + assert_int_equal(strlen(dup), 0); + assert_string_equal(dup, ""); + free(dup); + /* Invalid string */ + assert_null(AMstrdup(AMstr(NULL), NULL)); +} + +int run_byte_span_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMbytes), + cmocka_unit_test(test_AMstr), + cmocka_unit_test(test_AMstrCmp), + cmocka_unit_test(test_AMstrdup), + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/rust/automerge-c/test/cmocka_utils.c b/rust/automerge-c/test/cmocka_utils.c new file mode 100644 index 00000000..37c57fb1 --- /dev/null +++ b/rust/automerge-c/test/cmocka_utils.c @@ -0,0 +1,88 @@ +#include +#include +#include +#include + +/* third-party */ +#include +#include +#include +#include + +/* local */ +#include "cmocka_utils.h" + +/** + * \brief Assert that the given expression is true and report failure in terms + * of a line number within a file. + * + * \param[in] c An expression. + * \param[in] file A file's full path string. + * \param[in] line A line number. + */ +#define assert_true_where(c, file, line) _assert_true(cast_ptr_to_largest_integral_type(c), #c, file, line) + +/** + * \brief Assert that the given pointer is non-NULL and report failure in terms + * of a line number within a file. + * + * \param[in] c An expression. + * \param[in] file A file's full path string. + * \param[in] line A line number. + */ +#define assert_non_null_where(c, file, line) assert_true_where(c, file, line) + +/** + * \brief Forces the test to fail immediately and quit, printing the reason in + * terms of a line number within a file. + * + * \param[in] msg A message string into which \p str is interpolated. + * \param[in] str An owned string. + * \param[in] file A file's full path string. + * \param[in] line A line number. + */ +#define fail_msg_where(msg, str, file, line) \ + do { \ + print_error("ERROR: " msg "\n", str); \ + _fail(file, line); \ + } while (0) + +/** + * \brief Forces the test to fail immediately and quit, printing the reason in + * terms of a line number within a file. + * + * \param[in] msg A message string into which \p view.src is interpolated. + * \param[in] view A UTF-8 string view as an `AMbyteSpan` struct. + * \param[in] file A file's full path string. + * \param[in] line A line number. + */ +#define fail_msg_view_where(msg, view, file, line) \ + do { \ + char* const str = AMstrdup(view, NULL); \ + print_error("ERROR: " msg "\n", str); \ + free(str); \ + _fail(file, line); \ + } while (0) + +bool cmocka_cb(AMstack** stack, void* data) { + assert_non_null(data); + AMstackCallbackData* const sc_data = (AMstackCallbackData*)data; + assert_non_null_where(stack, sc_data->file, sc_data->line); + assert_non_null_where(*stack, sc_data->file, sc_data->line); + assert_non_null_where((*stack)->result, sc_data->file, sc_data->line); + if (AMresultStatus((*stack)->result) != AM_STATUS_OK) { + fail_msg_view_where("%s", AMresultError((*stack)->result), sc_data->file, sc_data->line); + return false; + } + /* Test that the types of all item values are members of the mask. */ + AMitems items = AMresultItems((*stack)->result); + AMitem* item = NULL; + while ((item = AMitemsNext(&items, 1)) != NULL) { + AMvalType const tag = AMitemValType(item); + if (!(tag & sc_data->bitmask)) { + fail_msg_where("Unexpected value type `%s`.", AMvalTypeToString(tag), sc_data->file, sc_data->line); + return false; + } + } + return true; +} diff --git a/rust/automerge-c/test/cmocka_utils.h b/rust/automerge-c/test/cmocka_utils.h new file mode 100644 index 00000000..b6611bcc --- /dev/null +++ b/rust/automerge-c/test/cmocka_utils.h @@ -0,0 +1,42 @@ +#ifndef TESTS_CMOCKA_UTILS_H +#define TESTS_CMOCKA_UTILS_H + +#include +#include + +/* third-party */ +#include +#include + +/* local */ +#include "base_state.h" + +/** + * \brief Forces the test to fail immediately and quit, printing the reason. + * + * \param[in] msg A message string into which \p view.src is interpolated. + * \param[in] view A UTF-8 string view as an `AMbyteSpan` struct. + */ +#define fail_msg_view(msg, view) \ + do { \ + char* const c_str = AMstrdup(view, NULL); \ + print_error("ERROR: " msg "\n", c_str); \ + free(c_str); \ + fail(); \ + } while (0) + +/** + * \brief Validates the top result in a stack based upon the parameters + * specified within the given data structure and reports violations + * using cmocka assertions. + * + * \param[in,out] stack A pointer to a pointer to an `AMstack` struct. + * \param[in] data A pointer to an owned `AMpushData` struct. + * \return `true` if the top `AMresult` struct in \p stack is valid, `false` + * otherwise. + * \pre \p stack `!= NULL`. + * \pre \p data `!= NULL`. + */ +bool cmocka_cb(AMstack** stack, void* data); + +#endif /* TESTS_CMOCKA_UTILS_H */ diff --git a/rust/automerge-c/test/doc_state.c b/rust/automerge-c/test/doc_state.c new file mode 100644 index 00000000..3cbece50 --- /dev/null +++ b/rust/automerge-c/test/doc_state.c @@ -0,0 +1,27 @@ +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include "cmocka_utils.h" +#include "doc_state.h" + +int setup_doc(void** state) { + DocState* doc_state = test_calloc(1, sizeof(DocState)); + setup_base((void**)&doc_state->base_state); + AMitemToDoc(AMstackItem(&doc_state->base_state->stack, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), + &doc_state->doc); + *state = doc_state; + return 0; +} + +int teardown_doc(void** state) { + DocState* doc_state = *state; + teardown_base((void**)&doc_state->base_state); + test_free(doc_state); + return 0; +} diff --git a/rust/automerge-c/test/doc_state.h b/rust/automerge-c/test/doc_state.h new file mode 100644 index 00000000..525a49fa --- /dev/null +++ b/rust/automerge-c/test/doc_state.h @@ -0,0 +1,17 @@ +#ifndef TESTS_DOC_STATE_H +#define TESTS_DOC_STATE_H + +/* local */ +#include +#include "base_state.h" + +typedef struct { + BaseState* base_state; + AMdoc* doc; +} DocState; + +int setup_doc(void** state); + +int teardown_doc(void** state); + +#endif /* TESTS_DOC_STATE_H */ diff --git a/rust/automerge-c/test/doc_tests.c b/rust/automerge-c/test/doc_tests.c new file mode 100644 index 00000000..c1d21928 --- /dev/null +++ b/rust/automerge-c/test/doc_tests.c @@ -0,0 +1,231 @@ +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include +#include "base_state.h" +#include "cmocka_utils.h" +#include "doc_state.h" +#include "str_utils.h" + +typedef struct { + DocState* doc_state; + AMbyteSpan actor_id_str; + uint8_t* actor_id_bytes; + size_t actor_id_size; +} TestState; + +static int setup(void** state) { + TestState* test_state = test_calloc(1, sizeof(TestState)); + setup_doc((void**)&test_state->doc_state); + test_state->actor_id_str.src = "000102030405060708090a0b0c0d0e0f"; + test_state->actor_id_str.count = strlen(test_state->actor_id_str.src); + test_state->actor_id_size = test_state->actor_id_str.count / 2; + test_state->actor_id_bytes = test_malloc(test_state->actor_id_size); + hex_to_bytes(test_state->actor_id_str.src, test_state->actor_id_bytes, test_state->actor_id_size); + *state = test_state; + return 0; +} + +static int teardown(void** state) { + TestState* test_state = *state; + teardown_doc((void**)&test_state->doc_state); + test_free(test_state->actor_id_bytes); + test_free(test_state); + return 0; +} + +static void test_AMkeys_empty(void** state) { + TestState* test_state = *state; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMitems forward = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_int_equal(AMitemsSize(&forward), 0); + AMitems reverse = AMitemsReversed(&forward); + assert_int_equal(AMitemsSize(&reverse), 0); + assert_null(AMitemsNext(&forward, 1)); + assert_null(AMitemsPrev(&forward, 1)); + assert_null(AMitemsNext(&reverse, 1)); + assert_null(AMitemsPrev(&reverse, 1)); +} + +static void test_AMkeys_list(void** state) { + TestState* test_state = *state; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMlistPutInt(doc, list, 0, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutInt(doc, list, 1, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutInt(doc, list, 2, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMitems forward = AMstackItems(stack_ptr, AMkeys(doc, list, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&forward), 3); + AMitems reverse = AMitemsReversed(&forward); + assert_int_equal(AMitemsSize(&reverse), 3); + /* Forward iterator forward. */ + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "2@"), str.src); + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "3@"), str.src); + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "4@"), str.src); + assert_null(AMitemsNext(&forward, 1)); + // /* Forward iterator reverse. */ + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "4@"), str.src); + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "3@"), str.src); + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_ptr_equal(strstr(str.src, "2@"), str.src); + assert_null(AMitemsPrev(&forward, 1)); + /* Reverse iterator forward. */ + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "4@"), str.src); + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "3@"), str.src); + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "2@"), str.src); + assert_null(AMitemsNext(&reverse, 1)); + /* Reverse iterator reverse. */ + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "2@"), str.src); + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "3@"), str.src); + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_ptr_equal(strstr(str.src, "4@"), str.src); + assert_null(AMitemsPrev(&reverse, 1)); +} + +static void test_AMkeys_map(void** state) { + TestState* test_state = *state; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("one"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("two"), 2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("three"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMitems forward = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&forward), 3); + AMitems reverse = AMitemsReversed(&forward); + assert_int_equal(AMitemsSize(&reverse), 3); + /* Forward iterator forward. */ + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + assert_true(AMitemToStr(AMitemsNext(&forward, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + assert_null(AMitemsNext(&forward, 1)); + /* Forward iterator reverse. */ + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + assert_true(AMitemToStr(AMitemsPrev(&forward, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + assert_null(AMitemsPrev(&forward, 1)); + /* Reverse iterator forward. */ + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + assert_true(AMitemToStr(AMitemsNext(&reverse, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + assert_null(AMitemsNext(&reverse, 1)); + /* Reverse iterator reverse. */ + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "one", str.count); + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_int_equal(str.count, 5); + assert_memory_equal(str.src, "three", str.count); + assert_true(AMitemToStr(AMitemsPrev(&reverse, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "two", str.count); + assert_null(AMitemsPrev(&reverse, 1)); +} + +static void test_AMputActor_bytes(void** state) { + TestState* test_state = *state; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromBytes(test_state->actor_id_bytes, test_state->actor_id_size), cmocka_cb, + AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->doc_state->doc, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMgetActorId(test_state->doc_state->doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMbyteSpan const bytes = AMactorIdBytes(actor_id); + assert_int_equal(bytes.count, test_state->actor_id_size); + assert_memory_equal(bytes.src, test_state->actor_id_bytes, bytes.count); +} + +static void test_AMputActor_str(void** state) { + TestState* test_state = *state; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(test_state->actor_id_str), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->doc_state->doc, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMgetActorId(test_state->doc_state->doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMbyteSpan const str = AMactorIdStr(actor_id); + assert_int_equal(str.count, test_state->actor_id_str.count); + assert_memory_equal(str.src, test_state->actor_id_str.src, str.count); +} + +static void test_AMspliceText(void** state) { + TestState* test_state = *state; + AMstack** stack_ptr = &test_state->doc_state->base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMobjId const* const text = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMspliceText(doc, text, 0, 0, AMstr("one + ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMspliceText(doc, text, 4, 2, AMstr("two = ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMspliceText(doc, text, 8, 2, AMstr("three")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMbyteSpan str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, strlen("one two three")); + assert_memory_equal(str.src, "one two three", str.count); +} + +int run_doc_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test_setup_teardown(test_AMkeys_empty, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMkeys_list, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMkeys_map, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMputActor_bytes, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMputActor_str, setup, teardown), + cmocka_unit_test_setup_teardown(test_AMspliceText, setup, teardown), + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/rust/automerge-c/test/enum_string_tests.c b/rust/automerge-c/test/enum_string_tests.c new file mode 100644 index 00000000..11131e43 --- /dev/null +++ b/rust/automerge-c/test/enum_string_tests.c @@ -0,0 +1,148 @@ +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include + +#define assert_to_string(function, tag) assert_string_equal(function(tag), #tag) + +#define assert_from_string(function, type, tag) \ + do { \ + type out; \ + assert_true(function(&out, #tag)); \ + assert_int_equal(out, tag); \ + } while (0) + +static void test_AMidxTypeToString(void** state) { + assert_to_string(AMidxTypeToString, AM_IDX_TYPE_DEFAULT); + assert_to_string(AMidxTypeToString, AM_IDX_TYPE_KEY); + assert_to_string(AMidxTypeToString, AM_IDX_TYPE_POS); + /* Zero tag */ + assert_string_equal(AMidxTypeToString(0), "AM_IDX_TYPE_DEFAULT"); + /* Invalid tag */ + assert_string_equal(AMidxTypeToString(-1), "???"); +} + +static void test_AMidxTypeFromString(void** state) { + assert_from_string(AMidxTypeFromString, AMidxType, AM_IDX_TYPE_DEFAULT); + assert_from_string(AMidxTypeFromString, AMidxType, AM_IDX_TYPE_KEY); + assert_from_string(AMidxTypeFromString, AMidxType, AM_IDX_TYPE_POS); + /* Invalid tag */ + AMidxType out = -1; + assert_false(AMidxTypeFromString(&out, "???")); + assert_int_equal(out, (AMidxType)-1); +} + +static void test_AMobjTypeToString(void** state) { + assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_DEFAULT); + assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_LIST); + assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_MAP); + assert_to_string(AMobjTypeToString, AM_OBJ_TYPE_TEXT); + /* Zero tag */ + assert_string_equal(AMobjTypeToString(0), "AM_OBJ_TYPE_DEFAULT"); + /* Invalid tag */ + assert_string_equal(AMobjTypeToString(-1), "???"); +} + +static void test_AMobjTypeFromString(void** state) { + assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_DEFAULT); + assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_LIST); + assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_MAP); + assert_from_string(AMobjTypeFromString, AMobjType, AM_OBJ_TYPE_TEXT); + /* Invalid tag */ + AMobjType out = -1; + assert_false(AMobjTypeFromString(&out, "???")); + assert_int_equal(out, (AMobjType)-1); +} + +static void test_AMstatusToString(void** state) { + assert_to_string(AMstatusToString, AM_STATUS_ERROR); + assert_to_string(AMstatusToString, AM_STATUS_INVALID_RESULT); + assert_to_string(AMstatusToString, AM_STATUS_OK); + /* Zero tag */ + assert_string_equal(AMstatusToString(0), "AM_STATUS_OK"); + /* Invalid tag */ + assert_string_equal(AMstatusToString(-1), "???"); +} + +static void test_AMstatusFromString(void** state) { + assert_from_string(AMstatusFromString, AMstatus, AM_STATUS_ERROR); + assert_from_string(AMstatusFromString, AMstatus, AM_STATUS_INVALID_RESULT); + assert_from_string(AMstatusFromString, AMstatus, AM_STATUS_OK); + /* Invalid tag */ + AMstatus out = -1; + assert_false(AMstatusFromString(&out, "???")); + assert_int_equal(out, (AMstatus)-1); +} + +static void test_AMvalTypeToString(void** state) { + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_ACTOR_ID); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_BOOL); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_BYTES); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_CHANGE); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_CHANGE_HASH); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_COUNTER); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_DEFAULT); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_DOC); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_F64); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_INT); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_NULL); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_OBJ_TYPE); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_STR); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_SYNC_HAVE); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_SYNC_MESSAGE); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_SYNC_STATE); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_TIMESTAMP); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_UINT); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_UNKNOWN); + assert_to_string(AMvalTypeToString, AM_VAL_TYPE_VOID); + /* Zero tag */ + assert_string_equal(AMvalTypeToString(0), "AM_VAL_TYPE_DEFAULT"); + /* Invalid tag */ + assert_string_equal(AMvalTypeToString(-1), "???"); +} + +static void test_AMvalTypeFromString(void** state) { + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_ACTOR_ID); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_BOOL); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_BYTES); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_CHANGE); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_CHANGE_HASH); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_COUNTER); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_DEFAULT); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_DOC); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_F64); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_INT); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_NULL); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_OBJ_TYPE); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_STR); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_SYNC_HAVE); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_SYNC_MESSAGE); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_SYNC_STATE); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_TIMESTAMP); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_UINT); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_UNKNOWN); + assert_from_string(AMvalTypeFromString, AMvalType, AM_VAL_TYPE_VOID); + /* Invalid tag */ + AMvalType out = -1; + assert_false(AMvalTypeFromString(&out, "???")); + assert_int_equal(out, (AMvalType)-1); +} + +int run_enum_string_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMidxTypeToString), cmocka_unit_test(test_AMidxTypeFromString), + cmocka_unit_test(test_AMobjTypeToString), cmocka_unit_test(test_AMobjTypeFromString), + cmocka_unit_test(test_AMstatusToString), cmocka_unit_test(test_AMstatusFromString), + cmocka_unit_test(test_AMvalTypeToString), cmocka_unit_test(test_AMvalTypeFromString), + }; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/rust/automerge-c/test/item_tests.c b/rust/automerge-c/test/item_tests.c new file mode 100644 index 00000000..a30b0556 --- /dev/null +++ b/rust/automerge-c/test/item_tests.c @@ -0,0 +1,94 @@ +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include +#include "cmocka_utils.h" +#include "doc_state.h" + +static void test_AMitemResult(void** state) { + enum { ITEM_COUNT = 1000 }; + + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + /* Append the strings to a list so that they'll be in numerical order. */ + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + for (size_t pos = 0; pos != ITEM_COUNT; ++pos) { + size_t const count = snprintf(NULL, 0, "%zu", pos); + char* const src = test_calloc(count + 1, sizeof(char)); + assert_int_equal(sprintf(src, "%zu", pos), count); + AMstackItem(NULL, AMlistPutStr(doc_state->doc, list, pos, true, AMbytes(src, count)), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + test_free(src); + } + /* Get an item iterator. */ + AMitems items = AMstackItems(stack_ptr, AMlistRange(doc_state->doc, list, 0, SIZE_MAX, NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + /* Get the item iterator's result so that it can be freed later. */ + AMresult const* const items_result = (*stack_ptr)->result; + /* Iterate over all of the items and copy their pointers into an array. */ + AMitem* item_ptrs[ITEM_COUNT] = {NULL}; + AMitem* item = NULL; + for (size_t pos = 0; (item = AMitemsNext(&items, 1)) != NULL; ++pos) { + /* The item's reference count should be 1. */ + assert_int_equal(AMitemRefCount(item), 1); + if (pos & 1) { + /* Create a redundant result for an odd item. */ + AMitem* const new_item = AMstackItem(stack_ptr, AMitemResult(item), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + /* The item's old and new pointers will never match. */ + assert_ptr_not_equal(new_item, item); + /* The item's reference count will have been incremented. */ + assert_int_equal(AMitemRefCount(item), 2); + assert_int_equal(AMitemRefCount(new_item), 2); + /* The item's old and new indices should match. */ + assert_int_equal(AMitemIdxType(item), AMitemIdxType(new_item)); + assert_int_equal(AMitemIdxType(item), AM_IDX_TYPE_POS); + size_t pos, new_pos; + assert_true(AMitemPos(item, &pos)); + assert_true(AMitemPos(new_item, &new_pos)); + assert_int_equal(pos, new_pos); + /* The item's old and new object IDs should match. */ + AMobjId const* const obj_id = AMitemObjId(item); + AMobjId const* const new_obj_id = AMitemObjId(new_item); + assert_true(AMobjIdEqual(obj_id, new_obj_id)); + /* The item's old and new value types should match. */ + assert_int_equal(AMitemValType(item), AMitemValType(new_item)); + /* The item's old and new string values should match. */ + AMbyteSpan str; + assert_true(AMitemToStr(item, &str)); + AMbyteSpan new_str; + assert_true(AMitemToStr(new_item, &new_str)); + assert_int_equal(str.count, new_str.count); + assert_memory_equal(str.src, new_str.src, new_str.count); + /* The item's old and new object IDs are one and the same. */ + assert_ptr_equal(obj_id, new_obj_id); + /* The item's old and new string values are one and the same. */ + assert_ptr_equal(str.src, new_str.src); + /* Save the item's new pointer. */ + item_ptrs[pos] = new_item; + } + } + /* Free the item iterator's result. */ + AMresultFree(AMstackPop(stack_ptr, items_result)); + /* An odd item's reference count should be 1 again. */ + for (size_t pos = 1; pos < ITEM_COUNT; pos += 2) { + assert_int_equal(AMitemRefCount(item_ptrs[pos]), 1); + } +} + +int run_item_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMitemResult), + }; + + return cmocka_run_group_tests(tests, setup_doc, teardown_doc); +} diff --git a/rust/automerge-c/test/list_tests.c b/rust/automerge-c/test/list_tests.c new file mode 100644 index 00000000..723dd038 --- /dev/null +++ b/rust/automerge-c/test/list_tests.c @@ -0,0 +1,515 @@ +#include +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include +#include "base_state.h" +#include "cmocka_utils.h" +#include "doc_state.h" +#include "macro_utils.h" + +static void test_AMlistIncrement(void** state) { + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMlistPutCounter(doc_state->doc, list, 0, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + int64_t counter; + assert_true(AMitemToCounter( + AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 0); + AMresultFree(AMstackPop(stack_ptr, NULL)); + AMstackItem(NULL, AMlistIncrement(doc_state->doc, list, 0, 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToCounter( + AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 3); + AMresultFree(AMstackPop(stack_ptr, NULL)); +} + +#define test_AMlistPut(suffix, mode) test_AMlistPut##suffix##_##mode + +#define static_void_test_AMlistPut(suffix, mode, type, scalar_value) \ + static void test_AMlistPut##suffix##_##mode(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjId const* const list = AMitemObjId( \ + AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + AMstackItem(NULL, AMlistPut##suffix(doc_state->doc, list, 0, !strcmp(#mode, "insert"), scalar_value), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); \ + type value; \ + assert_true(AMitemTo##suffix(AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, \ + AMexpect(suffix_to_val_type(#suffix))), \ + &value)); \ + assert_true(value == scalar_value); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } + +#define test_AMlistPutBytes(mode) test_AMlistPutBytes##_##mode + +#define static_void_test_AMlistPutBytes(mode, bytes_value) \ + static void test_AMlistPutBytes_##mode(void** state) { \ + static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \ + \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjId const* const list = AMitemObjId( \ + AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + AMstackItem( \ + NULL, AMlistPutBytes(doc_state->doc, list, 0, !strcmp(#mode, "insert"), AMbytes(bytes_value, BYTES_SIZE)), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); \ + AMbyteSpan bytes; \ + assert_true(AMitemToBytes( \ + AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), \ + &bytes)); \ + assert_int_equal(bytes.count, BYTES_SIZE); \ + assert_memory_equal(bytes.src, bytes_value, BYTES_SIZE); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } + +#define test_AMlistPutNull(mode) test_AMlistPutNull_##mode + +#define static_void_test_AMlistPutNull(mode) \ + static void test_AMlistPutNull_##mode(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjId const* const list = AMitemObjId( \ + AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + AMstackItem(NULL, AMlistPutNull(doc_state->doc, list, 0, !strcmp(#mode, "insert")), cmocka_cb, \ + AMexpect(AM_VAL_TYPE_VOID)); \ + AMresult* result = AMstackResult(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), NULL, NULL); \ + if (AMresultStatus(result) != AM_STATUS_OK) { \ + fail_msg_view("%s", AMresultError(result)); \ + } \ + assert_int_equal(AMresultSize(result), 1); \ + assert_int_equal(AMitemValType(AMresultItem(result)), AM_VAL_TYPE_NULL); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } + +#define test_AMlistPutObject(label, mode) test_AMlistPutObject_##label##_##mode + +#define static_void_test_AMlistPutObject(label, mode) \ + static void test_AMlistPutObject_##label##_##mode(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjId const* const list = AMitemObjId( \ + AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + AMobjType const obj_type = suffix_to_obj_type(#label); \ + AMobjId const* const obj_id = AMitemObjId( \ + AMstackItem(stack_ptr, AMlistPutObject(doc_state->doc, list, 0, !strcmp(#mode, "insert"), obj_type), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + assert_non_null(obj_id); \ + assert_int_equal(AMobjObjType(doc_state->doc, obj_id), obj_type); \ + assert_int_equal(AMobjSize(doc_state->doc, obj_id, NULL), 0); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } + +#define test_AMlistPutStr(mode) test_AMlistPutStr##_##mode + +#define static_void_test_AMlistPutStr(mode, str_value) \ + static void test_AMlistPutStr_##mode(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjId const* const list = AMitemObjId( \ + AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + AMstackItem(NULL, AMlistPutStr(doc_state->doc, list, 0, !strcmp(#mode, "insert"), AMstr(str_value)), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); \ + AMbyteSpan str; \ + assert_true(AMitemToStr( \ + AMstackItem(stack_ptr, AMlistGet(doc_state->doc, list, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), \ + &str)); \ + assert_int_equal(str.count, strlen(str_value)); \ + assert_memory_equal(str.src, str_value, str.count); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } + +static_void_test_AMlistPut(Bool, insert, bool, true); + +static_void_test_AMlistPut(Bool, update, bool, true); + +static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; + +static_void_test_AMlistPutBytes(insert, BYTES_VALUE); + +static_void_test_AMlistPutBytes(update, BYTES_VALUE); + +static_void_test_AMlistPut(Counter, insert, int64_t, INT64_MAX); + +static_void_test_AMlistPut(Counter, update, int64_t, INT64_MAX); + +static_void_test_AMlistPut(F64, insert, double, DBL_MAX); + +static_void_test_AMlistPut(F64, update, double, DBL_MAX); + +static_void_test_AMlistPut(Int, insert, int64_t, INT64_MAX); + +static_void_test_AMlistPut(Int, update, int64_t, INT64_MAX); + +static_void_test_AMlistPutNull(insert); + +static_void_test_AMlistPutNull(update); + +static_void_test_AMlistPutObject(List, insert); + +static_void_test_AMlistPutObject(List, update); + +static_void_test_AMlistPutObject(Map, insert); + +static_void_test_AMlistPutObject(Map, update); + +static_void_test_AMlistPutObject(Text, insert); + +static_void_test_AMlistPutObject(Text, update); + +static_void_test_AMlistPutStr(insert, + "Hello, " + "world!"); + +static_void_test_AMlistPutStr(update, + "Hello," + " world" + "!"); + +static_void_test_AMlistPut(Timestamp, insert, int64_t, INT64_MAX); + +static_void_test_AMlistPut(Timestamp, update, int64_t, INT64_MAX); + +static_void_test_AMlistPut(Uint, insert, uint64_t, UINT64_MAX); + +static_void_test_AMlistPut(Uint, update, uint64_t, UINT64_MAX); + +static void test_get_range_values(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + + /* Insert elements. */ + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("First")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Second")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Third")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Fourth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Fifth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Sixth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Seventh")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc1, list, 0, true, AMstr("Eighth")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMitems const v1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(doc1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + + AMstackItem(NULL, AMlistPutStr(doc1, list, 2, false, AMstr("Third V2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMstackItem(NULL, AMlistPutStr(doc2, list, 2, false, AMstr("Third V3")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc2, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + /* Forward vs. reverse: complete current list range. */ + AMitems range = + AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + size_t size = AMitemsSize(&range); + assert_int_equal(size, 8); + AMitems range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + size_t pos; + assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); + assert_int_equal(pos, 0); + assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); + assert_int_equal(pos, 7); + + AMitem *item1, *item_back1; + size_t count, middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + size_t pos1, pos_back1; + assert_true(AMitemPos(item1, &pos1)); + assert_true(AMitemPos(item_back1, &pos_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(pos1, pos_back1); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(pos1, pos_back1); + } + AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, NULL), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, NULL), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); + } + + /* Forward vs. reverse: partial current list range. */ + range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 1, 6, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 5); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); + assert_int_equal(pos, 1); + assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); + assert_int_equal(pos, 5); + + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + size_t pos1, pos_back1; + assert_true(AMitemPos(item1, &pos1)); + assert_true(AMitemPos(item_back1, &pos_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(pos1, pos_back1); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(pos1, pos_back1); + } + AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, NULL), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, NULL), NULL, NULL); + /** \note An item returned from an `AMlistGet()` call doesn't include + the index used to retrieve it. */ + assert_int_equal(AMitemIdxType(item2), 0); + assert_int_equal(AMitemIdxType(item_back2), 0); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); + } + + /* Forward vs. reverse: complete historical map range. */ + range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 8); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); + assert_int_equal(pos, 0); + assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); + assert_int_equal(pos, 7); + + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + size_t pos1, pos_back1; + assert_true(AMitemPos(item1, &pos1)); + assert_true(AMitemPos(item_back1, &pos_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(pos1, pos_back1); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(pos1, pos_back1); + } + AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, &v1), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, &v1), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); + } + + /* Forward vs. reverse: partial historical map range. */ + range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 2, 7, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 5); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemPos(AMitemsNext(&range, 1), &pos)); + assert_int_equal(pos, 2); + assert_true(AMitemPos(AMitemsNext(&range_back, 1), &pos)); + assert_int_equal(pos, 6); + + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + size_t pos1, pos_back1; + assert_true(AMitemPos(item1, &pos1)); + assert_true(AMitemPos(item_back1, &pos_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(pos1, pos_back1); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(pos1, pos_back1); + } + AMitem* item2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos1, &v1), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMlistGet(doc1, list, pos_back1, &v1), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); + } + + /* List range vs. object range: complete current. */ + range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMitems obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, list, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); + + AMitem *item, *obj_item; + for (item = NULL, obj_item = NULL; item && obj_item; + item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { + /** \note Object iteration doesn't yield any item indices. */ + assert_true(AMitemIdxType(item)); + assert_false(AMitemIdxType(obj_item)); + assert_true(AMitemEqual(item, obj_item)); + assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); + } + + /* List range vs. object range: complete historical. */ + range = AMstackItems(stack_ptr, AMlistRange(doc1, list, 0, SIZE_MAX, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, list, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); + + for (item = NULL, obj_item = NULL; item && obj_item; + item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { + /** \note Object iteration doesn't yield any item indices. */ + assert_true(AMitemIdxType(item)); + assert_false(AMitemIdxType(obj_item)); + assert_true(AMitemEqual(item, obj_item)); + assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); + } +} + +/** + * \brief A JavaScript application can introduce NUL (`\0`) characters into a + * list object's string value which will truncate it in a C application. + */ +static void test_get_NUL_string_value(void** state) { + /* + import * as Automerge from "@automerge/automerge"; + let doc = Automerge.init(); + doc = Automerge.change(doc, doc => { + doc[0] = 'o\0ps'; + }); + const bytes = Automerge.save(doc); + console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], + bytes).join(", ") + "};"); + */ + static uint8_t const OOPS_VALUE[] = {'o', '\0', 'p', 's'}; + static size_t const OOPS_SIZE = sizeof(OOPS_VALUE) / sizeof(uint8_t); + + static uint8_t const SAVED_DOC[] = { + 133, 111, 74, 131, 224, 28, 197, 17, 0, 113, 1, 16, 246, 137, 63, 193, 255, 181, 76, 79, 129, + 213, 133, 29, 214, 158, 164, 15, 1, 207, 184, 14, 57, 1, 194, 79, 247, 82, 160, 134, 227, 144, + 5, 241, 136, 205, 238, 250, 251, 54, 34, 250, 210, 96, 204, 132, 153, 203, 110, 109, 6, 6, 1, + 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 3, 33, 2, 35, 2, 52, 1, 66, + 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, + 1, 48, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, 0, 112, 115, 127, 0, 0}; + static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); + + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(SAVED_DOC, SAVED_DOC_SIZE), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, AM_ROOT, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_not_equal(str.count, strlen(OOPS_VALUE)); + assert_int_equal(str.count, OOPS_SIZE); + assert_memory_equal(str.src, OOPS_VALUE, str.count); +} + +static void test_insert_at_index(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* Insert both at the same index. */ + AMstackItem(NULL, AMlistPutUint(doc, list, 0, true, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutUint(doc, list, 0, true, 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + + assert_int_equal(AMobjSize(doc, list, NULL), 2); + AMitems const keys = AMstackItems(stack_ptr, AMkeys(doc, list, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&keys), 2); + AMitems const range = + AMstackItems(stack_ptr, AMlistRange(doc, list, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)); + assert_int_equal(AMitemsSize(&range), 2); +} + +int run_list_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMlistIncrement), + cmocka_unit_test(test_AMlistPut(Bool, insert)), + cmocka_unit_test(test_AMlistPut(Bool, update)), + cmocka_unit_test(test_AMlistPutBytes(insert)), + cmocka_unit_test(test_AMlistPutBytes(update)), + cmocka_unit_test(test_AMlistPut(Counter, insert)), + cmocka_unit_test(test_AMlistPut(Counter, update)), + cmocka_unit_test(test_AMlistPut(F64, insert)), + cmocka_unit_test(test_AMlistPut(F64, update)), + cmocka_unit_test(test_AMlistPut(Int, insert)), + cmocka_unit_test(test_AMlistPut(Int, update)), + cmocka_unit_test(test_AMlistPutNull(insert)), + cmocka_unit_test(test_AMlistPutNull(update)), + cmocka_unit_test(test_AMlistPutObject(List, insert)), + cmocka_unit_test(test_AMlistPutObject(List, update)), + cmocka_unit_test(test_AMlistPutObject(Map, insert)), + cmocka_unit_test(test_AMlistPutObject(Map, update)), + cmocka_unit_test(test_AMlistPutObject(Text, insert)), + cmocka_unit_test(test_AMlistPutObject(Text, update)), + cmocka_unit_test(test_AMlistPutStr(insert)), + cmocka_unit_test(test_AMlistPutStr(update)), + cmocka_unit_test(test_AMlistPut(Timestamp, insert)), + cmocka_unit_test(test_AMlistPut(Timestamp, update)), + cmocka_unit_test(test_AMlistPut(Uint, insert)), + cmocka_unit_test(test_AMlistPut(Uint, update)), + cmocka_unit_test_setup_teardown(test_get_range_values, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_get_NUL_string_value, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_insert_at_index, setup_base, teardown_base), + }; + + return cmocka_run_group_tests(tests, setup_doc, teardown_doc); +} diff --git a/rust/automerge-c/test/macro_utils.c b/rust/automerge-c/test/macro_utils.c new file mode 100644 index 00000000..3a546eb5 --- /dev/null +++ b/rust/automerge-c/test/macro_utils.c @@ -0,0 +1,38 @@ +#include + +/* local */ +#include "macro_utils.h" + +AMobjType suffix_to_obj_type(char const* obj_type_label) { + if (!strcmp(obj_type_label, "List")) + return AM_OBJ_TYPE_LIST; + else if (!strcmp(obj_type_label, "Map")) + return AM_OBJ_TYPE_MAP; + else if (!strcmp(obj_type_label, "Text")) + return AM_OBJ_TYPE_TEXT; + else + return AM_OBJ_TYPE_DEFAULT; +} + +AMvalType suffix_to_val_type(char const* suffix) { + if (!strcmp(suffix, "Bool")) + return AM_VAL_TYPE_BOOL; + else if (!strcmp(suffix, "Bytes")) + return AM_VAL_TYPE_BYTES; + else if (!strcmp(suffix, "Counter")) + return AM_VAL_TYPE_COUNTER; + else if (!strcmp(suffix, "F64")) + return AM_VAL_TYPE_F64; + else if (!strcmp(suffix, "Int")) + return AM_VAL_TYPE_INT; + else if (!strcmp(suffix, "Null")) + return AM_VAL_TYPE_NULL; + else if (!strcmp(suffix, "Str")) + return AM_VAL_TYPE_STR; + else if (!strcmp(suffix, "Timestamp")) + return AM_VAL_TYPE_TIMESTAMP; + else if (!strcmp(suffix, "Uint")) + return AM_VAL_TYPE_UINT; + else + return AM_VAL_TYPE_DEFAULT; +} diff --git a/rust/automerge-c/test/macro_utils.h b/rust/automerge-c/test/macro_utils.h new file mode 100644 index 00000000..e4c2c5b9 --- /dev/null +++ b/rust/automerge-c/test/macro_utils.h @@ -0,0 +1,23 @@ +#ifndef TESTS_MACRO_UTILS_H +#define TESTS_MACRO_UTILS_H + +/* local */ +#include + +/** + * \brief Gets the object type tag corresponding to an object type suffix. + * + * \param[in] suffix An object type suffix string. + * \return An `AMobjType` enum tag. + */ +AMobjType suffix_to_obj_type(char const* suffix); + +/** + * \brief Gets the value type tag corresponding to a value type suffix. + * + * \param[in] suffix A value type suffix string. + * \return An `AMvalType` enum tag. + */ +AMvalType suffix_to_val_type(char const* suffix); + +#endif /* TESTS_MACRO_UTILS_H */ diff --git a/automerge-c/test/main.c b/rust/automerge-c/test/main.c similarity index 50% rename from automerge-c/test/main.c rename to rust/automerge-c/test/main.c index 09b71bd5..2996c9b3 100644 --- a/automerge-c/test/main.c +++ b/rust/automerge-c/test/main.c @@ -1,6 +1,6 @@ +#include #include #include -#include #include /* third-party */ @@ -8,8 +8,14 @@ extern int run_actor_id_tests(void); +extern int run_byte_span_tests(void); + extern int run_doc_tests(void); +extern int run_enum_string_tests(void); + +extern int run_item_tests(void); + extern int run_list_tests(void); extern int run_map_tests(void); @@ -17,11 +23,6 @@ extern int run_map_tests(void); extern int run_ported_wasm_suite(void); int main(void) { - return ( - run_actor_id_tests() + - run_doc_tests() + - run_list_tests() + - run_map_tests() + - run_ported_wasm_suite() - ); + return (run_actor_id_tests() + run_byte_span_tests() + run_doc_tests() + run_enum_string_tests() + + run_item_tests() + run_list_tests() + run_map_tests() + run_ported_wasm_suite()); } diff --git a/rust/automerge-c/test/map_tests.c b/rust/automerge-c/test/map_tests.c new file mode 100644 index 00000000..2ee2e69a --- /dev/null +++ b/rust/automerge-c/test/map_tests.c @@ -0,0 +1,1582 @@ +#include +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include +#include +#include "base_state.h" +#include "cmocka_utils.h" +#include "doc_state.h" +#include "macro_utils.h" + +static void test_AMmapIncrement(void** state) { + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + AMstackItem(NULL, AMmapPutCounter(doc_state->doc, AM_ROOT, AMstr("Counter"), 0), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + int64_t counter; + assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr("Counter"), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 0); + AMresultFree(AMstackPop(stack_ptr, NULL)); + AMstackItem(NULL, AMmapIncrement(doc_state->doc, AM_ROOT, AMstr("Counter"), 3), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr("Counter"), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 3); + AMresultFree(AMstackPop(stack_ptr, NULL)); +} + +#define test_AMmapPut(suffix) test_AMmapPut##suffix + +#define static_void_test_AMmapPut(suffix, type, scalar_value) \ + static void test_AMmapPut##suffix(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMstackItem(NULL, AMmapPut##suffix(doc_state->doc, AM_ROOT, AMstr(#suffix), scalar_value), cmocka_cb, \ + AMexpect(AM_VAL_TYPE_VOID)); \ + type value; \ + assert_true(AMitemTo##suffix(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr(#suffix), NULL), \ + cmocka_cb, AMexpect(suffix_to_val_type(#suffix))), \ + &value)); \ + assert_true(value == scalar_value); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } + +static void test_AMmapPutBytes(void** state) { + static AMbyteSpan const KEY = {"Bytes", 5}; + static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX}; + static size_t const BYTES_SIZE = sizeof(BYTES_VALUE) / sizeof(uint8_t); + + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + AMstackItem(NULL, AMmapPutBytes(doc_state->doc, AM_ROOT, KEY, AMbytes(BYTES_VALUE, BYTES_SIZE)), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMbyteSpan bytes; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, KEY, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), + &bytes)); + assert_int_equal(bytes.count, BYTES_SIZE); + assert_memory_equal(bytes.src, BYTES_VALUE, BYTES_SIZE); + AMresultFree(AMstackPop(stack_ptr, NULL)); +} + +static void test_AMmapPutNull(void** state) { + static AMbyteSpan const KEY = {"Null", 4}; + + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + AMstackItem(NULL, AMmapPutNull(doc_state->doc, AM_ROOT, KEY), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMresult* result = AMstackResult(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, KEY, NULL), NULL, NULL); + if (AMresultStatus(result) != AM_STATUS_OK) { + fail_msg_view("%s", AMresultError(result)); + } + assert_int_equal(AMresultSize(result), 1); + AMitem* item = AMresultItem(result); + assert_int_equal(AMitemValType(item), AM_VAL_TYPE_NULL); +} + +#define test_AMmapPutObject(label) test_AMmapPutObject_##label + +#define static_void_test_AMmapPutObject(label) \ + static void test_AMmapPutObject_##label(void** state) { \ + DocState* doc_state = *state; \ + AMstack** stack_ptr = &doc_state->base_state->stack; \ + AMobjType const obj_type = suffix_to_obj_type(#label); \ + AMobjId const* const obj_id = \ + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc_state->doc, AM_ROOT, AMstr(#label), obj_type), \ + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); \ + assert_non_null(obj_id); \ + assert_int_equal(AMobjObjType(doc_state->doc, obj_id), obj_type); \ + assert_int_equal(AMobjSize(doc_state->doc, obj_id, NULL), 0); \ + AMresultFree(AMstackPop(stack_ptr, NULL)); \ + } + +static void test_AMmapPutStr(void** state) { + DocState* doc_state = *state; + AMstack** stack_ptr = &doc_state->base_state->stack; + AMstackItem(NULL, AMmapPutStr(doc_state->doc, AM_ROOT, AMstr("Str"), AMstr("Hello, world!")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMbyteSpan str; + assert_true(AMitemToStr(AMstackItem(stack_ptr, AMmapGet(doc_state->doc, AM_ROOT, AMstr("Str"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)), + &str)); + assert_int_equal(str.count, strlen("Hello, world!")); + assert_memory_equal(str.src, "Hello, world!", str.count); + AMresultFree(AMstackPop(stack_ptr, NULL)); +} + +static_void_test_AMmapPut(Bool, bool, true); + +static_void_test_AMmapPut(Counter, int64_t, INT64_MAX); + +static_void_test_AMmapPut(F64, double, DBL_MAX); + +static_void_test_AMmapPut(Int, int64_t, INT64_MAX); + +static_void_test_AMmapPutObject(List); + +static_void_test_AMmapPutObject(Map); + +static_void_test_AMmapPutObject(Text); + +static_void_test_AMmapPut(Timestamp, int64_t, INT64_MAX); + +static_void_test_AMmapPut(Uint, int64_t, UINT64_MAX); + +/** + * \brief A JavaScript application can introduce NUL (`\0`) characters into + * a map object's key which will truncate it in a C application. + */ +static void test_get_NUL_key(void** state) { + /* + import * as Automerge from "@automerge/automerge"; + let doc = Automerge.init(); + doc = Automerge.change(doc, doc => { + doc['o\0ps'] = 'oops'; + }); + const bytes = Automerge.save(doc); + console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], + bytes).join(", ") + "};"); + */ + static uint8_t const OOPS_SRC[] = {'o', '\0', 'p', 's'}; + static AMbyteSpan const OOPS_KEY = {.src = OOPS_SRC, .count = sizeof(OOPS_SRC) / sizeof(uint8_t)}; + + static uint8_t const SAVED_DOC[] = { + 133, 111, 74, 131, 233, 150, 60, 244, 0, 116, 1, 16, 223, 253, 146, 193, 58, 122, 66, 134, 151, + 225, 210, 51, 58, 86, 247, 8, 1, 49, 118, 234, 228, 42, 116, 171, 13, 164, 99, 244, 27, 19, + 150, 44, 201, 136, 222, 219, 90, 246, 226, 123, 77, 120, 157, 155, 55, 182, 2, 178, 64, 6, 1, + 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 6, 33, 2, 35, 2, 52, 1, 66, + 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, + 4, 111, 0, 112, 115, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, 111, 112, 115, 127, 0, 0}; + static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); + + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(SAVED_DOC, SAVED_DOC_SIZE), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, OOPS_KEY, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_not_equal(OOPS_KEY.count, strlen(OOPS_KEY.src)); + assert_int_equal(str.count, strlen("oops")); + assert_memory_equal(str.src, "oops", str.count); +} + +/** + * \brief A JavaScript application can introduce NUL (`\0`) characters into a + * map object's string value which will truncate it in a C application. + */ +static void test_get_NUL_string_value(void** state) { + /* + import * as Automerge from "@automerge/automerge"; + let doc = Automerge.init(); + doc = Automerge.change(doc, doc => { + doc.oops = 'o\0ps'; + }); + const bytes = Automerge.save(doc); + console.log("static uint8_t const SAVED_DOC[] = {" + Array.apply([], + bytes).join(", ") + "};"); + */ + static uint8_t const OOPS_VALUE[] = {'o', '\0', 'p', 's'}; + static size_t const OOPS_SIZE = sizeof(OOPS_VALUE) / sizeof(uint8_t); + + static uint8_t const SAVED_DOC[] = { + 133, 111, 74, 131, 63, 94, 151, 29, 0, 116, 1, 16, 156, 159, 189, 12, 125, 55, 71, 154, 136, + 104, 237, 186, 45, 224, 32, 22, 1, 36, 163, 164, 222, 81, 42, 1, 247, 231, 156, 54, 222, 76, + 6, 109, 18, 172, 75, 36, 118, 120, 68, 73, 87, 186, 230, 127, 68, 19, 81, 149, 185, 6, 1, + 2, 3, 2, 19, 2, 35, 2, 64, 2, 86, 2, 8, 21, 6, 33, 2, 35, 2, 52, 1, 66, + 2, 86, 2, 87, 4, 128, 1, 2, 127, 0, 127, 1, 127, 1, 127, 0, 127, 0, 127, 7, 127, + 4, 111, 111, 112, 115, 127, 0, 127, 1, 1, 127, 1, 127, 70, 111, 0, 112, 115, 127, 0, 0}; + static size_t const SAVED_DOC_SIZE = sizeof(SAVED_DOC) / sizeof(uint8_t); + + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(SAVED_DOC, SAVED_DOC_SIZE), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("oops"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), + &str)); + assert_int_not_equal(str.count, strlen(OOPS_VALUE)); + assert_int_equal(str.count, OOPS_SIZE); + assert_memory_equal(str.src, OOPS_VALUE, str.count); +} + +static void test_range_iter_map(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("a"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("b"), 4), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("c"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("d"), 6), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("a"), 7), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("a"), 8), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("d"), 9), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMactorId const* actor_id; + assert_true(AMitemToActorId(AMstackItem(stack_ptr, AMgetActorId(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMitems map_items = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)); + assert_int_equal(AMitemsSize(&map_items), 4); + + /* ["b"-"d") */ + AMitems range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr("b"), AMstr("d"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)); + /* First */ + AMitem* next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "b", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + uint64_t uint; + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 4); + AMobjId const* next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "c", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 5); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + assert_null(AMitemsNext(&range, 1)); + + /* ["b"-) */ + range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr("b"), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)); + /* First */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "b", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 4); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "c", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 5); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "d", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 9); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 7); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Fourth */ + assert_null(AMitemsNext(&range, 1)); + + /* [-"d") */ + range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr("d"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)); + /* First */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "a", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 8); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 6); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "b", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 4); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "c", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 5); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Fourth */ + assert_null(AMitemsNext(&range, 1)); + + /* ["a"-) */ + range = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr("a"), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)); + /* First */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "a", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 8); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 6); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "b", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 4); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "c", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 5); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Fourth */ + next = AMitemsNext(&range, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "d", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_UINT); + assert_true(AMitemToUint(next, &uint)); + assert_int_equal(uint, 9); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 7); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Fifth */ + assert_null(AMitemsNext(&range, 1)); +} + +static void test_map_range_back_and_forth_single(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMactorId const* actor_id; + assert_true(AMitemToActorId(AMstackItem(stack_ptr, AMgetActorId(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + + /* Forward, back, back. */ + AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + /* First */ + AMitem* next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + AMbyteSpan str; + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + AMobjId const* next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + AMitems range_back_all = AMitemsReversed(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + AMitem* next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + AMbyteSpan str_back; + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + AMobjId const* next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "b", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + + /* Forward, back, forward. */ + range_all = AMitemsRewound(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + /* First */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + + /* Forward, forward, forward. */ + range_all = AMitemsRewound(&range_all); + /* First */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Forward stop */ + assert_null(AMitemsNext(&range_all, 1)); + + /* Back, back, back. */ + range_back_all = AMitemsRewound(&range_back_all); + /* Third */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "b", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* First */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "a", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Back stop */ + assert_null(AMitemsNext(&range_back_all, 1)); +} + +static void test_map_range_back_and_forth_double(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + AMactorId const* actor_id1; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromBytes("\0", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id1)); + AMstackItem(NULL, AMsetActorId(doc1, actor_id1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + + /* The second actor should win all conflicts here. */ + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + AMactorId const* actor_id2; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromBytes("\1", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id2)); + AMstackItem(NULL, AMsetActorId(doc2, actor_id2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("1"), AMstr("aa")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("2"), AMstr("bb")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("3"), AMstr("cc")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + + AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + /* Forward, back, back. */ + AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + /* First */ + AMitem* next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + AMbyteSpan str; + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + AMobjId const* next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + AMitems range_back_all = AMitemsReversed(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + AMitem* next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + AMbyteSpan str_back; + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + AMobjId const* next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "bb", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + + /* Forward, back, forward. */ + range_all = AMitemsRewound(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + /* First */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "bb", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + + /* Forward, forward, forward. */ + range_all = AMitemsRewound(&range_all); + /* First */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Second */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "bb", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "cc", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Forward stop */ + assert_null(AMitemsNext(&range_all, 1)); + + /* Back, back, back. */ + range_back_all = AMitemsRewound(&range_back_all); + /* Third */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "bb", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* First */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "aa", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Back stop */ + assert_null(AMitemsNext(&range_back_all, 1)); +} + +static void test_map_range_at_back_and_forth_single(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + AMactorId const* actor_id; + assert_true(AMitemToActorId(AMstackItem(stack_ptr, AMgetActorId(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + + AMitems const heads = AMstackItems(stack_ptr, AMgetHeads(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + /* Forward, back, back. */ + AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + /* First */ + AMitem* next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + AMbyteSpan str; + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + AMobjId const* next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + AMitems range_back_all = AMitemsReversed(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + AMitem* next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + AMbyteSpan str_back; + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + AMobjId const* next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "b", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + + /* Forward, back, forward. */ + range_all = AMitemsRewound(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + /* First */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + + /* Forward, forward, forward. */ + range_all = AMitemsRewound(&range_all); + /* First */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Second */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Third */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 0); + /* Forward stop */ + assert_null(AMitemsNext(&range_all, 1)); + + /* Back, back, back. */ + range_back_all = AMitemsRewound(&range_back_all); + /* Third */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "c", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Second */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "b", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* First */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 1); + assert_memory_equal(str_back.src, "a", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 0); + /* Back stop */ + assert_null(AMitemsNext(&range_back_all, 1)); +} + +static void test_map_range_at_back_and_forth_double(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + AMactorId const* actor_id1; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromBytes("\0", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id1)); + AMstackItem(NULL, AMsetActorId(doc1, actor_id1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("1"), AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("2"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("3"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + + /* The second actor should win all conflicts here. */ + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + AMactorId const* actor_id2; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromBytes("\1", 1), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id2)); + AMstackItem(NULL, AMsetActorId(doc2, actor_id2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("1"), AMstr("aa")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("2"), AMstr("bb")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("3"), AMstr("cc")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + + AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems const heads = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + /* Forward, back, back. */ + AMitems range_all = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + /* First */ + AMitem* next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + AMbyteSpan str; + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + AMobjId const* next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + AMitems range_back_all = AMitemsReversed(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + AMitem* next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + AMbyteSpan str_back; + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + AMobjId const* next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "bb", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + + /* Forward, back, forward. */ + range_all = AMitemsRewound(&range_all); + range_back_all = AMitemsRewound(&range_back_all); + /* First */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "bb", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + + /* Forward, forward, forward. */ + range_all = AMitemsRewound(&range_all); + /* First */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "aa", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Second */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "bb", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Third */ + next = AMitemsNext(&range_all, 1); + assert_non_null(next); + assert_int_equal(AMitemIdxType(next), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next, &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "cc", str.count); + next_obj_id = AMitemObjId(next); + assert_int_equal(AMobjIdCounter(next_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_obj_id), 1); + /* Forward stop */ + assert_null(AMitemsNext(&range_all, 1)); + + /* Back, back, back. */ + range_back_all = AMitemsRewound(&range_back_all); + /* Third */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "3", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "cc", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 3); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Second */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "2", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "bb", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 2); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* First */ + next_back = AMitemsNext(&range_back_all, 1); + assert_non_null(next_back); + assert_int_equal(AMitemIdxType(next_back), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(next_back, &key)); + assert_int_equal(key.count, 1); + assert_memory_equal(key.src, "1", key.count); + assert_int_equal(AMitemValType(next_back), AM_VAL_TYPE_STR); + assert_true(AMitemToStr(next_back, &str_back)); + assert_int_equal(str_back.count, 2); + assert_memory_equal(str_back.src, "aa", str_back.count); + next_back_obj_id = AMitemObjId(next_back); + assert_int_equal(AMobjIdCounter(next_back_obj_id), 1); + assert_int_equal(AMactorIdCmp(AMobjIdActorId(next_back_obj_id), actor_id2), 0); + assert_int_equal(AMobjIdIndex(next_back_obj_id), 1); + /* Back stop */ + assert_null(AMitemsNext(&range_back_all, 1)); +} + +static void test_get_range_values(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("aa"), AMstr("aaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("bb"), AMstr("bbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("cc"), AMstr("ccc")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("dd"), AMstr("ddd")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMitems const v1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(doc1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("cc"), AMstr("ccc V2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc1, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("cc"), AMstr("ccc V3")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(doc2, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + AMstackItem(NULL, AMmerge(doc1, doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + + /* Forward vs. reverse: complete current map range. */ + AMitems range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + size_t size = AMitemsSize(&range); + assert_int_equal(size, 4); + AMitems range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + AMbyteSpan key; + assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); + assert_memory_equal(key.src, "aa", key.count); + assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); + assert_memory_equal(key.src, "dd", key.count); + + AMitem *item1, *item_back1; + size_t count, middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + AMbyteSpan key1, key_back1; + assert_true(AMitemKey(item1, &key1)); + assert_true(AMitemKey(item_back1, &key_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(AMstrCmp(key1, key_back1), 0); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(AMstrCmp(key1, key_back1), 0); + } + AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, NULL), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, NULL), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); + } + + /* Forward vs. reverse: partial current map range. */ + range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr("aa"), AMstr("dd"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 3); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); + assert_memory_equal(key.src, "aa", key.count); + assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); + assert_memory_equal(key.src, "cc", key.count); + + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + AMbyteSpan key1, key_back1; + assert_true(AMitemKey(item1, &key1)); + assert_true(AMitemKey(item_back1, &key_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(AMstrCmp(key1, key_back1), 0); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(AMstrCmp(key1, key_back1), 0); + } + AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, NULL), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, NULL), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); + } + + /* Forward vs. reverse: complete historical map range. */ + range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &v1), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 4); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); + assert_memory_equal(key.src, "aa", key.count); + assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); + assert_memory_equal(key.src, "dd", key.count); + + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + AMbyteSpan key1, key_back1; + assert_true(AMitemKey(item1, &key1)); + assert_true(AMitemKey(item_back1, &key_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(AMstrCmp(key1, key_back1), 0); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(AMstrCmp(key1, key_back1), 0); + } + AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, &v1), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, &v1), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); + } + + /* Forward vs. reverse: partial historical map range. */ + range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr("bb"), AMstr(NULL), &v1), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + size = AMitemsSize(&range); + assert_int_equal(size, 3); + range_back = AMitemsReversed(&range); + assert_int_equal(AMitemsSize(&range_back), size); + assert_true(AMitemKey(AMitemsNext(&range, 1), &key)); + assert_memory_equal(key.src, "bb", key.count); + assert_true(AMitemKey(AMitemsNext(&range_back, 1), &key)); + assert_memory_equal(key.src, "dd", key.count); + + middle = size / 2; + range = AMitemsRewound(&range); + range_back = AMitemsRewound(&range_back); + for (item1 = NULL, item_back1 = NULL, count = 0; item1 && item_back1; + item1 = AMitemsNext(&range, 1), item_back1 = AMitemsNext(&range_back, 1), ++count) { + AMbyteSpan key1, key_back1; + assert_true(AMitemKey(item1, &key1)); + assert_true(AMitemKey(item_back1, &key_back1)); + if ((count == middle) && (middle & 1)) { + /* The iterators are crossing in the middle. */ + assert_int_equal(AMstrCmp(key1, key_back1), 0); + assert_true(AMitemEqual(item1, item_back1)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item_back1))); + } else { + assert_int_not_equal(AMstrCmp(key1, key_back1), 0); + } + AMitem* item2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key1, &v1), NULL, NULL); + AMitem* item_back2 = AMstackItem(stack_ptr, AMmapGet(doc1, AM_ROOT, key_back1, &v1), NULL, NULL); + /** \note An item returned from an `AM...Get()` call doesn't include the + index used to retrieve it. */ + assert_false(AMitemIdxType(item2)); + assert_false(AMitemIdxType(item_back2)); + assert_true(AMitemEqual(item1, item2)); + assert_true(AMobjIdEqual(AMitemObjId(item1), AMitemObjId(item2))); + assert_true(AMitemEqual(item_back1, item_back2)); + assert_true(AMobjIdEqual(AMitemObjId(item_back1), AMitemObjId(item_back2))); + AMresultFree(AMstackPop(stack_ptr, NULL)); + } + + /* Map range vs. object range: complete current. */ + range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + AMitems obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); + + AMitem *item, *obj_item; + for (item = NULL, obj_item = NULL; item && obj_item; + item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { + /** \note Object iteration doesn't yield any item indices. */ + assert_true(AMitemIdxType(item)); + assert_false(AMitemIdxType(obj_item)); + assert_true(AMitemEqual(item, obj_item)); + assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); + } + + /* Map range vs. object range: complete historical. */ + range = AMstackItems(stack_ptr, AMmapRange(doc1, AM_ROOT, AMstr(NULL), AMstr(NULL), &v1), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, AM_ROOT, &v1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&range), AMitemsSize(&obj_items)); + + for (item = NULL, obj_item = NULL; item && obj_item; + item = AMitemsNext(&range, 1), obj_item = AMitemsNext(&obj_items, 1)) { + /** \note Object iteration doesn't yield any item indices. */ + assert_true(AMitemIdxType(item)); + assert_false(AMitemIdxType(obj_item)); + assert_true(AMitemEqual(item, obj_item)); + assert_true(AMobjIdEqual(AMitemObjId(item), AMitemObjId(obj_item))); + } +} + +int run_map_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test(test_AMmapIncrement), + cmocka_unit_test(test_AMmapPut(Bool)), + cmocka_unit_test(test_AMmapPutBytes), + cmocka_unit_test(test_AMmapPut(Counter)), + cmocka_unit_test(test_AMmapPut(F64)), + cmocka_unit_test(test_AMmapPut(Int)), + cmocka_unit_test(test_AMmapPutNull), + cmocka_unit_test(test_AMmapPutObject(List)), + cmocka_unit_test(test_AMmapPutObject(Map)), + cmocka_unit_test(test_AMmapPutObject(Text)), + cmocka_unit_test(test_AMmapPutStr), + cmocka_unit_test(test_AMmapPut(Timestamp)), + cmocka_unit_test(test_AMmapPut(Uint)), + cmocka_unit_test_setup_teardown(test_get_NUL_key, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_get_NUL_string_value, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_range_iter_map, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_single, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_map_range_back_and_forth_double, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_single, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_map_range_at_back_and_forth_double, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_get_range_values, setup_base, teardown_base), + }; + + return cmocka_run_group_tests(tests, setup_doc, teardown_doc); +} diff --git a/rust/automerge-c/test/ported_wasm/basic_tests.c b/rust/automerge-c/test/ported_wasm/basic_tests.c new file mode 100644 index 00000000..b83ff132 --- /dev/null +++ b/rust/automerge-c/test/ported_wasm/basic_tests.c @@ -0,0 +1,1642 @@ +#include +#include +#include +#include +#include +#include +#include + +/* third-party */ +#include + +/* local */ +#include +#include +#include +#include "../base_state.h" +#include "../cmocka_utils.h" + +/** + * \brief default import init() should return a promise + */ +static void test_default_import_init_should_return_a_promise(void** state); + +/** + * \brief should create, clone and free + */ +static void test_create_clone_and_free(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc1 = create() */ + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + /* const doc2 = doc1.clone() */ + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMclone(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); +} + +/** + * \brief should be able to start and commit + */ +static void test_start_and_commit(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* doc.commit() */ + AMstackItems(stack_ptr, AMemptyChange(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); +} + +/** + * \brief getting a nonexistent prop does not throw an error + */ +static void test_getting_a_nonexistent_prop_does_not_throw_an_error(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const root = "_root" */ + /* const result = doc.getWithType(root, "hello") */ + /* assert.deepEqual(result, undefined) */ + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("hello"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); +} + +/** + * \brief should be able to set and get a simple value + */ +static void test_should_be_able_to_set_and_get_a_simple_value(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc: Automerge = create("aabbcc") */ + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aabbcc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const root = "_root" */ + /* let result */ + /* */ + /* doc.put(root, "hello", "world") */ + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("hello"), AMstr("world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put(root, "number1", 5, "uint") */ + AMstackItem(NULL, AMmapPutUint(doc, AM_ROOT, AMstr("number1"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put(root, "number2", 5) */ + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("number2"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put(root, "number3", 5.5) */ + AMstackItem(NULL, AMmapPutF64(doc, AM_ROOT, AMstr("number3"), 5.5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put(root, "number4", 5.5, "f64") */ + AMstackItem(NULL, AMmapPutF64(doc, AM_ROOT, AMstr("number4"), 5.5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put(root, "number5", 5.5, "int") */ + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("number5"), 5.5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put(root, "bool", true) */ + AMstackItem(NULL, AMmapPutBool(doc, AM_ROOT, AMstr("bool"), true), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put(root, "time1", 1000, "timestamp") */ + AMstackItem(NULL, AMmapPutTimestamp(doc, AM_ROOT, AMstr("time1"), 1000), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put(root, "time2", new Date(1001)) */ + AMstackItem(NULL, AMmapPutTimestamp(doc, AM_ROOT, AMstr("time2"), 1001), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.putObject(root, "list", []); */ + AMstackItem(NULL, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + /* doc.put(root, "null", null) */ + AMstackItem(NULL, AMmapPutNull(doc, AM_ROOT, AMstr("null")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* */ + /* result = doc.getWithType(root, "hello") */ + /* assert.deepEqual(result, ["str", "world"]) */ + /* assert.deepEqual(doc.get("/", "hello"), "world") */ + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("hello"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), + &str)); + assert_int_equal(str.count, strlen("world")); + assert_memory_equal(str.src, "world", str.count); + /* assert.deepEqual(doc.get("/", "hello"), "world") */ + /* */ + /* result = doc.getWithType(root, "number1") */ + /* assert.deepEqual(result, ["uint", 5]) */ + uint64_t uint; + assert_true(AMitemToUint( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number1"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 5); + /* assert.deepEqual(doc.get("/", "number1"), 5) */ + /* */ + /* result = doc.getWithType(root, "number2") */ + /* assert.deepEqual(result, ["int", 5]) */ + int64_t int_; + assert_true(AMitemToInt( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number2"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_INT)), + &int_)); + assert_int_equal(int_, 5); + /* */ + /* result = doc.getWithType(root, "number3") */ + /* assert.deepEqual(result, ["f64", 5.5]) */ + double f64; + assert_true(AMitemToF64( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number3"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_F64)), + &f64)); + assert_float_equal(f64, 5.5, DBL_EPSILON); + /* */ + /* result = doc.getWithType(root, "number4") */ + /* assert.deepEqual(result, ["f64", 5.5]) */ + assert_true(AMitemToF64( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number4"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_F64)), + &f64)); + assert_float_equal(f64, 5.5, DBL_EPSILON); + /* */ + /* result = doc.getWithType(root, "number5") */ + /* assert.deepEqual(result, ["int", 5]) */ + assert_true(AMitemToInt( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("number5"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_INT)), + &int_)); + assert_int_equal(int_, 5); + /* */ + /* result = doc.getWithType(root, "bool") */ + /* assert.deepEqual(result, ["boolean", true]) */ + bool boolean; + assert_true(AMitemToBool( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("bool"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BOOL)), + &boolean)); + assert_true(boolean); + /* */ + /* doc.put(root, "bool", false, "boolean") */ + AMstackItem(NULL, AMmapPutBool(doc, AM_ROOT, AMstr("bool"), false), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* */ + /* result = doc.getWithType(root, "bool") */ + /* assert.deepEqual(result, ["boolean", false]) */ + assert_true(AMitemToBool( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("bool"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BOOL)), + &boolean)); + assert_false(boolean); + /* */ + /* result = doc.getWithType(root, "time1") */ + /* assert.deepEqual(result, ["timestamp", new Date(1000)]) */ + int64_t timestamp; + assert_true(AMitemToTimestamp(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("time1"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_TIMESTAMP)), + ×tamp)); + assert_int_equal(timestamp, 1000); + /* */ + /* result = doc.getWithType(root, "time2") */ + /* assert.deepEqual(result, ["timestamp", new Date(1001)]) */ + assert_true(AMitemToTimestamp(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("time2"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_TIMESTAMP)), + ×tamp)); + assert_int_equal(timestamp, 1001); + /* */ + /* result = doc.getWithType(root, "list") */ + /* assert.deepEqual(result, ["list", "10@aabbcc"]); */ + AMobjId const* const list = AMitemObjId( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("list"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + assert_int_equal(AMobjIdCounter(list), 10); + str = AMactorIdStr(AMobjIdActorId(list)); + assert_int_equal(str.count, strlen("aabbcc")); + assert_memory_equal(str.src, "aabbcc", str.count); + /* */ + /* result = doc.getWithType(root, "null") */ + /* assert.deepEqual(result, ["null", null]); */ + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("null"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_NULL)); +} + +/** + * \brief should be able to use bytes + */ +static void test_should_be_able_to_use_bytes(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* doc.put("_root", "data1", new Uint8Array([10, 11, 12])); */ + static uint8_t const DATA1[] = {10, 11, 12}; + AMstackItem(NULL, AMmapPutBytes(doc, AM_ROOT, AMstr("data1"), AMbytes(DATA1, sizeof(DATA1))), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); */ + static uint8_t const DATA2[] = {13, 14, 15}; + AMstackItem(NULL, AMmapPutBytes(doc, AM_ROOT, AMstr("data2"), AMbytes(DATA2, sizeof(DATA2))), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + /* const value1 = doc.getWithType("_root", "data1") */ + AMbyteSpan value1; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("data1"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), + &value1)); + /* assert.deepEqual(value1, ["bytes", new Uint8Array([10, 11, 12])]); */ + assert_int_equal(value1.count, sizeof(DATA1)); + assert_memory_equal(value1.src, DATA1, sizeof(DATA1)); + /* const value2 = doc.getWithType("_root", "data2") */ + AMbyteSpan value2; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("data2"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), + &value2)); + /* assert.deepEqual(value2, ["bytes", new Uint8Array([13, 14, 15])]); */ + assert_int_equal(value2.count, sizeof(DATA2)); + assert_memory_equal(value2.src, DATA2, sizeof(DATA2)); +} + +/** + * \brief should be able to make subobjects + */ +static void test_should_be_able_to_make_subobjects(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const root = "_root" */ + /* let result */ + /* */ + /* const submap = doc.putObject(root, "submap", {}) */ + AMobjId const* const submap = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("submap"), AM_OBJ_TYPE_MAP), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* doc.put(submap, "number", 6, "uint") */ + AMstackItem(NULL, AMmapPutUint(doc, submap, AMstr("number"), 6), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.strictEqual(doc.pendingOps(), 2) */ + assert_int_equal(AMpendingOps(doc), 2); + /* */ + /* result = doc.getWithType(root, "submap") */ + /* assert.deepEqual(result, ["map", submap]) */ + assert_true(AMobjIdEqual(AMitemObjId(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("submap"), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))), + submap)); + /* */ + /* result = doc.getWithType(submap, "number") */ + /* assert.deepEqual(result, ["uint", 6]) */ + uint64_t uint; + assert_true(AMitemToUint( + AMstackItem(stack_ptr, AMmapGet(doc, submap, AMstr("number"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 6); +} + +/** + * \brief should be able to make lists + */ +static void test_should_be_able_to_make_lists(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const root = "_root" */ + /* */ + /* const sublist = doc.putObject(root, "numbers", []) */ + AMobjId const* const sublist = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("numbers"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* doc.insert(sublist, 0, "a"); */ + AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.insert(sublist, 1, "b"); */ + AMstackItem(NULL, AMlistPutStr(doc, sublist, 1, true, AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.insert(sublist, 2, "c"); */ + AMstackItem(NULL, AMlistPutStr(doc, sublist, 2, true, AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.insert(sublist, 0, "z"); */ + AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("z")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* */ + /* assert.deepEqual(doc.getWithType(sublist, 0), ["str", "z"]) */ + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, sublist, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "z", str.count); + /* assert.deepEqual(doc.getWithType(sublist, 1), ["str", "a"]) */ + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, sublist, 1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b"]) */ + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, sublist, 2, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + /* assert.deepEqual(doc.getWithType(sublist, 3), ["str", "c"]) */ + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, sublist, 3, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + /* assert.deepEqual(doc.length(sublist), 4) */ + assert_int_equal(AMobjSize(doc, sublist, NULL), 4); + /* */ + /* doc.put(sublist, 2, "b v2"); */ + AMstackItem(NULL, AMlistPutStr(doc, sublist, 2, false, AMstr("b v2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* */ + /* assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b v2"]) */ + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, sublist, 2, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "b v2", str.count); + /* assert.deepEqual(doc.length(sublist), 4) */ + assert_int_equal(AMobjSize(doc, sublist, NULL), 4); +} + +/** + * \brief lists have insert, set, splice, and push ops + */ +static void test_lists_have_insert_set_splice_and_push_ops(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const root = "_root" */ + /* */ + /* const sublist = doc.putObject(root, "letters", []) */ + AMobjId const* const sublist = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("letters"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* doc.insert(sublist, 0, "a"); */ + AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.insert(sublist, 0, "b"); */ + AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, true, AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.materialize(), { letters: ["b", "a"] }) */ + AMitem* doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(doc_item, &key)); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&list_items), 2); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + assert_null(AMitemsNext(&list_items, 1)); + } + /* doc.push(sublist, "c"); */ + AMstackItem(NULL, AMlistPutStr(doc, sublist, SIZE_MAX, true, AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const heads = doc.getHeads() */ + AMitems const heads = AMstackItems(stack_ptr, AMgetHeads(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c"] }) */ + doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&list_items), 3); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + assert_null(AMitemsNext(&list_items, 1)); + } + /* doc.push(sublist, 3, "timestamp"); */ + AMstackItem(NULL, AMlistPutTimestamp(doc, sublist, SIZE_MAX, true, 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.materialize(), { letters: ["b", "a", "c", new + * Date(3)] } */ + doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); + assert_int_equal(AMitemsSize(&list_items), 4); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + int64_t timestamp; + assert_true(AMitemToTimestamp(AMitemsNext(&list_items, 1), ×tamp)); + assert_int_equal(timestamp, 3); + assert_null(AMitemsNext(&list_items, 1)); + } + /* doc.splice(sublist, 1, 1, ["d", "e", "f"]); */ + AMresult* data = AMstackResult( + stack_ptr, AMresultFrom(3, AMitemFromStr(AMstr("d")), AMitemFromStr(AMstr("e")), AMitemFromStr(AMstr("f"))), + NULL, NULL); + AMstackItem(NULL, AMsplice(doc, sublist, 1, 1, AMresultItems(data)), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.materialize(), { letters: ["b", "d", "e", "f", "c", + * new Date(3)] } */ + doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "d", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "e", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "f", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + int64_t timestamp; + assert_true(AMitemToTimestamp(AMitemsNext(&list_items, 1), ×tamp)); + assert_int_equal(timestamp, 3); + assert_null(AMitemsNext(&list_items, 1)); + } + /* doc.put(sublist, 0, "z"); */ + AMstackItem(NULL, AMlistPutStr(doc, sublist, 0, false, AMstr("z")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.materialize(), { letters: ["z", "d", "e", "f", "c", + * new Date(3)] } */ + doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "z", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "d", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "e", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "f", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + int64_t timestamp; + assert_true(AMitemToTimestamp(AMitemsNext(&list_items, 1), ×tamp)); + assert_int_equal(timestamp, 3); + assert_null(AMitemsNext(&list_items, 1)); + } + /* assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new + * Date(3)] */ + AMitems sublist_items = AMstackItems(stack_ptr, AMlistRange(doc, sublist, 0, SIZE_MAX, NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR | AM_VAL_TYPE_TIMESTAMP)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "z", str.count); + assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "d", str.count); + assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "e", str.count); + assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "f", str.count); + assert_true(AMitemToStr(AMitemsNext(&sublist_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + int64_t timestamp; + assert_true(AMitemToTimestamp(AMitemsNext(&sublist_items, 1), ×tamp)); + assert_int_equal(timestamp, 3); + assert_null(AMitemsNext(&sublist_items, 1)); + /* assert.deepEqual(doc.length(sublist), 6) */ + assert_int_equal(AMobjSize(doc, sublist, NULL), 6); + /* assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] + * } */ + doc_item = AMstackItem(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); + assert_int_equal(key.count, strlen("letters")); + assert_memory_equal(key.src, "letters", key.count); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, &heads), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "c", str.count); + assert_null(AMitemsNext(&list_items, 1)); + } +} + +/** + * \brief should be able to delete non-existent props + */ +static void test_should_be_able_to_delete_non_existent_props(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* */ + /* doc.put("_root", "foo", "bar") */ + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("foo"), AMstr("bar")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put("_root", "bip", "bap") */ + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("bip"), AMstr("bap")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const hash1 = doc.commit() */ + AMitems const hash1 = + AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* */ + /* assert.deepEqual(doc.keys("_root"), ["bip", "foo"]) */ + AMitems keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bip", str.count); + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "foo", str.count); + /* */ + /* doc.delete("_root", "foo") */ + AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("foo")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.delete("_root", "baz") */ + AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("baz")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const hash2 = doc.commit() */ + AMitems const hash2 = + AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* */ + /* assert.deepEqual(doc.keys("_root"), ["bip"]) */ + keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bip", str.count); + /* assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) */ + keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, &hash1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bip", str.count); + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "foo", str.count); + /* assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) */ + keys = AMstackItems(stack_ptr, AMkeys(doc, AM_ROOT, &hash2), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bip", str.count); +} + +/** + * \brief should be able to del + */ +static void test_should_be_able_to_del(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const root = "_root" */ + /* */ + /* doc.put(root, "xxx", "xxx"); */ + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("xxx"), AMstr("xxx")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.getWithType(root, "xxx"), ["str", "xxx"]) */ + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("xxx"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), + &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "xxx", str.count); + /* doc.delete(root, "xxx"); */ + AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("xxx")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.getWithType(root, "xxx"), undefined) */ + AMstackItem(NULL, AMmapGet(doc, AM_ROOT, AMstr("xxx"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); +} + +/** + * \brief should be able to use counters + */ +static void test_should_be_able_to_use_counters(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const root = "_root" */ + /* */ + /* doc.put(root, "counter", 10, "counter"); */ + AMstackItem(NULL, AMmapPutCounter(doc, AM_ROOT, AMstr("counter"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 10]) */ + int64_t counter; + assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 10); + /* doc.increment(root, "counter", 10); */ + AMstackItem(NULL, AMmapIncrement(doc, AM_ROOT, AMstr("counter"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 20]) */ + assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 20); + /* doc.increment(root, "counter", -5); */ + AMstackItem(NULL, AMmapIncrement(doc, AM_ROOT, AMstr("counter"), -5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 15]) */ + assert_true(AMitemToCounter(AMstackItem(stack_ptr, AMmapGet(doc, AM_ROOT, AMstr("counter"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER)), + &counter)); + assert_int_equal(counter, 15); +} + +/** + * \brief should be able to splice text + */ +static void test_should_be_able_to_splice_text(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const root = "_root"; */ + /* */ + /* const text = doc.putObject(root, "text", ""); */ + AMobjId const* const text = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* doc.splice(text, 0, 0, "hello ") */ + AMstackItem(NULL, AMspliceText(doc, text, 0, 0, AMstr("hello ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.splice(text, 6, 0, "world") */ + AMstackItem(NULL, AMspliceText(doc, text, 6, 0, AMstr("world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.splice(text, 11, 0, "!?") */ + AMstackItem(NULL, AMspliceText(doc, text, 11, 0, AMstr("!?")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) */ + AMbyteSpan str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMlistGet(doc, text, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "h", str.count); + /* assert.deepEqual(doc.getWithType(text, 1), ["str", "e"]) */ + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMlistGet(doc, text, 1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "e", str.count); + /* assert.deepEqual(doc.getWithType(text, 9), ["str", "l"]) */ + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMlistGet(doc, text, 9, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "l", str.count); + /* assert.deepEqual(doc.getWithType(text, 10), ["str", "d"]) */ + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, text, 10, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "d", str.count); + /* assert.deepEqual(doc.getWithType(text, 11), ["str", "!"]) */ + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, text, 11, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "!", str.count); + /* assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) */ + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMlistGet(doc, text, 12, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "?", str.count); +} + +/** + * \brief should be able to save all or incrementally + */ +static void test_should_be_able_to_save_all_or_incrementally(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* */ + /* doc.put("_root", "foo", 1) */ + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("foo"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* */ + /* const save1 = doc.save() */ + AMbyteSpan save1; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save1)); + /* */ + /* doc.put("_root", "bar", 2) */ + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("bar"), 2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* */ + /* const saveMidway = doc.clone().save(); */ + AMdoc* doc_clone; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMclone(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc_clone)); + AMbyteSpan saveMidway; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc_clone), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &saveMidway)); + /* */ + /* const save2 = doc.saveIncremental(); */ + AMbyteSpan save2; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsaveIncremental(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save2)); + /* */ + /* doc.put("_root", "baz", 3); */ + AMstackItem(NULL, AMmapPutInt(doc, AM_ROOT, AMstr("baz"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* */ + /* const save3 = doc.saveIncremental(); */ + AMbyteSpan save3; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsaveIncremental(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save3)); + /* */ + /* const saveA = doc.save(); */ + AMbyteSpan saveA; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &saveA)); + /* const saveB = new Uint8Array([...save1, ...save2, ...save3]); */ + size_t const saveB_count = save1.count + save2.count + save3.count; + uint8_t* const saveB_src = test_malloc(saveB_count); + memcpy(saveB_src, save1.src, save1.count); + memcpy(saveB_src + save1.count, save2.src, save2.count); + memcpy(saveB_src + save1.count + save2.count, save3.src, save3.count); + /* */ + /* assert.notDeepEqual(saveA, saveB); */ + assert_memory_not_equal(saveA.src, saveB_src, saveA.count); + /* */ + /* const docA = load(saveA); */ + AMdoc* docA; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(saveA.src, saveA.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &docA)); + /* const docB = load(saveB); */ + AMdoc* docB; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(saveB_src, saveB_count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &docB)); + test_free(saveB_src); + /* const docC = load(saveMidway) */ + AMdoc* docC; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(saveMidway.src, saveMidway.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &docC)); + /* docC.loadIncremental(save3) */ + AMstackItem(NULL, AMloadIncremental(docC, save3.src, save3.count), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)); + /* */ + /* assert.deepEqual(docA.keys("_root"), docB.keys("_root")); */ + AMitems const keysA = AMstackItems(stack_ptr, AMkeys(docA, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMitems const keysB = AMstackItems(stack_ptr, AMkeys(docB, AM_ROOT, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemsEqual(&keysA, &keysB)); + /* assert.deepEqual(docA.save(), docB.save()); */ + AMbyteSpan docA_save; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(docA), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &docA_save)); + AMbyteSpan docB_save; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(docB), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &docB_save)); + assert_int_equal(docA_save.count, docB_save.count); + assert_memory_equal(docA_save.src, docB_save.src, docA_save.count); + /* assert.deepEqual(docA.save(), docC.save()); */ + AMbyteSpan docC_save; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(docC), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &docC_save)); + assert_int_equal(docA_save.count, docC_save.count); + assert_memory_equal(docA_save.src, docC_save.src, docA_save.count); +} + +/** + * \brief should be able to splice text #2 + */ +static void test_should_be_able_to_splice_text_2(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create() */ + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const text = doc.putObject("_root", "text", ""); */ + AMobjId const* const text = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* doc.splice(text, 0, 0, "hello world"); */ + AMstackItem(NULL, AMspliceText(doc, text, 0, 0, AMstr("hello world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const hash1 = doc.commit(); */ + AMitems const hash1 = + AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* doc.splice(text, 6, 0, "big bad "); */ + AMstackItem(NULL, AMspliceText(doc, text, 6, 0, AMstr("big bad ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const hash2 = doc.commit(); */ + AMitems const hash2 = + AMstackItems(stack_ptr, AMcommit(doc, AMstr(NULL), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* assert.strictEqual(doc.text(text), "hello big bad world") */ + AMbyteSpan str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, strlen("hello big bad world")); + assert_memory_equal(str.src, "hello big bad world", str.count); + /* assert.strictEqual(doc.length(text), 19) */ + assert_int_equal(AMobjSize(doc, text, NULL), 19); + /* assert.strictEqual(doc.text(text, [hash1]), "hello world") */ + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, &hash1), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, strlen("hello world")); + assert_memory_equal(str.src, "hello world", str.count); + /* assert.strictEqual(doc.length(text, [hash1]), 11) */ + assert_int_equal(AMobjSize(doc, text, &hash1), 11); + /* assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") */ + assert_true( + AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, text, &hash2), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, strlen("hello big bad world")); + assert_memory_equal(str.src, "hello big bad world", str.count); + /* assert.strictEqual(doc.length(text, [hash2]), 19) */ + assert_int_equal(AMobjSize(doc, text, &hash2), 19); +} + +/** + * \brief local inc increments all visible counters in a map + */ +static void test_local_inc_increments_all_visible_counters_in_a_map(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc1 = create("aaaa") */ + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + /* doc1.put("_root", "hello", "world") */ + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("hello"), AMstr("world")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + /* const doc2 = load(doc1.save(), "bbbb"); */ + AMbyteSpan save; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save)); + AMdoc* doc2; + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMload(save.src, save.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMstackItem(NULL, AMsetActorId(doc2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const doc3 = load(doc1.save(), "cccc"); */ + AMdoc* doc3; + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMload(save.src, save.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc3)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("cccc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMstackItem(NULL, AMsetActorId(doc3, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* let heads = doc1.getHeads() */ + AMitems const heads1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* doc1.put("_root", "cnt", 20) */ + AMstackItem(NULL, AMmapPutInt(doc1, AM_ROOT, AMstr("cnt"), 20), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc2.put("_root", "cnt", 0, "counter") */ + AMstackItem(NULL, AMmapPutCounter(doc2, AM_ROOT, AMstr("cnt"), 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc3.put("_root", "cnt", 10, "counter") */ + AMstackItem(NULL, AMmapPutCounter(doc3, AM_ROOT, AMstr("cnt"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc1.applyChanges(doc2.getChanges(heads)) */ + AMitems const changes2 = + AMstackItems(stack_ptr, AMgetChanges(doc2, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMstackItem(NULL, AMapplyChanges(doc1, &changes2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc1.applyChanges(doc3.getChanges(heads)) */ + AMitems const changes3 = + AMstackItems(stack_ptr, AMgetChanges(doc3, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMstackItem(NULL, AMapplyChanges(doc1, &changes3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* let result = doc1.getAll("_root", "cnt") */ + AMitems result = AMstackItems(stack_ptr, AMmapGetAll(doc1, AM_ROOT, AMstr("cnt"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER | AM_VAL_TYPE_INT | AM_VAL_TYPE_STR)); + /* assert.deepEqual(result, [ + ['int', 20, '2@aaaa'], + ['counter', 0, '2@bbbb'], + ['counter', 10, '2@cccc'], + ]) */ + AMitem* result_item = AMitemsNext(&result, 1); + int64_t int_; + assert_true(AMitemToInt(result_item, &int_)); + assert_int_equal(int_, 20); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); + AMbyteSpan str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "aaaa", str.count); + result_item = AMitemsNext(&result, 1); + int64_t counter; + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 0); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "bbbb", str.count); + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 10); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "cccc", str.count); + /* doc1.increment("_root", "cnt", 5) */ + AMstackItem(NULL, AMmapIncrement(doc1, AM_ROOT, AMstr("cnt"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* result = doc1.getAll("_root", "cnt") */ + result = AMstackItems(stack_ptr, AMmapGetAll(doc1, AM_ROOT, AMstr("cnt"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER)); + /* assert.deepEqual(result, [ + ['counter', 5, '2@bbbb'], + ['counter', 15, '2@cccc'], + ]) */ + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 5); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "bbbb", str.count); + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 15); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 2); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "cccc", str.count); + /* */ + /* const save1 = doc1.save() */ + AMbyteSpan save1; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save1)); + /* const doc4 = load(save1) */ + AMdoc* doc4; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(save1.src, save1.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc4)); + /* assert.deepEqual(doc4.save(), save1); */ + AMbyteSpan doc4_save; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc4), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &doc4_save)); + assert_int_equal(doc4_save.count, save1.count); + assert_memory_equal(doc4_save.src, save1.src, doc4_save.count); +} + +/** + * \brief local inc increments all visible counters in a sequence + */ +static void test_local_inc_increments_all_visible_counters_in_a_sequence(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc1 = create("aaaa") */ + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + /* const seq = doc1.putObject("_root", "seq", []) */ + AMobjId const* const seq = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("seq"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* doc1.insert(seq, 0, "hello") */ + AMstackItem(NULL, AMlistPutStr(doc1, seq, 0, true, AMstr("hello")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const doc2 = load(doc1.save(), "bbbb"); */ + AMbyteSpan save1; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save1)); + AMdoc* doc2; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(save1.src, save1.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMstackItem(NULL, AMsetActorId(doc2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const doc3 = load(doc1.save(), "cccc"); */ + AMdoc* doc3; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(save1.src, save1.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc3)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("cccc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMstackItem(NULL, AMsetActorId(doc3, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* let heads = doc1.getHeads() */ + AMitems const heads1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* doc1.put(seq, 0, 20) */ + AMstackItem(NULL, AMlistPutInt(doc1, seq, 0, false, 20), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc2.put(seq, 0, 0, "counter") */ + AMstackItem(NULL, AMlistPutCounter(doc2, seq, 0, false, 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc3.put(seq, 0, 10, "counter") */ + AMstackItem(NULL, AMlistPutCounter(doc3, seq, 0, false, 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc1.applyChanges(doc2.getChanges(heads)) */ + AMitems const changes2 = + AMstackItems(stack_ptr, AMgetChanges(doc2, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMstackItem(NULL, AMapplyChanges(doc1, &changes2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc1.applyChanges(doc3.getChanges(heads)) */ + AMitems const changes3 = + AMstackItems(stack_ptr, AMgetChanges(doc3, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMstackItem(NULL, AMapplyChanges(doc1, &changes3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* let result = doc1.getAll(seq, 0) */ + AMitems result = AMstackItems(stack_ptr, AMlistGetAll(doc1, seq, 0, NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_COUNTER | AM_VAL_TYPE_INT)); + /* assert.deepEqual(result, [ + ['int', 20, '3@aaaa'], + ['counter', 0, '3@bbbb'], + ['counter', 10, '3@cccc'], + ]) */ + AMitem* result_item = AMitemsNext(&result, 1); + int64_t int_; + assert_true(AMitemToInt(result_item, &int_)); + assert_int_equal(int_, 20); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); + AMbyteSpan str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "aaaa", str.count); + result_item = AMitemsNext(&result, 1); + int64_t counter; + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 0); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); + assert_memory_equal(str.src, "bbbb", str.count); + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 10); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "cccc", str.count); + /* doc1.increment(seq, 0, 5) */ + AMstackItem(NULL, AMlistIncrement(doc1, seq, 0, 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* result = doc1.getAll(seq, 0) */ + result = AMstackItems(stack_ptr, AMlistGetAll(doc1, seq, 0, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_COUNTER)); + /* assert.deepEqual(result, [ + ['counter', 5, '3@bbbb'], + ['counter', 15, '3@cccc'], + ]) */ + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 5); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "bbbb", str.count); + result_item = AMitemsNext(&result, 1); + assert_true(AMitemToCounter(result_item, &counter)); + assert_int_equal(counter, 15); + assert_int_equal(AMobjIdCounter(AMitemObjId(result_item)), 3); + str = AMactorIdStr(AMobjIdActorId(AMitemObjId(result_item))); + assert_memory_equal(str.src, "cccc", str.count); + /* */ + /* const save = doc1.save() */ + AMbyteSpan save; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &save)); + /* const doc4 = load(save) */ + AMdoc* doc4; + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMload(save.src, save.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc4)); + /* assert.deepEqual(doc4.save(), save); */ + AMbyteSpan doc4_save; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc4), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &doc4_save)); + assert_int_equal(doc4_save.count, save.count); + assert_memory_equal(doc4_save.src, save.src, doc4_save.count); +} + +/** + * \brief paths can be used instead of objids + */ +static void test_paths_can_be_used_instead_of_objids(void** state); + +/** + * \brief should be able to fetch changes by hash + */ +static void test_should_be_able_to_fetch_changes_by_hash(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc1 = create("aaaa") */ + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + /* const doc2 = create("bbbb") */ + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc2; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + /* doc1.put("/", "a", "b") */ + AMstackItem(NULL, AMmapPutStr(doc1, AM_ROOT, AMstr("a"), AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc2.put("/", "b", "c") */ + AMstackItem(NULL, AMmapPutStr(doc2, AM_ROOT, AMstr("b"), AMstr("c")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const head1 = doc1.getHeads() */ + AMitems head1 = AMstackItems(stack_ptr, AMgetHeads(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* const head2 = doc2.getHeads() */ + AMitems head2 = AMstackItems(stack_ptr, AMgetHeads(doc2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* const change1 = doc1.getChangeByHash(head1[0]) + if (change1 === null) { throw new RangeError("change1 should not be + null") */ + AMbyteSpan change_hash1; + assert_true(AMitemToChangeHash(AMitemsNext(&head1, 1), &change_hash1)); + AMchange const* change1; + assert_true(AMitemToChange(AMstackItem(stack_ptr, AMgetChangeByHash(doc1, change_hash1.src, change_hash1.count), + cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)), + &change1)); + /* const change2 = doc1.getChangeByHash(head2[0]) + assert.deepEqual(change2, null) */ + AMbyteSpan change_hash2; + assert_true(AMitemToChangeHash(AMitemsNext(&head2, 1), &change_hash2)); + AMstackItem(NULL, AMgetChangeByHash(doc1, change_hash2.src, change_hash2.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(decodeChange(change1).hash, head1[0]) */ + assert_memory_equal(AMchangeHash(change1).src, change_hash1.src, change_hash1.count); +} + +/** + * \brief recursive sets are possible + */ +static void test_recursive_sets_are_possible(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create("aaaa") */ + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]] */ + AMobjId const* const l1 = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + { + AMobjId const* const map = AMitemObjId(AMstackItem( + stack_ptr, AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMmapPutStr(doc, map, AMstr("foo"), AMstr("bar")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMlistPutObject(doc, l1, SIZE_MAX, true, AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + for (int value = 1; value != 4; ++value) { + AMstackItem(NULL, AMlistPutInt(doc, list, SIZE_MAX, true, value), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + } + } + /* const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) */ + AMobjId const* const l2 = AMitemObjId(AMstackItem(stack_ptr, AMlistPutObject(doc, l1, 0, true, AM_OBJ_TYPE_MAP), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + { + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, l2, AMstr("zip"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("a")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMlistPutStr(doc, list, SIZE_MAX, true, AMstr("b")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + } + /* const l3 = doc.putObject("_root", "info1", "hello world") // 'text' + * object */ + AMobjId const* const l3 = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("info1"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMspliceText(doc, l3, 0, 0, AMstr("hello world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* doc.put("_root", "info2", "hello world") // 'str' */ + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("info2"), AMstr("hello world")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + /* const l4 = doc.putObject("_root", "info3", "hello world") */ + AMobjId const* const l4 = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("info3"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + AMstackItem(NULL, AMspliceText(doc, l4, 0, 0, AMstr("hello world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(doc.materialize(), { + "list": [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]], + "info1": "hello world", + "info2": "hello world", + "info3": "hello world", + }) */ + AMitems doc_items = AMstackItems(stack_ptr, AMmapRange(doc, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE | AM_VAL_TYPE_STR)); + AMitem* doc_item = AMitemsNext(&doc_items, 1); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(doc_item, &key)); + assert_int_equal(key.count, strlen("info1")); + assert_memory_equal(key.src, "info1", key.count); + AMbyteSpan str; + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMtext(doc, AMitemObjId(doc_item), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, strlen("hello world")); + assert_memory_equal(str.src, "hello world", str.count); + doc_item = AMitemsNext(&doc_items, 1); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); + assert_int_equal(key.count, strlen("info2")); + assert_memory_equal(key.src, "info2", key.count); + assert_true(AMitemToStr(doc_item, &str)); + assert_int_equal(str.count, strlen("hello world")); + assert_memory_equal(str.src, "hello world", str.count); + doc_item = AMitemsNext(&doc_items, 1); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); + assert_int_equal(key.count, strlen("info3")); + assert_memory_equal(key.src, "info3", key.count); + assert_true(AMitemToStr( + AMstackItem(stack_ptr, AMtext(doc, AMitemObjId(doc_item), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, strlen("hello world")); + assert_memory_equal(str.src, "hello world", str.count); + doc_item = AMitemsNext(&doc_items, 1); + assert_int_equal(AMitemIdxType(doc_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(doc_item, &key)); + assert_int_equal(key.count, strlen("list")); + assert_memory_equal(key.src, "list", key.count); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(doc_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + AMitem const* list_item = AMitemsNext(&list_items, 1); + { + AMitems map_items = + AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + AMitem const* map_item = AMitemsNext(&map_items, 1); + assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(map_item, &key)); + assert_int_equal(key.count, strlen("zip")); + assert_memory_equal(key.src, "zip", key.count); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(map_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE | AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + } + } + list_item = AMitemsNext(&list_items, 1); + { + AMitems map_items = + AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE | AM_VAL_TYPE_STR)); + AMitem* map_item = AMitemsNext(&map_items, 1); + assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(map_item, &key)); + assert_int_equal(key.count, strlen("foo")); + assert_memory_equal(key.src, "foo", key.count); + AMbyteSpan str; + assert_true(AMitemToStr(map_item, &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bar", str.count); + } + list_item = AMitemsNext(&list_items, 1); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(list_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_INT)); + int64_t int_; + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 1); + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 2); + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 3); + } + } + /* assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) */ + AMitems map_items = AMstackItems(stack_ptr, AMmapRange(doc, l2, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + AMitem const* map_item = AMitemsNext(&map_items, 1); + assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); + assert_true(AMitemKey(map_item, &key)); + assert_int_equal(key.count, strlen("zip")); + assert_memory_equal(key.src, "zip", key.count); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(map_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + } + /* assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" + * }, [1, 2, 3]] */ + AMitems list_items = + AMstackItems(stack_ptr, AMlistRange(doc, l1, 0, SIZE_MAX, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + AMitem const* list_item = AMitemsNext(&list_items, 1); + { + AMitems map_items = + AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + AMitem const* map_item = AMitemsNext(&map_items, 1); + assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(map_item, &key)); + assert_int_equal(key.count, strlen("zip")); + assert_memory_equal(key.src, "zip", key.count); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(map_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "a", str.count); + assert_true(AMitemToStr(AMitemsNext(&list_items, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "b", str.count); + } + } + list_item = AMitemsNext(&list_items, 1); + { + AMitems map_items = + AMstackItems(stack_ptr, AMmapRange(doc, AMitemObjId(list_item), AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + AMitem* map_item = AMitemsNext(&map_items, 1); + assert_int_equal(AMitemIdxType(map_item), AM_IDX_TYPE_KEY); + AMbyteSpan key; + assert_true(AMitemKey(map_item, &key)); + assert_int_equal(key.count, strlen("foo")); + assert_memory_equal(key.src, "foo", key.count); + AMbyteSpan str; + assert_true(AMitemToStr(map_item, &str)); + assert_int_equal(str.count, 3); + assert_memory_equal(str.src, "bar", str.count); + } + list_item = AMitemsNext(&list_items, 1); + { + AMitems list_items = AMstackItems(stack_ptr, AMlistRange(doc, AMitemObjId(list_item), 0, SIZE_MAX, NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_INT)); + int64_t int_; + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 1); + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 2); + assert_true(AMitemToInt(AMitemsNext(&list_items, 1), &int_)); + assert_int_equal(int_, 3); + } + /* assert.deepEqual(doc.materialize(l4), "hello world") */ + assert_true(AMitemToStr(AMstackItem(stack_ptr, AMtext(doc, l4, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, strlen("hello world")); + assert_memory_equal(str.src, "hello world", str.count); +} + +/** + * \brief only returns an object id when objects are created + */ +static void test_only_returns_an_object_id_when_objects_are_created(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc = create("aaaa") */ + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc)); + /* const r1 = doc.put("_root", "foo", "bar") + assert.deepEqual(r1, null); */ + AMstackItem(NULL, AMmapPutStr(doc, AM_ROOT, AMstr("foo"), AMstr("bar")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const r2 = doc.putObject("_root", "list", []) */ + AMobjId const* const r2 = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc, AM_ROOT, AMstr("list"), AM_OBJ_TYPE_LIST), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* const r3 = doc.put("_root", "counter", 10, "counter") + assert.deepEqual(r3, null); */ + AMstackItem(NULL, AMmapPutCounter(doc, AM_ROOT, AMstr("counter"), 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const r4 = doc.increment("_root", "counter", 1) + assert.deepEqual(r4, null); */ + AMstackItem(NULL, AMmapIncrement(doc, AM_ROOT, AMstr("counter"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const r5 = doc.delete("_root", "counter") + assert.deepEqual(r5, null); */ + AMstackItem(NULL, AMmapDelete(doc, AM_ROOT, AMstr("counter")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const r6 = doc.insert(r2, 0, 10); + assert.deepEqual(r6, null); */ + AMstackItem(NULL, AMlistPutInt(doc, r2, 0, true, 10), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const r7 = doc.insertObject(r2, 0, {}); */ + AMobjId const* const r7 = AMitemObjId(AMstackItem(stack_ptr, AMlistPutObject(doc, r2, 0, true, AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* const r8 = doc.splice(r2, 1, 0, ["a", "b", "c"]); */ + AMresult* data = AMstackResult( + stack_ptr, AMresultFrom(3, AMitemFromStr(AMstr("a")), AMitemFromStr(AMstr("b")), AMitemFromStr(AMstr("c"))), + NULL, NULL); + AMstackItem(NULL, AMsplice(doc, r2, 1, 0, AMresultItems(data)), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(r2, "2@aaaa"); */ + assert_int_equal(AMobjIdCounter(r2), 2); + AMbyteSpan str = AMactorIdStr(AMobjIdActorId(r2)); + assert_int_equal(str.count, 4); + assert_memory_equal(str.src, "aaaa", str.count); + /* assert.deepEqual(r7, "7@aaaa"); */ + assert_int_equal(AMobjIdCounter(r7), 7); + str = AMactorIdStr(AMobjIdActorId(r7)); + assert_memory_equal(str.src, "aaaa", str.count); +} + +/** + * \brief objects without properties are preserved + */ +static void test_objects_without_properties_are_preserved(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const doc1 = create("aaaa") */ + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), &actor_id)); + AMdoc* doc1; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc1)); + /* const a = doc1.putObject("_root", "a", {}); */ + AMobjId const* const a = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("a"), AM_OBJ_TYPE_MAP), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* const b = doc1.putObject("_root", "b", {}); */ + AMobjId const* const b = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("b"), AM_OBJ_TYPE_MAP), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* const c = doc1.putObject("_root", "c", {}); */ + AMobjId const* const c = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(doc1, AM_ROOT, AMstr("c"), AM_OBJ_TYPE_MAP), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* const d = doc1.put(c, "d", "dd"); */ + AMstackItem(NULL, AMmapPutStr(doc1, c, AMstr("d"), AMstr("dd")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const saved = doc1.save(); */ + AMbyteSpan saved; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(doc1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &saved)); + /* const doc2 = load(saved); */ + AMdoc* doc2; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(saved.src, saved.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &doc2)); + /* assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) */ + AMitems doc_items = AMstackItems(stack_ptr, AMmapRange(doc2, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE)); + assert_true(AMobjIdEqual(AMitemObjId(AMitemsNext(&doc_items, 1)), a)); + /* assert.deepEqual(doc2.keys(a), []) */ + AMitems keys = AMstackItems(stack_ptr, AMkeys(doc1, a, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&keys), 0); + /* assert.deepEqual(doc2.getWithType("_root", "b"), ["map", b]) */ + assert_true(AMobjIdEqual(AMitemObjId(AMitemsNext(&doc_items, 1)), b)); + /* assert.deepEqual(doc2.keys(b), []) */ + keys = AMstackItems(stack_ptr, AMkeys(doc1, b, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_int_equal(AMitemsSize(&keys), 0); + /* assert.deepEqual(doc2.getWithType("_root", "c"), ["map", c]) */ + assert_true(AMobjIdEqual(AMitemObjId(AMitemsNext(&doc_items, 1)), c)); + /* assert.deepEqual(doc2.keys(c), ["d"]) */ + keys = AMstackItems(stack_ptr, AMkeys(doc1, c, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMbyteSpan str; + assert_true(AMitemToStr(AMitemsNext(&keys, 1), &str)); + assert_int_equal(str.count, 1); + assert_memory_equal(str.src, "d", str.count); + /* assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) */ + AMitems obj_items = AMstackItems(stack_ptr, AMobjItems(doc1, c, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemToStr(AMitemsNext(&obj_items, 1), &str)); + assert_int_equal(str.count, 2); + assert_memory_equal(str.src, "dd", str.count); +} + +/** + * \brief should allow you to forkAt a heads + */ +static void test_should_allow_you_to_forkAt_a_heads(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const A = create("aaaaaa") */ + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aaaaaa")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* A; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A)); + /* A.put("/", "key1", "val1"); */ + AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key1"), AMstr("val1")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* A.put("/", "key2", "val2"); */ + AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key2"), AMstr("val2")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* const heads1 = A.getHeads(); */ + AMitems const heads1 = AMstackItems(stack_ptr, AMgetHeads(A), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* const B = A.fork("bbbbbb") */ + AMdoc* B; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &B)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("bbbbbb")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(B, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* A.put("/", "key3", "val3"); */ + AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key3"), AMstr("val3")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* B.put("/", "key4", "val4"); */ + AMstackItem(NULL, AMmapPutStr(B, AM_ROOT, AMstr("key4"), AMstr("val4")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* A.merge(B) */ + AMstackItem(NULL, AMmerge(A, B), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* const heads2 = A.getHeads(); */ + AMitems const heads2 = AMstackItems(stack_ptr, AMgetHeads(A), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* A.put("/", "key5", "val5"); */ + AMstackItem(NULL, AMmapPutStr(A, AM_ROOT, AMstr("key5"), AMstr("val5")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", + * heads1) */ + AMdoc* A_forkAt1; + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, &heads1), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A_forkAt1)); + AMitems AforkAt1_items = AMstackItems(stack_ptr, AMmapRange(A_forkAt1, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMitems A1_items = AMstackItems(stack_ptr, AMmapRange(A, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads1), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemsEqual(&AforkAt1_items, &A1_items)); + /* assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", + * heads2) */ + AMdoc* A_forkAt2; + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, &heads2), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A_forkAt2)); + AMitems AforkAt2_items = AMstackItems(stack_ptr, AMmapRange(A_forkAt2, AM_ROOT, AMstr(NULL), AMstr(NULL), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)); + AMitems A2_items = AMstackItems(stack_ptr, AMmapRange(A, AM_ROOT, AMstr(NULL), AMstr(NULL), &heads2), cmocka_cb, + AMexpect(AM_VAL_TYPE_STR)); + assert_true(AMitemsEqual(&AforkAt2_items, &A2_items)); +} + +/** + * \brief should handle merging text conflicts then saving & loading + */ +static void test_should_handle_merging_text_conflicts_then_saving_and_loading(void** state) { + BaseState* base_state = *state; + AMstack** stack_ptr = &base_state->stack; + /* const A = create("aabbcc") */ + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("aabbcc")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* A; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &A)); + /* const At = A.putObject('_root', 'text', "") */ + AMobjId const* const At = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(A, AM_ROOT, AMstr("text"), AM_OBJ_TYPE_TEXT), cmocka_cb, + AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + /* A.splice(At, 0, 0, 'hello') */ + AMstackItem(NULL, AMspliceText(A, At, 0, 0, AMstr("hello")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* */ + /* const B = A.fork() */ + AMdoc* B; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMfork(A, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &B)); + /* */ + /* assert.deepEqual(B.getWithType("_root", "text"), ["text", At]) */ + AMbyteSpan str; + assert_true( + AMitemToStr(AMstackItem(stack_ptr, + AMtext(B, + AMitemObjId(AMstackItem(stack_ptr, AMmapGet(B, AM_ROOT, AMstr("text"), NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))), + NULL), + cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), + &str)); + AMbyteSpan str2; + assert_true(AMitemToStr(AMstackItem(stack_ptr, AMtext(A, At, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str2)); + assert_int_equal(str.count, str2.count); + assert_memory_equal(str.src, str2.src, str.count); + /* */ + /* B.splice(At, 4, 1) */ + AMstackItem(NULL, AMspliceText(B, At, 4, 1, AMstr(NULL)), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* B.splice(At, 4, 0, '!') */ + AMstackItem(NULL, AMspliceText(B, At, 4, 0, AMstr("!")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* B.splice(At, 5, 0, ' ') */ + AMstackItem(NULL, AMspliceText(B, At, 5, 0, AMstr(" ")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* B.splice(At, 6, 0, 'world') */ + AMstackItem(NULL, AMspliceText(B, At, 6, 0, AMstr("world")), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* */ + /* A.merge(B) */ + AMstackItem(NULL, AMmerge(A, B), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* */ + /* const binary = A.save() */ + AMbyteSpan binary; + assert_true(AMitemToBytes(AMstackItem(stack_ptr, AMsave(A), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &binary)); + /* */ + /* const C = load(binary) */ + AMdoc* C; + assert_true(AMitemToDoc( + AMstackItem(stack_ptr, AMload(binary.src, binary.count), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &C)); + /* */ + /* assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc'] */ + AMobjId const* const C_text = AMitemObjId( + AMstackItem(stack_ptr, AMmapGet(C, AM_ROOT, AMstr("text"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); + assert_int_equal(AMobjIdCounter(C_text), 1); + str = AMactorIdStr(AMobjIdActorId(C_text)); + assert_int_equal(str.count, strlen("aabbcc")); + assert_memory_equal(str.src, "aabbcc", str.count); + /* assert.deepEqual(C.text(At), 'hell! world') */ + assert_true(AMitemToStr(AMstackItem(stack_ptr, AMtext(C, At, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_STR)), &str)); + assert_int_equal(str.count, strlen("hell! world")); + assert_memory_equal(str.src, "hell! world", str.count); +} + +int run_ported_wasm_basic_tests(void) { + const struct CMUnitTest tests[] = { + cmocka_unit_test_setup_teardown(test_create_clone_and_free, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_start_and_commit, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_getting_a_nonexistent_prop_does_not_throw_an_error, setup_base, + teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_set_and_get_a_simple_value, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_use_bytes, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_make_subobjects, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_make_lists, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_lists_have_insert_set_splice_and_push_ops, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_delete_non_existent_props, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_del, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_use_counters, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_save_all_or_incrementally, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_splice_text_2, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_map, setup_base, + teardown_base), + cmocka_unit_test_setup_teardown(test_local_inc_increments_all_visible_counters_in_a_sequence, setup_base, + teardown_base), + cmocka_unit_test_setup_teardown(test_should_be_able_to_fetch_changes_by_hash, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_recursive_sets_are_possible, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_only_returns_an_object_id_when_objects_are_created, setup_base, + teardown_base), + cmocka_unit_test_setup_teardown(test_objects_without_properties_are_preserved, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_allow_you_to_forkAt_a_heads, setup_base, teardown_base), + cmocka_unit_test_setup_teardown(test_should_handle_merging_text_conflicts_then_saving_and_loading, setup_base, + teardown_base)}; + + return cmocka_run_group_tests(tests, NULL, NULL); +} diff --git a/automerge-c/test/ported_wasm/suite.c b/rust/automerge-c/test/ported_wasm/suite.c similarity index 71% rename from automerge-c/test/ported_wasm/suite.c rename to rust/automerge-c/test/ported_wasm/suite.c index fc10fadc..440ed899 100644 --- a/automerge-c/test/ported_wasm/suite.c +++ b/rust/automerge-c/test/ported_wasm/suite.c @@ -1,6 +1,6 @@ +#include #include #include -#include #include /* third-party */ @@ -11,8 +11,5 @@ extern int run_ported_wasm_basic_tests(void); extern int run_ported_wasm_sync_tests(void); int run_ported_wasm_suite(void) { - return ( - run_ported_wasm_basic_tests() + - run_ported_wasm_sync_tests() - ); + return (run_ported_wasm_basic_tests() + run_ported_wasm_sync_tests()); } diff --git a/automerge-c/test/ported_wasm/sync_tests.c b/rust/automerge-c/test/ported_wasm/sync_tests.c similarity index 50% rename from automerge-c/test/ported_wasm/sync_tests.c rename to rust/automerge-c/test/ported_wasm/sync_tests.c index ec5f84a4..099f8dbf 100644 --- a/automerge-c/test/ported_wasm/sync_tests.c +++ b/rust/automerge-c/test/ported_wasm/sync_tests.c @@ -9,10 +9,12 @@ /* local */ #include -#include "../stack_utils.h" +#include +#include "../base_state.h" +#include "../cmocka_utils.h" typedef struct { - AMresultStack* stack; + BaseState* base_state; AMdoc* n1; AMdoc* n2; AMsyncState* s1; @@ -21,43 +23,35 @@ typedef struct { static int setup(void** state) { TestState* test_state = test_calloc(1, sizeof(TestState)); - test_state->n1 = AMpush(&test_state->stack, - AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr("01234567"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - test_state->n2 = AMpush(&test_state->stack, - AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - test_state->s1 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - test_state->s2 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + setup_base((void**)&test_state->base_state); + AMstack** stack_ptr = &test_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("01234567")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &test_state->n1)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("89abcdef")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + assert_true( + AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &test_state->n2)); + assert_true(AMitemToSyncState( + AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &test_state->s1)); + assert_true(AMitemToSyncState( + AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &test_state->s2)); *state = test_state; return 0; } static int teardown(void** state) { TestState* test_state = *state; - AMfreeStack(&test_state->stack); + teardown_base((void**)&test_state->base_state); test_free(test_state); return 0; } -static void sync(AMdoc* a, - AMdoc* b, - AMsyncState* a_sync_state, - AMsyncState* b_sync_state) { +static void sync(AMdoc* a, AMdoc* b, AMsyncState* a_sync_state, AMsyncState* b_sync_state) { static size_t const MAX_ITER = 10; AMsyncMessage const* a2b_msg = NULL; @@ -66,29 +60,35 @@ static void sync(AMdoc* a, do { AMresult* a2b_msg_result = AMgenerateSyncMessage(a, a_sync_state); AMresult* b2a_msg_result = AMgenerateSyncMessage(b, b_sync_state); - AMvalue value = AMresultValue(a2b_msg_result); - switch (value.tag) { - case AM_VALUE_SYNC_MESSAGE: { - a2b_msg = value.sync_message; - AMfree(AMreceiveSyncMessage(b, b_sync_state, a2b_msg)); - } - break; - case AM_VALUE_VOID: a2b_msg = NULL; break; + AMitem* item = AMresultItem(a2b_msg_result); + switch (AMitemValType(item)) { + case AM_VAL_TYPE_SYNC_MESSAGE: { + AMitemToSyncMessage(item, &a2b_msg); + AMstackResult(NULL, AMreceiveSyncMessage(b, b_sync_state, a2b_msg), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + } break; + case AM_VAL_TYPE_VOID: + a2b_msg = NULL; + break; } - value = AMresultValue(b2a_msg_result); - switch (value.tag) { - case AM_VALUE_SYNC_MESSAGE: { - b2a_msg = value.sync_message; - AMfree(AMreceiveSyncMessage(a, a_sync_state, b2a_msg)); - } - break; - case AM_VALUE_VOID: b2a_msg = NULL; break; + item = AMresultItem(b2a_msg_result); + switch (AMitemValType(item)) { + case AM_VAL_TYPE_SYNC_MESSAGE: { + AMitemToSyncMessage(item, &b2a_msg); + AMstackResult(NULL, AMreceiveSyncMessage(a, a_sync_state, b2a_msg), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + } break; + case AM_VAL_TYPE_VOID: + b2a_msg = NULL; + break; } if (++iter > MAX_ITER) { - fail_msg("Did not synchronize within %d iterations. " - "Do you have a bug causing an infinite loop?", MAX_ITER); + fail_msg( + "Did not synchronize within %d iterations. " + "Do you have a bug causing an infinite loop?", + MAX_ITER); } - } while(a2b_msg || b2a_msg); + } while (a2b_msg || b2a_msg); } static time_t const TIME_0 = 0; @@ -96,151 +96,135 @@ static time_t const TIME_0 = 0; /** * \brief should send a sync message implying no local data */ -static void test_should_send_a_sync_message_implying_no_local_data(void **state) { +static void test_should_send_a_sync_message_implying_no_local_data(void** state) { /* const doc = create() const s1 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* const m1 = doc.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } const message: DecodedSyncMessage = decodeSyncMessage(m1) */ - AMsyncMessage const* const m1 = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + AMsyncMessage const* m1; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &m1)); /* assert.deepStrictEqual(message.heads, []) */ - AMchangeHashes heads = AMsyncMessageHeads(m1); - assert_int_equal(AMchangeHashesSize(&heads), 0); + AMitems heads = AMstackItems(stack_ptr, AMsyncMessageHeads(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&heads), 0); /* assert.deepStrictEqual(message.need, []) */ - AMchangeHashes needs = AMsyncMessageNeeds(m1); - assert_int_equal(AMchangeHashesSize(&needs), 0); + AMitems needs = AMstackItems(stack_ptr, AMsyncMessageNeeds(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&needs), 0); /* assert.deepStrictEqual(message.have.length, 1) */ - AMsyncHaves haves = AMsyncMessageHaves(m1); - assert_int_equal(AMsyncHavesSize(&haves), 1); + AMitems haves = AMstackItems(stack_ptr, AMsyncMessageHaves(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); + assert_int_equal(AMitemsSize(&haves), 1); /* assert.deepStrictEqual(message.have[0].lastSync, []) */ - AMsyncHave const* have0 = AMsyncHavesNext(&haves, 1); - AMchangeHashes last_sync = AMsyncHaveLastSync(have0); - assert_int_equal(AMchangeHashesSize(&last_sync), 0); + AMsyncHave const* have0; + assert_true(AMitemToSyncHave(AMitemsNext(&haves, 1), &have0)); + AMitems last_sync = + AMstackItems(stack_ptr, AMsyncHaveLastSync(have0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&last_sync), 0); /* assert.deepStrictEqual(message.have[0].bloom.byteLength, 0) assert.deepStrictEqual(message.changes, []) */ - AMchanges changes = AMsyncMessageChanges(m1); - assert_int_equal(AMchangesSize(&changes), 0); + AMitems changes = AMstackItems(stack_ptr, AMsyncMessageChanges(m1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&changes), 0); } /** * \brief should not reply if we have no data as well */ -static void test_should_not_reply_if_we_have_no_data_as_well(void **state) { +static void test_should_not_reply_if_we_have_no_data_as_well(void** state) { /* const n1 = create(), n2 = create() const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* const m1 = n1.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") }*/ - AMsyncMessage const* const m1 = AMpush(&test_state->stack, - AMgenerateSyncMessage( - test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (m1 === null) { throw new RangeError("message should not be null") */ + AMsyncMessage const* m1; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &m1)); /* n2.receiveSyncMessage(s2, m1) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, m1)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, m1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const m2 = n2.generateSyncMessage(s2) assert.deepStrictEqual(m2, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); } /** * \brief repos with equal heads do not need a reply message */ -static void test_repos_with_equal_heads_do_not_need_a_reply_message(void **state) { +static void test_repos_with_equal_heads_do_not_need_a_reply_message(void** state) { /* const n1 = create(), n2 = create() const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* make two nodes with the same changes */ /* const list = n1.putObject("_root", "n", []) */ - AMobjId const* const list = AMpush(&test_state->stack, - AMmapPutObject(test_state->n1, - AM_ROOT, - "n", - AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.insert(list, i, i) */ - AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); + AMstackItem(NULL, AMlistPutUint(test_state->n1, list, i, true, i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* n2.applyChanges(n1.getChanges([])) */ - AMchanges const changes = AMpush(&test_state->stack, - AMgetChanges(test_state->n1, NULL), - AM_VALUE_CHANGES, - cmocka_cb).changes; - AMfree(AMapplyChanges(test_state->n2, &changes)); + AMitems const items = + AMstackItems(stack_ptr, AMgetChanges(test_state->n1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + AMstackItem(NULL, AMapplyChanges(test_state->n2, &items), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); /* */ /* generate a naive sync message */ /* const m1 = n1.generateSyncMessage(s1) - if (m1 === null) { throw new RangeError("message should not be null") }*/ - AMsyncMessage const* m1 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (m1 === null) { throw new RangeError("message should not be null") */ + AMsyncMessage const* m1; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &m1)); /* assert.deepStrictEqual(s1.lastSentHeads, n1.getHeads()) */ - AMchangeHashes const last_sent_heads = AMsyncStateLastSentHeads( - test_state->s1 - ); - AMchangeHashes const heads = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&last_sent_heads, &heads), 0); + AMitems const last_sent_heads = + AMstackItems(stack_ptr, AMsyncStateLastSentHeads(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems const heads = + AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&last_sent_heads, &heads)); /* */ /* heads are equal so this message should be null */ /* n2.receiveSyncMessage(s2, m1) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, m1)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, m1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* const m2 = n2.generateSyncMessage(s2) assert.strictEqual(m2, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); } /** * \brief n1 should offer all changes to n2 when starting from nothing */ -static void test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing(void **state) { +static void test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing(void** state) { /* const n1 = create(), n2 = create() */ TestState* test_state = *state; - + AMstack** stack_ptr = &test_state->base_state->stack; /* make changes for n1 that n2 should request */ /* const list = n1.putObject("_root", "n", []) */ - AMobjId const* const list = AMpush( - &test_state->stack, - AMmapPutObject(test_state->n1, AM_ROOT, "n", AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.insert(list, i, i) */ - AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); + AMstackItem(NULL, AMlistPutUint(test_state->n1, list, i, true, i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ @@ -254,26 +238,24 @@ static void test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing(vo /** * \brief should sync peers where one has commits the other does not */ -static void test_should_sync_peers_where_one_has_commits_the_other_does_not(void **state) { +static void test_should_sync_peers_where_one_has_commits_the_other_does_not(void** state) { /* const n1 = create(), n2 = create() */ TestState* test_state = *state; - + AMstack** stack_ptr = &test_state->base_state->stack; /* make changes for n1 that n2 should request */ /* const list = n1.putObject("_root", "n", []) */ - AMobjId const* const list = AMpush( - &test_state->stack, - AMmapPutObject(test_state->n1, AM_ROOT, "n", AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const list = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("n"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.insert(list, i, i) */ - AMfree(AMlistPutUint(test_state->n1, AM_ROOT, i, true, i)); + AMstackItem(NULL, AMlistPutUint(test_state->n1, list, i, true, i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ @@ -287,19 +269,20 @@ static void test_should_sync_peers_where_one_has_commits_the_other_does_not(void /** * \brief should work with prior sync state */ -static void test_should_work_with_prior_sync_state(void **state) { +static void test_should_work_with_prior_sync_state(void** state) { /* create & synchronize two nodes */ /* const n1 = create(), n2 = create() const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); @@ -308,10 +291,10 @@ static void test_should_work_with_prior_sync_state(void **state) { /* for (let i = 5; i < 10; i++) { */ for (size_t i = 5; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ @@ -325,326 +308,333 @@ static void test_should_work_with_prior_sync_state(void **state) { /** * \brief should not generate messages once synced */ -static void test_should_not_generate_messages_once_synced(void **state) { +static void test_should_not_generate_messages_once_synced(void** state) { /* create & synchronize two nodes */ /* const n1 = create('abc123'), n2 = create('def456') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("abc123"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("def456"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMstack** stack_ptr = &test_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("abc123")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->n1, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("def456")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->n2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* let message, patch for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { - // n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); - // n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + /* n1.put("_root", "x", i) */ + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* n1.commit("", 0) */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n2.put("_root", "y", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "y", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("y"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* n1 reports what it has */ /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") }*/ - AMsyncMessage const* message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (message === null) { throw new RangeError("message should not be + null") */ + AMsyncMessage const* message; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); /* */ /* n2 receives that message and sends changes along with what it has */ /* n2.receiveSyncMessage(s2, message) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, message)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, message), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") }*/ - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - AMchanges message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 5); + if (message === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); + AMitems message_changes = + AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&message_changes), 5); /* */ /* n1 receives the changes and replies with the changes it now knows that * n2 needs */ /* n1.receiveSyncMessage(s1, message) */ - AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, message)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, message), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") }*/ - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 5); + if (message === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); + message_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&message_changes), 5); /* */ /* n2 applies the changes and sends confirmation ending the exchange */ /* n2.receiveSyncMessage(s2, message) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, message)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, message), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* message = n2.generateSyncMessage(s2) - if (message === null) { throw new RangeError("message should not be null") }*/ - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (message === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); /* */ /* n1 receives the message and has nothing more to say */ /* n1.receiveSyncMessage(s1, message) */ - AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, message)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, message), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* message = n1.generateSyncMessage(s1) assert.deepStrictEqual(message, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMgenerateSyncMessage(test_state->n1, test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* //assert.deepStrictEqual(patch, null) // no changes arrived */ /* */ /* n2 also has nothing left to say */ /* message = n2.generateSyncMessage(s2) assert.deepStrictEqual(message, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); } /** * \brief should allow simultaneous messages during synchronization */ -static void test_should_allow_simultaneous_messages_during_synchronization(void **state) { +static void test_should_allow_simultaneous_messages_during_synchronization(void** state) { /* create & synchronize two nodes */ /* const n1 = create('abc123'), n2 = create('def456') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMfree(AMsetActorId(test_state->n1, AMpush(&test_state->stack, - AMactorIdInitStr("abc123"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); - AMfree(AMsetActorId(test_state->n2, AMpush(&test_state->stack, - AMactorIdInitStr("def456"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id)); + AMstack** stack_ptr = &test_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("abc123")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->n1, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("def456")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMstackItem(NULL, AMsetActorId(test_state->n2, actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n2.put("_root", "y", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "y", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("y"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* const head1 = n1.getHeads()[0], head2 = n2.getHeads()[0] */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMbyteSpan const head1 = AMchangeHashesNext(&heads1, 1); - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMbyteSpan const head2 = AMchangeHashesNext(&heads2, 1); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMbyteSpan head1; + assert_true(AMitemToChangeHash(AMitemsNext(&heads1, 1), &head1)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMbyteSpan head2; + assert_true(AMitemToChangeHash(AMitemsNext(&heads2, 1), &head2)); /* */ /* both sides report what they have but have no shared peer state */ /* let msg1to2, msg2to1 msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ - AMsyncMessage const* msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg1to2 === null) { throw new RangeError("message should not be + null") */ + AMsyncMessage const* msg1to2; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg1to2)); /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") }*/ - AMsyncMessage const* msg2to1 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, - test_state->s2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg2to1 === null) { throw new RangeError("message should not be + null") */ + AMsyncMessage const* msg2to1; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg2to1)); /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) */ - AMchanges msg1to2_changes = AMsyncMessageChanges(msg1to2); - assert_int_equal(AMchangesSize(&msg1to2_changes), 0); - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, 0)*/ - AMsyncHaves msg1to2_haves = AMsyncMessageHaves(msg1to2); - AMsyncHave const* msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); - AMchangeHashes msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); - assert_int_equal(AMchangeHashesSize(&msg1to2_last_sync), 0); + AMitems msg1to2_changes = + AMstackItems(stack_ptr, AMsyncMessageChanges(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg1to2_changes), 0); + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync.length, + * 0 */ + AMitems msg1to2_haves = + AMstackItems(stack_ptr, AMsyncMessageHaves(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); + AMsyncHave const* msg1to2_have; + assert_true(AMitemToSyncHave(AMitemsNext(&msg1to2_haves, 1), &msg1to2_have)); + AMitems msg1to2_last_sync = + AMstackItems(stack_ptr, AMsyncHaveLastSync(msg1to2_have), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&msg1to2_last_sync), 0); /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) */ - AMchanges msg2to1_changes = AMsyncMessageChanges(msg2to1); - assert_int_equal(AMchangesSize(&msg2to1_changes), 0); - /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, 0)*/ - AMsyncHaves msg2to1_haves = AMsyncMessageHaves(msg2to1); - AMsyncHave const* msg2to1_have = AMsyncHavesNext(&msg2to1_haves, 1); - AMchangeHashes msg2to1_last_sync = AMsyncHaveLastSync(msg2to1_have); - assert_int_equal(AMchangeHashesSize(&msg2to1_last_sync), 0); + AMitems msg2to1_changes = + AMstackItems(stack_ptr, AMsyncMessageChanges(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg2to1_changes), 0); + /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).have[0].lastSync.length, + * 0 */ + AMitems msg2to1_haves = + AMstackItems(stack_ptr, AMsyncMessageHaves(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); + AMsyncHave const* msg2to1_have; + assert_true(AMitemToSyncHave(AMitemsNext(&msg2to1_haves, 1), &msg2to1_have)); + AMitems msg2to1_last_sync = + AMstackItems(stack_ptr, AMsyncHaveLastSync(msg2to1_have), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&msg2to1_last_sync), 0); /* */ - /* n1 and n2 receive that message and update sync state but make no patch*/ + /* n1 and n2 receive that message and update sync state but make no patc */ /* n1.receiveSyncMessage(s1, msg2to1) */ - AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* n2.receiveSyncMessage(s2, msg1to2) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* */ /* now both reply with their local changes that the other lacks * (standard warning that 1% of the time this will result in a "needs" * message) */ /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ - msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg1to2 === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg1to2)); /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 5) */ - msg1to2_changes = AMsyncMessageChanges(msg1to2); - assert_int_equal(AMchangesSize(&msg1to2_changes), 5); + msg1to2_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg1to2_changes), 5); /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") }*/ - msg2to1 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg2to1 === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg2to1)); /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 5) */ - msg2to1_changes = AMsyncMessageChanges(msg2to1); - assert_int_equal(AMchangesSize(&msg2to1_changes), 5); + msg2to1_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg2to1_changes), 5); /* */ /* both should now apply the changes and update the frontend */ /* n1.receiveSyncMessage(s1, msg2to1) */ - AMfree(AMreceiveSyncMessage(test_state->n1, - test_state->s1, - msg2to1)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepStrictEqual(n1.getMissingDeps(), []) */ - AMchangeHashes missing_deps = AMpush(&test_state->stack, - AMgetMissingDeps(test_state->n1, NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesSize(&missing_deps), 0); + AMitems missing_deps = + AMstackItems(stack_ptr, AMgetMissingDeps(test_state->n1, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&missing_deps), 0); /* //assert.notDeepStrictEqual(patch1, null) assert.deepStrictEqual(n1.materialize(), { x: 4, y: 4 }) */ - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n1, AM_ROOT, "x", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n1, AM_ROOT, "y", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); + uint64_t uint; + assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n1, AM_ROOT, AMstr("x"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 4); + assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n1, AM_ROOT, AMstr("y"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 4); /* */ /* n2.receiveSyncMessage(s2, msg1to2) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepStrictEqual(n2.getMissingDeps(), []) */ - missing_deps = AMpush(&test_state->stack, - AMgetMissingDeps(test_state->n2, NULL), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesSize(&missing_deps), 0); + missing_deps = + AMstackItems(stack_ptr, AMgetMissingDeps(test_state->n2, NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_int_equal(AMitemsSize(&missing_deps), 0); /* //assert.notDeepStrictEqual(patch2, null) assert.deepStrictEqual(n2.materialize(), { x: 4, y: 4 }) */ - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n2, AM_ROOT, "x", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n2, AM_ROOT, "y", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 4); + assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n2, AM_ROOT, AMstr("x"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 4); + assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n2, AM_ROOT, AMstr("y"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 4); /* */ /* The response acknowledges the changes received and sends no further * changes */ /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ - msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg1to2 === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg1to2)); /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).changes.length, 0) */ - msg1to2_changes = AMsyncMessageChanges(msg1to2); - assert_int_equal(AMchangesSize(&msg1to2_changes), 0); + msg1to2_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg1to2_changes), 0); /* msg2to1 = n2.generateSyncMessage(s2) - if (msg2to1 === null) { throw new RangeError("message should not be null") }*/ - msg2to1 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (msg2to1 === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n2, test_state->s2), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg2to1)); /* assert.deepStrictEqual(decodeSyncMessage(msg2to1).changes.length, 0) */ - msg2to1_changes = AMsyncMessageChanges(msg2to1); - assert_int_equal(AMchangesSize(&msg2to1_changes), 0); + msg2to1_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(msg2to1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&msg2to1_changes), 0); /* */ /* After receiving acknowledgements, their shared heads should be equal */ /* n1.receiveSyncMessage(s1, msg2to1) */ - AMfree(AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n1, test_state->s1, msg2to1), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* n2.receiveSyncMessage(s2, msg1to2) */ - AMfree(AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2)); + AMstackItem(NULL, AMreceiveSyncMessage(test_state->n2, test_state->s2, msg1to2), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* assert.deepStrictEqual(s1.sharedHeads, [head1, head2].sort()) */ - AMchangeHashes s1_shared_heads = AMsyncStateSharedHeads(test_state->s1); - assert_memory_equal(AMchangeHashesNext(&s1_shared_heads, 1).src, - head1.src, - head1.count); - assert_memory_equal(AMchangeHashesNext(&s1_shared_heads, 1).src, - head2.src, - head2.count); + AMitems s1_shared_heads = + AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMbyteSpan s1_shared_change_hash; + assert_true(AMitemToChangeHash(AMitemsNext(&s1_shared_heads, 1), &s1_shared_change_hash)); + assert_memory_equal(s1_shared_change_hash.src, head1.src, head1.count); + assert_true(AMitemToChangeHash(AMitemsNext(&s1_shared_heads, 1), &s1_shared_change_hash)); + assert_memory_equal(s1_shared_change_hash.src, head2.src, head2.count); /* assert.deepStrictEqual(s2.sharedHeads, [head1, head2].sort()) */ - AMchangeHashes s2_shared_heads = AMsyncStateSharedHeads(test_state->s2); - assert_memory_equal(AMchangeHashesNext(&s2_shared_heads, 1).src, - head1.src, - head1.count); - assert_memory_equal(AMchangeHashesNext(&s2_shared_heads, 1).src, - head2.src, - head2.count); + AMitems s2_shared_heads = + AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMbyteSpan s2_shared_change_hash; + assert_true(AMitemToChangeHash(AMitemsNext(&s2_shared_heads, 1), &s2_shared_change_hash)); + assert_memory_equal(s2_shared_change_hash.src, head1.src, head1.count); + assert_true(AMitemToChangeHash(AMitemsNext(&s2_shared_heads, 1), &s2_shared_change_hash)); + assert_memory_equal(s2_shared_change_hash.src, head2.src, head2.count); /* //assert.deepStrictEqual(patch1, null) //assert.deepStrictEqual(patch2, null) */ /* */ /* We're in sync, no more messages required */ /* msg1to2 = n1.generateSyncMessage(s1) assert.deepStrictEqual(msg1to2, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMgenerateSyncMessage(test_state->n1, test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* msg2to1 = n2.generateSyncMessage(s2) assert.deepStrictEqual(msg2to1, null) */ - AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n2, test_state->s2), - AM_VALUE_VOID, - cmocka_cb); + AMstackItem(NULL, AMgenerateSyncMessage(test_state->n2, test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* If we make one more change and start another sync then its lastSync * should be updated */ /* n1.put("_root", "x", 5) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 5)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* msg1to2 = n1.generateSyncMessage(s1) - if (msg1to2 === null) { throw new RangeError("message should not be null") }*/ - msg1to2 = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; - /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, [head1, head2].sort()*/ - msg1to2_haves = AMsyncMessageHaves(msg1to2); - msg1to2_have = AMsyncHavesNext(&msg1to2_haves, 1); - msg1to2_last_sync = AMsyncHaveLastSync(msg1to2_have); - AMbyteSpan msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); + if (msg1to2 === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &msg1to2)); + /* assert.deepStrictEqual(decodeSyncMessage(msg1to2).have[0].lastSync, + * [head1, head2].sort( */ + msg1to2_haves = AMstackItems(stack_ptr, AMsyncMessageHaves(msg1to2), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_HAVE)); + assert_true(AMitemToSyncHave(AMitemsNext(&msg1to2_haves, 1), &msg1to2_have)); + msg1to2_last_sync = + AMstackItems(stack_ptr, AMsyncHaveLastSync(msg1to2_have), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMbyteSpan msg1to2_last_sync_next; + assert_true(AMitemToChangeHash(AMitemsNext(&msg1to2_last_sync, 1), &msg1to2_last_sync_next)); assert_int_equal(msg1to2_last_sync_next.count, head1.count); assert_memory_equal(msg1to2_last_sync_next.src, head1.src, head1.count); - msg1to2_last_sync_next = AMchangeHashesNext(&msg1to2_last_sync, 1); + assert_true(AMitemToChangeHash(AMitemsNext(&msg1to2_last_sync, 1), &msg1to2_last_sync_next)); assert_int_equal(msg1to2_last_sync_next.count, head2.count); assert_memory_equal(msg1to2_last_sync_next.src, head2.src, head2.count); } @@ -652,87 +642,89 @@ static void test_should_allow_simultaneous_messages_during_synchronization(void /** * \brief should assume sent changes were received until we hear otherwise */ -static void test_should_assume_sent_changes_were_received_until_we_hear_otherwise(void **state) { +static void test_should_assume_sent_changes_were_received_until_we_hear_otherwise(void** state) { /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* let message = null */ /* */ /* const items = n1.putObject("_root", "items", []) */ - AMobjId const* items = AMpush(&test_state->stack, - AMmapPutObject(test_state->n1, - AM_ROOT, - "items", - AM_OBJ_TYPE_LIST), - AM_VALUE_OBJ_ID, - cmocka_cb).obj_id; + AMobjId const* const items = + AMitemObjId(AMstackItem(stack_ptr, AMmapPutObject(test_state->n1, AM_ROOT, AMstr("items"), AM_OBJ_TYPE_LIST), + cmocka_cb, AMexpect(AM_VAL_TYPE_OBJ_TYPE))); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* */ /* n1.push(items, "x") */ - AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, "x")); + AMstackItem(NULL, AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("x")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") }*/ - AMsyncMessage const* message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, - test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (message === null) { throw new RangeError("message should not be null") + */ + AMsyncMessage const* message; + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ - AMchanges message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 1); + AMitems message_changes = + AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&message_changes), 1); /* */ /* n1.push(items, "y") */ - AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, "y")); + AMstackItem(NULL, AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("y")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") }*/ - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (message === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ - message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 1); + message_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&message_changes), 1); /* */ /* n1.push(items, "z") */ - AMfree(AMlistPutStr(test_state->n1, items, SIZE_MAX, true, "z")); + AMstackItem(NULL, AMlistPutStr(test_state->n1, items, SIZE_MAX, true, AMstr("z")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* message = n1.generateSyncMessage(s1) - if (message === null) { throw new RangeError("message should not be null") }*/ - message = AMpush(&test_state->stack, - AMgenerateSyncMessage(test_state->n1, test_state->s1), - AM_VALUE_SYNC_MESSAGE, - cmocka_cb).sync_message; + if (message === null) { throw new RangeError("message should not be + null") */ + assert_true(AMitemToSyncMessage(AMstackItem(stack_ptr, AMgenerateSyncMessage(test_state->n1, test_state->s1), + cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_MESSAGE)), + &message)); /* assert.deepStrictEqual(decodeSyncMessage(message).changes.length, 1) */ - message_changes = AMsyncMessageChanges(message); - assert_int_equal(AMchangesSize(&message_changes), 1); + message_changes = AMstackItems(stack_ptr, AMsyncMessageChanges(message), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); + assert_int_equal(AMitemsSize(&message_changes), 1); } /** * \brief should work regardless of who initiates the exchange */ -static void test_should_work_regardless_of_who_initiates_the_exchange(void **state) { +static void test_should_work_regardless_of_who_initiates_the_exchange(void** state) { /* create & synchronize two nodes */ /* const n1 = create(), n2 = create() const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* for (let i = 0; i < 5; i++) { */ for (size_t i = 0; i != 5; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ @@ -742,10 +734,10 @@ static void test_should_work_regardless_of_who_initiates_the_exchange(void **sta /* for (let i = 5; i < 10; i++) { */ for (size_t i = 5; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ @@ -759,24 +751,26 @@ static void test_should_work_regardless_of_who_initiates_the_exchange(void **sta /** * \brief should work without prior sync state */ -static void test_should_work_without_prior_sync_state(void **state) { - /* Scenario: ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 - * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - * `-- c15 <-- c16 <-- c17 - * lastSync is undefined. */ +static void test_should_work_without_prior_sync_state(void** state) { + /* Scenario: ,-- + * c10 <-- c11 <-- c12 <-- c13 <-- c14 c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 + * <-- c6 <-- c7 <-- c8 <-- c9 <-+ + * `-- + * c15 <-- c16 <-- c17 lastSync is undefined. */ /* */ /* create two peers both with divergent commits */ /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2) */ @@ -785,19 +779,19 @@ static void test_should_work_without_prior_sync_state(void **state) { /* for (let i = 10; i < 15; i++) { */ for (size_t i = 10; i != 15; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* for (let i = 15; i < 18; i++) { */ for (size_t i = 15; i != 18; ++i) { /* n2.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ @@ -805,15 +799,9 @@ static void test_should_work_without_prior_sync_state(void **state) { /* sync(n1, n2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); } @@ -821,25 +809,27 @@ static void test_should_work_without_prior_sync_state(void **state) { /** * \brief should work with prior sync state */ -static void test_should_work_with_prior_sync_state_2(void **state) { +static void test_should_work_with_prior_sync_state_2(void** state) { /* Scenario: - * ,-- c10 <-- c11 <-- c12 <-- c13 <-- c14 - * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ - * `-- c15 <-- c16 <-- c17 - * lastSync is c9. */ + * ,-- + * c10 <-- c11 <-- c12 <-- c13 <-- c14 c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 + * <-- c6 <-- c7 <-- c8 <-- c9 <-+ + * `-- + * c15 <-- c16 <-- c17 lastSync is c9. */ /* */ /* create two peers both with divergent commits */ /* const n1 = create('01234567'), n2 = create('89abcdef') let s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* for (let i = 0; i < 10; i++) { */ for (size_t i = 0; i != 10; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ @@ -848,54 +838,44 @@ static void test_should_work_with_prior_sync_state_2(void **state) { /* for (let i = 10; i < 15; i++) { */ for (size_t i = 10; i != 15; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* for (let i = 15; i < 18; i++) { */ for (size_t i = 15; i != 18; ++i) { /* n2.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n2.commit("", 0) */ - AMfree(AMcommit(test_state->n2, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* s1 = decodeSyncState(encodeSyncState(s1)) */ - AMbyteSpan encoded = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->s1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* s1 = AMpush(&test_state->stack, - AMsyncStateDecode(encoded.src, encoded.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMbyteSpan encoded; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &encoded)); + AMsyncState* s1; + assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded.src, encoded.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_SYNC_STATE)), + &s1)); /* s2 = decodeSyncState(encodeSyncState(s2)) */ - encoded = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->s2), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* s2 = AMpush(&test_state->stack, - AMsyncStateDecode(encoded.src, - encoded.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &encoded)); + AMsyncState* s2; + assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded.src, encoded.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_SYNC_STATE)), + &s2)); /* */ /* assert.notDeepStrictEqual(n1.materialize(), n2.materialize()) */ assert_false(AMequal(test_state->n1, test_state->n2)); /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, s1, s2); /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); } @@ -903,39 +883,39 @@ static void test_should_work_with_prior_sync_state_2(void **state) { /** * \brief should ensure non-empty state after sync */ -static void test_should_ensure_non_empty_state_after_sync(void **state) { +static void test_should_ensure_non_empty_state_after_sync(void** state) { /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* for (let i = 0; i < 3; i++) { */ for (size_t i = 0; i != 3; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* */ /* assert.deepStrictEqual(s1.sharedHeads, n1.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes shared_heads1 = AMsyncStateSharedHeads(test_state->s1); - assert_int_equal(AMchangeHashesCmp(&shared_heads1, &heads1), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems shared_heads1 = + AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&shared_heads1, &heads1)); /* assert.deepStrictEqual(s2.sharedHeads, n1.getHeads()) */ - AMchangeHashes shared_heads2 = AMsyncStateSharedHeads(test_state->s2); - assert_int_equal(AMchangeHashesCmp(&shared_heads2, &heads1), 0); + AMitems shared_heads2 = + AMstackItems(stack_ptr, AMsyncStateSharedHeads(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&shared_heads2, &heads1)); } /** * \brief should re-sync after one node crashed with data loss */ -static void test_should_resync_after_one_node_crashed_with_data_loss(void **state) { +static void test_should_resync_after_one_node_crashed_with_data_loss(void** state) { /* Scenario: (r) (n2) (n1) * c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 * n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync @@ -946,15 +926,16 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat let s1 = initSyncState() const s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* n1 makes three changes, which we sync to n2 */ /* for (let i = 0; i < 3; i++) { */ for (size_t i = 0; i != 3; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); @@ -963,28 +944,25 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat /* let r let rSyncState ;[r, rSyncState] = [n2.clone(), s2.clone()] */ - AMdoc* r = AMpush(&test_state->stack, - AMclone(test_state->n2), - AM_VALUE_DOC, - cmocka_cb).doc; - AMbyteSpan const encoded_s2 = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->s2), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* sync_state_r = AMpush(&test_state->stack, - AMsyncStateDecode(encoded_s2.src, - encoded_s2.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMdoc* r; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMclone(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &r)); + AMbyteSpan encoded_s2; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s2), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), + &encoded_s2)); + AMsyncState* sync_state_r; + assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded_s2.src, encoded_s2.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_SYNC_STATE)), + &sync_state_r)); /* */ /* sync another few commits */ /* for (let i = 3; i < 6; i++) { */ for (size_t i = 3; i != 6; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ @@ -992,15 +970,9 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat /* */ /* everyone should be on the same page here */ /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); /* */ @@ -1009,132 +981,106 @@ static void test_should_resync_after_one_node_crashed_with_data_loss(void **stat /* for (let i = 6; i < 9; i++) { */ for (size_t i = 6; i != 9; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* s1 = decodeSyncState(encodeSyncState(s1)) */ - AMbyteSpan const encoded_s1 = AMpush(&test_state->stack, - AMsyncStateEncode(test_state->s1), - AM_VALUE_BYTES, - cmocka_cb).bytes; - AMsyncState* const s1 = AMpush(&test_state->stack, - AMsyncStateDecode(encoded_s1.src, - encoded_s1.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMbyteSpan encoded_s1; + assert_true( + AMitemToBytes(AMstackItem(stack_ptr, AMsyncStateEncode(test_state->s1), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), + &encoded_s1)); + AMsyncState* s1; + assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded_s1.src, encoded_s1.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_SYNC_STATE)), + &s1)); /* rSyncState = decodeSyncState(encodeSyncState(rSyncState)) */ - AMbyteSpan const encoded_r = AMpush(&test_state->stack, - AMsyncStateEncode(sync_state_r), - AM_VALUE_BYTES, - cmocka_cb).bytes; - sync_state_r = AMpush(&test_state->stack, - AMsyncStateDecode(encoded_r.src, encoded_r.count), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMbyteSpan encoded_r; + assert_true(AMitemToBytes( + AMstackItem(stack_ptr, AMsyncStateEncode(sync_state_r), cmocka_cb, AMexpect(AM_VAL_TYPE_BYTES)), &encoded_r)); + assert_true(AMitemToSyncState(AMstackItem(stack_ptr, AMsyncStateDecode(encoded_r.src, encoded_r.count), cmocka_cb, + AMexpect(AM_VAL_TYPE_SYNC_STATE)), + &sync_state_r)); /* */ /* assert.notDeepStrictEqual(n1.getHeads(), r.getHeads()) */ - heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads_r = AMpush(&test_state->stack, - AMgetHeads(r), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_not_equal(AMchangeHashesCmp(&heads1, &heads_r), 0); + heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads_r = AMstackItems(stack_ptr, AMgetHeads(r), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_false(AMitemsEqual(&heads1, &heads_r)); /* assert.notDeepStrictEqual(n1.materialize(), r.materialize()) */ assert_false(AMequal(test_state->n1, r)); /* assert.deepStrictEqual(n1.materialize(), { x: 8 }) */ - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(test_state->n1, AM_ROOT, "x", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 8); + uint64_t uint; + assert_true(AMitemToUint(AMstackItem(stack_ptr, AMmapGet(test_state->n1, AM_ROOT, AMstr("x"), NULL), cmocka_cb, + AMexpect(AM_VAL_TYPE_UINT)), + &uint)); + assert_int_equal(uint, 8); /* assert.deepStrictEqual(r.materialize(), { x: 2 }) */ - assert_int_equal(AMpush(&test_state->stack, - AMmapGet(r, AM_ROOT, "x", NULL), - AM_VALUE_UINT, - cmocka_cb).uint, 2); + assert_true(AMitemToUint( + AMstackItem(stack_ptr, AMmapGet(r, AM_ROOT, AMstr("x"), NULL), cmocka_cb, AMexpect(AM_VAL_TYPE_UINT)), &uint)); + assert_int_equal(uint, 2); /* sync(n1, r, s1, rSyncState) */ sync(test_state->n1, r, test_state->s1, sync_state_r); /* assert.deepStrictEqual(n1.getHeads(), r.getHeads()) */ - heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - heads_r = AMpush(&test_state->stack, - AMgetHeads(r), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads_r), 0); + heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + heads_r = AMstackItems(stack_ptr, AMgetHeads(r), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads_r)); /* assert.deepStrictEqual(n1.materialize(), r.materialize()) */ assert_true(AMequal(test_state->n1, r)); } /** - * \brief should re-sync after one node experiences data loss without disconnecting + * \brief should re-sync after one node experiences data loss without + * disconnecting */ -static void test_should_resync_after_one_node_experiences_data_loss_without_disconnecting(void **state) { +static void test_should_resync_after_one_node_experiences_data_loss_without_disconnecting(void** state) { /* const n1 = create('01234567'), n2 = create('89abcdef') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; + AMstack** stack_ptr = &test_state->base_state->stack; /* */ /* n1 makes three changes which we sync to n2 */ /* for (let i = 0; i < 3; i++) { */ for (size_t i = 0; i != 3; ++i) { /* n1.put("_root", "x", i) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", i)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.commit("", 0) */ - AMfree(AMcommit(test_state->n1, "", &TIME_0)); - /* { */ + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* */ /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); /* */ /* const n2AfterDataLoss = create('89abcdef') */ - AMdoc* n2_after_data_loss = AMpush(&test_state->stack, - AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr("89abcdef"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("89abcdef")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* n2_after_data_loss; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), + &n2_after_data_loss)); /* */ /* "n2" now has no data, but n1 still thinks it does. Note we don't do * decodeSyncState(encodeSyncState(s1)) in order to simulate data loss * without disconnecting */ /* sync(n1, n2AfterDataLoss, s1, initSyncState()) */ - AMsyncState* s2_after_data_loss = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMsyncState* s2_after_data_loss; + assert_true(AMitemToSyncState( + AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &s2_after_data_loss)); sync(test_state->n1, n2_after_data_loss, test_state->s1, s2_after_data_loss); /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); } @@ -1142,33 +1088,33 @@ static void test_should_resync_after_one_node_experiences_data_loss_without_disc /** * \brief should handle changes concurrent to the last sync heads */ -static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void **state) { - /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98')*/ +static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void** state) { + /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = + * create('fedcba98' */ TestState* test_state = *state; - AMdoc* n3 = AMpush(&test_state->stack, - AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr("fedcba98"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; - /* const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState()*/ + AMstack** stack_ptr = &test_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("fedcba98")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* n3; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &n3)); + /* const s12 = initSyncState(), s21 = initSyncState(), s23 = + * initSyncState(), s32 = initSyncState( */ AMsyncState* s12 = test_state->s1; AMsyncState* s21 = test_state->s2; - AMsyncState* s23 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; - AMsyncState* s32 = AMpush(&test_state->stack, - AMsyncStateInit(), - AM_VALUE_SYNC_STATE, - cmocka_cb).sync_state; + AMsyncState* s23; + assert_true(AMitemToSyncState( + AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &s23)); + AMsyncState* s32; + assert_true(AMitemToSyncState( + AMstackItem(stack_ptr, AMsyncStateInit(), cmocka_cb, AMexpect(AM_VAL_TYPE_SYNC_STATE)), &s32)); /* */ /* Change 1 is known to all three nodes */ /* //n1 = Automerge.change(n1, {time: 0}, doc => doc.x = 1) */ /* n1.put("_root", "x", 1); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 1)); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* sync(n1, n2, s12, s21) */ sync(test_state->n1, test_state->n2, s12, s21); @@ -1177,47 +1123,38 @@ static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void * /* */ /* Change 2 is known to n1 and n2 */ /* n1.put("_root", "x", 2); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 2)); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* sync(n1, n2, s12, s21) */ sync(test_state->n1, test_state->n2, s12, s21); /* */ /* Each of the three nodes makes one change (changes 3, 4, 5) */ /* n1.put("_root", "x", 3); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 3)); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* n2.put("_root", "x", 4); n2.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "x", 4)); - AMfree(AMcommit(test_state->n2, "", &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("x"), 4), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* n3.put("_root", "x", 5); n3.commit("", 0) */ - AMfree(AMmapPutUint(n3, AM_ROOT, "x", 5)); - AMfree(AMcommit(n3, "", &TIME_0)); + AMstackItem(NULL, AMmapPutUint(n3, AM_ROOT, AMstr("x"), 5), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(n3, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* Apply n3's latest change to n2. */ /* let change = n3.getLastLocalChange() if (change === null) throw new RangeError("no local change") */ - AMchanges changes = AMpush(&test_state->stack, - AMgetLastLocalChange(n3), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems changes = AMstackItems(stack_ptr, AMgetLastLocalChange(n3), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* n2.applyChanges([change]) */ - AMfree(AMapplyChanges(test_state->n2, &changes)); + AMstackItem(NULL, AMapplyChanges(test_state->n2, &changes), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* */ /* Now sync n1 and n2. n3's change is concurrent to n1 and n2's last sync * heads */ /* sync(n1, n2, s12, s21) */ sync(test_state->n1, test_state->n2, s12, s21); /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); } @@ -1225,39 +1162,35 @@ static void test_should_handle_changes_concurrrent_to_the_last_sync_heads(void * /** * \brief should handle histories with lots of branching and merging */ -static void test_should_handle_histories_with_lots_of_branching_and_merging(void **state) { - /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') - const s1 = initSyncState(), s2 = initSyncState() */ +static void test_should_handle_histories_with_lots_of_branching_and_merging(void** state) { + /* const n1 = create('01234567'), n2 = create('89abcdef'), n3 = + create('fedcba98') const s1 = initSyncState(), s2 = initSyncState() */ TestState* test_state = *state; - AMdoc* n3 = AMpush(&test_state->stack, - AMcreate(AMpush(&test_state->stack, - AMactorIdInitStr("fedcba98"), - AM_VALUE_ACTOR_ID, - cmocka_cb).actor_id), - AM_VALUE_DOC, - cmocka_cb).doc; + AMstack** stack_ptr = &test_state->base_state->stack; + AMactorId const* actor_id; + assert_true(AMitemToActorId( + AMstackItem(stack_ptr, AMactorIdFromStr(AMstr("fedcba98")), cmocka_cb, AMexpect(AM_VAL_TYPE_ACTOR_ID)), + &actor_id)); + AMdoc* n3; + assert_true(AMitemToDoc(AMstackItem(stack_ptr, AMcreate(actor_id), cmocka_cb, AMexpect(AM_VAL_TYPE_DOC)), &n3)); /* n1.put("_root", "x", 0); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "x", 0)); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("x"), 0), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* let change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") */ - AMchanges change1 = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->n1), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems change1 = + AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* n2.applyChanges([change1]) */ - AMfree(AMapplyChanges(test_state->n2, &change1)); + AMstackItem(NULL, AMapplyChanges(test_state->n2, &change1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* let change2 = n1.getLastLocalChange() if (change2 === null) throw new RangeError("no local change") */ - AMchanges change2 = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->n1), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems change2 = + AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* n3.applyChanges([change2]) */ - AMfree(AMapplyChanges(n3, &change2)); + AMstackItem(NULL, AMapplyChanges(n3, &change2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n3.put("_root", "x", 1); n3.commit("", 0) */ - AMfree(AMmapPutUint(n3, AM_ROOT, "x", 1)); - AMfree(AMcommit(n3, "", &TIME_0)); + AMstackItem(NULL, AMmapPutUint(n3, AM_ROOT, AMstr("x"), 1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(n3, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 * / \/ \/ \/ @@ -1269,28 +1202,24 @@ static void test_should_handle_histories_with_lots_of_branching_and_merging(void /* for (let i = 1; i < 20; i++) { */ for (size_t i = 1; i != 20; ++i) { /* n1.put("_root", "n1", i); n1.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n1, AM_ROOT, "n1", i)); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n1, AM_ROOT, AMstr("n1"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* n2.put("_root", "n2", i); n2.commit("", 0) */ - AMfree(AMmapPutUint(test_state->n2, AM_ROOT, "n2", i)); - AMfree(AMcommit(test_state->n2, "", &TIME_0)); + AMstackItem(NULL, AMmapPutUint(test_state->n2, AM_ROOT, AMstr("n2"), i), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* const change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") */ - AMchanges change1 = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->n1), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems change1 = + AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* const change2 = n2.getLastLocalChange() if (change2 === null) throw new RangeError("no local change") */ - AMchanges change2 = AMpush(&test_state->stack, - AMgetLastLocalChange(test_state->n2), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems change2 = + AMstackItems(stack_ptr, AMgetLastLocalChange(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* n1.applyChanges([change2]) */ - AMfree(AMapplyChanges(test_state->n1, &change2)); + AMstackItem(NULL, AMapplyChanges(test_state->n1, &change2), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n2.applyChanges([change1]) */ - AMfree(AMapplyChanges(test_state->n2, &change1)); - /* { */ + AMstackItem(NULL, AMapplyChanges(test_state->n2, &change1), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); + /* { */ } /* */ /* sync(n1, n2, s1, s2) */ @@ -1300,31 +1229,24 @@ static void test_should_handle_histories_with_lots_of_branching_and_merging(void * the slower code path */ /* const change3 = n2.getLastLocalChange() if (change3 === null) throw new RangeError("no local change") */ - AMchanges change3 = AMpush(&test_state->stack, - AMgetLastLocalChange(n3), - AM_VALUE_CHANGES, - cmocka_cb).changes; + AMitems change3 = AMstackItems(stack_ptr, AMgetLastLocalChange(n3), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE)); /* n2.applyChanges([change3]) */ - AMfree(AMapplyChanges(test_state->n2, &change3)); + AMstackItem(NULL, AMapplyChanges(test_state->n2, &change3), cmocka_cb, AMexpect(AM_VAL_TYPE_VOID)); /* n1.put("_root", "n1", "final"); n1.commit("", 0) */ - AMfree(AMmapPutStr(test_state->n1, AM_ROOT, "n1", "final")); - AMfree(AMcommit(test_state->n1, "", &TIME_0)); + AMstackItem(NULL, AMmapPutStr(test_state->n1, AM_ROOT, AMstr("n1"), AMstr("final")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n1, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* n2.put("_root", "n2", "final"); n2.commit("", 0) */ - AMfree(AMmapPutStr(test_state->n2, AM_ROOT, "n2", "final")); - AMfree(AMcommit(test_state->n2, "", &TIME_0)); + AMstackItem(NULL, AMmapPutStr(test_state->n2, AM_ROOT, AMstr("n2"), AMstr("final")), cmocka_cb, + AMexpect(AM_VAL_TYPE_VOID)); + AMstackItem(NULL, AMcommit(test_state->n2, AMstr(""), &TIME_0), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); /* */ /* sync(n1, n2, s1, s2) */ sync(test_state->n1, test_state->n2, test_state->s1, test_state->s2); /* assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) */ - AMchangeHashes heads1 = AMpush(&test_state->stack, - AMgetHeads(test_state->n1), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - AMchangeHashes heads2 = AMpush(&test_state->stack, - AMgetHeads(test_state->n2), - AM_VALUE_CHANGE_HASHES, - cmocka_cb).change_hashes; - assert_int_equal(AMchangeHashesCmp(&heads1, &heads2), 0); + AMitems heads1 = AMstackItems(stack_ptr, AMgetHeads(test_state->n1), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + AMitems heads2 = AMstackItems(stack_ptr, AMgetHeads(test_state->n2), cmocka_cb, AMexpect(AM_VAL_TYPE_CHANGE_HASH)); + assert_true(AMitemsEqual(&heads1, &heads2)); /* assert.deepStrictEqual(n1.materialize(), n2.materialize()) */ assert_true(AMequal(test_state->n1, test_state->n2)); } @@ -1334,20 +1256,26 @@ int run_ported_wasm_sync_tests(void) { cmocka_unit_test_setup_teardown(test_should_send_a_sync_message_implying_no_local_data, setup, teardown), cmocka_unit_test_setup_teardown(test_should_not_reply_if_we_have_no_data_as_well, setup, teardown), cmocka_unit_test_setup_teardown(test_repos_with_equal_heads_do_not_need_a_reply_message, setup, teardown), - cmocka_unit_test_setup_teardown(test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_sync_peers_where_one_has_commits_the_other_does_not, setup, teardown), + cmocka_unit_test_setup_teardown(test_n1_should_offer_all_changes_to_n2_when_starting_from_nothing, setup, + teardown), + cmocka_unit_test_setup_teardown(test_should_sync_peers_where_one_has_commits_the_other_does_not, setup, + teardown), cmocka_unit_test_setup_teardown(test_should_work_with_prior_sync_state, setup, teardown), cmocka_unit_test_setup_teardown(test_should_not_generate_messages_once_synced, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_allow_simultaneous_messages_during_synchronization, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_assume_sent_changes_were_received_until_we_hear_otherwise, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_allow_simultaneous_messages_during_synchronization, setup, + teardown), + cmocka_unit_test_setup_teardown(test_should_assume_sent_changes_were_received_until_we_hear_otherwise, setup, + teardown), cmocka_unit_test_setup_teardown(test_should_work_regardless_of_who_initiates_the_exchange, setup, teardown), cmocka_unit_test_setup_teardown(test_should_work_without_prior_sync_state, setup, teardown), cmocka_unit_test_setup_teardown(test_should_work_with_prior_sync_state_2, setup, teardown), cmocka_unit_test_setup_teardown(test_should_ensure_non_empty_state_after_sync, setup, teardown), cmocka_unit_test_setup_teardown(test_should_resync_after_one_node_crashed_with_data_loss, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_resync_after_one_node_experiences_data_loss_without_disconnecting, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_resync_after_one_node_experiences_data_loss_without_disconnecting, + setup, teardown), cmocka_unit_test_setup_teardown(test_should_handle_changes_concurrrent_to_the_last_sync_heads, setup, teardown), - cmocka_unit_test_setup_teardown(test_should_handle_histories_with_lots_of_branching_and_merging, setup, teardown), + cmocka_unit_test_setup_teardown(test_should_handle_histories_with_lots_of_branching_and_merging, setup, + teardown), }; return cmocka_run_group_tests(tests, NULL, NULL); diff --git a/automerge-c/test/str_utils.c b/rust/automerge-c/test/str_utils.c similarity index 100% rename from automerge-c/test/str_utils.c rename to rust/automerge-c/test/str_utils.c index cc923cb4..2937217a 100644 --- a/automerge-c/test/str_utils.c +++ b/rust/automerge-c/test/str_utils.c @@ -1,5 +1,5 @@ -#include #include +#include /* local */ #include "str_utils.h" diff --git a/rust/automerge-c/test/str_utils.h b/rust/automerge-c/test/str_utils.h new file mode 100644 index 00000000..14a4af73 --- /dev/null +++ b/rust/automerge-c/test/str_utils.h @@ -0,0 +1,17 @@ +#ifndef TESTS_STR_UTILS_H +#define TESTS_STR_UTILS_H + +/** + * \brief Converts a hexadecimal string into an array of bytes. + * + * \param[in] hex_str A hexadecimal string. + * \param[in] src A pointer to an array of bytes. + * \param[in] count The count of bytes to copy into the array pointed to by + * \p src. + * \pre \p src `!= NULL` + * \pre `sizeof(`\p src `) > 0` + * \pre \p count `<= sizeof(`\p src `)` + */ +void hex_to_bytes(char const* hex_str, uint8_t* src, size_t const count); + +#endif /* TESTS_STR_UTILS_H */ diff --git a/automerge-cli/.gitignore b/rust/automerge-cli/.gitignore similarity index 100% rename from automerge-cli/.gitignore rename to rust/automerge-cli/.gitignore diff --git a/automerge-cli/Cargo.toml b/rust/automerge-cli/Cargo.toml similarity index 80% rename from automerge-cli/Cargo.toml rename to rust/automerge-cli/Cargo.toml index f434bc69..430090a6 100644 --- a/automerge-cli/Cargo.toml +++ b/rust/automerge-cli/Cargo.toml @@ -13,17 +13,18 @@ bench = false doc = false [dependencies] -clap = {version = "~3.1", features = ["derive"]} +clap = {version = "~4", features = ["derive"]} serde_json = "^1.0" anyhow = "1.0" -atty = "^0.2" thiserror = "^1.0" combine = "^4.5" maplit = "^1.0" -colored_json = "^2.1" tracing-subscriber = "~0.3" automerge = { path = "../automerge" } +is-terminal = "0.4.1" +termcolor = "1.1.3" +serde = "1.0.150" [dev-dependencies] duct = "^0.13" diff --git a/automerge-cli/IDEAS.md b/rust/automerge-cli/IDEAS.md similarity index 100% rename from automerge-cli/IDEAS.md rename to rust/automerge-cli/IDEAS.md diff --git a/automerge-cli/src/change.rs b/rust/automerge-cli/src/change.rs similarity index 100% rename from automerge-cli/src/change.rs rename to rust/automerge-cli/src/change.rs diff --git a/rust/automerge-cli/src/color_json.rs b/rust/automerge-cli/src/color_json.rs new file mode 100644 index 00000000..9514da22 --- /dev/null +++ b/rust/automerge-cli/src/color_json.rs @@ -0,0 +1,370 @@ +use std::io::Write; + +use serde::Serialize; +use serde_json::ser::Formatter; +use termcolor::{Buffer, BufferWriter, Color, ColorSpec, WriteColor}; + +struct Style { + /// style of object brackets + object_brackets: ColorSpec, + /// style of array brackets + array_brackets: ColorSpec, + /// style of object + key: ColorSpec, + /// style of string values + string_value: ColorSpec, + /// style of integer values + integer_value: ColorSpec, + /// style of float values + float_value: ColorSpec, + /// style of bool values + bool_value: ColorSpec, + /// style of the `nil` value + nil_value: ColorSpec, + /// should the quotation get the style of the inner string/key? + string_include_quotation: bool, +} + +impl Default for Style { + fn default() -> Self { + Self { + object_brackets: ColorSpec::new().set_bold(true).clone(), + array_brackets: ColorSpec::new().set_bold(true).clone(), + key: ColorSpec::new() + .set_fg(Some(Color::Blue)) + .set_bold(true) + .clone(), + string_value: ColorSpec::new().set_fg(Some(Color::Green)).clone(), + integer_value: ColorSpec::new(), + float_value: ColorSpec::new(), + bool_value: ColorSpec::new(), + nil_value: ColorSpec::new(), + string_include_quotation: true, + } + } +} + +/// Write pretty printed, colored json to stdout +pub(crate) fn print_colored_json(value: &serde_json::Value) -> std::io::Result<()> { + let formatter = ColoredFormatter { + formatter: serde_json::ser::PrettyFormatter::new(), + style: Style::default(), + in_object_key: false, + }; + let mut ignored_writer = Vec::new(); + let mut ser = serde_json::Serializer::with_formatter(&mut ignored_writer, formatter); + value + .serialize(&mut ser) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string())) +} + +struct ColoredFormatter { + formatter: F, + style: Style, + in_object_key: bool, +} + +fn write_colored(color: ColorSpec, handler: H) -> std::io::Result<()> +where + H: FnOnce(&mut Buffer) -> std::io::Result<()>, +{ + let buf = BufferWriter::stdout(termcolor::ColorChoice::Auto); + let mut buffer = buf.buffer(); + buffer.set_color(&color)?; + handler(&mut buffer)?; + buffer.reset()?; + buf.print(&buffer)?; + Ok(()) +} + +impl Formatter for ColoredFormatter { + fn write_null(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.nil_value.clone(), |w| { + self.formatter.write_null(w) + }) + } + + fn write_bool(&mut self, _writer: &mut W, value: bool) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.bool_value.clone(), |w| { + self.formatter.write_bool(w, value) + }) + } + + fn write_i8(&mut self, _writer: &mut W, value: i8) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_i8(w, value) + }) + } + + fn write_i16(&mut self, _writer: &mut W, value: i16) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_i16(w, value) + }) + } + + fn write_i32(&mut self, _writer: &mut W, value: i32) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_i32(w, value) + }) + } + + fn write_i64(&mut self, _writer: &mut W, value: i64) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_i64(w, value) + }) + } + + fn write_i128(&mut self, _writer: &mut W, value: i128) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_i128(w, value) + }) + } + + fn write_u8(&mut self, _writer: &mut W, value: u8) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_u8(w, value) + }) + } + + fn write_u16(&mut self, _writer: &mut W, value: u16) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_u16(w, value) + }) + } + + fn write_u32(&mut self, _writer: &mut W, value: u32) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_u32(w, value) + }) + } + + fn write_u64(&mut self, _writer: &mut W, value: u64) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_u64(w, value) + }) + } + + fn write_u128(&mut self, _writer: &mut W, value: u128) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_u128(w, value) + }) + } + + fn write_f32(&mut self, _writer: &mut W, value: f32) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.float_value.clone(), |w| { + self.formatter.write_f32(w, value) + }) + } + + fn write_f64(&mut self, _writer: &mut W, value: f64) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.float_value.clone(), |w| { + self.formatter.write_f64(w, value) + }) + } + + fn write_number_str(&mut self, _writer: &mut W, value: &str) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.integer_value.clone(), |w| { + self.formatter.write_number_str(w, value) + }) + } + + fn begin_string(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + let style = if self.style.string_include_quotation { + if self.in_object_key { + self.style.key.clone() + } else { + self.style.string_value.clone() + } + } else { + ColorSpec::new() + }; + write_colored(style, |w| self.formatter.begin_string(w)) + } + + fn end_string(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + let style = if self.style.string_include_quotation { + if self.in_object_key { + self.style.key.clone() + } else { + self.style.string_value.clone() + } + } else { + ColorSpec::new() + }; + write_colored(style, |w| self.formatter.end_string(w)) + } + + fn write_string_fragment(&mut self, _writer: &mut W, fragment: &str) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + let style = if self.in_object_key { + self.style.key.clone() + } else { + self.style.string_value.clone() + }; + write_colored(style, |w| w.write_all(fragment.as_bytes())) + } + + fn write_char_escape( + &mut self, + _writer: &mut W, + char_escape: serde_json::ser::CharEscape, + ) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + let style = if self.in_object_key { + self.style.key.clone() + } else { + self.style.string_value.clone() + }; + write_colored(style, |w| self.formatter.write_char_escape(w, char_escape)) + } + + fn begin_array(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.array_brackets.clone(), |w| { + self.formatter.begin_array(w) + }) + } + + fn end_array(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.array_brackets.clone(), |w| { + self.formatter.end_array(w) + }) + } + + fn begin_array_value(&mut self, _writer: &mut W, first: bool) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(ColorSpec::new(), |w| { + self.formatter.begin_array_value(w, first) + }) + } + + fn end_array_value(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(ColorSpec::new(), |w| self.formatter.end_array_value(w)) + } + + fn begin_object(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.object_brackets.clone(), |w| { + self.formatter.begin_object(w) + }) + } + + fn end_object(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(self.style.object_brackets.clone(), |w| { + self.formatter.end_object(w) + }) + } + + fn begin_object_key(&mut self, _writer: &mut W, first: bool) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.in_object_key = true; + write_colored(ColorSpec::new(), |w| { + self.formatter.begin_object_key(w, first) + }) + } + + fn end_object_key(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.in_object_key = false; + write_colored(ColorSpec::new(), |w| self.formatter.end_object_key(w)) + } + + fn begin_object_value(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.in_object_key = false; + write_colored(ColorSpec::new(), |w| self.formatter.begin_object_value(w)) + } + + fn end_object_value(&mut self, _writer: &mut W) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + self.in_object_key = false; + write_colored(ColorSpec::new(), |w| self.formatter.end_object_value(w)) + } + + fn write_raw_fragment(&mut self, _writer: &mut W, fragment: &str) -> std::io::Result<()> + where + W: ?Sized + std::io::Write, + { + write_colored(ColorSpec::new(), |w| { + self.formatter.write_raw_fragment(w, fragment) + }) + } +} diff --git a/automerge-cli/src/examine.rs b/rust/automerge-cli/src/examine.rs similarity index 87% rename from automerge-cli/src/examine.rs rename to rust/automerge-cli/src/examine.rs index 847abd4f..0ee102fb 100644 --- a/automerge-cli/src/examine.rs +++ b/rust/automerge-cli/src/examine.rs @@ -1,6 +1,8 @@ use automerge as am; use thiserror::Error; +use crate::{color_json::print_colored_json, SkipVerifyFlag}; + #[derive(Error, Debug)] pub enum ExamineError { #[error("Error reading change file: {:?}", source)] @@ -20,16 +22,18 @@ pub enum ExamineError { }, } -pub fn examine( +pub(crate) fn examine( mut input: impl std::io::Read, mut output: impl std::io::Write, + skip: SkipVerifyFlag, is_tty: bool, ) -> Result<(), ExamineError> { let mut buf: Vec = Vec::new(); input .read_to_end(&mut buf) .map_err(|e| ExamineError::ReadingChanges { source: e })?; - let doc = am::Automerge::load(&buf) + let doc = skip + .load(&buf) .map_err(|e| ExamineError::ApplyingInitialChanges { source: e })?; let uncompressed_changes: Vec<_> = doc .get_changes(&[]) @@ -39,7 +43,7 @@ pub fn examine( .collect(); if is_tty { let json_changes = serde_json::to_value(uncompressed_changes).unwrap(); - colored_json::write_colored_json(&json_changes, &mut output).unwrap(); + print_colored_json(&json_changes).unwrap(); writeln!(output).unwrap(); } else { let json_changes = serde_json::to_string_pretty(&uncompressed_changes).unwrap(); diff --git a/rust/automerge-cli/src/examine_sync.rs b/rust/automerge-cli/src/examine_sync.rs new file mode 100644 index 00000000..c0d5df97 --- /dev/null +++ b/rust/automerge-cli/src/examine_sync.rs @@ -0,0 +1,38 @@ +use automerge::sync::ReadMessageError; + +use crate::color_json::print_colored_json; + +#[derive(Debug, thiserror::Error)] +pub enum ExamineSyncError { + #[error("Error reading message: {0}")] + ReadMessage(#[source] std::io::Error), + + #[error("error writing message: {0}")] + WriteMessage(#[source] std::io::Error), + + #[error("error writing json to output: {0}")] + WriteJson(#[source] serde_json::Error), + + #[error("Error parsing message: {0}")] + ParseMessage(#[from] ReadMessageError), +} + +pub(crate) fn examine_sync( + mut input: Box, + output: W, + is_tty: bool, +) -> Result<(), ExamineSyncError> { + let mut buf: Vec = Vec::new(); + input + .read_to_end(&mut buf) + .map_err(ExamineSyncError::ReadMessage)?; + + let message = automerge::sync::Message::decode(&buf)?; + let json = serde_json::to_value(message).unwrap(); + if is_tty { + print_colored_json(&json).map_err(ExamineSyncError::WriteMessage)?; + } else { + serde_json::to_writer(output, &json).map_err(ExamineSyncError::WriteJson)?; + } + Ok(()) +} diff --git a/automerge-cli/src/export.rs b/rust/automerge-cli/src/export.rs similarity index 87% rename from automerge-cli/src/export.rs rename to rust/automerge-cli/src/export.rs index 49cded8f..45f39101 100644 --- a/automerge-cli/src/export.rs +++ b/rust/automerge-cli/src/export.rs @@ -1,5 +1,8 @@ use anyhow::Result; use automerge as am; +use automerge::ReadDoc; + +use crate::{color_json::print_colored_json, SkipVerifyFlag}; pub(crate) fn map_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { let keys = doc.keys(obj); @@ -28,7 +31,7 @@ fn list_to_json(doc: &am::Automerge, obj: &am::ObjId) -> serde_json::Value { let len = doc.length(obj); let mut array = Vec::new(); for i in 0..len { - let val = doc.get(obj, i as usize); + let val = doc.get(obj, i); match val { Ok(Some((am::Value::Object(o), exid))) if o == am::ObjType::Map || o == am::ObjType::Table => @@ -69,22 +72,23 @@ fn scalar_to_json(val: &am::ScalarValue) -> serde_json::Value { } } -fn get_state_json(input_data: Vec) -> Result { - let doc = am::Automerge::load(&input_data).unwrap(); // FIXME +fn get_state_json(input_data: Vec, skip: SkipVerifyFlag) -> Result { + let doc = skip.load(&input_data).unwrap(); // FIXME Ok(map_to_json(&doc, &am::ObjId::Root)) } -pub fn export_json( +pub(crate) fn export_json( mut changes_reader: impl std::io::Read, mut writer: impl std::io::Write, + skip: SkipVerifyFlag, is_tty: bool, ) -> Result<()> { let mut input_data = vec![]; changes_reader.read_to_end(&mut input_data)?; - let state_json = get_state_json(input_data)?; + let state_json = get_state_json(input_data, skip)?; if is_tty { - colored_json::write_colored_json(&state_json, &mut writer).unwrap(); + print_colored_json(&state_json).unwrap(); writeln!(writer).unwrap(); } else { writeln!( @@ -103,7 +107,10 @@ mod tests { #[test] fn cli_export_with_empty_input() { - assert_eq!(get_state_json(vec![]).unwrap(), serde_json::json!({})) + assert_eq!( + get_state_json(vec![], Default::default()).unwrap(), + serde_json::json!({}) + ) } #[test] @@ -117,7 +124,7 @@ mod tests { let mut backend = initialize_from_json(&initial_state_json).unwrap(); let change_bytes = backend.save(); assert_eq!( - get_state_json(change_bytes).unwrap(), + get_state_json(change_bytes, Default::default()).unwrap(), serde_json::json!({"sparrows": 15.0}) ) } @@ -144,7 +151,7 @@ mod tests { */ let change_bytes = backend.save(); assert_eq!( - get_state_json(change_bytes).unwrap(), + get_state_json(change_bytes, Default::default()).unwrap(), serde_json::json!({ "birds": { "wrens": 3.0, diff --git a/automerge-cli/src/import.rs b/rust/automerge-cli/src/import.rs similarity index 100% rename from automerge-cli/src/import.rs rename to rust/automerge-cli/src/import.rs diff --git a/automerge-cli/src/main.rs b/rust/automerge-cli/src/main.rs similarity index 60% rename from automerge-cli/src/main.rs rename to rust/automerge-cli/src/main.rs index ffc13012..8f3f816d 100644 --- a/automerge-cli/src/main.rs +++ b/rust/automerge-cli/src/main.rs @@ -1,10 +1,15 @@ use std::{fs::File, path::PathBuf, str::FromStr}; use anyhow::{anyhow, Result}; -use clap::Parser; +use clap::{ + builder::{BoolishValueParser, TypedValueParser, ValueParserFactory}, + Parser, +}; +use is_terminal::IsTerminal; -//mod change; +mod color_json; mod examine; +mod examine_sync; mod export; mod import; mod merge; @@ -16,12 +21,50 @@ struct Opts { cmd: Command, } -#[derive(Debug)] +#[derive(clap::ValueEnum, Clone, Debug)] enum ExportFormat { Json, Toml, } +#[derive(Copy, Clone, Default, Debug)] +pub(crate) struct SkipVerifyFlag(bool); + +impl SkipVerifyFlag { + fn load(&self, buf: &[u8]) -> Result { + if self.0 { + automerge::Automerge::load(buf) + } else { + automerge::Automerge::load_unverified_heads(buf) + } + } +} + +#[derive(Clone)] +struct SkipVerifyFlagParser; +impl ValueParserFactory for SkipVerifyFlag { + type Parser = SkipVerifyFlagParser; + + fn value_parser() -> Self::Parser { + SkipVerifyFlagParser + } +} + +impl TypedValueParser for SkipVerifyFlagParser { + type Value = SkipVerifyFlag; + + fn parse_ref( + &self, + cmd: &clap::Command, + arg: Option<&clap::Arg>, + value: &std::ffi::OsStr, + ) -> Result { + BoolishValueParser::new() + .parse_ref(cmd, arg, value) + .map(SkipVerifyFlag) + } +} + impl FromStr for ExportFormat { type Err = anyhow::Error; @@ -43,12 +86,15 @@ enum Command { format: ExportFormat, /// Path that contains Automerge changes - #[clap(parse(from_os_str))] changes_file: Option, /// The file to write to. If omitted assumes stdout - #[clap(parse(from_os_str), long("out"), short('o'))] + #[clap(long("out"), short('o'))] output_file: Option, + + /// Whether to verify the head hashes of a compressed document + #[clap(long, action = clap::ArgAction::SetFalse)] + skip_verifying_heads: SkipVerifyFlag, }, Import { @@ -56,69 +102,37 @@ enum Command { #[clap(long, short, default_value = "json")] format: ExportFormat, - #[clap(parse(from_os_str))] input_file: Option, /// Path to write Automerge changes to - #[clap(parse(from_os_str), long("out"), short('o'))] + #[clap(long("out"), short('o'))] changes_file: Option, }, - /// Read an automerge document from a file or stdin, perform a change on it and write a new - /// document to stdout or the specified output file. - Change { - /// The change script to perform. Change scripts have the form []. - /// The possible commands are 'set', 'insert', 'delete', and 'increment'. - /// - /// Paths look like this: $["mapkey"][0]. They always lways start with a '$', then each - /// subsequent segment of the path is either a string in double quotes to index a key in a - /// map, or an integer index to address an array element. - /// - /// Examples - /// - /// ## set - /// - /// > automerge change 'set $["someobject"] {"items": []}' somefile - /// - /// ## insert - /// - /// > automerge change 'insert $["someobject"]["items"][0] "item1"' somefile - /// - /// ## increment - /// - /// > automerge change 'increment $["mycounter"]' - /// - /// ## delete - /// - /// > automerge change 'delete $["someobject"]["items"]' somefile - script: String, - - /// The file to change, if omitted will assume stdin - #[clap(parse(from_os_str))] + /// Read an automerge document and print a JSON representation of the changes in it to stdout + Examine { input_file: Option, - - /// Path to write Automerge changes to, if omitted will write to stdout - #[clap(parse(from_os_str), long("out"), short('o'))] - output_file: Option, + skip_verifying_heads: SkipVerifyFlag, }, - /// Read an automerge document and print a JSON representation of the changes in it to stdout - Examine { input_file: Option }, + /// Read an automerge sync messaage and print a JSON representation of it + ExamineSync { input_file: Option }, /// Read one or more automerge documents and output a merged, compacted version of them Merge { /// The file to write to. If omitted assumes stdout - #[clap(parse(from_os_str), long("out"), short('o'))] + #[clap(long("out"), short('o'))] output_file: Option, + /// The file(s) to compact. If empty assumes stdin input: Vec, }, } fn open_file_or_stdin(maybe_path: Option) -> Result> { - if atty::is(atty::Stream::Stdin) { + if std::io::stdin().is_terminal() { if let Some(path) = maybe_path { - Ok(Box::new(File::open(&path).unwrap())) + Ok(Box::new(File::open(path).unwrap())) } else { Err(anyhow!( "Must provide file path if not providing input via stdin" @@ -130,9 +144,9 @@ fn open_file_or_stdin(maybe_path: Option) -> Result) -> Result> { - if atty::is(atty::Stream::Stdout) { + if std::io::stdout().is_terminal() { if let Some(path) = maybe_path { - Ok(Box::new(File::create(&path).unwrap())) + Ok(Box::new(File::create(path).unwrap())) } else { Err(anyhow!("Must provide file path if not piping to stdout")) } @@ -149,16 +163,22 @@ fn main() -> Result<()> { changes_file, format, output_file, + skip_verifying_heads, } => { let output: Box = if let Some(output_file) = output_file { - Box::new(File::create(&output_file)?) + Box::new(File::create(output_file)?) } else { Box::new(std::io::stdout()) }; match format { ExportFormat::Json => { let mut in_buffer = open_file_or_stdin(changes_file)?; - export::export_json(&mut in_buffer, output, atty::is(atty::Stream::Stdout)) + export::export_json( + &mut in_buffer, + output, + skip_verifying_heads, + std::io::stdout().is_terminal(), + ) } ExportFormat::Toml => unimplemented!(), } @@ -175,23 +195,30 @@ fn main() -> Result<()> { } ExportFormat::Toml => unimplemented!(), }, - Command::Change { .. - //input_file, - //output_file, - //script, + Command::Examine { + input_file, + skip_verifying_heads, } => { - unimplemented!() -/* - let in_buffer = open_file_or_stdin(input_file)?; - let mut out_buffer = create_file_or_stdout(output_file)?; - change::change(in_buffer, &mut out_buffer, script.as_str()) - .map_err(|e| anyhow::format_err!("Unable to make changes: {:?}", e)) -*/ - } - Command::Examine { input_file } => { let in_buffer = open_file_or_stdin(input_file)?; let out_buffer = std::io::stdout(); - match examine::examine(in_buffer, out_buffer, atty::is(atty::Stream::Stdout)) { + match examine::examine( + in_buffer, + out_buffer, + skip_verifying_heads, + std::io::stdout().is_terminal(), + ) { + Ok(()) => {} + Err(e) => { + eprintln!("Error: {:?}", e); + } + } + Ok(()) + } + Command::ExamineSync { input_file } => { + let in_buffer = open_file_or_stdin(input_file)?; + let out_buffer = std::io::stdout(); + match examine_sync::examine_sync(in_buffer, out_buffer, std::io::stdout().is_terminal()) + { Ok(()) => {} Err(e) => { eprintln!("Error: {:?}", e); diff --git a/automerge-cli/src/merge.rs b/rust/automerge-cli/src/merge.rs similarity index 100% rename from automerge-cli/src/merge.rs rename to rust/automerge-cli/src/merge.rs diff --git a/automerge-cli/tests/integration.rs b/rust/automerge-cli/tests/integration.rs similarity index 100% rename from automerge-cli/tests/integration.rs rename to rust/automerge-cli/tests/integration.rs diff --git a/rust/automerge-test/Cargo.toml b/rust/automerge-test/Cargo.toml new file mode 100644 index 00000000..9290d7ac --- /dev/null +++ b/rust/automerge-test/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "automerge-test" +version = "0.2.0" +edition = "2021" +license = "MIT" +repository = "https://github.com/automerge/automerge-rs" +rust-version = "1.57.0" +description = "Utilities for testing automerge libraries" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +automerge = { version = "^0.3", path = "../automerge" } +smol_str = { version = "^0.1.21", features=["serde"] } +serde = { version = "^1.0", features=["derive"] } +decorum = "0.3.1" +serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } diff --git a/rust/automerge-test/README.md b/rust/automerge-test/README.md new file mode 100644 index 00000000..2cadabbb --- /dev/null +++ b/rust/automerge-test/README.md @@ -0,0 +1,3 @@ +# `automerge-test` + +Utilities for making assertions about automerge documents diff --git a/automerge/tests/helpers/mod.rs b/rust/automerge-test/src/lib.rs similarity index 88% rename from automerge/tests/helpers/mod.rs rename to rust/automerge-test/src/lib.rs index 38706d37..a1d4ea89 100644 --- a/automerge/tests/helpers/mod.rs +++ b/rust/automerge-test/src/lib.rs @@ -4,6 +4,8 @@ use std::{ hash::Hash, }; +use automerge::ReadDoc; + use serde::ser::{SerializeMap, SerializeSeq}; pub fn new_doc() -> automerge::AutoCommit { @@ -40,17 +42,19 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// ## Constructing documents /// /// ```rust -/// let mut doc = automerge::Automerge::new(); -/// let todos = doc.set(automerge::ROOT, "todos", automerge::Value::map()).unwrap().unwrap(); -/// let todo = doc.insert(todos, 0, automerge::Value::map()).unwrap(); -/// let title = doc.set(todo, "title", "water plants").unwrap().unwrap(); +/// # use automerge::transaction::Transactable; +/// # use automerge_test::{assert_doc, map, list}; +/// let mut doc = automerge::AutoCommit::new(); +/// let todos = doc.put_object(automerge::ROOT, "todos", automerge::ObjType::List).unwrap(); +/// let todo = doc.insert_object(todos, 0, automerge::ObjType::Map).unwrap(); +/// let title = doc.put(todo, "title", "water plants").unwrap(); /// /// assert_doc!( /// &doc, /// map!{ /// "todos" => { /// list![ -/// { map!{ title = "water plants" } } +/// { map!{ "title" => { "water plants" } } } /// ] /// } /// } @@ -63,13 +67,17 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { /// conflicting values we must capture all of these. /// /// ```rust -/// let mut doc1 = automerge::Automerge::new(); -/// let mut doc2 = automerge::Automerge::new(); -/// let op1 = doc1.set(automerge::ROOT, "field", "one").unwrap().unwrap(); -/// let op2 = doc2.set(automerge::ROOT, "field", "two").unwrap().unwrap(); +/// # use automerge_test::{assert_doc, map}; +/// # use automerge::transaction::Transactable; +/// # use automerge::ReadDoc; +/// +/// let mut doc1 = automerge::AutoCommit::new(); +/// let mut doc2 = automerge::AutoCommit::new(); +/// doc1.put(automerge::ROOT, "field", "one").unwrap(); +/// doc2.put(automerge::ROOT, "field", "two").unwrap(); /// doc1.merge(&mut doc2); /// assert_doc!( -/// &doc1, +/// doc1.document(), /// map!{ /// "field" => { /// "one", @@ -81,16 +89,11 @@ pub fn sorted_actors() -> (automerge::ActorId, automerge::ActorId) { #[macro_export] macro_rules! assert_doc { ($doc: expr, $expected: expr) => {{ - use $crate::helpers::realize; + use $crate::realize; let realized = realize($doc); let expected_obj = $expected.into(); if realized != expected_obj { - let serde_right = serde_json::to_string_pretty(&realized).unwrap(); - let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); - panic!( - "documents didn't match\n expected\n{}\n got\n{}", - &serde_left, &serde_right - ); + $crate::pretty_panic(expected_obj, realized) } }}; } @@ -100,16 +103,11 @@ macro_rules! assert_doc { #[macro_export] macro_rules! assert_obj { ($doc: expr, $obj_id: expr, $prop: expr, $expected: expr) => {{ - use $crate::helpers::realize_prop; + use $crate::realize_prop; let realized = realize_prop($doc, $obj_id, $prop); let expected_obj = $expected.into(); if realized != expected_obj { - let serde_right = serde_json::to_string_pretty(&realized).unwrap(); - let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); - panic!( - "documents didn't match\n expected\n{}\n got\n{}", - &serde_left, &serde_right - ); + $crate::pretty_panic(expected_obj, realized) } }}; } @@ -118,12 +116,13 @@ macro_rules! assert_obj { /// the keys of the map, the inner set is the set of values for that key: /// /// ``` +/// # use automerge_test::map; /// map!{ /// "key" => { /// "value1", /// "value2", /// } -/// } +/// }; /// ``` /// /// The map above would represent a map with a conflict on the "key" property. The values can be @@ -134,6 +133,7 @@ macro_rules! map { (@inner { $($value:expr),* }) => { { use std::collections::BTreeSet; + use $crate::RealizedObject; let mut inner: BTreeSet = BTreeSet::new(); $( let _ = inner.insert($value.into()); @@ -145,6 +145,7 @@ macro_rules! map { ($($key:expr => $inner:tt),*) => { { use std::collections::{BTreeMap, BTreeSet}; + use $crate::RealizedObject; let mut _map: BTreeMap> = ::std::collections::BTreeMap::new(); $( let inner = map!(@inner $inner); @@ -158,12 +159,13 @@ macro_rules! map { /// Construct `RealizedObject::Sequence`. This macro represents a sequence of values /// /// ``` +/// # use automerge_test::{list, RealizedObject}; /// list![ /// { /// "value1", /// "value2", /// } -/// ] +/// ]; /// ``` /// /// The list above would represent a list with a conflict on the 0 index. The values can be @@ -178,6 +180,7 @@ macro_rules! list { (@inner { $($value:expr),* }) => { { use std::collections::BTreeSet; + use $crate::RealizedObject; let mut inner: BTreeSet = BTreeSet::new(); $( let _ = inner.insert($value.into()); @@ -330,12 +333,12 @@ impl serde::Serialize for RealizedObject { } } -pub fn realize(doc: &automerge::Automerge) -> RealizedObject { +pub fn realize(doc: &R) -> RealizedObject { realize_obj(doc, &automerge::ROOT, automerge::ObjType::Map) } -pub fn realize_prop>( - doc: &automerge::Automerge, +pub fn realize_prop>( + doc: &R, obj_id: &automerge::ObjId, prop: P, ) -> RealizedObject { @@ -346,8 +349,8 @@ pub fn realize_prop>( } } -pub fn realize_obj( - doc: &automerge::Automerge, +pub fn realize_obj( + doc: &R, obj_id: &automerge::ObjId, objtype: automerge::ObjType, ) -> RealizedObject { @@ -370,8 +373,8 @@ pub fn realize_obj( } } -fn realize_values>( - doc: &automerge::Automerge, +fn realize_values>( + doc: &R, obj_id: &automerge::ObjId, key: K, ) -> BTreeSet { @@ -458,6 +461,12 @@ impl From<&str> for RealizedObject { } } +impl From for RealizedObject { + fn from(f: f64) -> Self { + RealizedObject::Value(OrdScalarValue::F64(f.into())) + } +} + impl From> for RealizedObject { fn from(vals: Vec) -> Self { RealizedObject::Sequence( @@ -473,7 +482,15 @@ impl From> for RealizedObject { } /// Pretty print the contents of a document -#[allow(dead_code)] pub fn pretty_print(doc: &automerge::Automerge) { println!("{}", serde_json::to_string_pretty(&realize(doc)).unwrap()) } + +pub fn pretty_panic(expected_obj: RealizedObject, realized: RealizedObject) { + let serde_right = serde_json::to_string_pretty(&realized).unwrap(); + let serde_left = serde_json::to_string_pretty(&expected_obj).unwrap(); + panic!( + "documents didn't match\n expected\n{}\n got\n{}", + &serde_left, &serde_right + ); +} diff --git a/automerge-wasm/.eslintignore b/rust/automerge-wasm/.eslintignore similarity index 100% rename from automerge-wasm/.eslintignore rename to rust/automerge-wasm/.eslintignore diff --git a/automerge-js/.eslintrc.cjs b/rust/automerge-wasm/.eslintrc.cjs similarity index 100% rename from automerge-js/.eslintrc.cjs rename to rust/automerge-wasm/.eslintrc.cjs diff --git a/automerge-wasm/.gitignore b/rust/automerge-wasm/.gitignore similarity index 60% rename from automerge-wasm/.gitignore rename to rust/automerge-wasm/.gitignore index a5ef445c..77c11e08 100644 --- a/automerge-wasm/.gitignore +++ b/rust/automerge-wasm/.gitignore @@ -1,5 +1,6 @@ /node_modules -/dev -/target +/bundler +/nodejs +/deno Cargo.lock yarn.lock diff --git a/automerge-wasm/Cargo.toml b/rust/automerge-wasm/Cargo.toml similarity index 87% rename from automerge-wasm/Cargo.toml rename to rust/automerge-wasm/Cargo.toml index 38fe3dab..b6055a7d 100644 --- a/automerge-wasm/Cargo.toml +++ b/rust/automerge-wasm/Cargo.toml @@ -28,14 +28,16 @@ serde = "^1.0" serde_json = "^1.0" rand = { version = "^0.8.4" } getrandom = { version = "^0.2.2", features=["js"] } -uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] } +uuid = { version = "^1.2.1", features=["v4", "js", "serde"] } serde-wasm-bindgen = "0.4.3" serde_bytes = "0.11.5" hex = "^0.4.3" regex = "^1.5" +itertools = "^0.10.3" +thiserror = "^1.0.16" [dependencies.wasm-bindgen] -version = "^0.2" +version = "^0.2.83" #features = ["std"] features = ["serde-serialize", "std"] @@ -55,5 +57,6 @@ features = ["console"] [dev-dependencies] futures = "^0.1" +proptest = { version = "^1.0.0", default-features = false, features = ["std"] } wasm-bindgen-futures = "^0.4" wasm-bindgen-test = "^0.3" diff --git a/automerge-wasm/LICENSE b/rust/automerge-wasm/LICENSE similarity index 100% rename from automerge-wasm/LICENSE rename to rust/automerge-wasm/LICENSE diff --git a/automerge-wasm/README.md b/rust/automerge-wasm/README.md similarity index 91% rename from automerge-wasm/README.md rename to rust/automerge-wasm/README.md index 2fb6a2f0..20256313 100644 --- a/automerge-wasm/README.md +++ b/rust/automerge-wasm/README.md @@ -18,34 +18,6 @@ An Object id uniquely identifies a Map, List or Text object within a document. Heads refers to a set of hashes that uniquely identifies a point in time in a document's history. Heads are useful for comparing documents state or retrieving past states from the document. -### Using the Library and Creating a Document - -This is a rust/wasm package and will work in a node or web environment. Node is able to load wasm synchronously but a web environment is not. The 'init' export of the package is a function that returns a promise that resolves once the wasm is loaded. - -This creates a document in node. The memory allocated is handled by wasm and isn't managed by the javascript garbage collector and thus needs to be manually freed. - -```javascript - import { create } from "automerge-wasm" - - let doc = create() - - doc.free() -``` - -While this will work in both node and in a web context - -```javascript - import { init, create } from "automerge-wasm" - - init().then(_ => { - let doc = create() - doc.free() - }) - -``` - -The examples below will assume a node context for brevity. - ### Automerge Scalar Types Automerge has many scalar types. Methods like `put()` and `insert()` take an optional data type parameter. Normally the type can be inferred but in some cases, such as telling the difference between int, uint and a counter, it cannot. @@ -53,7 +25,7 @@ Automerge has many scalar types. Methods like `put()` and `insert()` take an op These are puts without a data type ```javascript - import { create } from "automerge-wasm" + import { create } from "@automerge/automerge-wasm" let doc = create() doc.put("/", "prop1", 100) // int @@ -63,7 +35,6 @@ These are puts without a data type doc.put("/", "prop5", new Uint8Array([1,2,3])) doc.put("/", "prop6", true) doc.put("/", "prop7", null) - doc.free() ``` Put's with a data type and examples of all the supported data types. @@ -71,7 +42,7 @@ Put's with a data type and examples of all the supported data types. While int vs uint vs f64 matters little in javascript, Automerge is a cross platform library where these distinctions matter. ```javascript - import { create } from "automerge-wasm" + import { create } from "@automerge/automerge-wasm" let doc = create() doc.put("/", "prop1", 100, "int") @@ -84,7 +55,6 @@ While int vs uint vs f64 matters little in javascript, Automerge is a cross plat doc.put("/", "prop8", new Uint8Array([1,2,3]), "bytes") doc.put("/", "prop9", true, "boolean") doc.put("/", "prop10", null, "null") - doc.free() ``` ### Automerge Object Types @@ -92,7 +62,7 @@ While int vs uint vs f64 matters little in javascript, Automerge is a cross plat Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key value stores where the values can be any scalar type or any object type. Lists are numerically indexed sets of data that can hold any scalar or any object type. ```javascript - import { create } from "automerge-wasm" + import { create } from "@automerge/automerge-wasm" let doc = create() @@ -111,14 +81,12 @@ Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key va // text is initialized with a string let notes = doc.putObject("/", "notes", "Hello world!") - - doc.free() ``` You can access objects by passing the object id as the first parameter for a call. ```javascript - import { create } from "automerge-wasm" + import { create } from "@automerge/automerge-wasm" let doc = create() @@ -142,8 +110,6 @@ You can access objects by passing the object id as the first parameter for a cal // use a path instead doc.put("/config", "align", "right") - - doc.free() ``` Using the id directly is always faster (as it prevents the path to id conversion internally) so it is preferred for performance critical code. @@ -165,7 +131,6 @@ Maps are key/value stores. The root object is always a map. The keys are alway doc.keys(mymap) // returns ["bytes","foo","sub"] doc.materialize("_root") // returns { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {}}} - doc.free() ``` ### Lists @@ -185,12 +150,11 @@ Lists are index addressable sets of values. These values can be any scalar or o doc.materialize(items) // returns [ "bat", [1,2], { hello : "world" }, true, "bag", "brick"] doc.length(items) // returns 6 - doc.free() ``` ### Text -Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the `splice()` method. Spliced strings will be indexable by character (important to note for platforms that index by graphmeme cluster). Non text can be inserted into a text document and will be represented with the unicode object replacement character. +Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the `splice()` method. Spliced strings will be indexable by character (important to note for platforms that index by graphmeme cluster). ```javascript let doc = create("aaaaaa") @@ -198,13 +162,6 @@ Text is a specialized list type intended for modifying a text document. The pri doc.splice(notes, 6, 5, "everyone") doc.text(notes) // returns "Hello everyone" - - let obj = doc.insertObject(notes, 6, { hi: "there" }) - - doc.text(notes) // returns "Hello \ufffceveryone" - doc.getWithType(notes, 6) // returns ["map", obj] - doc.get(obj, "hi") // returns "there" - doc.free() ``` ### Tables @@ -234,7 +191,6 @@ When querying maps use the `get()` method with the object in question and the pr doc1.get("_root","key3") // returns "doc2val" doc1.getAll("_root","key3") // returns [[ "str", "doc1val"], ["str", "doc2val"]] - doc1.free(); doc2.free() ``` ### Counters @@ -256,8 +212,6 @@ Counters are 64 bit ints that support the increment operation. Frequently diffe doc1.merge(doc2) doc1.materialize("_root") // returns { number: 10, total: 33 } - - doc1.free(); doc2.free() ``` ### Transactions @@ -285,8 +239,6 @@ Generally speaking you don't need to think about transactions when using Automer doc.get("_root", "key") // returns "val2" doc.pendingOps() // returns 0 - - doc.free() ``` ### Viewing Old Versions of the Document @@ -308,8 +260,6 @@ All query functions can take an optional argument of `heads` which allow you to doc.get("_root","key",heads2) // returns "val2" doc.get("_root","key",heads1) // returns "val1" doc.get("_root","key",[]) // returns undefined - - doc.free() ``` This works for `get()`, `getAll()`, `keys()`, `length()`, `text()`, and `materialize()` @@ -335,8 +285,6 @@ The `merge()` command applies all changes in the argument doc into the calling d doc1.materialize("_root") // returns { key1: "val1", key2: "val2", key3: "val3" } doc2.materialize("_root") // returns { key1: "val1", key3: "val3" } - - doc1.free(); doc2.free() ``` Note that calling `a.merge(a)` will produce an unrecoverable error from the wasm-bindgen layer which (as of this writing) there is no workaround for. @@ -350,7 +298,7 @@ If you wish to incrementally update a saved Automerge doc you can call `saveIncr The `load()` function takes a `Uint8Array()` of bytes produced in this way and constitutes a new document. The `loadIncremental()` method is available if you wish to consume the result of a `saveIncremental()` with an already instanciated document. ```javascript - import { create, load } from "automerge-wasm" + import { create, load } from "@automerge/automerge-wasm" let doc1 = create() @@ -382,14 +330,12 @@ The `load()` function takes a `Uint8Array()` of bytes produced in this way and c doc2.materialize("_root") // returns { key1: "value1", key2: "value2" } doc3.materialize("_root") // returns { key1: "value1", key2: "value2" } doc4.materialize("_root") // returns { key1: "value1", key2: "value2" } - - doc1.free(); doc2.free(); doc3.free(); doc4.free() ``` One interesting feature of automerge binary saves is that they can be concatenated together in any order and can still be loaded into a coherent merged document. ```javascript -import { load } from "automerge-wasm" +import { load } from "@automerge/automerge-wasm" import * as fs from "fs" let file1 = fs.readFileSync("automerge_save_1"); @@ -409,7 +355,7 @@ When syncing a document the `generateSyncMessage()` and `receiveSyncMessage()` m A very simple sync implementation might look like this. ```javascript - import { encodeSyncState, decodeSyncState, initSyncState } from "automerge-wasm" + import { encodeSyncState, decodeSyncState, initSyncState } from "@automerge/automerge-wasm" let states = {} @@ -457,7 +403,7 @@ Actors are ids that need to be unique to each process writing to a document. Th Methods that create new documents will generate random actors automatically - if you wish to supply your own it is always taken as an optional argument. This is true for the following functions. ```javascript - import { create, load } from "automerge-wasm" + import { create, load } from "@automerge/automerge-wasm" let doc1 = create() // random actorid let doc2 = create("aabbccdd") @@ -467,8 +413,6 @@ Methods that create new documents will generate random actors automatically - if let doc6 = load(doc4.save(), "00aabb11") let actor = doc1.getActor() - - doc1.free(); doc2.free(); doc3.free(); doc4.free(); doc5.free(); doc6.free() ``` ### Glossary: Object Id's @@ -491,7 +435,35 @@ Object Ids uniquely identify an object within a document. They are represented doc.put(o1v2, "x", "y") // modifying the new "o1" object assert.deepEqual(doc.materialize("_root"), { "o1": { x: "y" }, "o2": {} }) - - doc.free() ``` +### Appendix: Building + + The following steps should allow you to build the package + + ``` + $ rustup target add wasm32-unknown-unknown + $ cargo install wasm-bindgen-cli + $ cargo install wasm-opt + $ yarn + $ yarn release + $ yarn pack + ``` + +### Appendix: WASM and Memory Allocation + +Allocated memory in rust will be freed automatically on platforms that support `FinalizationRegistry`. + +This is currently supported in [all major browsers and nodejs](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry). + +On unsupported platforms you can free memory explicitly. + +```javascript + import { create, initSyncState } from "@automerge/automerge-wasm" + + let doc = create() + let sync = initSyncState() + + doc.free() + sync.free() +``` diff --git a/rust/automerge-wasm/deno-tests/deno.ts b/rust/automerge-wasm/deno-tests/deno.ts new file mode 100644 index 00000000..b346435a --- /dev/null +++ b/rust/automerge-wasm/deno-tests/deno.ts @@ -0,0 +1,8 @@ +// @deno-types="../index.d.ts" +import { create } from '../deno/automerge_wasm.js' + +Deno.test("It should create, clone and free", () => { + const doc1 = create(false) + const doc2 = doc1.clone() + doc2.free() +}); diff --git a/automerge-wasm/examples/cra/.gitignore b/rust/automerge-wasm/examples/cra/.gitignore similarity index 100% rename from automerge-wasm/examples/cra/.gitignore rename to rust/automerge-wasm/examples/cra/.gitignore diff --git a/automerge-wasm/examples/cra/README.md b/rust/automerge-wasm/examples/cra/README.md similarity index 100% rename from automerge-wasm/examples/cra/README.md rename to rust/automerge-wasm/examples/cra/README.md diff --git a/automerge-wasm/examples/cra/package.json b/rust/automerge-wasm/examples/cra/package.json similarity index 100% rename from automerge-wasm/examples/cra/package.json rename to rust/automerge-wasm/examples/cra/package.json diff --git a/rust/automerge-wasm/examples/cra/public/favicon.ico b/rust/automerge-wasm/examples/cra/public/favicon.ico new file mode 100644 index 00000000..a11777cc Binary files /dev/null and b/rust/automerge-wasm/examples/cra/public/favicon.ico differ diff --git a/rust/automerge-wasm/examples/cra/public/index.html b/rust/automerge-wasm/examples/cra/public/index.html new file mode 100644 index 00000000..aa069f27 --- /dev/null +++ b/rust/automerge-wasm/examples/cra/public/index.html @@ -0,0 +1,43 @@ + + + + + + + + + + + + + React App + + + +
+ + + diff --git a/rust/automerge-wasm/examples/cra/public/logo192.png b/rust/automerge-wasm/examples/cra/public/logo192.png new file mode 100644 index 00000000..fc44b0a3 Binary files /dev/null and b/rust/automerge-wasm/examples/cra/public/logo192.png differ diff --git a/rust/automerge-wasm/examples/cra/public/logo512.png b/rust/automerge-wasm/examples/cra/public/logo512.png new file mode 100644 index 00000000..a4e47a65 Binary files /dev/null and b/rust/automerge-wasm/examples/cra/public/logo512.png differ diff --git a/rust/automerge-wasm/examples/cra/public/manifest.json b/rust/automerge-wasm/examples/cra/public/manifest.json new file mode 100644 index 00000000..080d6c77 --- /dev/null +++ b/rust/automerge-wasm/examples/cra/public/manifest.json @@ -0,0 +1,25 @@ +{ + "short_name": "React App", + "name": "Create React App Sample", + "icons": [ + { + "src": "favicon.ico", + "sizes": "64x64 32x32 24x24 16x16", + "type": "image/x-icon" + }, + { + "src": "logo192.png", + "type": "image/png", + "sizes": "192x192" + }, + { + "src": "logo512.png", + "type": "image/png", + "sizes": "512x512" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff" +} diff --git a/rust/automerge-wasm/examples/cra/public/robots.txt b/rust/automerge-wasm/examples/cra/public/robots.txt new file mode 100644 index 00000000..e9e57dc4 --- /dev/null +++ b/rust/automerge-wasm/examples/cra/public/robots.txt @@ -0,0 +1,3 @@ +# https://www.robotstxt.org/robotstxt.html +User-agent: * +Disallow: diff --git a/rust/automerge-wasm/examples/cra/src/App.css b/rust/automerge-wasm/examples/cra/src/App.css new file mode 100644 index 00000000..74b5e053 --- /dev/null +++ b/rust/automerge-wasm/examples/cra/src/App.css @@ -0,0 +1,38 @@ +.App { + text-align: center; +} + +.App-logo { + height: 40vmin; + pointer-events: none; +} + +@media (prefers-reduced-motion: no-preference) { + .App-logo { + animation: App-logo-spin infinite 20s linear; + } +} + +.App-header { + background-color: #282c34; + min-height: 100vh; + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + font-size: calc(10px + 2vmin); + color: white; +} + +.App-link { + color: #61dafb; +} + +@keyframes App-logo-spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} diff --git a/automerge-wasm/examples/cra/src/App.test.tsx b/rust/automerge-wasm/examples/cra/src/App.test.tsx similarity index 100% rename from automerge-wasm/examples/cra/src/App.test.tsx rename to rust/automerge-wasm/examples/cra/src/App.test.tsx diff --git a/automerge-wasm/examples/cra/src/App.tsx b/rust/automerge-wasm/examples/cra/src/App.tsx similarity index 100% rename from automerge-wasm/examples/cra/src/App.tsx rename to rust/automerge-wasm/examples/cra/src/App.tsx diff --git a/automerge-wasm/examples/cra/src/index.css b/rust/automerge-wasm/examples/cra/src/index.css similarity index 100% rename from automerge-wasm/examples/cra/src/index.css rename to rust/automerge-wasm/examples/cra/src/index.css diff --git a/automerge-wasm/examples/cra/src/index.tsx b/rust/automerge-wasm/examples/cra/src/index.tsx similarity index 100% rename from automerge-wasm/examples/cra/src/index.tsx rename to rust/automerge-wasm/examples/cra/src/index.tsx diff --git a/rust/automerge-wasm/examples/cra/src/logo.svg b/rust/automerge-wasm/examples/cra/src/logo.svg new file mode 100644 index 00000000..9dfc1c05 --- /dev/null +++ b/rust/automerge-wasm/examples/cra/src/logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/automerge-wasm/examples/cra/src/react-app-env.d.ts b/rust/automerge-wasm/examples/cra/src/react-app-env.d.ts similarity index 100% rename from automerge-wasm/examples/cra/src/react-app-env.d.ts rename to rust/automerge-wasm/examples/cra/src/react-app-env.d.ts diff --git a/automerge-wasm/examples/cra/src/reportWebVitals.ts b/rust/automerge-wasm/examples/cra/src/reportWebVitals.ts similarity index 100% rename from automerge-wasm/examples/cra/src/reportWebVitals.ts rename to rust/automerge-wasm/examples/cra/src/reportWebVitals.ts diff --git a/automerge-wasm/examples/cra/src/setupTests.ts b/rust/automerge-wasm/examples/cra/src/setupTests.ts similarity index 100% rename from automerge-wasm/examples/cra/src/setupTests.ts rename to rust/automerge-wasm/examples/cra/src/setupTests.ts diff --git a/automerge-wasm/examples/cra/tsconfig.json b/rust/automerge-wasm/examples/cra/tsconfig.json similarity index 100% rename from automerge-wasm/examples/cra/tsconfig.json rename to rust/automerge-wasm/examples/cra/tsconfig.json diff --git a/automerge-wasm/examples/webpack/.gitignore b/rust/automerge-wasm/examples/webpack/.gitignore similarity index 100% rename from automerge-wasm/examples/webpack/.gitignore rename to rust/automerge-wasm/examples/webpack/.gitignore diff --git a/automerge-wasm/examples/webpack/package.json b/rust/automerge-wasm/examples/webpack/package.json similarity index 100% rename from automerge-wasm/examples/webpack/package.json rename to rust/automerge-wasm/examples/webpack/package.json diff --git a/automerge-wasm/examples/webpack/public/index.html b/rust/automerge-wasm/examples/webpack/public/index.html similarity index 100% rename from automerge-wasm/examples/webpack/public/index.html rename to rust/automerge-wasm/examples/webpack/public/index.html diff --git a/automerge-wasm/examples/webpack/src/index.js b/rust/automerge-wasm/examples/webpack/src/index.js similarity index 100% rename from automerge-wasm/examples/webpack/src/index.js rename to rust/automerge-wasm/examples/webpack/src/index.js diff --git a/automerge-js/examples/webpack/webpack.config.js b/rust/automerge-wasm/examples/webpack/webpack.config.js similarity index 100% rename from automerge-js/examples/webpack/webpack.config.js rename to rust/automerge-wasm/examples/webpack/webpack.config.js diff --git a/automerge-wasm/types/index.d.ts b/rust/automerge-wasm/index.d.ts similarity index 73% rename from automerge-wasm/types/index.d.ts rename to rust/automerge-wasm/index.d.ts index ea57f9c2..be12e4c1 100644 --- a/automerge-wasm/types/index.d.ts +++ b/rust/automerge-wasm/index.d.ts @@ -1,4 +1,3 @@ - export type Actor = string; export type ObjID = string; export type Change = Uint8Array; @@ -83,6 +82,9 @@ export type DecodedChange = { ops: Op[] } +type PartialBy = Omit & Partial> +export type ChangeToEncode = PartialBy + export type Op = { action: string, obj: ObjID, @@ -92,18 +94,42 @@ export type Op = { pred: string[], } -export type Patch = { - obj: ObjID - action: 'assign' | 'insert' | 'delete' - key: Prop +export type Patch = PutPatch | DelPatch | SpliceTextPatch | IncPatch | InsertPatch; + +export type PutPatch = { + action: 'put' + path: Prop[], value: Value - datatype: Datatype conflict: boolean } -export function create(actor?: Actor): Automerge; -export function load(data: Uint8Array, actor?: Actor): Automerge; -export function encodeChange(change: DecodedChange): Change; +export type IncPatch = { + action: 'inc' + path: Prop[], + value: number +} + +export type DelPatch = { + action: 'del' + path: Prop[], + length?: number, +} + +export type SpliceTextPatch = { + action: 'splice' + path: Prop[], + value: string, +} + +export type InsertPatch = { + action: 'insert' + path: Prop[], + values: Value[], +} + +export function encodeChange(change: ChangeToEncode): Change; +export function create(text_v2: boolean, actor?: Actor): Automerge; +export function load(data: Uint8Array, text_v2: boolean, actor?: Actor): Automerge; export function decodeChange(change: Change): DecodedChange; export function initSyncState(): SyncState; export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; @@ -113,10 +139,10 @@ export function decodeSyncState(data: Uint8Array): SyncState; export function exportSyncState(state: SyncState): JsSyncState; export function importSyncState(state: JsSyncState): SyncState; -export class API { - create(actor?: Actor): Automerge; - load(data: Uint8Array, actor?: Actor): Automerge; - encodeChange(change: DecodedChange): Change; +export interface API { + create(text_v2: boolean, actor?: Actor): Automerge; + load(data: Uint8Array, text_v2: boolean, actor?: Actor): Automerge; + encodeChange(change: ChangeToEncode): Change; decodeChange(change: Change): DecodedChange; initSyncState(): SyncState; encodeSyncMessage(message: DecodedSyncMessage): SyncMessage; @@ -147,17 +173,21 @@ export class Automerge { keys(obj: ObjID, heads?: Heads): string[]; text(obj: ObjID, heads?: Heads): string; length(obj: ObjID, heads?: Heads): number; - materialize(obj?: ObjID, heads?: Heads): MaterializeValue; + materialize(obj?: ObjID, heads?: Heads, metadata?: unknown): MaterializeValue; + toJS(): MaterializeValue; // transactions - commit(message?: string, time?: number): Hash; + commit(message?: string, time?: number): Hash | null; + emptyChange(message?: string, time?: number): Hash; merge(other: Automerge): Heads; getActorId(): Actor; pendingOps(): number; rollback(): number; // patches - enablePatches(enable: boolean): void; + enablePatches(enable: boolean): boolean; + enableFreeze(enable: boolean): boolean; + registerDatatype(datatype: string, callback: Function): void; popPatches(): Patch[]; // save and load to local store @@ -179,16 +209,18 @@ export class Automerge { getMissingDeps(heads?: Heads): Heads; // memory management - free(): void; - clone(actor?: string): Automerge; - fork(actor?: string): Automerge; - forkAt(heads: Heads, actor?: string): Automerge; + free(): void; // only needed if weak-refs are unsupported + clone(actor?: string): Automerge; // TODO - remove, this is dangerous + fork(actor?: string, heads?: Heads): Automerge; - // dump internal state to console.log + // dump internal state to console.log - for debugging dump(): void; + + // experimental api can go here + applyPatches(obj: Doc, meta?: unknown, callback?: (patch: Array, before: Doc, after: Doc) => void): Doc; } -export class JsSyncState { +export interface JsSyncState { sharedHeads: Heads; lastSentHeads: Heads; theirHeads: Heads | undefined; @@ -204,6 +236,3 @@ export class SyncState { sentHashes: Heads; readonly sharedHeads: Heads; } - -export function init (): Promise; - diff --git a/rust/automerge-wasm/package.json b/rust/automerge-wasm/package.json new file mode 100644 index 00000000..80b39fd4 --- /dev/null +++ b/rust/automerge-wasm/package.json @@ -0,0 +1,60 @@ +{ + "collaborators": [ + "Orion Henry ", + "Alex Good ", + "Martin Kleppmann" + ], + "name": "@automerge/automerge-wasm", + "description": "wasm-bindgen bindings to the automerge rust implementation", + "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm", + "repository": "github:automerge/automerge-rs", + "version": "0.1.25", + "license": "MIT", + "files": [ + "README.md", + "LICENSE", + "package.json", + "index.d.ts", + "nodejs/automerge_wasm.js", + "nodejs/automerge_wasm_bg.wasm", + "deno/automerge_wasm.js", + "deno/automerge_wasm_bg.wasm", + "bundler/automerge_wasm.js", + "bundler/automerge_wasm_bg.js", + "bundler/automerge_wasm_bg.wasm" + ], + "private": false, + "types": "index.d.ts", + "module": "./bundler/automerge_wasm.js", + "main": "./nodejs/automerge_wasm.js", + "scripts": { + "lint": "eslint test/*.ts index.d.ts", + "debug": "cross-env PROFILE=dev TARGET_DIR=debug yarn buildall", + "build": "cross-env PROFILE=dev TARGET_DIR=debug FEATURES='' yarn buildall", + "release": "cross-env PROFILE=release TARGET_DIR=release yarn buildall", + "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=bundler yarn target && cross-env TARGET=deno yarn target", + "target": "rimraf ./$TARGET && yarn compile && yarn bindgen && yarn opt", + "compile": "cargo build --target wasm32-unknown-unknown --profile $PROFILE", + "bindgen": "wasm-bindgen --no-typescript --weak-refs --target $TARGET --out-dir $TARGET ../target/wasm32-unknown-unknown/$TARGET_DIR/automerge_wasm.wasm", + "opt": "wasm-opt -O4 $TARGET/automerge_wasm_bg.wasm -o $TARGET/automerge_wasm_bg.wasm", + "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts" + }, + "devDependencies": { + "@types/mocha": "^10.0.1", + "@types/node": "^18.11.13", + "@typescript-eslint/eslint-plugin": "^5.46.0", + "@typescript-eslint/parser": "^5.46.0", + "cross-env": "^7.0.3", + "eslint": "^8.29.0", + "fast-sha256": "^1.3.0", + "mocha": "^10.2.0", + "pako": "^2.1.0", + "rimraf": "^3.0.2", + "ts-mocha": "^10.0.0", + "typescript": "^4.9.4" + }, + "exports": { + "browser": "./bundler/automerge_wasm.js", + "require": "./nodejs/automerge_wasm.js" + } +} diff --git a/rust/automerge-wasm/src/interop.rs b/rust/automerge-wasm/src/interop.rs new file mode 100644 index 00000000..1546ff10 --- /dev/null +++ b/rust/automerge-wasm/src/interop.rs @@ -0,0 +1,1478 @@ +use crate::error::InsertObject; +use crate::value::Datatype; +use crate::{Automerge, TextRepresentation}; +use automerge as am; +use automerge::ReadDoc; +use automerge::ROOT; +use automerge::{Change, ChangeHash, ObjType, Prop}; +use js_sys::{Array, Function, JsString, Object, Reflect, Symbol, Uint8Array}; +use std::borrow::Cow; +use std::collections::{BTreeSet, HashSet}; +use std::fmt::Display; +use wasm_bindgen::prelude::*; +use wasm_bindgen::JsCast; + +use crate::{observer::Patch, ObjId, Value}; + +const RAW_DATA_SYMBOL: &str = "_am_raw_value_"; +const DATATYPE_SYMBOL: &str = "_am_datatype_"; +const RAW_OBJECT_SYMBOL: &str = "_am_objectId"; +const META_SYMBOL: &str = "_am_meta"; + +pub(crate) struct JS(pub(crate) JsValue); +pub(crate) struct AR(pub(crate) Array); + +impl From for JsValue { + fn from(ar: AR) -> Self { + ar.0.into() + } +} + +impl From for JsValue { + fn from(js: JS) -> Self { + js.0 + } +} + +impl From for JS { + fn from(state: am::sync::State) -> Self { + let shared_heads: JS = state.shared_heads.into(); + let last_sent_heads: JS = state.last_sent_heads.into(); + let their_heads: JS = state.their_heads.into(); + let their_need: JS = state.their_need.into(); + let sent_hashes: JS = state.sent_hashes.into(); + let their_have = if let Some(have) = &state.their_have { + JsValue::from(AR::from(have.as_slice()).0) + } else { + JsValue::null() + }; + let result: JsValue = Object::new().into(); + // we can unwrap here b/c we made the object and know its not frozen + Reflect::set(&result, &"sharedHeads".into(), &shared_heads.0).unwrap(); + Reflect::set(&result, &"lastSentHeads".into(), &last_sent_heads.0).unwrap(); + Reflect::set(&result, &"theirHeads".into(), &their_heads.0).unwrap(); + Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap(); + Reflect::set(&result, &"theirHave".into(), &their_have).unwrap(); + Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap(); + Reflect::set(&result, &"inFlight".into(), &state.in_flight.into()).unwrap(); + JS(result) + } +} + +impl From> for JS { + fn from(heads: Vec) -> Self { + JS(heads + .iter() + .map(|h| JsValue::from_str(&h.to_string())) + .collect::() + .into()) + } +} + +impl From> for JS { + fn from(heads: HashSet) -> Self { + let result: JsValue = Object::new().into(); + for key in &heads { + Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); + } + JS(result) + } +} + +impl From> for JS { + fn from(heads: BTreeSet) -> Self { + let result: JsValue = Object::new().into(); + for key in &heads { + Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap(); + } + JS(result) + } +} + +impl From>> for JS { + fn from(heads: Option>) -> Self { + if let Some(v) = heads { + let v: Array = v + .iter() + .map(|h| JsValue::from_str(&h.to_string())) + .collect(); + JS(v.into()) + } else { + JS(JsValue::null()) + } + } +} + +impl TryFrom for HashSet { + type Error = error::BadChangeHashSet; + + fn try_from(value: JS) -> Result { + let result = HashSet::new(); + fold_hash_set(result, &value.0, |mut set, hash| { + set.insert(hash); + set + }) + } +} + +impl TryFrom for BTreeSet { + type Error = error::BadChangeHashSet; + + fn try_from(value: JS) -> Result { + let result = BTreeSet::new(); + fold_hash_set(result, &value.0, |mut set, hash| { + set.insert(hash); + set + }) + } +} + +fn fold_hash_set(init: O, val: &JsValue, f: F) -> Result +where + F: Fn(O, ChangeHash) -> O, +{ + let mut result = init; + for key in Reflect::own_keys(val) + .map_err(|_| error::BadChangeHashSet::ListProp)? + .iter() + { + if let Some(true) = js_get(val, &key)?.0.as_bool() { + let hash = ChangeHash::try_from(JS(key.clone())) + .map_err(|e| error::BadChangeHashSet::BadHash(key, e))?; + result = f(result, hash); + } + } + Ok(result) +} + +impl TryFrom for ChangeHash { + type Error = error::BadChangeHash; + + fn try_from(value: JS) -> Result { + if let Some(s) = value.0.as_string() { + Ok(s.parse()?) + } else { + Err(error::BadChangeHash::NotString) + } + } +} + +impl TryFrom for Option> { + type Error = error::BadChangeHashes; + + fn try_from(value: JS) -> Result { + if value.0.is_null() { + Ok(None) + } else { + Vec::::try_from(value).map(Some) + } + } +} + +impl TryFrom for Vec { + type Error = error::BadChangeHashes; + + fn try_from(value: JS) -> Result { + let value = value + .0 + .dyn_into::() + .map_err(|_| error::BadChangeHashes::NotArray)?; + let value = value + .iter() + .enumerate() + .map(|(i, v)| { + ChangeHash::try_from(JS(v)).map_err(|e| error::BadChangeHashes::BadElem(i, e)) + }) + .collect::, _>>()?; + Ok(value) + } +} + +impl TryFrom for Vec { + type Error = error::BadJSChanges; + + fn try_from(value: JS) -> Result { + let value = value + .0 + .dyn_into::() + .map_err(|_| error::BadJSChanges::ChangesNotArray)?; + let changes = value + .iter() + .enumerate() + .map(|(i, j)| { + j.dyn_into().map_err::(|_| { + error::BadJSChanges::ElemNotUint8Array(i) + }) + }) + .collect::, _>>()?; + let changes = changes + .iter() + .enumerate() + .map(|(i, arr)| { + automerge::Change::try_from(arr.to_vec().as_slice()) + .map_err(|e| error::BadJSChanges::BadChange(i, e)) + }) + .collect::, _>>()?; + Ok(changes) + } +} + +impl TryFrom for am::sync::State { + type Error = error::BadSyncState; + + fn try_from(value: JS) -> Result { + let value = value.0; + let shared_heads = js_get(&value, "sharedHeads")? + .try_into() + .map_err(error::BadSyncState::BadSharedHeads)?; + let last_sent_heads = js_get(&value, "lastSentHeads")? + .try_into() + .map_err(error::BadSyncState::BadLastSentHeads)?; + let their_heads = js_get(&value, "theirHeads")? + .try_into() + .map_err(error::BadSyncState::BadTheirHeads)?; + let their_need = js_get(&value, "theirNeed")? + .try_into() + .map_err(error::BadSyncState::BadTheirNeed)?; + let their_have = js_get(&value, "theirHave")? + .try_into() + .map_err(error::BadSyncState::BadTheirHave)?; + let sent_hashes = js_get(&value, "sentHashes")? + .try_into() + .map_err(error::BadSyncState::BadSentHashes)?; + let in_flight = js_get(&value, "inFlight")? + .0 + .as_bool() + .ok_or(error::BadSyncState::InFlightNotBoolean)?; + Ok(am::sync::State { + shared_heads, + last_sent_heads, + their_heads, + their_need, + their_have, + sent_hashes, + in_flight, + }) + } +} + +impl TryFrom for am::sync::Have { + type Error = error::BadHave; + + fn try_from(value: JS) -> Result { + let last_sync = js_get(&value.0, "lastSync")? + .try_into() + .map_err(error::BadHave::BadLastSync)?; + let bloom = js_get(&value.0, "bloom")? + .try_into() + .map_err(error::BadHave::BadBloom)?; + Ok(am::sync::Have { last_sync, bloom }) + } +} + +impl TryFrom for Option> { + type Error = error::BadHaves; + + fn try_from(value: JS) -> Result { + if value.0.is_null() { + Ok(None) + } else { + Ok(Some(value.try_into()?)) + } + } +} + +impl TryFrom for Vec { + type Error = error::BadHaves; + + fn try_from(value: JS) -> Result { + let value = value + .0 + .dyn_into::() + .map_err(|_| error::BadHaves::NotArray)?; + let have = value + .iter() + .enumerate() + .map(|(i, s)| JS(s).try_into().map_err(|e| error::BadHaves::BadElem(i, e))) + .collect::, _>>()?; + Ok(have) + } +} + +impl TryFrom for am::sync::BloomFilter { + type Error = error::BadBloom; + + fn try_from(value: JS) -> Result { + let value: Uint8Array = value + .0 + .dyn_into() + .map_err(|_| error::BadBloom::NotU8Array)?; + let value = value.to_vec(); + let value = value.as_slice().try_into()?; + Ok(value) + } +} + +impl TryFrom for am::sync::Message { + type Error = error::BadSyncMessage; + + fn try_from(value: JS) -> Result { + let heads = js_get(&value.0, "heads")? + .try_into() + .map_err(error::BadSyncMessage::BadHeads)?; + let need = js_get(&value.0, "need")? + .try_into() + .map_err(error::BadSyncMessage::BadNeed)?; + let changes = js_get(&value.0, "changes")?.try_into()?; + let have = js_get(&value.0, "have")?.try_into()?; + Ok(am::sync::Message { + heads, + need, + have, + changes, + }) + } +} + +impl From<&[ChangeHash]> for AR { + fn from(value: &[ChangeHash]) -> Self { + AR(value + .iter() + .map(|h| JsValue::from_str(&hex::encode(h.0))) + .collect()) + } +} + +impl From<&[Change]> for AR { + fn from(value: &[Change]) -> Self { + let changes: Array = value + .iter() + .map(|c| Uint8Array::from(c.raw_bytes())) + .collect(); + AR(changes) + } +} + +impl From<&[am::sync::Have]> for AR { + fn from(value: &[am::sync::Have]) -> Self { + AR(value + .iter() + .map(|have| { + let last_sync: Array = have + .last_sync + .iter() + .map(|h| JsValue::from_str(&hex::encode(h.0))) + .collect(); + // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes() + let bloom = Uint8Array::from(have.bloom.to_bytes().as_slice()); + let obj: JsValue = Object::new().into(); + // we can unwrap here b/c we created the object and know its not frozen + Reflect::set(&obj, &"lastSync".into(), &last_sync.into()).unwrap(); + Reflect::set(&obj, &"bloom".into(), &bloom.into()).unwrap(); + obj + }) + .collect()) + } +} + +pub(crate) fn to_js_err(err: T) -> JsValue { + js_sys::Error::new(&std::format!("{}", err)).into() +} + +pub(crate) fn js_get, S: std::fmt::Debug + Into>( + obj: J, + prop: S, +) -> Result { + let prop = prop.into(); + Ok(JS(Reflect::get(&obj.into(), &prop).map_err(|e| { + error::GetProp { + property: format!("{:?}", prop), + error: e, + } + })?)) +} + +pub(crate) fn js_set, S: std::fmt::Debug + Into>( + obj: &JsValue, + prop: S, + val: V, +) -> Result { + let prop = prop.into(); + Reflect::set(obj, &prop, &val.into()).map_err(|e| error::SetProp { + property: prop, + error: e, + }) +} + +pub(crate) fn js_get_symbol>(obj: J, prop: &Symbol) -> Result { + Ok(JS(Reflect::get(&obj.into(), &prop.into()).map_err( + |e| error::GetProp { + property: format!("{}", prop.to_string()), + error: e, + }, + )?)) +} + +pub(crate) fn to_prop(p: JsValue) -> Result { + if let Some(s) = p.as_string() { + Ok(Prop::Map(s)) + } else if let Some(n) = p.as_f64() { + Ok(Prop::Seq(n as usize)) + } else { + Err(error::InvalidProp) + } +} + +pub(crate) enum JsObjType { + Text(String), + Map(Vec<(Prop, JsValue)>), + List(Vec<(Prop, JsValue)>), +} + +impl JsObjType { + pub(crate) fn objtype(&self) -> ObjType { + match self { + Self::Text(_) => ObjType::Text, + Self::Map(_) => ObjType::Map, + Self::List(_) => ObjType::List, + } + } + + pub(crate) fn text(&self) -> Option<&str> { + match self { + Self::Text(s) => Some(s.as_ref()), + Self::Map(_) => None, + Self::List(_) => None, + } + } + + pub(crate) fn subvals(&self) -> impl Iterator, JsValue)> + '_ + Clone { + match self { + Self::Text(s) => SubValIter::Str(s.chars().enumerate()), + Self::Map(sub) => SubValIter::Slice(sub.as_slice().iter()), + Self::List(sub) => SubValIter::Slice(sub.as_slice().iter()), + } + } +} + +#[derive(Debug, Clone)] +pub(crate) enum SubValIter<'a> { + Slice(std::slice::Iter<'a, (Prop, JsValue)>), + Str(std::iter::Enumerate>), +} + +impl<'a> Iterator for SubValIter<'a> { + type Item = (std::borrow::Cow<'a, Prop>, JsValue); + + fn next(&mut self) -> Option { + match self { + Self::Slice(i) => i + .next() + .map(|(p, v)| (std::borrow::Cow::Borrowed(p), v.clone())), + Self::Str(i) => i + .next() + .map(|(n, c)| (std::borrow::Cow::Owned(Prop::Seq(n)), c.to_string().into())), + } + } +} + +pub(crate) fn import_obj( + value: &JsValue, + datatype: &Option, +) -> Result { + match datatype.as_deref() { + Some("map") => { + let map = value + .clone() + .dyn_into::() + .map_err(|_| InsertObject::ValueNotObject)?; + let map = js_sys::Object::keys(&map) + .iter() + .zip(js_sys::Object::values(&map).iter()) + .map(|(key, val)| (key.as_string().unwrap().into(), val)) + .collect(); + Ok(JsObjType::Map(map)) + } + Some("list") => { + let list = value + .clone() + .dyn_into::() + .map_err(|_| InsertObject::ValueNotObject)?; + let list = list + .iter() + .enumerate() + .map(|(i, e)| (i.into(), e)) + .collect(); + Ok(JsObjType::List(list)) + } + Some("text") => { + let text = value.as_string().ok_or(InsertObject::ValueNotObject)?; + Ok(JsObjType::Text(text)) + } + Some(_) => Err(InsertObject::ValueNotObject), + None => { + if let Ok(list) = value.clone().dyn_into::() { + let list = list + .iter() + .enumerate() + .map(|(i, e)| (i.into(), e)) + .collect(); + Ok(JsObjType::List(list)) + } else if let Ok(map) = value.clone().dyn_into::() { + let map = js_sys::Object::keys(&map) + .iter() + .zip(js_sys::Object::values(&map).iter()) + .map(|(key, val)| (key.as_string().unwrap().into(), val)) + .collect(); + Ok(JsObjType::Map(map)) + } else if let Some(s) = value.as_string() { + Ok(JsObjType::Text(s)) + } else { + Err(InsertObject::ValueNotObject) + } + } + } +} + +pub(crate) fn get_heads( + heads: Option, +) -> Result>, error::BadChangeHashes> { + heads + .map(|h| { + h.iter() + .enumerate() + .map(|(i, v)| { + ChangeHash::try_from(JS(v)).map_err(|e| error::BadChangeHashes::BadElem(i, e)) + }) + .collect() + }) + .transpose() +} + +impl Automerge { + pub(crate) fn export_object( + &self, + obj: &ObjId, + datatype: Datatype, + heads: Option<&Vec>, + meta: &JsValue, + ) -> Result { + let result = match datatype { + Datatype::Text => match self.text_rep { + TextRepresentation::String => { + if let Some(heads) = heads { + self.doc.text_at(obj, heads)?.into() + } else { + self.doc.text(obj)?.into() + } + } + TextRepresentation::Array => self + .wrap_object(self.export_list(obj, heads, meta)?, datatype, obj, meta)? + .into(), + }, + Datatype::List => self + .wrap_object(self.export_list(obj, heads, meta)?, datatype, obj, meta)? + .into(), + _ => self + .wrap_object(self.export_map(obj, heads, meta)?, datatype, obj, meta)? + .into(), + }; + Ok(result) + } + + pub(crate) fn export_map( + &self, + obj: &ObjId, + heads: Option<&Vec>, + meta: &JsValue, + ) -> Result { + let keys = self.doc.keys(obj); + let map = Object::new(); + for k in keys { + let val_and_id = if let Some(heads) = heads { + self.doc.get_at(obj, &k, heads) + } else { + self.doc.get(obj, &k) + }; + if let Ok(Some((val, id))) = val_and_id { + let subval = match val { + Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, + Value::Scalar(_) => self.export_value(alloc(&val, self.text_rep))?, + }; + js_set(&map, &k, &subval)?; + }; + } + + Ok(map) + } + + pub(crate) fn export_list( + &self, + obj: &ObjId, + heads: Option<&Vec>, + meta: &JsValue, + ) -> Result { + let len = self.doc.length(obj); + let array = Array::new(); + for i in 0..len { + let val_and_id = if let Some(heads) = heads { + self.doc.get_at(obj, i, heads) + } else { + self.doc.get(obj, i) + }; + if let Ok(Some((val, id))) = val_and_id { + let subval = match val { + Value::Object(o) => self.export_object(&id, o.into(), heads, meta)?, + Value::Scalar(_) => self.export_value(alloc(&val, self.text_rep))?, + }; + array.push(&subval); + }; + } + + Ok(array.into()) + } + + pub(crate) fn export_value( + &self, + (datatype, raw_value): (Datatype, JsValue), + ) -> Result { + if let Some(function) = self.external_types.get(&datatype) { + let wrapped_value = function + .call1(&JsValue::undefined(), &raw_value) + .map_err(|e| error::Export::CallDataHandler(datatype.to_string(), e))?; + if let Ok(o) = wrapped_value.dyn_into::() { + let key = Symbol::for_(RAW_DATA_SYMBOL); + set_hidden_value(&o, &key, &raw_value)?; + let key = Symbol::for_(DATATYPE_SYMBOL); + set_hidden_value(&o, &key, datatype)?; + Ok(o.into()) + } else { + Err(error::Export::InvalidDataHandler(datatype.to_string())) + } + } else { + Ok(raw_value) + } + } + + pub(crate) fn unwrap_object( + &self, + ext_val: &Object, + ) -> Result<(Object, Datatype, ObjId), error::Export> { + let inner = js_get_symbol(ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?.0; + + let datatype = js_get_symbol(ext_val, &Symbol::for_(DATATYPE_SYMBOL))? + .0 + .try_into(); + + let id_val = js_get_symbol(ext_val, &Symbol::for_(RAW_OBJECT_SYMBOL))?.0; + let id = if id_val.is_undefined() { + am::ROOT + } else { + self.doc.import(&id_val.as_string().unwrap_or_default())?.0 + }; + + let inner = inner + .dyn_into::() + .unwrap_or_else(|_| ext_val.clone()); + let datatype = datatype.unwrap_or_else(|_| { + if Array::is_array(&inner) { + Datatype::List + } else { + Datatype::Map + } + }); + Ok((inner, datatype, id)) + } + + pub(crate) fn unwrap_scalar(&self, ext_val: JsValue) -> Result { + let inner = js_get_symbol(&ext_val, &Symbol::for_(RAW_DATA_SYMBOL))?.0; + if !inner.is_undefined() { + Ok(inner) + } else { + Ok(ext_val) + } + } + + fn maybe_wrap_object( + &self, + (datatype, raw_value): (Datatype, JsValue), + id: &ObjId, + meta: &JsValue, + ) -> Result { + if let Ok(obj) = raw_value.clone().dyn_into::() { + let result = self.wrap_object(obj, datatype, id, meta)?; + Ok(result.into()) + } else { + self.export_value((datatype, raw_value)) + } + } + + pub(crate) fn wrap_object( + &self, + value: Object, + datatype: Datatype, + id: &ObjId, + meta: &JsValue, + ) -> Result { + let value = if let Some(function) = self.external_types.get(&datatype) { + let wrapped_value = function + .call1(&JsValue::undefined(), &value) + .map_err(|e| error::Export::CallDataHandler(datatype.to_string(), e))?; + let wrapped_object = wrapped_value + .dyn_into::() + .map_err(|_| error::Export::InvalidDataHandler(datatype.to_string()))?; + set_hidden_value(&wrapped_object, &Symbol::for_(RAW_DATA_SYMBOL), value)?; + wrapped_object + } else { + value + }; + if matches!(datatype, Datatype::Map | Datatype::List) + || (datatype == Datatype::Text && self.text_rep == TextRepresentation::Array) + { + set_hidden_value( + &value, + &Symbol::for_(RAW_OBJECT_SYMBOL), + &JsValue::from(&id.to_string()), + )?; + } + set_hidden_value(&value, &Symbol::for_(DATATYPE_SYMBOL), datatype)?; + set_hidden_value(&value, &Symbol::for_(META_SYMBOL), meta)?; + if self.freeze { + Object::freeze(&value); + } + Ok(value) + } + + pub(crate) fn apply_patch_to_array( + &self, + array: &Object, + patch: &Patch, + meta: &JsValue, + exposed: &mut HashSet, + ) -> Result { + let result = Array::from(array); // shallow copy + match patch { + Patch::PutSeq { + index, + value, + expose, + .. + } => { + if *expose && value.0.is_object() { + exposed.insert(value.1.clone()); + js_set(&result, *index as f64, &JsValue::null())?; + } else { + let sub_val = + self.maybe_wrap_object(alloc(&value.0, self.text_rep), &value.1, meta)?; + js_set(&result, *index as f64, &sub_val)?; + } + Ok(result.into()) + } + Patch::DeleteSeq { index, length, .. } => { + Ok(self.sub_splice(result, *index, *length, vec![], meta)?) + } + Patch::Insert { index, values, .. } => { + Ok(self.sub_splice(result, *index, 0, values, meta)?) + } + Patch::Increment { prop, value, .. } => { + if let Prop::Seq(index) = prop { + let index = *index as f64; + let old_val = js_get(&result, index)?.0; + let old_val = self.unwrap_scalar(old_val)?; + if let Some(old) = old_val.as_f64() { + let new_value: Value<'_> = + am::ScalarValue::counter(old as i64 + *value).into(); + js_set( + &result, + index, + &self.export_value(alloc(&new_value, self.text_rep))?, + )?; + Ok(result.into()) + } else { + Err(error::ApplyPatch::IncrementNonNumeric) + } + } else { + Err(error::ApplyPatch::IncrementKeyInSeq) + } + } + Patch::DeleteMap { .. } => Err(error::ApplyPatch::DeleteKeyFromSeq), + Patch::PutMap { .. } => Err(error::ApplyPatch::PutKeyInSeq), + Patch::SpliceText { index, value, .. } => { + match self.text_rep { + TextRepresentation::String => Err(error::ApplyPatch::SpliceTextInSeq), + TextRepresentation::Array => { + let bytes: Vec = value.iter().cloned().collect(); + let val = String::from_utf16_lossy(bytes.as_slice()); + let elems = val + .chars() + .map(|c| { + ( + Value::Scalar(std::borrow::Cow::Owned(am::ScalarValue::Str( + c.to_string().into(), + ))), + ObjId::Root, // Using ROOT is okay because this ID is never used as + // we're producing ScalarValue::Str + ) + }) + .collect::>(); + Ok(self.sub_splice(result, *index, 0, &elems, meta)?) + } + } + } + } + } + + pub(crate) fn apply_patch_to_map( + &self, + map: &Object, + patch: &Patch, + meta: &JsValue, + exposed: &mut HashSet, + ) -> Result { + let result = Object::assign(&Object::new(), map); // shallow copy + match patch { + Patch::PutMap { + key, value, expose, .. + } => { + if *expose && value.0.is_object() { + exposed.insert(value.1.clone()); + js_set(&result, key, &JsValue::null())?; + } else { + let sub_val = + self.maybe_wrap_object(alloc(&value.0, self.text_rep), &value.1, meta)?; + js_set(&result, key, &sub_val)?; + } + Ok(result) + } + Patch::DeleteMap { key, .. } => { + Reflect::delete_property(&result, &key.into()).map_err(|e| { + error::Export::Delete { + prop: key.to_string(), + err: e, + } + })?; + Ok(result) + } + Patch::Increment { prop, value, .. } => { + if let Prop::Map(key) = prop { + let old_val = js_get(&result, key)?.0; + let old_val = self.unwrap_scalar(old_val)?; + if let Some(old) = old_val.as_f64() { + let new_value: Value<'_> = + am::ScalarValue::counter(old as i64 + *value).into(); + js_set( + &result, + key, + &self.export_value(alloc(&new_value, self.text_rep))?, + )?; + Ok(result) + } else { + Err(error::ApplyPatch::IncrementNonNumeric) + } + } else { + Err(error::ApplyPatch::IncrementIndexInMap) + } + } + Patch::Insert { .. } => Err(error::ApplyPatch::InsertInMap), + Patch::DeleteSeq { .. } => Err(error::ApplyPatch::SpliceInMap), + //Patch::SpliceText { .. } => Err(to_js_err("cannot Splice into map")), + Patch::SpliceText { .. } => Err(error::ApplyPatch::SpliceTextInMap), + Patch::PutSeq { .. } => Err(error::ApplyPatch::PutIdxInMap), + } + } + + pub(crate) fn apply_patch( + &self, + obj: Object, + patch: &Patch, + depth: usize, + meta: &JsValue, + exposed: &mut HashSet, + ) -> Result { + let (inner, datatype, id) = self.unwrap_object(&obj)?; + let prop = patch.path().get(depth).map(|p| prop_to_js(&p.1)); + let result = if let Some(prop) = prop { + let subval = js_get(&inner, &prop)?.0; + if subval.is_string() && patch.path().len() - 1 == depth { + if let Ok(s) = subval.dyn_into::() { + let new_value = self.apply_patch_to_text(&s, patch)?; + let result = shallow_copy(&inner); + js_set(&result, &prop, &new_value)?; + Ok(result) + } else { + // bad patch - short circuit + Ok(obj) + } + } else if let Ok(sub_obj) = js_get(&inner, &prop)?.0.dyn_into::() { + let new_value = self.apply_patch(sub_obj, patch, depth + 1, meta, exposed)?; + let result = shallow_copy(&inner); + js_set(&result, &prop, &new_value)?; + Ok(result) + } else { + // if a patch is trying to access a deleted object make no change + // short circuit the wrap process + return Ok(obj); + } + } else if Array::is_array(&inner) { + if &id == patch.obj() { + self.apply_patch_to_array(&inner, patch, meta, exposed) + } else { + Ok(Array::from(&inner).into()) + } + } else if &id == patch.obj() { + self.apply_patch_to_map(&inner, patch, meta, exposed) + } else { + Ok(Object::assign(&Object::new(), &inner)) + }?; + + self.wrap_object(result, datatype, &id, meta) + .map_err(|e| e.into()) + } + + fn apply_patch_to_text( + &self, + string: &JsString, + patch: &Patch, + ) -> Result { + match patch { + Patch::DeleteSeq { index, length, .. } => { + let index = *index as u32; + let before = string.slice(0, index); + let after = string.slice(index + *length as u32, string.length()); + let result = before.concat(&after); + Ok(result.into()) + } + Patch::SpliceText { index, value, .. } => { + let index = *index as u32; + let length = string.length(); + let before = string.slice(0, index); + let after = string.slice(index, length); + let bytes: Vec = value.iter().cloned().collect(); + let result = before + .concat(&String::from_utf16_lossy(bytes.as_slice()).into()) + .concat(&after); + Ok(result.into()) + } + _ => Ok(string.into()), + } + } + + fn sub_splice<'a, I: IntoIterator, ObjId)>>( + &self, + o: Array, + index: usize, + num_del: usize, + values: I, + meta: &JsValue, + ) -> Result { + let args: Array = values + .into_iter() + .map(|v| self.maybe_wrap_object(alloc(&v.0, self.text_rep), &v.1, meta)) + .collect::>()?; + args.unshift(&(num_del as u32).into()); + args.unshift(&(index as u32).into()); + let method = js_get(&o, "splice")? + .0 + .dyn_into::() + .map_err(error::Export::GetSplice)?; + Reflect::apply(&method, &o, &args).map_err(error::Export::CallSplice)?; + Ok(o.into()) + } + + pub(crate) fn import(&self, id: JsValue) -> Result<(ObjId, am::ObjType), error::ImportObj> { + if let Some(s) = id.as_string() { + // valid formats are + // 123@aabbcc + // 123@aabccc/prop1/prop2/prop3 + // /prop1/prop2/prop3 + let mut components = s.split('/'); + let obj = components.next(); + let (id, obj_type) = if obj == Some("") { + (ROOT, am::ObjType::Map) + } else { + self.doc + .import(obj.unwrap_or_default()) + .map_err(error::ImportObj::BadImport)? + }; + self.import_path(id, obj_type, components) + .map_err(|e| error::ImportObj::InvalidPath(s.to_string(), e)) + } else { + Err(error::ImportObj::NotString) + } + } + + fn import_path<'a, I: Iterator>( + &self, + mut obj: ObjId, + mut obj_type: am::ObjType, + components: I, + ) -> Result<(ObjId, am::ObjType), error::ImportPath> { + for (i, prop) in components.enumerate() { + if prop.is_empty() { + break; + } + let is_map = matches!(obj_type, am::ObjType::Map | am::ObjType::Table); + let val = if is_map { + self.doc.get(obj, prop)? + } else { + let idx = prop + .parse() + .map_err(|_| error::ImportPath::IndexNotInteger(i, prop.to_string()))?; + self.doc.get(obj, am::Prop::Seq(idx))? + }; + match val { + Some((am::Value::Object(am::ObjType::Map), id)) => { + obj_type = am::ObjType::Map; + obj = id; + } + Some((am::Value::Object(am::ObjType::Table), id)) => { + obj_type = am::ObjType::Table; + obj = id; + } + Some((am::Value::Object(am::ObjType::List), id)) => { + obj_type = am::ObjType::List; + obj = id; + } + Some((am::Value::Object(am::ObjType::Text), id)) => { + obj_type = am::ObjType::Text; + obj = id; + } + None => return Err(error::ImportPath::NonExistentObject(i, prop.to_string())), + _ => return Err(error::ImportPath::NotAnObject), + }; + } + Ok((obj, obj_type)) + } + + pub(crate) fn import_prop(&self, prop: JsValue) -> Result { + if let Some(s) = prop.as_string() { + Ok(s.into()) + } else if let Some(n) = prop.as_f64() { + Ok((n as usize).into()) + } else { + Err(error::InvalidProp) + } + } + + pub(crate) fn import_scalar( + &self, + value: &JsValue, + datatype: &Option, + ) -> Option { + match datatype.as_deref() { + Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean), + Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)), + Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)), + Some("str") => value.as_string().map(|v| am::ScalarValue::Str(v.into())), + Some("f64") => value.as_f64().map(am::ScalarValue::F64), + Some("bytes") => Some(am::ScalarValue::Bytes( + value.clone().dyn_into::().unwrap().to_vec(), + )), + Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)), + Some("timestamp") => { + if let Some(v) = value.as_f64() { + Some(am::ScalarValue::Timestamp(v as i64)) + } else if let Ok(d) = value.clone().dyn_into::() { + Some(am::ScalarValue::Timestamp(d.get_time() as i64)) + } else { + None + } + } + Some("null") => Some(am::ScalarValue::Null), + Some(_) => None, + None => { + if value.is_null() { + Some(am::ScalarValue::Null) + } else if let Some(b) = value.as_bool() { + Some(am::ScalarValue::Boolean(b)) + } else if let Some(s) = value.as_string() { + Some(am::ScalarValue::Str(s.into())) + } else if let Some(n) = value.as_f64() { + if (n.round() - n).abs() < f64::EPSILON { + Some(am::ScalarValue::Int(n as i64)) + } else { + Some(am::ScalarValue::F64(n)) + } + } else if let Ok(d) = value.clone().dyn_into::() { + Some(am::ScalarValue::Timestamp(d.get_time() as i64)) + } else if let Ok(o) = &value.clone().dyn_into::() { + Some(am::ScalarValue::Bytes(o.to_vec())) + } else { + None + } + } + } + } + + pub(crate) fn import_value( + &self, + value: &JsValue, + datatype: Option, + ) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), error::InvalidValue> { + match self.import_scalar(value, &datatype) { + Some(val) => Ok((val.into(), vec![])), + None => { + if let Ok(js_obj) = import_obj(value, &datatype) { + Ok(( + js_obj.objtype().into(), + js_obj + .subvals() + .map(|(p, v)| (p.into_owned(), v)) + .collect::>(), + )) + } else { + web_sys::console::log_2(&"Invalid value".into(), value); + Err(error::InvalidValue) + } + } + } + } + + pub(crate) fn finalize_exposed( + &self, + object: &JsValue, + exposed: HashSet, + meta: &JsValue, + ) -> Result<(), error::ApplyPatch> { + for obj in exposed { + let mut pointer = object.clone(); + if let Ok(obj_type) = self.doc.object_type(&obj) { + // only valid obj's should make it to this point ... + let path: Vec<_> = self + .doc + .path_to_object(&obj)? + .iter() + .map(|p| prop_to_js(&p.1)) + .collect(); + let value = self.export_object(&obj, obj_type.into(), None, meta)?; + for (i, prop) in path.iter().enumerate() { + if i + 1 < path.len() { + pointer = js_get(&pointer, prop)?.0; + } else { + js_set(&pointer, prop, &value)?; + } + } + } + } + Ok(()) + } +} + +pub(crate) fn alloc(value: &Value<'_>, text_rep: TextRepresentation) -> (Datatype, JsValue) { + match value { + am::Value::Object(o) => match o { + ObjType::Map => (Datatype::Map, Object::new().into()), + ObjType::Table => (Datatype::Table, Object::new().into()), + ObjType::List => (Datatype::List, Array::new().into()), + ObjType::Text => match text_rep { + TextRepresentation::String => (Datatype::Text, "".into()), + TextRepresentation::Array => (Datatype::Text, Array::new().into()), + }, + }, + am::Value::Scalar(s) => match s.as_ref() { + am::ScalarValue::Bytes(v) => (Datatype::Bytes, Uint8Array::from(v.as_slice()).into()), + am::ScalarValue::Str(v) => (Datatype::Str, v.to_string().into()), + am::ScalarValue::Int(v) => (Datatype::Int, (*v as f64).into()), + am::ScalarValue::Uint(v) => (Datatype::Uint, (*v as f64).into()), + am::ScalarValue::F64(v) => (Datatype::F64, (*v).into()), + am::ScalarValue::Counter(v) => (Datatype::Counter, (f64::from(v)).into()), + am::ScalarValue::Timestamp(v) => ( + Datatype::Timestamp, + js_sys::Date::new(&(*v as f64).into()).into(), + ), + am::ScalarValue::Boolean(v) => (Datatype::Boolean, (*v).into()), + am::ScalarValue::Null => (Datatype::Null, JsValue::null()), + am::ScalarValue::Unknown { bytes, type_code } => ( + Datatype::Unknown(*type_code), + Uint8Array::from(bytes.as_slice()).into(), + ), + }, + } +} + +fn set_hidden_value>( + o: &Object, + key: &Symbol, + value: V, +) -> Result<(), error::Export> { + let definition = Object::new(); + js_set(&definition, "value", &value.into()).map_err(|_| error::Export::SetHidden("value"))?; + js_set(&definition, "writable", false).map_err(|_| error::Export::SetHidden("writable"))?; + js_set(&definition, "enumerable", false).map_err(|_| error::Export::SetHidden("enumerable"))?; + js_set(&definition, "configurable", false) + .map_err(|_| error::Export::SetHidden("configurable"))?; + Object::define_property(o, &key.into(), &definition); + Ok(()) +} + +fn shallow_copy(obj: &Object) -> Object { + if Array::is_array(obj) { + Array::from(obj).into() + } else { + Object::assign(&Object::new(), obj) + } +} + +fn prop_to_js(prop: &Prop) -> JsValue { + match prop { + Prop::Map(key) => key.into(), + Prop::Seq(index) => (*index as f64).into(), + } +} + +pub(crate) mod error { + use automerge::{AutomergeError, LoadChangeError}; + use wasm_bindgen::JsValue; + + #[derive(Debug, thiserror::Error)] + pub enum BadJSChanges { + #[error("the changes were not an array of Uint8Array")] + ChangesNotArray, + #[error("change {0} was not a Uint8Array")] + ElemNotUint8Array(usize), + #[error("error loading change {0}: {1}")] + BadChange(usize, LoadChangeError), + } + + #[derive(Debug, thiserror::Error)] + pub enum BadChangeHashes { + #[error("the change hashes were not an array of strings")] + NotArray, + #[error("could not decode hash {0}: {1}")] + BadElem(usize, BadChangeHash), + } + + impl From for JsValue { + fn from(e: BadChangeHashes) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum BadChangeHashSet { + #[error("not an object")] + NotObject, + #[error(transparent)] + GetProp(#[from] GetProp), + #[error("unable to getOwnProperties")] + ListProp, + #[error("unable to parse hash from {0:?}: {1}")] + BadHash(wasm_bindgen::JsValue, BadChangeHash), + } + + #[derive(Debug, thiserror::Error)] + pub enum BadChangeHash { + #[error("change hash was not a string")] + NotString, + #[error(transparent)] + Parse(#[from] automerge::ParseChangeHashError), + } + + impl From for JsValue { + fn from(e: BadChangeHash) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum BadSyncState { + #[error(transparent)] + GetProp(#[from] GetProp), + #[error("bad sharedHeads: {0}")] + BadSharedHeads(BadChangeHashes), + #[error("bad lastSentHeads: {0}")] + BadLastSentHeads(BadChangeHashes), + #[error("bad theirHeads: {0}")] + BadTheirHeads(BadChangeHashes), + #[error("bad theirNeed: {0}")] + BadTheirNeed(BadChangeHashes), + #[error("bad theirHave: {0}")] + BadTheirHave(BadHaves), + #[error("bad sentHashes: {0}")] + BadSentHashes(BadChangeHashSet), + #[error("inFlight not a boolean")] + InFlightNotBoolean, + } + + impl From for JsValue { + fn from(e: BadSyncState) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error("unable to get property {property}: {error:?}")] + pub struct GetProp { + pub(super) property: String, + pub(super) error: wasm_bindgen::JsValue, + } + + impl From for JsValue { + fn from(e: GetProp) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error("error setting property {property:?} on JS value: {error:?}")] + pub struct SetProp { + pub(super) property: JsValue, + pub(super) error: JsValue, + } + + impl From for JsValue { + fn from(e: SetProp) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum BadHave { + #[error("bad lastSync: {0}")] + BadLastSync(BadChangeHashes), + #[error("bad bloom: {0}")] + BadBloom(BadBloom), + #[error(transparent)] + GetHaveProp(#[from] GetProp), + } + + #[derive(Debug, thiserror::Error)] + pub enum BadHaves { + #[error("value was not an array")] + NotArray, + #[error("error loading have at index {0}: {1}")] + BadElem(usize, BadHave), + } + + #[derive(Debug, thiserror::Error)] + pub enum BadBloom { + #[error("the value was not a Uint8Array")] + NotU8Array, + #[error("unable to decode: {0}")] + Decode(#[from] automerge::sync::DecodeBloomError), + } + + #[derive(Debug, thiserror::Error)] + pub enum Export { + #[error(transparent)] + Set(#[from] SetProp), + #[error("unable to delete prop {prop}: {err:?}")] + Delete { prop: String, err: JsValue }, + #[error("unable to set hidden property {0}")] + SetHidden(&'static str), + #[error("data handler for type {0} did not return a valid object")] + InvalidDataHandler(String), + #[error("error calling data handler for type {0}: {1:?}")] + CallDataHandler(String, JsValue), + #[error(transparent)] + GetProp(#[from] GetProp), + #[error(transparent)] + InvalidDatatype(#[from] crate::value::InvalidDatatype), + #[error("unable to get the splice function: {0:?}")] + GetSplice(JsValue), + #[error("error calling splice: {0:?}")] + CallSplice(JsValue), + #[error(transparent)] + Automerge(#[from] AutomergeError), + } + + impl From for JsValue { + fn from(e: Export) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ApplyPatch { + #[error(transparent)] + Export(#[from] Export), + #[error("cannot delete from a seq")] + DeleteKeyFromSeq, + #[error("cannot put key in seq")] + PutKeyInSeq, + #[error("cannot increment a non-numeric value")] + IncrementNonNumeric, + #[error("cannot increment a key in a seq")] + IncrementKeyInSeq, + #[error("cannot increment index in a map")] + IncrementIndexInMap, + #[error("cannot insert into a map")] + InsertInMap, + #[error("cannot splice into a map")] + SpliceInMap, + #[error("cannot splice text into a seq")] + SpliceTextInSeq, + #[error("cannot splice text into a map")] + SpliceTextInMap, + #[error("cannot put a seq index in a map")] + PutIdxInMap, + #[error(transparent)] + GetProp(#[from] GetProp), + #[error(transparent)] + SetProp(#[from] SetProp), + #[error(transparent)] + Automerge(#[from] AutomergeError), + } + + impl From for JsValue { + fn from(e: ApplyPatch) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum BadSyncMessage { + #[error(transparent)] + GetProp(#[from] GetProp), + #[error("unable to read haves: {0}")] + BadHaves(#[from] BadHaves), + #[error("could not read changes: {0}")] + BadJSChanges(#[from] BadJSChanges), + #[error("could not read heads: {0}")] + BadHeads(BadChangeHashes), + #[error("could not read need: {0}")] + BadNeed(BadChangeHashes), + } + + impl From for JsValue { + fn from(e: BadSyncMessage) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ImportObj { + #[error("obj id was not a string")] + NotString, + #[error("invalid path {0}: {1}")] + InvalidPath(String, ImportPath), + #[error("unable to import object id: {0}")] + BadImport(AutomergeError), + } + + impl From for JsValue { + fn from(e: ImportObj) -> Self { + JsValue::from(format!("invalid object ID: {}", e)) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ImportPath { + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error("path component {0} ({1}) should be an integer to index a sequence")] + IndexNotInteger(usize, String), + #[error("path component {0} ({1}) referenced a nonexistent object")] + NonExistentObject(usize, String), + #[error("path did not refer to an object")] + NotAnObject, + } + + #[derive(Debug, thiserror::Error)] + #[error("given property was not a string or integer")] + pub struct InvalidProp; + + #[derive(Debug, thiserror::Error)] + #[error("given property was not a string or integer")] + pub struct InvalidValue; +} diff --git a/rust/automerge-wasm/src/lib.rs b/rust/automerge-wasm/src/lib.rs new file mode 100644 index 00000000..09072ca7 --- /dev/null +++ b/rust/automerge-wasm/src/lib.rs @@ -0,0 +1,1155 @@ +#![doc( + html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg", + html_favicon_url = "https:///raw.githubusercontent.com/automerge/automerge-rs/main/img/favicon.ico" +)] +#![warn( + missing_debug_implementations, + // missing_docs, // TODO: add documentation! + rust_2021_compatibility, + rust_2018_idioms, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + private_in_public, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true +)] +#![allow(clippy::unused_unit)] +use am::transaction::CommitOptions; +use am::transaction::{Observed, Transactable, UnObserved}; +use am::ScalarValue; +use automerge as am; +use automerge::{sync::SyncDoc, Change, ObjId, Prop, ReadDoc, TextEncoding, Value, ROOT}; +use js_sys::{Array, Function, Object, Uint8Array}; +use serde::ser::Serialize; +use std::borrow::Cow; +use std::collections::HashMap; +use std::collections::HashSet; +use std::convert::TryInto; +use wasm_bindgen::prelude::*; +use wasm_bindgen::JsCast; + +mod interop; +mod observer; +mod sequence_tree; +mod sync; +mod value; + +use observer::Observer; + +use interop::{alloc, get_heads, import_obj, js_set, to_js_err, to_prop, AR, JS}; +use sync::SyncState; +use value::Datatype; + +use crate::interop::SubValIter; + +#[allow(unused_macros)] +macro_rules! log { + ( $( $t:tt )* ) => { + web_sys::console::log_1(&format!( $( $t )* ).into()); + }; +} + +type AutoCommit = am::AutoCommitWithObs>; + +#[cfg(feature = "wee_alloc")] +#[global_allocator] +static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; + +/// How text is represented in materialized objects on the JS side +#[derive(Debug, Eq, PartialEq, Clone, Copy)] +#[wasm_bindgen] +pub enum TextRepresentation { + /// As an array of characters and objects + Array, + /// As a single JS string + String, +} + +impl std::default::Default for TextRepresentation { + fn default() -> Self { + TextRepresentation::Array + } +} + +#[wasm_bindgen] +#[derive(Debug)] +pub struct Automerge { + doc: AutoCommit, + freeze: bool, + external_types: HashMap, + text_rep: TextRepresentation, +} + +#[wasm_bindgen] +impl Automerge { + pub fn new( + actor: Option, + text_rep: TextRepresentation, + ) -> Result { + let mut doc = AutoCommit::default().with_encoding(TextEncoding::Utf16); + if let Some(a) = actor { + let a = automerge::ActorId::from(hex::decode(a)?.to_vec()); + doc.set_actor(a); + } + Ok(Automerge { + doc, + freeze: false, + external_types: HashMap::default(), + text_rep, + }) + } + + #[allow(clippy::should_implement_trait)] + pub fn clone(&mut self, actor: Option) -> Result { + let mut automerge = Automerge { + doc: self.doc.clone(), + freeze: self.freeze, + external_types: self.external_types.clone(), + text_rep: self.text_rep, + }; + if let Some(s) = actor { + let actor = automerge::ActorId::from(hex::decode(s)?.to_vec()); + automerge.doc.set_actor(actor); + } + Ok(automerge) + } + + pub fn fork( + &mut self, + actor: Option, + heads: JsValue, + ) -> Result { + let heads: Result, _> = JS(heads).try_into(); + let doc = if let Ok(heads) = heads { + self.doc.fork_at(&heads)? + } else { + self.doc.fork() + }; + let mut automerge = Automerge { + doc, + freeze: self.freeze, + external_types: self.external_types.clone(), + text_rep: self.text_rep, + }; + if let Some(s) = actor { + let actor = + automerge::ActorId::from(hex::decode(s).map_err(error::BadActorId::from)?.to_vec()); + automerge.doc.set_actor(actor); + } + Ok(automerge) + } + + #[wasm_bindgen(js_name = pendingOps)] + pub fn pending_ops(&self) -> JsValue { + (self.doc.pending_ops() as u32).into() + } + + pub fn commit(&mut self, message: Option, time: Option) -> JsValue { + let mut commit_opts = CommitOptions::default(); + if let Some(message) = message { + commit_opts.set_message(message); + } + if let Some(time) = time { + commit_opts.set_time(time as i64); + } + let hash = self.doc.commit_with(commit_opts); + match hash { + Some(h) => JsValue::from_str(&hex::encode(h.0)), + None => JsValue::NULL, + } + } + + pub fn merge(&mut self, other: &mut Automerge) -> Result { + let heads = self.doc.merge(&mut other.doc)?; + let heads: Array = heads + .iter() + .map(|h| JsValue::from_str(&hex::encode(h.0))) + .collect(); + Ok(heads) + } + + pub fn rollback(&mut self) -> f64 { + self.doc.rollback() as f64 + } + + pub fn keys(&self, obj: JsValue, heads: Option) -> Result { + let (obj, _) = self.import(obj)?; + let result = if let Some(heads) = get_heads(heads)? { + self.doc + .keys_at(&obj, &heads) + .map(|s| JsValue::from_str(&s)) + .collect() + } else { + self.doc.keys(&obj).map(|s| JsValue::from_str(&s)).collect() + }; + Ok(result) + } + + pub fn text(&self, obj: JsValue, heads: Option) -> Result { + let (obj, _) = self.import(obj)?; + if let Some(heads) = get_heads(heads)? { + Ok(self.doc.text_at(&obj, &heads)?) + } else { + Ok(self.doc.text(&obj)?) + } + } + + pub fn splice( + &mut self, + obj: JsValue, + start: f64, + delete_count: f64, + text: JsValue, + ) -> Result<(), error::Splice> { + let (obj, obj_type) = self.import(obj)?; + let start = start as usize; + let delete_count = delete_count as usize; + let vals = if let Some(t) = text.as_string() { + if obj_type == am::ObjType::Text && self.text_rep == TextRepresentation::String { + self.doc.splice_text(&obj, start, delete_count, &t)?; + return Ok(()); + } else { + t.chars() + .map(|c| ScalarValue::Str(c.to_string().into())) + .collect::>() + } + } else { + let mut vals = vec![]; + if let Ok(array) = text.dyn_into::() { + for (index, i) in array.iter().enumerate() { + let value = self + .import_scalar(&i, &None) + .ok_or(error::Splice::ValueNotPrimitive(index))?; + vals.push(value); + } + } + vals + }; + if !vals.is_empty() { + self.doc.splice(&obj, start, delete_count, vals)?; + } else { + // no vals given but we still need to call the text vs splice + // bc utf16 + match obj_type { + am::ObjType::List => { + self.doc.splice(&obj, start, delete_count, vals)?; + } + am::ObjType::Text => match self.text_rep { + TextRepresentation::String => { + self.doc.splice_text(&obj, start, delete_count, "")?; + } + TextRepresentation::Array => { + self.doc.splice(&obj, start, delete_count, vals)?; + } + }, + _ => {} + } + } + Ok(()) + } + + pub fn push( + &mut self, + obj: JsValue, + value: JsValue, + datatype: JsValue, + ) -> Result<(), error::Insert> { + let (obj, _) = self.import(obj)?; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or(error::Insert::ValueNotPrimitive)?; + let index = self.doc.length(&obj); + self.doc.insert(&obj, index, value)?; + Ok(()) + } + + #[wasm_bindgen(js_name = pushObject)] + pub fn push_object( + &mut self, + obj: JsValue, + value: JsValue, + ) -> Result, error::InsertObject> { + let (obj, _) = self.import(obj)?; + let imported_obj = import_obj(&value, &None)?; + let index = self.doc.length(&obj); + let opid = self + .doc + .insert_object(&obj, index, imported_obj.objtype())?; + if let Some(s) = imported_obj.text() { + match self.text_rep { + TextRepresentation::String => { + self.doc.splice_text(&opid, 0, 0, s)?; + } + TextRepresentation::Array => { + self.subset::(&opid, imported_obj.subvals())?; + } + } + } else { + self.subset::(&opid, imported_obj.subvals())?; + } + Ok(opid.to_string().into()) + } + + pub fn insert( + &mut self, + obj: JsValue, + index: f64, + value: JsValue, + datatype: JsValue, + ) -> Result<(), error::Insert> { + let (obj, _) = self.import(obj)?; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or(error::Insert::ValueNotPrimitive)?; + self.doc.insert(&obj, index as usize, value)?; + Ok(()) + } + + #[wasm_bindgen(js_name = insertObject)] + pub fn insert_object( + &mut self, + obj: JsValue, + index: f64, + value: JsValue, + ) -> Result, error::InsertObject> { + let (obj, _) = self.import(obj)?; + let imported_obj = import_obj(&value, &None)?; + let opid = self + .doc + .insert_object(&obj, index as usize, imported_obj.objtype())?; + if let Some(s) = imported_obj.text() { + match self.text_rep { + TextRepresentation::String => { + self.doc.splice_text(&opid, 0, 0, s)?; + } + TextRepresentation::Array => { + self.subset::(&opid, imported_obj.subvals())?; + } + } + } else { + self.subset::(&opid, imported_obj.subvals())?; + } + Ok(opid.to_string().into()) + } + + pub fn put( + &mut self, + obj: JsValue, + prop: JsValue, + value: JsValue, + datatype: JsValue, + ) -> Result<(), error::Insert> { + let (obj, _) = self.import(obj)?; + let prop = self.import_prop(prop)?; + let value = self + .import_scalar(&value, &datatype.as_string()) + .ok_or(error::Insert::ValueNotPrimitive)?; + self.doc.put(&obj, prop, value)?; + Ok(()) + } + + #[wasm_bindgen(js_name = putObject)] + pub fn put_object( + &mut self, + obj: JsValue, + prop: JsValue, + value: JsValue, + ) -> Result { + let (obj, _) = self.import(obj)?; + let prop = self.import_prop(prop)?; + let imported_obj = import_obj(&value, &None)?; + let opid = self.doc.put_object(&obj, prop, imported_obj.objtype())?; + if let Some(s) = imported_obj.text() { + match self.text_rep { + TextRepresentation::String => { + self.doc.splice_text(&opid, 0, 0, s)?; + } + TextRepresentation::Array => { + self.subset::(&opid, imported_obj.subvals())?; + } + } + } else { + self.subset::(&opid, imported_obj.subvals())?; + } + Ok(opid.to_string().into()) + } + + fn subset<'a, E, I>(&mut self, obj: &am::ObjId, vals: I) -> Result<(), E> + where + I: IntoIterator, JsValue)>, + E: From + + From + + From, + { + for (p, v) in vals { + let (value, subvals) = self.import_value(v.as_ref(), None)?; + //let opid = self.0.set(id, p, value)?; + let opid = match (p.as_ref(), value) { + (Prop::Map(s), Value::Object(objtype)) => { + Some(self.doc.put_object(obj, s, objtype)?) + } + (Prop::Map(s), Value::Scalar(scalar)) => { + self.doc.put(obj, s, scalar.into_owned())?; + None + } + (Prop::Seq(i), Value::Object(objtype)) => { + Some(self.doc.insert_object(obj, *i, objtype)?) + } + (Prop::Seq(i), Value::Scalar(scalar)) => { + self.doc.insert(obj, *i, scalar.into_owned())?; + None + } + }; + if let Some(opid) = opid { + self.subset::(&opid, SubValIter::Slice(subvals.as_slice().iter()))?; + } + } + Ok(()) + } + + pub fn increment( + &mut self, + obj: JsValue, + prop: JsValue, + value: JsValue, + ) -> Result<(), error::Increment> { + let (obj, _) = self.import(obj)?; + let prop = self.import_prop(prop)?; + let value: f64 = value.as_f64().ok_or(error::Increment::ValueNotNumeric)?; + self.doc.increment(&obj, prop, value as i64)?; + Ok(()) + } + + #[wasm_bindgen(js_name = get)] + pub fn get( + &self, + obj: JsValue, + prop: JsValue, + heads: Option, + ) -> Result { + let (obj, _) = self.import(obj)?; + let prop = to_prop(prop); + let heads = get_heads(heads)?; + if let Ok(prop) = prop { + let value = if let Some(h) = heads { + self.doc.get_at(&obj, prop, &h)? + } else { + self.doc.get(&obj, prop)? + }; + if let Some((value, id)) = value { + match alloc(&value, self.text_rep) { + (datatype, js_value) if datatype.is_scalar() => Ok(js_value), + _ => Ok(id.to_string().into()), + } + } else { + Ok(JsValue::undefined()) + } + } else { + Ok(JsValue::undefined()) + } + } + + #[wasm_bindgen(js_name = getWithType)] + pub fn get_with_type( + &self, + obj: JsValue, + prop: JsValue, + heads: Option, + ) -> Result { + let (obj, _) = self.import(obj)?; + let prop = to_prop(prop); + let heads = get_heads(heads)?; + if let Ok(prop) = prop { + let value = if let Some(h) = heads { + self.doc.get_at(&obj, prop, &h)? + } else { + self.doc.get(&obj, prop)? + }; + if let Some(value) = value { + match &value { + (Value::Object(obj_type), obj_id) => { + let result = Array::new(); + result.push(&obj_type.to_string().into()); + result.push(&obj_id.to_string().into()); + Ok(result.into()) + } + (Value::Scalar(_), _) => { + let result = Array::new(); + let (datatype, value) = alloc(&value.0, self.text_rep); + result.push(&datatype.into()); + result.push(&value); + Ok(result.into()) + } + } + } else { + Ok(JsValue::null()) + } + } else { + Ok(JsValue::null()) + } + } + + #[wasm_bindgen(js_name = getAll)] + pub fn get_all( + &self, + obj: JsValue, + arg: JsValue, + heads: Option, + ) -> Result { + let (obj, _) = self.import(obj)?; + let result = Array::new(); + let prop = to_prop(arg); + if let Ok(prop) = prop { + let values = if let Some(heads) = get_heads(heads)? { + self.doc.get_all_at(&obj, prop, &heads) + } else { + self.doc.get_all(&obj, prop) + }?; + for (value, id) in values { + let sub = Array::new(); + let (datatype, js_value) = alloc(&value, self.text_rep); + sub.push(&datatype.into()); + if value.is_scalar() { + sub.push(&js_value); + } + sub.push(&id.to_string().into()); + result.push(&JsValue::from(&sub)); + } + } + Ok(result) + } + + #[wasm_bindgen(js_name = enableFreeze)] + pub fn enable_freeze(&mut self, enable: JsValue) -> Result { + let enable = enable + .as_bool() + .ok_or_else(|| to_js_err("must pass a bool to enableFreeze"))?; + let old_freeze = self.freeze; + self.freeze = enable; + Ok(old_freeze.into()) + } + + #[wasm_bindgen(js_name = enablePatches)] + pub fn enable_patches(&mut self, enable: JsValue) -> Result { + let enable = enable + .as_bool() + .ok_or_else(|| to_js_err("must pass a bool to enablePatches"))?; + let old_enabled = self.doc.observer().enable(enable); + self.doc.observer().set_text_rep(self.text_rep); + Ok(old_enabled.into()) + } + + #[wasm_bindgen(js_name = registerDatatype)] + pub fn register_datatype( + &mut self, + datatype: JsValue, + function: JsValue, + ) -> Result<(), value::InvalidDatatype> { + let datatype = Datatype::try_from(datatype)?; + if let Ok(function) = function.dyn_into::() { + self.external_types.insert(datatype, function); + } else { + self.external_types.remove(&datatype); + } + Ok(()) + } + + #[wasm_bindgen(js_name = applyPatches)] + pub fn apply_patches( + &mut self, + object: JsValue, + meta: JsValue, + callback: JsValue, + ) -> Result { + let mut object = object + .dyn_into::() + .map_err(|_| error::ApplyPatch::NotObjectd)?; + let patches = self.doc.observer().take_patches(); + let callback = callback.dyn_into::().ok(); + + // even if there are no patches we may need to update the meta object + // which requires that we update the object too + if patches.is_empty() && !meta.is_undefined() { + let (obj, datatype, id) = self.unwrap_object(&object)?; + object = Object::assign(&Object::new(), &obj); + object = self.wrap_object(object, datatype, &id, &meta)?; + } + + let mut exposed = HashSet::default(); + + let before = object.clone(); + + for p in &patches { + object = self.apply_patch(object, p, 0, &meta, &mut exposed)?; + } + + if let Some(c) = &callback { + if !patches.is_empty() { + let patches: Array = patches + .into_iter() + .map(JsValue::try_from) + .collect::>()?; + c.call3(&JsValue::undefined(), &patches.into(), &before, &object) + .map_err(error::ApplyPatch::PatchCallback)?; + } + } + + self.finalize_exposed(&object, exposed, &meta)?; + + Ok(object.into()) + } + + #[wasm_bindgen(js_name = popPatches)] + pub fn pop_patches(&mut self) -> Result { + // transactions send out observer updates as they occur, not waiting for them to be + // committed. + // If we pop the patches then we won't be able to revert them. + + let patches = self.doc.observer().take_patches(); + let result = Array::new(); + for p in patches { + result.push(&p.try_into()?); + } + Ok(result) + } + + pub fn length(&self, obj: JsValue, heads: Option) -> Result { + let (obj, _) = self.import(obj)?; + if let Some(heads) = get_heads(heads)? { + Ok(self.doc.length_at(&obj, &heads) as f64) + } else { + Ok(self.doc.length(&obj) as f64) + } + } + + pub fn delete(&mut self, obj: JsValue, prop: JsValue) -> Result<(), error::Get> { + let (obj, _) = self.import(obj)?; + let prop = to_prop(prop)?; + self.doc.delete(&obj, prop)?; + Ok(()) + } + + pub fn save(&mut self) -> Uint8Array { + Uint8Array::from(self.doc.save().as_slice()) + } + + #[wasm_bindgen(js_name = saveIncremental)] + pub fn save_incremental(&mut self) -> Uint8Array { + let bytes = self.doc.save_incremental(); + Uint8Array::from(bytes.as_slice()) + } + + #[wasm_bindgen(js_name = loadIncremental)] + pub fn load_incremental(&mut self, data: Uint8Array) -> Result { + let data = data.to_vec(); + let len = self.doc.load_incremental(&data)?; + Ok(len as f64) + } + + #[wasm_bindgen(js_name = applyChanges)] + pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), error::ApplyChangesError> { + let changes: Vec<_> = JS(changes).try_into()?; + self.doc.apply_changes(changes)?; + Ok(()) + } + + #[wasm_bindgen(js_name = getChanges)] + pub fn get_changes(&mut self, have_deps: JsValue) -> Result { + let deps: Vec<_> = JS(have_deps).try_into()?; + let changes = self.doc.get_changes(&deps)?; + let changes: Array = changes + .iter() + .map(|c| Uint8Array::from(c.raw_bytes())) + .collect(); + Ok(changes) + } + + #[wasm_bindgen(js_name = getChangeByHash)] + pub fn get_change_by_hash( + &mut self, + hash: JsValue, + ) -> Result { + let hash = JS(hash).try_into()?; + let change = self.doc.get_change_by_hash(&hash); + if let Some(c) = change { + Ok(Uint8Array::from(c.raw_bytes()).into()) + } else { + Ok(JsValue::null()) + } + } + + #[wasm_bindgen(js_name = getChangesAdded)] + pub fn get_changes_added(&mut self, other: &mut Automerge) -> Array { + let changes = self.doc.get_changes_added(&mut other.doc); + let changes: Array = changes + .iter() + .map(|c| Uint8Array::from(c.raw_bytes())) + .collect(); + changes + } + + #[wasm_bindgen(js_name = getHeads)] + pub fn get_heads(&mut self) -> Array { + let heads = self.doc.get_heads(); + let heads: Array = heads + .iter() + .map(|h| JsValue::from_str(&hex::encode(h.0))) + .collect(); + heads + } + + #[wasm_bindgen(js_name = getActorId)] + pub fn get_actor_id(&self) -> String { + let actor = self.doc.get_actor(); + actor.to_string() + } + + #[wasm_bindgen(js_name = getLastLocalChange)] + pub fn get_last_local_change(&mut self) -> JsValue { + if let Some(change) = self.doc.get_last_local_change() { + Uint8Array::from(change.raw_bytes()).into() + } else { + JsValue::null() + } + } + + pub fn dump(&mut self) { + self.doc.dump() + } + + #[wasm_bindgen(js_name = getMissingDeps)] + pub fn get_missing_deps(&mut self, heads: Option) -> Result { + let heads = get_heads(heads)?.unwrap_or_default(); + let deps = self.doc.get_missing_deps(&heads); + let deps: Array = deps + .iter() + .map(|h| JsValue::from_str(&hex::encode(h.0))) + .collect(); + Ok(deps) + } + + #[wasm_bindgen(js_name = receiveSyncMessage)] + pub fn receive_sync_message( + &mut self, + state: &mut SyncState, + message: Uint8Array, + ) -> Result<(), error::ReceiveSyncMessage> { + let message = message.to_vec(); + let message = am::sync::Message::decode(message.as_slice())?; + self.doc + .sync() + .receive_sync_message(&mut state.0, message)?; + Ok(()) + } + + #[wasm_bindgen(js_name = generateSyncMessage)] + pub fn generate_sync_message(&mut self, state: &mut SyncState) -> JsValue { + if let Some(message) = self.doc.sync().generate_sync_message(&mut state.0) { + Uint8Array::from(message.encode().as_slice()).into() + } else { + JsValue::null() + } + } + + #[wasm_bindgen(js_name = toJS)] + pub fn to_js(&mut self, meta: JsValue) -> Result { + self.export_object(&ROOT, Datatype::Map, None, &meta) + } + + pub fn materialize( + &mut self, + obj: JsValue, + heads: Option, + meta: JsValue, + ) -> Result { + let (obj, obj_type) = self.import(obj).unwrap_or((ROOT, am::ObjType::Map)); + let heads = get_heads(heads)?; + let _patches = self.doc.observer().take_patches(); // throw away patches + Ok(self.export_object(&obj, obj_type.into(), heads.as_ref(), &meta)?) + } + + #[wasm_bindgen(js_name = emptyChange)] + pub fn empty_change(&mut self, message: Option, time: Option) -> JsValue { + let time = time.map(|f| f as i64); + let options = CommitOptions { message, time }; + let hash = self.doc.empty_change(options); + JsValue::from_str(&hex::encode(hash)) + } +} + +#[wasm_bindgen(js_name = create)] +pub fn init(text_v2: bool, actor: Option) -> Result { + console_error_panic_hook::set_once(); + let text_rep = if text_v2 { + TextRepresentation::String + } else { + TextRepresentation::Array + }; + Automerge::new(actor, text_rep) +} + +#[wasm_bindgen(js_name = load)] +pub fn load( + data: Uint8Array, + text_v2: bool, + actor: Option, +) -> Result { + let data = data.to_vec(); + let text_rep = if text_v2 { + TextRepresentation::String + } else { + TextRepresentation::Array + }; + let mut doc = am::AutoCommitWithObs::::load(&data)? + .with_observer(Observer::default().with_text_rep(text_rep)) + .with_encoding(TextEncoding::Utf16); + if let Some(s) = actor { + let actor = + automerge::ActorId::from(hex::decode(s).map_err(error::BadActorId::from)?.to_vec()); + doc.set_actor(actor); + } + Ok(Automerge { + doc, + freeze: false, + external_types: HashMap::default(), + text_rep, + }) +} + +#[wasm_bindgen(js_name = encodeChange)] +pub fn encode_change(change: JsValue) -> Result { + // Alex: Technically we should be using serde_wasm_bindgen::from_value instead of into_serde. + // Unfortunately serde_wasm_bindgen::from_value fails for some inscrutable reason, so instead + // we use into_serde (sorry to future me). + #[allow(deprecated)] + let change: am::ExpandedChange = change.into_serde()?; + let change: Change = change.into(); + Ok(Uint8Array::from(change.raw_bytes())) +} + +#[wasm_bindgen(js_name = decodeChange)] +pub fn decode_change(change: Uint8Array) -> Result { + let change = Change::from_bytes(change.to_vec())?; + let change: am::ExpandedChange = change.decode(); + let serializer = serde_wasm_bindgen::Serializer::json_compatible(); + Ok(change.serialize(&serializer)?) +} + +#[wasm_bindgen(js_name = initSyncState)] +pub fn init_sync_state() -> SyncState { + SyncState(am::sync::State::new()) +} + +// this is needed to be compatible with the automerge-js api +#[wasm_bindgen(js_name = importSyncState)] +pub fn import_sync_state(state: JsValue) -> Result { + Ok(SyncState(JS(state).try_into()?)) +} + +// this is needed to be compatible with the automerge-js api +#[wasm_bindgen(js_name = exportSyncState)] +pub fn export_sync_state(state: &SyncState) -> JsValue { + JS::from(state.0.clone()).into() +} + +#[wasm_bindgen(js_name = encodeSyncMessage)] +pub fn encode_sync_message(message: JsValue) -> Result { + let message: am::sync::Message = JS(message).try_into()?; + Ok(Uint8Array::from(message.encode().as_slice())) +} + +#[wasm_bindgen(js_name = decodeSyncMessage)] +pub fn decode_sync_message(msg: Uint8Array) -> Result { + let data = msg.to_vec(); + let msg = am::sync::Message::decode(&data)?; + let heads = AR::from(msg.heads.as_slice()); + let need = AR::from(msg.need.as_slice()); + let changes = AR::from(msg.changes.as_slice()); + let have = AR::from(msg.have.as_slice()); + let obj = Object::new().into(); + // SAFETY: we just created this object + js_set(&obj, "heads", heads).unwrap(); + js_set(&obj, "need", need).unwrap(); + js_set(&obj, "have", have).unwrap(); + js_set(&obj, "changes", changes).unwrap(); + Ok(obj) +} + +#[wasm_bindgen(js_name = encodeSyncState)] +pub fn encode_sync_state(state: &SyncState) -> Uint8Array { + Uint8Array::from(state.0.encode().as_slice()) +} + +#[wasm_bindgen(js_name = decodeSyncState)] +pub fn decode_sync_state(data: Uint8Array) -> Result { + SyncState::decode(data) +} + +pub mod error { + use automerge::AutomergeError; + use wasm_bindgen::JsValue; + + use crate::interop::{ + self, + error::{BadChangeHashes, BadJSChanges}, + }; + + #[derive(Debug, thiserror::Error)] + #[error("could not parse Actor ID as a hex string: {0}")] + pub struct BadActorId(#[from] hex::FromHexError); + + impl From for JsValue { + fn from(s: BadActorId) -> Self { + JsValue::from(s.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ApplyChangesError { + #[error(transparent)] + DecodeChanges(#[from] BadJSChanges), + #[error("error applying changes: {0}")] + Apply(#[from] AutomergeError), + } + + impl From for JsValue { + fn from(e: ApplyChangesError) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Fork { + #[error(transparent)] + BadActor(#[from] BadActorId), + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error(transparent)] + BadChangeHashes(#[from] BadChangeHashes), + } + + impl From for JsValue { + fn from(f: Fork) -> Self { + JsValue::from(f.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error(transparent)] + pub struct Merge(#[from] AutomergeError); + + impl From for JsValue { + fn from(e: Merge) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Get { + #[error("invalid object ID: {0}")] + ImportObj(#[from] interop::error::ImportObj), + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error("bad heads: {0}")] + BadHeads(#[from] interop::error::BadChangeHashes), + #[error(transparent)] + InvalidProp(#[from] interop::error::InvalidProp), + } + + impl From for JsValue { + fn from(e: Get) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Splice { + #[error("invalid object ID: {0}")] + ImportObj(#[from] interop::error::ImportObj), + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error("value at {0} in values to insert was not a primitive")] + ValueNotPrimitive(usize), + } + + impl From for JsValue { + fn from(e: Splice) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Insert { + #[error("invalid object id: {0}")] + ImportObj(#[from] interop::error::ImportObj), + #[error("the value to insert was not a primitive")] + ValueNotPrimitive, + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error(transparent)] + InvalidProp(#[from] interop::error::InvalidProp), + #[error(transparent)] + InvalidValue(#[from] interop::error::InvalidValue), + } + + impl From for JsValue { + fn from(e: Insert) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum InsertObject { + #[error("invalid object id: {0}")] + ImportObj(#[from] interop::error::ImportObj), + #[error("the value to insert must be an object")] + ValueNotObject, + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error(transparent)] + InvalidProp(#[from] interop::error::InvalidProp), + #[error(transparent)] + InvalidValue(#[from] interop::error::InvalidValue), + } + + impl From for JsValue { + fn from(e: InsertObject) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Increment { + #[error("invalid object id: {0}")] + ImportObj(#[from] interop::error::ImportObj), + #[error(transparent)] + InvalidProp(#[from] interop::error::InvalidProp), + #[error("value was not numeric")] + ValueNotNumeric, + #[error(transparent)] + Automerge(#[from] AutomergeError), + } + + impl From for JsValue { + fn from(e: Increment) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum BadSyncMessage { + #[error("could not decode sync message: {0}")] + ReadMessage(#[from] automerge::sync::ReadMessageError), + } + + impl From for JsValue { + fn from(e: BadSyncMessage) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ApplyPatch { + #[error(transparent)] + Interop(#[from] interop::error::ApplyPatch), + #[error(transparent)] + Export(#[from] interop::error::Export), + #[error("patch was not an object")] + NotObjectd, + #[error("error calling patch callback: {0:?}")] + PatchCallback(JsValue), + } + + impl From for JsValue { + fn from(e: ApplyPatch) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error("unable to build patches: {0}")] + pub struct PopPatches(#[from] interop::error::Export); + + impl From for JsValue { + fn from(e: PopPatches) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Materialize { + #[error(transparent)] + Export(#[from] interop::error::Export), + #[error("bad heads: {0}")] + Heads(#[from] interop::error::BadChangeHashes), + } + + impl From for JsValue { + fn from(e: Materialize) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum ReceiveSyncMessage { + #[error(transparent)] + Decode(#[from] automerge::sync::ReadMessageError), + #[error(transparent)] + Automerge(#[from] AutomergeError), + } + + impl From for JsValue { + fn from(e: ReceiveSyncMessage) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum Load { + #[error(transparent)] + Automerge(#[from] AutomergeError), + #[error(transparent)] + BadActor(#[from] BadActorId), + } + + impl From for JsValue { + fn from(e: Load) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + #[error("Unable to read JS change: {0}")] + pub struct EncodeChange(#[from] serde_json::Error); + + impl From for JsValue { + fn from(e: EncodeChange) -> Self { + JsValue::from(e.to_string()) + } + } + + #[derive(Debug, thiserror::Error)] + pub enum DecodeChange { + #[error(transparent)] + Load(#[from] automerge::LoadChangeError), + #[error(transparent)] + Serialize(#[from] serde_wasm_bindgen::Error), + } + + impl From for JsValue { + fn from(e: DecodeChange) -> Self { + JsValue::from(e.to_string()) + } + } +} diff --git a/rust/automerge-wasm/src/observer.rs b/rust/automerge-wasm/src/observer.rs new file mode 100644 index 00000000..2351c762 --- /dev/null +++ b/rust/automerge-wasm/src/observer.rs @@ -0,0 +1,518 @@ +#![allow(dead_code)] + +use std::borrow::Cow; + +use crate::{ + interop::{self, alloc, js_set}, + TextRepresentation, +}; +use automerge::{ObjId, OpObserver, Prop, ReadDoc, ScalarValue, Value}; +use js_sys::{Array, Object}; +use wasm_bindgen::prelude::*; + +use crate::sequence_tree::SequenceTree; + +#[derive(Debug, Clone, Default)] +pub(crate) struct Observer { + enabled: bool, + patches: Vec, + text_rep: TextRepresentation, +} + +impl Observer { + pub(crate) fn take_patches(&mut self) -> Vec { + std::mem::take(&mut self.patches) + } + pub(crate) fn enable(&mut self, enable: bool) -> bool { + if self.enabled && !enable { + self.patches.truncate(0) + } + let old_enabled = self.enabled; + self.enabled = enable; + old_enabled + } + + fn get_path(&mut self, doc: &R, obj: &ObjId) -> Option> { + match doc.parents(obj) { + Ok(parents) => parents.visible_path(), + Err(e) => { + automerge::log!("error generating patch : {:?}", e); + None + } + } + } + + pub(crate) fn with_text_rep(mut self, text_rep: TextRepresentation) -> Self { + self.text_rep = text_rep; + self + } + + pub(crate) fn set_text_rep(&mut self, text_rep: TextRepresentation) { + self.text_rep = text_rep; + } +} + +#[derive(Debug, Clone)] +pub(crate) enum Patch { + PutMap { + obj: ObjId, + path: Vec<(ObjId, Prop)>, + key: String, + value: (Value<'static>, ObjId), + expose: bool, + }, + PutSeq { + obj: ObjId, + path: Vec<(ObjId, Prop)>, + index: usize, + value: (Value<'static>, ObjId), + expose: bool, + }, + Insert { + obj: ObjId, + path: Vec<(ObjId, Prop)>, + index: usize, + values: SequenceTree<(Value<'static>, ObjId)>, + }, + SpliceText { + obj: ObjId, + path: Vec<(ObjId, Prop)>, + index: usize, + value: SequenceTree, + }, + Increment { + obj: ObjId, + path: Vec<(ObjId, Prop)>, + prop: Prop, + value: i64, + }, + DeleteMap { + obj: ObjId, + path: Vec<(ObjId, Prop)>, + key: String, + }, + DeleteSeq { + obj: ObjId, + path: Vec<(ObjId, Prop)>, + index: usize, + length: usize, + }, +} + +impl OpObserver for Observer { + fn insert( + &mut self, + doc: &R, + obj: ObjId, + index: usize, + tagged_value: (Value<'_>, ObjId), + ) { + if self.enabled { + let value = (tagged_value.0.to_owned(), tagged_value.1); + if let Some(Patch::Insert { + obj: tail_obj, + index: tail_index, + values, + .. + }) = self.patches.last_mut() + { + let range = *tail_index..=*tail_index + values.len(); + if tail_obj == &obj && range.contains(&index) { + values.insert(index - *tail_index, value); + return; + } + } + if let Some(path) = self.get_path(doc, &obj) { + let mut values = SequenceTree::new(); + values.push(value); + let patch = Patch::Insert { + path, + obj, + index, + values, + }; + self.patches.push(patch); + } + } + } + + fn splice_text(&mut self, doc: &R, obj: ObjId, index: usize, value: &str) { + if self.enabled { + if self.text_rep == TextRepresentation::Array { + for (i, c) in value.chars().enumerate() { + self.insert( + doc, + obj.clone(), + index + i, + ( + Value::Scalar(Cow::Owned(ScalarValue::Str(c.to_string().into()))), + ObjId::Root, // We hope this is okay + ), + ); + } + return; + } + if let Some(Patch::SpliceText { + obj: tail_obj, + index: tail_index, + value: prev_value, + .. + }) = self.patches.last_mut() + { + let range = *tail_index..=*tail_index + prev_value.len(); + if tail_obj == &obj && range.contains(&index) { + let i = index - *tail_index; + for (n, ch) in value.encode_utf16().enumerate() { + prev_value.insert(i + n, ch) + } + return; + } + } + if let Some(path) = self.get_path(doc, &obj) { + let mut v = SequenceTree::new(); + for ch in value.encode_utf16() { + v.push(ch) + } + let patch = Patch::SpliceText { + path, + obj, + index, + value: v, + }; + self.patches.push(patch); + } + } + } + + fn delete_seq(&mut self, doc: &R, obj: ObjId, index: usize, length: usize) { + if self.enabled { + match self.patches.last_mut() { + Some(Patch::SpliceText { + obj: tail_obj, + index: tail_index, + value, + .. + }) => { + let range = *tail_index..*tail_index + value.len(); + if tail_obj == &obj + && range.contains(&index) + && range.contains(&(index + length - 1)) + { + for _ in 0..length { + value.remove(index - *tail_index); + } + return; + } + } + Some(Patch::Insert { + obj: tail_obj, + index: tail_index, + values, + .. + }) => { + let range = *tail_index..*tail_index + values.len(); + if tail_obj == &obj + && range.contains(&index) + && range.contains(&(index + length - 1)) + { + for _ in 0..length { + values.remove(index - *tail_index); + } + return; + } + } + Some(Patch::DeleteSeq { + obj: tail_obj, + index: tail_index, + length: tail_length, + .. + }) => { + if tail_obj == &obj && index == *tail_index { + *tail_length += length; + return; + } + } + _ => {} + } + if let Some(path) = self.get_path(doc, &obj) { + let patch = Patch::DeleteSeq { + path, + obj, + index, + length, + }; + self.patches.push(patch) + } + } + } + + fn delete_map(&mut self, doc: &R, obj: ObjId, key: &str) { + if self.enabled { + if let Some(path) = self.get_path(doc, &obj) { + let patch = Patch::DeleteMap { + path, + obj, + key: key.to_owned(), + }; + self.patches.push(patch) + } + } + } + + fn put( + &mut self, + doc: &R, + obj: ObjId, + prop: Prop, + tagged_value: (Value<'_>, ObjId), + _conflict: bool, + ) { + if self.enabled { + let expose = false; + if let Some(path) = self.get_path(doc, &obj) { + let value = (tagged_value.0.to_owned(), tagged_value.1); + let patch = match prop { + Prop::Map(key) => Patch::PutMap { + path, + obj, + key, + value, + expose, + }, + Prop::Seq(index) => Patch::PutSeq { + path, + obj, + index, + value, + expose, + }, + }; + self.patches.push(patch); + } + } + } + + fn expose( + &mut self, + doc: &R, + obj: ObjId, + prop: Prop, + tagged_value: (Value<'_>, ObjId), + _conflict: bool, + ) { + if self.enabled { + let expose = true; + if let Some(path) = self.get_path(doc, &obj) { + let value = (tagged_value.0.to_owned(), tagged_value.1); + let patch = match prop { + Prop::Map(key) => Patch::PutMap { + path, + obj, + key, + value, + expose, + }, + Prop::Seq(index) => Patch::PutSeq { + path, + obj, + index, + value, + expose, + }, + }; + self.patches.push(patch); + } + } + } + + fn increment( + &mut self, + doc: &R, + obj: ObjId, + prop: Prop, + tagged_value: (i64, ObjId), + ) { + if self.enabled { + if let Some(path) = self.get_path(doc, &obj) { + let value = tagged_value.0; + self.patches.push(Patch::Increment { + path, + obj, + prop, + value, + }) + } + } + } + + fn text_as_seq(&self) -> bool { + self.text_rep == TextRepresentation::Array + } +} + +impl automerge::op_observer::BranchableObserver for Observer { + fn merge(&mut self, other: &Self) { + self.patches.extend_from_slice(other.patches.as_slice()) + } + + fn branch(&self) -> Self { + Observer { + patches: vec![], + enabled: self.enabled, + text_rep: self.text_rep, + } + } +} + +fn prop_to_js(p: &Prop) -> JsValue { + match p { + Prop::Map(key) => JsValue::from_str(key), + Prop::Seq(index) => JsValue::from_f64(*index as f64), + } +} + +fn export_path(path: &[(ObjId, Prop)], end: &Prop) -> Array { + let result = Array::new(); + for p in path { + result.push(&prop_to_js(&p.1)); + } + result.push(&prop_to_js(end)); + result +} + +impl Patch { + pub(crate) fn path(&self) -> &[(ObjId, Prop)] { + match &self { + Self::PutMap { path, .. } => path.as_slice(), + Self::PutSeq { path, .. } => path.as_slice(), + Self::Increment { path, .. } => path.as_slice(), + Self::Insert { path, .. } => path.as_slice(), + Self::SpliceText { path, .. } => path.as_slice(), + Self::DeleteMap { path, .. } => path.as_slice(), + Self::DeleteSeq { path, .. } => path.as_slice(), + } + } + + pub(crate) fn obj(&self) -> &ObjId { + match &self { + Self::PutMap { obj, .. } => obj, + Self::PutSeq { obj, .. } => obj, + Self::Increment { obj, .. } => obj, + Self::Insert { obj, .. } => obj, + Self::SpliceText { obj, .. } => obj, + Self::DeleteMap { obj, .. } => obj, + Self::DeleteSeq { obj, .. } => obj, + } + } +} + +impl TryFrom for JsValue { + type Error = interop::error::Export; + + fn try_from(p: Patch) -> Result { + let result = Object::new(); + match p { + Patch::PutMap { + path, key, value, .. + } => { + js_set(&result, "action", "put")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Map(key)), + )?; + js_set( + &result, + "value", + alloc(&value.0, TextRepresentation::String).1, + )?; + Ok(result.into()) + } + Patch::PutSeq { + path, index, value, .. + } => { + js_set(&result, "action", "put")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Seq(index)), + )?; + js_set( + &result, + "value", + alloc(&value.0, TextRepresentation::String).1, + )?; + Ok(result.into()) + } + Patch::Insert { + path, + index, + values, + .. + } => { + js_set(&result, "action", "insert")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Seq(index)), + )?; + js_set( + &result, + "values", + values + .iter() + .map(|v| alloc(&v.0, TextRepresentation::String).1) + .collect::(), + )?; + Ok(result.into()) + } + Patch::SpliceText { + path, index, value, .. + } => { + js_set(&result, "action", "splice")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Seq(index)), + )?; + let bytes: Vec = value.iter().cloned().collect(); + js_set(&result, "value", String::from_utf16_lossy(bytes.as_slice()))?; + Ok(result.into()) + } + Patch::Increment { + path, prop, value, .. + } => { + js_set(&result, "action", "inc")?; + js_set(&result, "path", export_path(path.as_slice(), &prop))?; + js_set(&result, "value", &JsValue::from_f64(value as f64))?; + Ok(result.into()) + } + Patch::DeleteMap { path, key, .. } => { + js_set(&result, "action", "del")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Map(key)), + )?; + Ok(result.into()) + } + Patch::DeleteSeq { + path, + index, + length, + .. + } => { + js_set(&result, "action", "del")?; + js_set( + &result, + "path", + export_path(path.as_slice(), &Prop::Seq(index)), + )?; + if length > 1 { + js_set(&result, "length", length)?; + } + Ok(result.into()) + } + } + } +} diff --git a/automerge/src/sequence_tree.rs b/rust/automerge-wasm/src/sequence_tree.rs similarity index 72% rename from automerge/src/sequence_tree.rs rename to rust/automerge-wasm/src/sequence_tree.rs index ba5c7ff6..91b183a2 100644 --- a/automerge/src/sequence_tree.rs +++ b/rust/automerge-wasm/src/sequence_tree.rs @@ -4,41 +4,37 @@ use std::{ mem, }; -pub type SequenceTree = SequenceTreeInternal; +pub(crate) const B: usize = 16; +pub(crate) type SequenceTree = SequenceTreeInternal; #[derive(Clone, Debug)] -pub struct SequenceTreeInternal { - root_node: Option>, +pub(crate) struct SequenceTreeInternal { + root_node: Option>, } #[derive(Clone, Debug, PartialEq)] struct SequenceTreeNode { elements: Vec, - children: Vec>, + children: Vec>, length: usize, } -impl SequenceTreeInternal +impl SequenceTreeInternal where T: Clone + Debug, { /// Construct a new, empty, sequence. - pub fn new() -> Self { + pub(crate) fn new() -> Self { Self { root_node: None } } /// Get the length of the sequence. - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { self.root_node.as_ref().map_or(0, |n| n.len()) } - /// Check if the sequence is empty. - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - /// Create an iterator through the sequence. - pub fn iter(&self) -> Iter<'_, T, B> { + pub(crate) fn iter(&self) -> Iter<'_, T> { Iter { inner: self, index: 0, @@ -50,7 +46,7 @@ where /// # Panics /// /// Panics if `index > len`. - pub fn insert(&mut self, index: usize, element: T) { + pub(crate) fn insert(&mut self, index: usize, element: T) { let old_len = self.len(); if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] @@ -93,27 +89,22 @@ where } /// Push the `element` onto the back of the sequence. - pub fn push(&mut self, element: T) { + pub(crate) fn push(&mut self, element: T) { let l = self.len(); self.insert(l, element) } /// Get the `element` at `index` in the sequence. - pub fn get(&self, index: usize) -> Option<&T> { + pub(crate) fn get(&self, index: usize) -> Option<&T> { self.root_node.as_ref().and_then(|n| n.get(index)) } - /// Get the `element` at `index` in the sequence. - pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { - self.root_node.as_mut().and_then(|n| n.get_mut(index)) - } - /// Removes the element at `index` from the sequence. /// /// # Panics /// /// Panics if `index` is out of bounds. - pub fn remove(&mut self, index: usize) -> T { + pub(crate) fn remove(&mut self, index: usize) -> T { if let Some(root) = self.root_node.as_mut() { #[cfg(debug_assertions)] let len = root.check(); @@ -134,18 +125,9 @@ where panic!("remove from empty tree") } } - - /// Update the `element` at `index` in the sequence, returning the old value. - /// - /// # Panics - /// - /// Panics if `index > len` - pub fn set(&mut self, index: usize, element: T) -> T { - self.root_node.as_mut().unwrap().set(index, element) - } } -impl SequenceTreeNode +impl SequenceTreeNode where T: Clone + Debug, { @@ -157,7 +139,7 @@ where } } - pub fn len(&self) -> usize { + pub(crate) fn len(&self) -> usize { self.length } @@ -380,7 +362,7 @@ where l } - pub fn remove(&mut self, index: usize) -> T { + pub(crate) fn remove(&mut self, index: usize) -> T { let original_len = self.len(); if self.is_leaf() { let v = self.remove_from_leaf(index); @@ -423,7 +405,7 @@ where } } - fn merge(&mut self, middle: T, successor_sibling: SequenceTreeNode) { + fn merge(&mut self, middle: T, successor_sibling: SequenceTreeNode) { self.elements.push(middle); self.elements.extend(successor_sibling.elements); self.children.extend(successor_sibling.children); @@ -431,31 +413,7 @@ where assert!(self.is_full()); } - pub fn set(&mut self, index: usize, element: T) -> T { - if self.is_leaf() { - let old_element = self.elements.get_mut(index).unwrap(); - mem::replace(old_element, element) - } else { - let mut cumulative_len = 0; - for (child_index, child) in self.children.iter_mut().enumerate() { - match (cumulative_len + child.len()).cmp(&index) { - Ordering::Less => { - cumulative_len += child.len() + 1; - } - Ordering::Equal => { - let old_element = self.elements.get_mut(child_index).unwrap(); - return mem::replace(old_element, element); - } - Ordering::Greater => { - return child.set(index - cumulative_len, element); - } - } - } - panic!("Invalid index to set: {} but len was {}", index, self.len()) - } - } - - pub fn get(&self, index: usize) -> Option<&T> { + pub(crate) fn get(&self, index: usize) -> Option<&T> { if self.is_leaf() { return self.elements.get(index); } else { @@ -474,29 +432,9 @@ where } None } - - pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { - if self.is_leaf() { - return self.elements.get_mut(index); - } else { - let mut cumulative_len = 0; - for (child_index, child) in self.children.iter_mut().enumerate() { - match (cumulative_len + child.len()).cmp(&index) { - Ordering::Less => { - cumulative_len += child.len() + 1; - } - Ordering::Equal => return self.elements.get_mut(child_index), - Ordering::Greater => { - return child.get_mut(index - cumulative_len); - } - } - } - } - None - } } -impl Default for SequenceTreeInternal +impl Default for SequenceTreeInternal where T: Clone + Debug, { @@ -505,7 +443,7 @@ where } } -impl PartialEq for SequenceTreeInternal +impl PartialEq for SequenceTreeInternal where T: Clone + Debug + PartialEq, { @@ -514,13 +452,13 @@ where } } -impl<'a, T> IntoIterator for &'a SequenceTreeInternal +impl<'a, T> IntoIterator for &'a SequenceTreeInternal where T: Clone + Debug, { type Item = &'a T; - type IntoIter = Iter<'a, T, B>; + type IntoIter = Iter<'a, T>; fn into_iter(self) -> Self::IntoIter { Iter { @@ -530,12 +468,13 @@ where } } +#[derive(Debug)] pub struct Iter<'a, T> { - inner: &'a SequenceTreeInternal, + inner: &'a SequenceTreeInternal, index: usize, } -impl<'a, T> Iterator for Iter<'a, T, B> +impl<'a, T> Iterator for Iter<'a, T> where T: Clone + Debug, { @@ -554,37 +493,35 @@ where #[cfg(test)] mod tests { - use crate::ActorId; + use proptest::prelude::*; use super::*; #[test] fn push_back() { let mut t = SequenceTree::new(); - let actor = ActorId::random(); - t.push(actor.op_id_at(1)); - t.push(actor.op_id_at(2)); - t.push(actor.op_id_at(3)); - t.push(actor.op_id_at(4)); - t.push(actor.op_id_at(5)); - t.push(actor.op_id_at(6)); - t.push(actor.op_id_at(8)); - t.push(actor.op_id_at(100)); + t.push(1); + t.push(2); + t.push(3); + t.push(4); + t.push(5); + t.push(6); + t.push(8); + t.push(100); } #[test] fn insert() { let mut t = SequenceTree::new(); - let actor = ActorId::random(); - t.insert(0, actor.op_id_at(1)); - t.insert(1, actor.op_id_at(1)); - t.insert(0, actor.op_id_at(1)); - t.insert(0, actor.op_id_at(1)); - t.insert(0, actor.op_id_at(1)); - t.insert(3, actor.op_id_at(1)); - t.insert(4, actor.op_id_at(1)); + t.insert(0, 1); + t.insert(1, 1); + t.insert(0, 1); + t.insert(0, 1); + t.insert(0, 1); + t.insert(3, 1); + t.insert(4, 1); } #[test] @@ -609,79 +546,72 @@ mod tests { } } - /* - fn arb_indices() -> impl Strategy> { - proptest::collection::vec(any::(), 0..1000).prop_map(|v| { - let mut len = 0; - v.into_iter() - .map(|i| { - len += 1; - i % len - }) - .collect::>() - }) - } - */ + fn arb_indices() -> impl Strategy> { + proptest::collection::vec(any::(), 0..1000).prop_map(|v| { + let mut len = 0; + v.into_iter() + .map(|i| { + len += 1; + i % len + }) + .collect::>() + }) + } - // use proptest::prelude::*; + proptest! { - /* - proptest! { + #[test] + fn proptest_insert(indices in arb_indices()) { + let mut t = SequenceTreeInternal::::new(); + let mut v = Vec::new(); - #[test] - fn proptest_insert(indices in arb_indices()) { - let mut t = SequenceTreeInternal::::new(); - let actor = ActorId::random(); - let mut v = Vec::new(); - - for i in indices{ - if i <= v.len() { - t.insert(i % 3, i); - v.insert(i % 3, i); - } else { - return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) - } - - assert_eq!(v, t.iter().copied().collect::>()) + for i in indices{ + if i <= v.len() { + t.insert(i % 3, i); + v.insert(i % 3, i); + } else { + return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) } + + assert_eq!(v, t.iter().copied().collect::>()) + } + } + + } + + proptest! { + + // This is a really slow test due to all the copying of the Vecs (i.e. not due to the + // sequencetree) so we only do a few runs + #![proptest_config(ProptestConfig::with_cases(20))] + #[test] + fn proptest_remove(inserts in arb_indices(), removes in arb_indices()) { + let mut t = SequenceTreeInternal::::new(); + let mut v = Vec::new(); + + for i in inserts { + if i <= v.len() { + t.insert(i , i); + v.insert(i , i); + } else { + return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) + } + + assert_eq!(v, t.iter().copied().collect::>()) } - } - */ - - /* - proptest! { - - #[test] - fn proptest_remove(inserts in arb_indices(), removes in arb_indices()) { - let mut t = SequenceTreeInternal::::new(); - let actor = ActorId::random(); - let mut v = Vec::new(); - - for i in inserts { - if i <= v.len() { - t.insert(i , i); - v.insert(i , i); - } else { - return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) - } - - assert_eq!(v, t.iter().copied().collect::>()) + for i in removes { + if i < v.len() { + let tr = t.remove(i); + let vr = v.remove(i); + assert_eq!(tr, vr); + } else { + return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) } - for i in removes { - if i < v.len() { - let tr = t.remove(i); - let vr = v.remove(i); - assert_eq!(tr, vr); - } else { - return Err(proptest::test_runner::TestCaseError::reject("index out of bounds")) - } - - assert_eq!(v, t.iter().copied().collect::>()) - } + assert_eq!(v, t.iter().copied().collect::>()) } - } - */ + + } } diff --git a/automerge-wasm/src/sync.rs b/rust/automerge-wasm/src/sync.rs similarity index 73% rename from automerge-wasm/src/sync.rs rename to rust/automerge-wasm/src/sync.rs index 94f65041..c4fd4a86 100644 --- a/automerge-wasm/src/sync.rs +++ b/rust/automerge-wasm/src/sync.rs @@ -5,7 +5,7 @@ use std::collections::{BTreeSet, HashMap}; use std::convert::TryInto; use wasm_bindgen::prelude::*; -use crate::interop::{to_js_err, AR, JS}; +use crate::interop::{self, to_js_err, AR, JS}; #[wasm_bindgen] #[derive(Debug)] @@ -24,7 +24,10 @@ impl SyncState { } #[wasm_bindgen(setter, js_name = lastSentHeads)] - pub fn set_last_sent_heads(&mut self, heads: JsValue) -> Result<(), JsValue> { + pub fn set_last_sent_heads( + &mut self, + heads: JsValue, + ) -> Result<(), interop::error::BadChangeHashes> { let heads: Vec = JS(heads).try_into()?; self.0.last_sent_heads = heads; Ok(()) @@ -44,10 +47,19 @@ impl SyncState { SyncState(self.0.clone()) } - pub(crate) fn decode(data: Uint8Array) -> Result { + pub(crate) fn decode(data: Uint8Array) -> Result { let data = data.to_vec(); - let s = am::sync::State::decode(&data); - let s = s.map_err(to_js_err)?; + let s = am::sync::State::decode(&data)?; Ok(SyncState(s)) } } + +#[derive(Debug, thiserror::Error)] +#[error(transparent)] +pub struct DecodeSyncStateErr(#[from] automerge::sync::DecodeStateError); + +impl From for JsValue { + fn from(e: DecodeSyncStateErr) -> Self { + JsValue::from(e.to_string()) + } +} diff --git a/rust/automerge-wasm/src/value.rs b/rust/automerge-wasm/src/value.rs new file mode 100644 index 00000000..643e2881 --- /dev/null +++ b/rust/automerge-wasm/src/value.rs @@ -0,0 +1,161 @@ +use automerge::{ObjType, ScalarValue, Value}; +use wasm_bindgen::prelude::*; + +#[derive(Debug, Clone, Hash, Eq, PartialEq)] +pub(crate) enum Datatype { + Map, + Table, + List, + Text, + Bytes, + Str, + Int, + Uint, + F64, + Counter, + Timestamp, + Boolean, + Null, + Unknown(u8), +} + +impl Datatype { + pub(crate) fn is_scalar(&self) -> bool { + !matches!(self, Self::Map | Self::Table | Self::List | Self::Text) + } +} + +impl From<&ObjType> for Datatype { + fn from(o: &ObjType) -> Self { + (*o).into() + } +} + +impl From for Datatype { + fn from(o: ObjType) -> Self { + match o { + ObjType::Map => Self::Map, + ObjType::List => Self::List, + ObjType::Table => Self::Table, + ObjType::Text => Self::Text, + } + } +} + +impl std::fmt::Display for Datatype { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + write!(f, "{}", String::from(self.clone())) + } +} + +impl From<&ScalarValue> for Datatype { + fn from(s: &ScalarValue) -> Self { + match s { + ScalarValue::Bytes(_) => Self::Bytes, + ScalarValue::Str(_) => Self::Str, + ScalarValue::Int(_) => Self::Int, + ScalarValue::Uint(_) => Self::Uint, + ScalarValue::F64(_) => Self::F64, + ScalarValue::Counter(_) => Self::Counter, + ScalarValue::Timestamp(_) => Self::Timestamp, + ScalarValue::Boolean(_) => Self::Boolean, + ScalarValue::Null => Self::Null, + ScalarValue::Unknown { type_code, .. } => Self::Unknown(*type_code), + } + } +} + +impl From<&Value<'_>> for Datatype { + fn from(v: &Value<'_>) -> Self { + match v { + Value::Object(o) => o.into(), + Value::Scalar(s) => s.as_ref().into(), + /* + ScalarValue::Bytes(_) => Self::Bytes, + ScalarValue::Str(_) => Self::Str, + ScalarValue::Int(_) => Self::Int, + ScalarValue::Uint(_) => Self::Uint, + ScalarValue::F64(_) => Self::F64, + ScalarValue::Counter(_) => Self::Counter, + ScalarValue::Timestamp(_) => Self::Timestamp, + ScalarValue::Boolean(_) => Self::Boolean, + ScalarValue::Null => Self::Null, + ScalarValue::Unknown { type_code, .. } => Self::Unknown(*type_code), + */ + } + } +} + +impl From for String { + fn from(d: Datatype) -> Self { + match d { + Datatype::Map => "map".into(), + Datatype::Table => "table".into(), + Datatype::List => "list".into(), + Datatype::Text => "text".into(), + Datatype::Bytes => "bytes".into(), + Datatype::Str => "str".into(), + Datatype::Int => "int".into(), + Datatype::Uint => "uint".into(), + Datatype::F64 => "f64".into(), + Datatype::Counter => "counter".into(), + Datatype::Timestamp => "timestamp".into(), + Datatype::Boolean => "boolean".into(), + Datatype::Null => "null".into(), + Datatype::Unknown(type_code) => format!("unknown{}", type_code), + } + } +} + +impl TryFrom for Datatype { + type Error = InvalidDatatype; + + fn try_from(datatype: JsValue) -> Result { + let datatype = datatype.as_string().ok_or(InvalidDatatype::NotString)?; + match datatype.as_str() { + "map" => Ok(Datatype::Map), + "table" => Ok(Datatype::Table), + "list" => Ok(Datatype::List), + "text" => Ok(Datatype::Text), + "bytes" => Ok(Datatype::Bytes), + "str" => Ok(Datatype::Str), + "int" => Ok(Datatype::Int), + "uint" => Ok(Datatype::Uint), + "f64" => Ok(Datatype::F64), + "counter" => Ok(Datatype::Counter), + "timestamp" => Ok(Datatype::Timestamp), + "boolean" => Ok(Datatype::Boolean), + "null" => Ok(Datatype::Null), + d => { + if d.starts_with("unknown") { + // TODO: handle "unknown{}", + Err(InvalidDatatype::UnknownNotImplemented) + } else { + Err(InvalidDatatype::Unknown(d.to_string())) + } + } + } + } +} + +impl From for JsValue { + fn from(d: Datatype) -> Self { + String::from(d).into() + } +} + +#[derive(Debug, thiserror::Error)] +pub enum InvalidDatatype { + #[error("unknown datatype")] + Unknown(String), + #[error("datatype is not a string")] + NotString, + #[error("cannot handle unknown datatype")] + UnknownNotImplemented, +} + +impl From for JsValue { + fn from(e: InvalidDatatype) -> Self { + JsValue::from(e.to_string()) + } +} diff --git a/rust/automerge-wasm/test/apply.ts b/rust/automerge-wasm/test/apply.ts new file mode 100644 index 00000000..453b4c26 --- /dev/null +++ b/rust/automerge-wasm/test/apply.ts @@ -0,0 +1,229 @@ + +import { describe, it } from 'mocha'; +import assert from 'assert' +import { create, Value } from '..' + +export const OBJECT_ID = Symbol.for('_am_objectId') // object containing metadata about current + +// @ts-ignore +function _obj(doc: any) : any { + if (typeof doc === 'object' && doc !== null) { + return doc[OBJECT_ID] + } +} + +// sample classes for testing +class Counter { + value: number; + constructor(n: number) { + this.value = n + } +} + +describe('Automerge', () => { + describe('Patch Apply', () => { + it('apply nested sets on maps', () => { + const start = { hello: { mellow: { yellow: "world", x: 1 }, y : 2 } } + const doc1 = create(true) + doc1.putObject("/", "hello", start.hello); + let mat = doc1.materialize("/") + const doc2 = create(true) + doc2.enablePatches(true) + doc2.merge(doc1) + + let base = doc2.applyPatches({}) + assert.deepEqual(mat, start) + assert.deepEqual(base, start) + + doc2.delete("/hello/mellow", "yellow"); + // @ts-ignore + delete start.hello.mellow.yellow; + base = doc2.applyPatches(base) + mat = doc2.materialize("/") + + assert.deepEqual(mat, start) + assert.deepEqual(base, start) + }) + + it('apply patches on lists', () => { + const start = { list: [1,2,3,4] } + const doc1 = create(true) + doc1.putObject("/", "list", start.list); + let mat = doc1.materialize("/") + const doc2 = create(true) + doc2.enablePatches(true) + doc2.merge(doc1) + mat = doc1.materialize("/") + let base = doc2.applyPatches({}) + assert.deepEqual(mat, start) + assert.deepEqual(base, start) + + doc2.delete("/list", 3); + start.list.splice(3,1) + base = doc2.applyPatches(base) + + assert.deepEqual(base, start) + }) + + it('apply patches on lists of lists of lists', () => { + const start = { list: + [ + [ + [ 1, 2, 3, 4, 5, 6], + [ 7, 8, 9,10,11,12], + ], + [ + [ 7, 8, 9,10,11,12], + [ 1, 2, 3, 4, 5, 6], + ] + ] + } + const doc1 = create(true) + doc1.enablePatches(true) + doc1.putObject("/", "list", start.list); + let base = doc1.applyPatches({}) + let mat = doc1.clone().materialize("/") + assert.deepEqual(mat, start) + assert.deepEqual(base, start) + + doc1.delete("/list/0/1", 3) + start.list[0][1].splice(3,1) + + doc1.delete("/list/0", 0) + start.list[0].splice(0,1) + + mat = doc1.clone().materialize("/") + base = doc1.applyPatches(base) + assert.deepEqual(mat, start) + assert.deepEqual(base, start) + }) + + it('large inserts should make one splice patch', () => { + const doc1 = create(true) + doc1.enablePatches(true) + doc1.putObject("/", "list", "abc"); + const patches = doc1.popPatches() + assert.deepEqual( patches, [ + { action: 'put', path: [ 'list' ], value: "" }, + { action: 'splice', path: [ 'list', 0 ], value: 'abc' }]) + }) + + it('it should allow registering type wrappers', () => { + const doc1 = create(true) + doc1.enablePatches(true) + doc1.registerDatatype("counter", (n: number) => new Counter(n)) + const doc2 = doc1.fork() + doc1.put("/", "n", 10, "counter") + doc1.put("/", "m", 10, "int") + + let mat = doc1.materialize("/") + assert.deepEqual( mat, { n: new Counter(10), m: 10 } ) + + doc2.merge(doc1) + let apply = doc2.applyPatches({}) + assert.deepEqual( apply, { n: new Counter(10), m: 10 } ) + + doc1.increment("/","n", 5) + mat = doc1.materialize("/") + assert.deepEqual( mat, { n: new Counter(15), m: 10 } ) + + doc2.merge(doc1) + apply = doc2.applyPatches(apply) + assert.deepEqual( apply, { n: new Counter(15), m: 10 } ) + }) + + it('text can be managed as an array or a string', () => { + const doc1 = create(true, "aaaa") + doc1.enablePatches(true) + + doc1.putObject("/", "notes", "hello world") + + let mat = doc1.materialize("/") + + assert.deepEqual( mat, { notes: "hello world" } ) + + const doc2 = create(true) + let apply : any = doc2.materialize("/") + doc2.enablePatches(true) + apply = doc2.applyPatches(apply) + + doc2.merge(doc1); + apply = doc2.applyPatches(apply) + assert.deepEqual(_obj(apply), "_root") + assert.deepEqual( apply, { notes: "hello world" } ) + + doc2.splice("/notes", 6, 5, "everyone"); + apply = doc2.applyPatches(apply) + assert.deepEqual( apply, { notes: "hello everyone" } ) + + mat = doc2.materialize("/") + assert.deepEqual(_obj(mat), "_root") + // @ts-ignore + assert.deepEqual( mat, { notes: "hello everyone" } ) + }) + + it('should set the OBJECT_ID property on lists, maps, and text objects and not on scalars', () => { + const doc1 = create(true, 'aaaa') + const mat: any = doc1.materialize("/") + doc1.enablePatches(true) + doc1.registerDatatype("counter", (n: number) => new Counter(n)) + doc1.put("/", "string", "string", "str") + doc1.put("/", "uint", 2, "uint") + doc1.put("/", "int", 2, "int") + doc1.put("/", "float", 2.3, "f64") + doc1.put("/", "bytes", new Uint8Array(), "bytes") + doc1.put("/", "counter", 1, "counter") + doc1.put("/", "date", new Date(), "timestamp") + doc1.putObject("/", "text", "text") + doc1.putObject("/", "list", []) + doc1.putObject("/", "map", {}) + const applied = doc1.applyPatches(mat) + + assert.equal(_obj(applied.string), null) + assert.equal(_obj(applied.uint), null) + assert.equal(_obj(applied.int), null) + assert.equal(_obj(applied.float), null) + assert.equal(_obj(applied.bytes), null) + assert.equal(_obj(applied.counter), null) + assert.equal(_obj(applied.date), null) + assert.equal(_obj(applied.text), null) + + assert.notEqual(_obj(applied.list), null) + assert.notEqual(_obj(applied.map), null) + }) + + it('should set the root OBJECT_ID to "_root"', () => { + const doc1 = create(true, 'aaaa') + const mat: any = doc1.materialize("/") + assert.equal(_obj(mat), "_root") + doc1.enablePatches(true) + doc1.put("/", "key", "value") + const applied = doc1.applyPatches(mat) + assert.equal(_obj(applied), "_root") + }) + + it.skip('it can patch quickly', () => { +/* + console.time("init") + let doc1 = create() + doc1.enablePatches(true) + doc1.putObject("/", "notes", ""); + let mat = doc1.materialize("/") + let doc2 = doc1.fork() + let testData = new Array( 100000 ).join("x") + console.timeEnd("init") + console.time("splice") + doc2.splice("/notes", 0, 0, testData); + console.timeEnd("splice") + console.time("merge") + doc1.merge(doc2) + console.timeEnd("merge") + console.time("patch") + mat = doc1.applyPatches(mat) + console.timeEnd("patch") +*/ + }) + }) +}) + +// TODO: squash puts & deletes diff --git a/automerge-wasm/test/helpers/columnar.js b/rust/automerge-wasm/test/helpers/columnar.js similarity index 100% rename from automerge-wasm/test/helpers/columnar.js rename to rust/automerge-wasm/test/helpers/columnar.js diff --git a/automerge-wasm/test/helpers/common.js b/rust/automerge-wasm/test/helpers/common.js similarity index 100% rename from automerge-wasm/test/helpers/common.js rename to rust/automerge-wasm/test/helpers/common.js diff --git a/automerge-js/test/legacy/encoding.js b/rust/automerge-wasm/test/helpers/encoding.js similarity index 100% rename from automerge-js/test/legacy/encoding.js rename to rust/automerge-wasm/test/helpers/encoding.js diff --git a/automerge-wasm/test/helpers/sync.js b/rust/automerge-wasm/test/helpers/sync.js similarity index 100% rename from automerge-wasm/test/helpers/sync.js rename to rust/automerge-wasm/test/helpers/sync.js diff --git a/automerge-wasm/test/readme.ts b/rust/automerge-wasm/test/readme.ts similarity index 84% rename from automerge-wasm/test/readme.ts rename to rust/automerge-wasm/test/readme.ts index 5dcff10e..e5823556 100644 --- a/automerge-wasm/test/readme.ts +++ b/rust/automerge-wasm/test/readme.ts @@ -1,23 +1,18 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ import { describe, it } from 'mocha'; import * as assert from 'assert' -//@ts-ignore -import { init, create, load } from '..' +import { create, load, initSyncState } from '..' describe('Automerge', () => { describe('Readme Examples', () => { - it('Using the Library and Creating a Document (1)', () => { - const doc = create() + it('Using the Library and Creating a Document', () => { + const doc = create(true) + const sync = initSyncState() doc.free() - }) - it('Using the Library and Creating a Document (2)', (done) => { - init().then((_:any) => { - const doc = create() - doc.free() - done() - }) + sync.free() }) it('Automerge Scalar Types (1)', () => { - const doc = create() + const doc = create(true) doc.put("/", "prop1", 100) // int doc.put("/", "prop2", 3.14) // f64 doc.put("/", "prop3", "hello world") @@ -35,11 +30,9 @@ describe('Automerge', () => { prop6: true, prop7: null }) - - doc.free() }) it('Automerge Scalar Types (2)', () => { - const doc = create() + const doc = create(true) doc.put("/", "prop1", 100, "int") doc.put("/", "prop2", 100, "uint") doc.put("/", "prop3", 100.5, "f64") @@ -50,10 +43,9 @@ describe('Automerge', () => { doc.put("/", "prop8", new Uint8Array([1,2,3]), "bytes") doc.put("/", "prop9", true, "boolean") doc.put("/", "prop10", null, "null") - doc.free() }) it('Automerge Object Types (1)', () => { - const doc = create() + const doc = create(true) // you can create an object by passing in the inital state - if blank pass in `{}` // the return value is the Object Id @@ -70,11 +62,9 @@ describe('Automerge', () => { // text is initialized with a string const notes = doc.putObject("/", "notes", "Hello world!") - - doc.free() }) it('Automerge Object Types (2)', () => { - const doc = create() + const doc = create(true) const config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] }) @@ -93,11 +83,9 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize("/"), { config: { align: "right", archived: false, cycles: [ 10, 19, 21 ] } }) - - doc.free() }) it('Maps (1)', () => { - const doc = create() + const doc = create(true) const mymap = doc.putObject("_root", "mymap", { foo: "bar"}) // make a new map with the foo key @@ -109,11 +97,9 @@ describe('Automerge', () => { assert.deepEqual(doc.keys(mymap),["bytes","foo","sub"]) assert.deepEqual(doc.materialize("_root"), { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {} }}) - - doc.free() }) it('Lists (1)', () => { - const doc = create() + const doc = create(true) const items = doc.putObject("_root", "items", [10,"box"]) // init a new list with two elements doc.push(items, true) // push `true` to the end of the list @@ -125,26 +111,16 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize(items),[ "bat", [ 1 ,2 ], { hello : "world" }, true, "bag", "brick" ]) assert.deepEqual(doc.length(items),6) - - doc.free() }) it('Text (1)', () => { - const doc = create("aaaaaa") + const doc = create(true, "aaaaaa") const notes = doc.putObject("_root", "notes", "Hello world") doc.splice(notes, 6, 5, "everyone") assert.deepEqual(doc.text(notes), "Hello everyone") - - const obj = doc.insertObject(notes, 6, { hi: "there" }) - - assert.deepEqual(doc.text(notes), "Hello \ufffceveryone") - assert.deepEqual(doc.get(notes, 6), obj) - assert.deepEqual(doc.get(obj, "hi"), "there") - - doc.free() }) it('Querying Data (1)', () => { - const doc1 = create("aabbcc") + const doc1 = create(true, "aabbcc") doc1.put("_root", "key1", "val1") const key2 = doc1.putObject("_root", "key2", []) @@ -162,11 +138,9 @@ describe('Automerge', () => { assert.deepEqual(doc1.get("_root","key3"), "doc2val") assert.deepEqual(doc1.getAll("_root","key3"),[[ "str", "doc1val", "3@aabbcc"], ["str", "doc2val", "3@ffaaff"]]) - - doc1.free(); doc2.free() }) it('Counters (1)', () => { - const doc1 = create("aaaaaa") + const doc1 = create(true, "aaaaaa") doc1.put("_root", "number", 0) doc1.put("_root", "total", 0, "counter") @@ -180,11 +154,9 @@ describe('Automerge', () => { doc1.merge(doc2) assert.deepEqual(doc1.materialize("_root"), { number: 10, total: 33 }) - - doc1.free(); doc2.free() }) it('Transactions (1)', () => { - const doc = create() + const doc = create(true) doc.put("_root", "key", "val1") @@ -204,11 +176,9 @@ describe('Automerge', () => { assert.deepEqual(doc.get("_root", "key"),"val2") assert.deepEqual(doc.pendingOps(),0) - - doc.free() }) it('Viewing Old Versions of the Document (1)', () => { - const doc = create() + const doc = create(true) doc.put("_root", "key", "val1") const heads1 = doc.getHeads() @@ -222,11 +192,9 @@ describe('Automerge', () => { assert.deepEqual(doc.get("_root","key",heads2), "val2") assert.deepEqual(doc.get("_root","key",heads1), "val1") assert.deepEqual(doc.get("_root","key",[]), undefined) - - doc.free() }) it('Forking And Merging (1)', () => { - const doc1 = create() + const doc1 = create(true) doc1.put("_root", "key1", "val1") const doc2 = doc1.fork() @@ -238,17 +206,15 @@ describe('Automerge', () => { assert.deepEqual(doc1.materialize("_root"), { key1: "val1", key2: "val2", key3: "val3" }) assert.deepEqual(doc2.materialize("_root"), { key1: "val1", key3: "val3" }) - - doc1.free(); doc2.free() }) it('Saving And Loading (1)', () => { - const doc1 = create() + const doc1 = create(true) doc1.put("_root", "key1", "value1") const save1 = doc1.save() - const doc2 = load(save1) + const doc2 = load(save1, true) doc2.materialize("_root") // returns { key1: "value1" } @@ -264,17 +230,15 @@ describe('Automerge', () => { doc2.loadIncremental(saveIncremental) - const doc3 = load(save2) + const doc3 = load(save2, true) - const doc4 = load(save3) + const doc4 = load(save3, true) assert.deepEqual(doc1.materialize("_root"), { key1: "value1", key2: "value2" }) assert.deepEqual(doc2.materialize("_root"), { key1: "value1", key2: "value2" }) assert.deepEqual(doc3.materialize("_root"), { key1: "value1", key2: "value2" }) assert.deepEqual(doc4.materialize("_root"), { key1: "value1", key2: "value2" }) - - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) - it.skip('Syncing (1)', () => { }) + //it.skip('Syncing (1)', () => { }) }) }) diff --git a/automerge-wasm/test/test.ts b/rust/automerge-wasm/test/test.ts similarity index 78% rename from automerge-wasm/test/test.ts rename to rust/automerge-wasm/test/test.ts index 852ec2cc..bb4f71e3 100644 --- a/automerge-wasm/test/test.ts +++ b/rust/automerge-wasm/test/test.ts @@ -1,10 +1,10 @@ import { describe, it } from 'mocha'; -//@ts-ignore import assert from 'assert' -//@ts-ignore +// @ts-ignore import { BloomFilter } from './helpers/sync' -import { init, create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' -import { DecodedSyncMessage, Hash } from '..'; +import { create, load, SyncState, Automerge, encodeChange, decodeChange, initSyncState, decodeSyncMessage, decodeSyncState, encodeSyncState, encodeSyncMessage } from '..' +import { Value, DecodedSyncMessage, Hash } from '..'; +import {kill} from 'process'; function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { const MAX_ITER = 10 @@ -28,33 +28,27 @@ function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncSta describe('Automerge', () => { describe('basics', () => { - it('default import init() should return a promise', () => { - assert(init() instanceof Promise) - }) it('should create, clone and free', () => { - const doc1 = create() + const doc1 = create(true) const doc2 = doc1.clone() - doc1.free() doc2.free() }) it('should be able to start and commit', () => { - const doc = create() + const doc = create(true) doc.commit() - doc.free() }) it('getting a nonexistent prop does not throw an error', () => { - const doc = create() + const doc = create(true) const root = "_root" const result = doc.getWithType(root, "hello") assert.deepEqual(result, undefined) - doc.free() }) it('should be able to set and get a simple value', () => { - const doc: Automerge = create("aabbcc") + const doc: Automerge = create(true, "aabbcc") const root = "_root" let result @@ -109,23 +103,20 @@ describe('Automerge', () => { result = doc.getWithType(root, "null") assert.deepEqual(result, ["null", null]); - - doc.free() }) it('should be able to use bytes', () => { - const doc = create() + const doc = create(true) doc.put("_root", "data1", new Uint8Array([10, 11, 12])); doc.put("_root", "data2", new Uint8Array([13, 14, 15]), "bytes"); const value1 = doc.getWithType("_root", "data1") assert.deepEqual(value1, ["bytes", new Uint8Array([10, 11, 12])]); const value2 = doc.getWithType("_root", "data2") assert.deepEqual(value2, ["bytes", new Uint8Array([13, 14, 15])]); - doc.free() }) it('should be able to make subobjects', () => { - const doc = create() + const doc = create(true) const root = "_root" let result @@ -138,11 +129,10 @@ describe('Automerge', () => { result = doc.getWithType(submap, "number") assert.deepEqual(result, ["uint", 6]) - doc.free() }) it('should be able to make lists', () => { - const doc = create() + const doc = create(true) const root = "_root" const sublist = doc.putObject(root, "numbers", []) @@ -161,11 +151,10 @@ describe('Automerge', () => { assert.deepEqual(doc.getWithType(sublist, 2), ["str", "b v2"]) assert.deepEqual(doc.length(sublist), 4) - doc.free() }) it('lists have insert, set, splice, and push ops', () => { - const doc = create() + const doc = create(true) const root = "_root" const sublist = doc.putObject(root, "letters", []) @@ -184,12 +173,10 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize(sublist), ["z", "d", "e", "f", "c", new Date(3)]) assert.deepEqual(doc.length(sublist), 6) assert.deepEqual(doc.materialize("/", heads), { letters: ["b", "a", "c"] }) - - doc.free() }) it('should be able delete non-existent props', () => { - const doc = create() + const doc = create(true) doc.put("_root", "foo", "bar") doc.put("_root", "bip", "bap") @@ -202,24 +189,24 @@ describe('Automerge', () => { const hash2 = doc.commit() assert.deepEqual(doc.keys("_root"), ["bip"]) + assert.ok(hash1) assert.deepEqual(doc.keys("_root", [hash1]), ["bip", "foo"]) + assert.ok(hash2) assert.deepEqual(doc.keys("_root", [hash2]), ["bip"]) - doc.free() }) it('should be able to del', () => { - const doc = create() + const doc = create(true) const root = "_root" doc.put(root, "xxx", "xxx"); assert.deepEqual(doc.getWithType(root, "xxx"), ["str", "xxx"]) doc.delete(root, "xxx"); assert.deepEqual(doc.getWithType(root, "xxx"), undefined) - doc.free() }) it('should be able to use counters', () => { - const doc = create() + const doc = create(true) const root = "_root" doc.put(root, "counter", 10, "counter"); @@ -228,37 +215,34 @@ describe('Automerge', () => { assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 20]) doc.increment(root, "counter", -5); assert.deepEqual(doc.getWithType(root, "counter"), ["counter", 15]) - doc.free() }) it('should be able to splice text', () => { - const doc = create() + const doc = create(true) const root = "_root"; const text = doc.putObject(root, "text", ""); doc.splice(text, 0, 0, "hello ") - doc.splice(text, 6, 0, ["w", "o", "r", "l", "d"]) - doc.splice(text, 11, 0, ["!", "?"]) + doc.splice(text, 6, 0, "world") + doc.splice(text, 11, 0, "!?") assert.deepEqual(doc.getWithType(text, 0), ["str", "h"]) assert.deepEqual(doc.getWithType(text, 1), ["str", "e"]) assert.deepEqual(doc.getWithType(text, 9), ["str", "l"]) assert.deepEqual(doc.getWithType(text, 10), ["str", "d"]) assert.deepEqual(doc.getWithType(text, 11), ["str", "!"]) assert.deepEqual(doc.getWithType(text, 12), ["str", "?"]) - doc.free() }) - it('should be able to insert objects into text', () => { - const doc = create() + it.skip('should NOT be able to insert objects into text', () => { + const doc = create(true) const text = doc.putObject("/", "text", "Hello world"); - const obj = doc.insertObject(text, 6, { hello: "world" }); - assert.deepEqual(doc.text(text), "Hello \ufffcworld"); - assert.deepEqual(doc.getWithType(text, 6), ["map", obj]); - assert.deepEqual(doc.getWithType(obj, "hello"), ["str", "world"]); + assert.throws(() => { + doc.insertObject(text, 6, { hello: "world" }); + }) }) it('should be able save all or incrementally', () => { - const doc = create() + const doc = create(true) doc.put("_root", "foo", 1) @@ -279,22 +263,18 @@ describe('Automerge', () => { assert.notDeepEqual(saveA, saveB); - const docA = load(saveA); - const docB = load(saveB); - const docC = load(saveMidway) + const docA = load(saveA, true); + const docB = load(saveB, true); + const docC = load(saveMidway, true) docC.loadIncremental(save3) assert.deepEqual(docA.keys("_root"), docB.keys("_root")); assert.deepEqual(docA.save(), docB.save()); assert.deepEqual(docA.save(), docC.save()); - doc.free() - docA.free() - docB.free() - docC.free() }) it('should be able to splice text', () => { - const doc = create() + const doc = create(true) const text = doc.putObject("_root", "text", ""); doc.splice(text, 0, 0, "hello world"); const hash1 = doc.commit(); @@ -302,19 +282,21 @@ describe('Automerge', () => { const hash2 = doc.commit(); assert.strictEqual(doc.text(text), "hello big bad world") assert.strictEqual(doc.length(text), 19) + assert.ok(hash1) assert.strictEqual(doc.text(text, [hash1]), "hello world") assert.strictEqual(doc.length(text, [hash1]), 11) + assert.ok(hash2) assert.strictEqual(doc.text(text, [hash2]), "hello big bad world") + assert.ok(hash2) assert.strictEqual(doc.length(text, [hash2]), 19) - doc.free() }) it('local inc increments all visible counters in a map', () => { - const doc1 = create("aaaa") + const doc1 = create(true, "aaaa") doc1.put("_root", "hello", "world") - const doc2 = load(doc1.save(), "bbbb"); - const doc3 = load(doc1.save(), "cccc"); - let heads = doc1.getHeads() + const doc2 = load(doc1.save(), true, "bbbb"); + const doc3 = load(doc1.save(), true, "cccc"); + const heads = doc1.getHeads() doc1.put("_root", "cnt", 20) doc2.put("_root", "cnt", 0, "counter") doc3.put("_root", "cnt", 10, "counter") @@ -334,21 +316,17 @@ describe('Automerge', () => { ]) const save1 = doc1.save() - const doc4 = load(save1) + const doc4 = load(save1, true) assert.deepEqual(doc4.save(), save1); - doc1.free() - doc2.free() - doc3.free() - doc4.free() }) it('local inc increments all visible counters in a sequence', () => { - const doc1 = create("aaaa") + const doc1 = create(true, "aaaa") const seq = doc1.putObject("_root", "seq", []) doc1.insert(seq, 0, "hello") - const doc2 = load(doc1.save(), "bbbb"); - const doc3 = load(doc1.save(), "cccc"); - let heads = doc1.getHeads() + const doc2 = load(doc1.save(), true, "bbbb"); + const doc3 = load(doc1.save(), true, "cccc"); + const heads = doc1.getHeads() doc1.put(seq, 0, 20) doc2.put(seq, 0, 0, "counter") doc3.put(seq, 0, 10, "counter") @@ -368,16 +346,12 @@ describe('Automerge', () => { ]) const save = doc1.save() - const doc4 = load(save) + const doc4 = load(save, true) assert.deepEqual(doc4.save(), save); - doc1.free() - doc2.free() - doc3.free() - doc4.free() }) it('paths can be used instead of objids', () => { - const doc = create("aaaa") + const doc = create(true, "aaaa") doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) assert.deepEqual(doc.materialize("/"), { list: [{ foo: "bar" }, [1, 2, 3]] }) assert.deepEqual(doc.materialize("/list"), [{ foo: "bar" }, [1, 2, 3]]) @@ -385,8 +359,8 @@ describe('Automerge', () => { }) it('should be able to fetch changes by hash', () => { - const doc1 = create("aaaa") - const doc2 = create("bbbb") + const doc1 = create(true, "aaaa") + const doc2 = create(true, "bbbb") doc1.put("/", "a", "b") doc2.put("/", "b", "c") const head1 = doc1.getHeads() @@ -399,10 +373,10 @@ describe('Automerge', () => { }) it('recursive sets are possible', () => { - const doc = create("aaaa") + const doc = create(true, "aaaa") const l1 = doc.putObject("_root", "list", [{ foo: "bar" }, [1, 2, 3]]) const l2 = doc.insertObject(l1, 0, { zip: ["a", "b"] }) - const l3 = doc.putObject("_root", "info1", "hello world") // 'text' object + doc.putObject("_root", "info1", "hello world") // 'text' object doc.put("_root", "info2", "hello world") // 'str' const l4 = doc.putObject("_root", "info3", "hello world") assert.deepEqual(doc.materialize(), { @@ -414,11 +388,10 @@ describe('Automerge', () => { assert.deepEqual(doc.materialize(l2), { zip: ["a", "b"] }) assert.deepEqual(doc.materialize(l1), [{ zip: ["a", "b"] }, { foo: "bar" }, [1, 2, 3]]) assert.deepEqual(doc.materialize(l4), "hello world") - doc.free() }) it('only returns an object id when objects are created', () => { - const doc = create("aaaa") + const doc = create(true, "aaaa") const r1 = doc.put("_root", "foo", "bar") const r2 = doc.putObject("_root", "list", []) const r3 = doc.put("_root", "counter", 10, "counter") @@ -437,17 +410,16 @@ describe('Automerge', () => { assert.deepEqual(r7, "7@aaaa"); assert.deepEqual(r8, null); //assert.deepEqual(r9,["12@aaaa","13@aaaa"]); - doc.free() }) it('objects without properties are preserved', () => { - const doc1 = create("aaaa") + const doc1 = create(true, "aaaa") const a = doc1.putObject("_root", "a", {}); const b = doc1.putObject("_root", "b", {}); const c = doc1.putObject("_root", "c", {}); - const d = doc1.put(c, "d", "dd"); + doc1.put(c, "d", "dd"); const saved = doc1.save(); - const doc2 = load(saved); + const doc2 = load(saved, true); assert.deepEqual(doc2.getWithType("_root", "a"), ["map", a]) assert.deepEqual(doc2.keys(a), []) assert.deepEqual(doc2.getWithType("_root", "b"), ["map", b]) @@ -455,12 +427,10 @@ describe('Automerge', () => { assert.deepEqual(doc2.getWithType("_root", "c"), ["map", c]) assert.deepEqual(doc2.keys(c), ["d"]) assert.deepEqual(doc2.getWithType(c, "d"), ["str", "dd"]) - doc1.free() - doc2.free() }) - it('should allow you to forkAt a heads', () => { - const A = create("aaaaaa") + it('should allow you to fork at a heads', () => { + const A = create(true, "aaaaaa") A.put("/", "key1", "val1"); A.put("/", "key2", "val2"); const heads1 = A.getHeads(); @@ -470,12 +440,12 @@ describe('Automerge', () => { A.merge(B) const heads2 = A.getHeads(); A.put("/", "key5", "val5"); - assert.deepEqual(A.forkAt(heads1).materialize("/"), A.materialize("/", heads1)) - assert.deepEqual(A.forkAt(heads2).materialize("/"), A.materialize("/", heads2)) + assert.deepEqual(A.fork(undefined, heads1).materialize("/"), A.materialize("/", heads1)) + assert.deepEqual(A.fork(undefined, heads2).materialize("/"), A.materialize("/", heads2)) }) it('should handle merging text conflicts then saving & loading', () => { - const A = create("aabbcc") + const A = create(true, "aabbcc") const At = A.putObject('_root', 'text', "") A.splice(At, 0, 0, 'hello') @@ -492,7 +462,7 @@ describe('Automerge', () => { const binary = A.save() - const C = load(binary) + const C = load(binary, true) assert.deepEqual(C.getWithType('_root', 'text'), ['text', '1@aabbcc']) assert.deepEqual(C.text(At), 'hell! world') @@ -501,77 +471,67 @@ describe('Automerge', () => { describe('patch generation', () => { it('should include root object key updates', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.put('_root', 'hello', 'world') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['hello'], value: 'world', conflict: false } + { action: 'put', path: ['hello'], value: 'world' } ]) - doc1.free() - doc2.free() }) it('should include nested object creation', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.putObject('_root', 'birds', { friday: { robins: 3 } }) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: [ 'birds' ], value: {}, conflict: false }, - { action: 'put', path: [ 'birds', 'friday' ], value: {}, conflict: false }, - { action: 'put', path: [ 'birds', 'friday', 'robins' ], value: 3, conflict: false}, + { action: 'put', path: [ 'birds' ], value: {} }, + { action: 'put', path: [ 'birds', 'friday' ], value: {} }, + { action: 'put', path: [ 'birds', 'friday', 'robins' ], value: 3}, ]) - doc1.free() - doc2.free() }) it('should delete map keys', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.put('_root', 'favouriteBird', 'Robin') doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) doc1.delete('_root', 'favouriteBird') doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: [ 'favouriteBird' ], value: 'Robin', conflict: false }, + { action: 'put', path: [ 'favouriteBird' ], value: 'Robin' }, { action: 'del', path: [ 'favouriteBird' ] } ]) - doc1.free() - doc2.free() }) it('should include list element insertion', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: [ 'birds' ], value: [], conflict: false }, - { action: 'splice', path: [ 'birds', 0 ], values: ['Goldfinch', 'Chaffinch'] }, + { action: 'put', path: [ 'birds' ], value: [] }, + { action: 'insert', path: [ 'birds', 0 ], values: ['Goldfinch', 'Chaffinch'] }, ]) - doc1.free() - doc2.free() }) it('should insert nested maps into a list', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.putObject('_root', 'birds', []) doc2.loadIncremental(doc1.saveIncremental()) doc1.insertObject('1@aaaa', 0, { species: 'Goldfinch', count: 3 }) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'splice', path: [ 'birds', 0 ], values: [{}] }, - { action: 'put', path: [ 'birds', 0, 'species' ], value: 'Goldfinch', conflict: false }, - { action: 'put', path: [ 'birds', 0, 'count', ], value: 3, conflict: false } + { action: 'insert', path: [ 'birds', 0 ], values: [{}] }, + { action: 'put', path: [ 'birds', 0, 'species' ], value: 'Goldfinch' }, + { action: 'put', path: [ 'birds', 0, 'count', ], value: 3 } ]) - doc1.free() - doc2.free() }) it('should calculate list indexes based on visible elements', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.putObject('_root', 'birds', ['Goldfinch', 'Chaffinch']) doc2.loadIncremental(doc1.saveIncremental()) doc1.delete('1@aaaa', 0) @@ -582,14 +542,12 @@ describe('Automerge', () => { assert.deepEqual(doc1.getWithType('1@aaaa', 1), ['str', 'Greenfinch']) assert.deepEqual(doc2.popPatches(), [ { action: 'del', path: ['birds', 0] }, - { action: 'splice', path: ['birds', 1], values: ['Greenfinch'] } + { action: 'insert', path: ['birds', 1], values: ['Greenfinch'] } ]) - doc1.free() - doc2.free() }) it('should handle concurrent insertions at the head of a list', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') doc1.putObject('_root', 'values', []) const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) @@ -607,17 +565,15 @@ describe('Automerge', () => { assert.deepEqual([0, 1, 2, 3].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual([0, 1, 2, 3].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd']) assert.deepEqual(doc3.popPatches(), [ - { action: 'splice', path: ['values', 0], values:['c','d'] }, - { action: 'splice', path: ['values', 0], values:['a','b'] }, + { action: 'insert', path: ['values', 0], values:['a','b','c','d'] }, ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'splice', path: ['values',0], values:['a','b','c','d'] }, + { action: 'insert', path: ['values',0], values:['a','b','c','d'] }, ]) - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) it('should handle concurrent insertions beyond the head', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') doc1.putObject('_root', 'values', ['a', 'b']) const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) @@ -635,17 +591,15 @@ describe('Automerge', () => { assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc3.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual([0, 1, 2, 3, 4, 5].map(i => (doc4.getWithType('1@aaaa', i) || [])[1]), ['a', 'b', 'c', 'd', 'e', 'f']) assert.deepEqual(doc3.popPatches(), [ - { action: 'splice', path: ['values', 2], values: ['e','f'] }, - { action: 'splice', path: ['values', 2], values: ['c','d'] }, + { action: 'insert', path: ['values', 2], values: ['c','d','e','f'] }, ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'splice', path: ['values', 2], values: ['c','d','e','f'] }, + { action: 'insert', path: ['values', 2], values: ['c','d','e','f'] }, ]) - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) it('should handle conflicts on root object keys', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Goldfinch') const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() @@ -658,18 +612,16 @@ describe('Automerge', () => { assert.deepEqual(doc4.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc4.getAll('_root', 'bird'), [['str', 'Greenfinch', '1@aaaa'], ['str', 'Goldfinch', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Greenfinch', conflict: false }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, + { action: 'put', path: ['bird'], value: 'Greenfinch' }, + { action: 'put', path: ['bird'], value: 'Goldfinch' }, ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, + { action: 'put', path: ['bird'], value: 'Goldfinch' }, ]) - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) it('should handle three-way conflicts', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Chaffinch') doc3.put('_root', 'bird', 'Goldfinch') @@ -693,22 +645,17 @@ describe('Automerge', () => { ['str', 'Greenfinch', '1@aaaa'], ['str', 'Chaffinch', '1@bbbb'], ['str', 'Goldfinch', '1@cccc'] ]) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Chaffinch', conflict: true }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true } + { action: 'put', path: ['bird'], value: 'Chaffinch' }, + { action: 'put', path: ['bird'], value: 'Goldfinch' } ]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true } + { action: 'put', path: ['bird'], value: 'Goldfinch' }, ]) - assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: true } - ]) - doc1.free(); doc2.free(); doc3.free() + assert.deepEqual(doc3.popPatches(), [ ]) }) it('should allow a conflict to be resolved', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc') doc1.put('_root', 'bird', 'Greenfinch') doc2.put('_root', 'bird', 'Chaffinch') doc3.enablePatches(true) @@ -719,15 +666,14 @@ describe('Automerge', () => { doc3.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Greenfinch', conflict: false }, - { action: 'put', path: ['bird'], value: 'Chaffinch', conflict: true }, - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } + { action: 'put', path: ['bird'], value: 'Greenfinch' }, + { action: 'put', path: ['bird'], value: 'Chaffinch' }, + { action: 'put', path: ['bird'], value: 'Goldfinch' } ]) - doc1.free(); doc2.free(); doc3.free() }) it('should handle a concurrent map key overwrite and delete', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.put('_root', 'bird', 'Greenfinch') doc2.loadIncremental(doc1.saveIncremental()) doc1.put('_root', 'bird', 'Goldfinch') @@ -742,16 +688,15 @@ describe('Automerge', () => { assert.deepEqual(doc2.getWithType('_root', 'bird'), ['str', 'Goldfinch']) assert.deepEqual(doc2.getAll('_root', 'bird'), [['str', 'Goldfinch', '2@aaaa']]) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } + { action: 'put', path: ['bird'], value: 'Goldfinch' } ]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Goldfinch', conflict: false } + { action: 'put', path: ['bird'], value: 'Goldfinch' } ]) - doc1.free(); doc2.free() }) it('should handle a conflict on a list element', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') doc1.putObject('_root', 'birds', ['Thrush', 'Magpie']) const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) @@ -769,18 +714,16 @@ describe('Automerge', () => { assert.deepEqual(doc4.getWithType('1@aaaa', 0), ['str', 'Redwing']) assert.deepEqual(doc4.getAll('1@aaaa', 0), [['str', 'Song Thrush', '4@aaaa'], ['str', 'Redwing', '4@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['birds',0], value: 'Song Thrush', conflict: false }, - { action: 'put', path: ['birds',0], value: 'Redwing', conflict: true } + { action: 'put', path: ['birds',0], value: 'Song Thrush' }, + { action: 'put', path: ['birds',0], value: 'Redwing' } ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'put', path: ['birds',0], value: 'Redwing', conflict: false }, - { action: 'put', path: ['birds',0], value: 'Redwing', conflict: true } + { action: 'put', path: ['birds',0], value: 'Redwing' }, ]) - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) it('should handle a concurrent list element overwrite and delete', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc'), doc4 = create('dddd') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc'), doc4 = create(true, 'dddd') doc1.putObject('_root', 'birds', ['Parakeet', 'Magpie', 'Thrush']) const change1 = doc1.saveIncremental() doc2.loadIncremental(change1) @@ -801,21 +744,19 @@ describe('Automerge', () => { assert.deepEqual(doc4.getAll('1@aaaa', 2), [['str', 'Song Thrush', '6@aaaa'], ['str', 'Redwing', '6@bbbb']]) assert.deepEqual(doc3.popPatches(), [ { action: 'del', path: ['birds',0], }, - { action: 'put', path: ['birds',1], value: 'Song Thrush', conflict: false }, - { action: 'splice', path: ['birds',0], values: ['Ring-necked parakeet'] }, - { action: 'put', path: ['birds',2], value: 'Redwing', conflict: true } + { action: 'put', path: ['birds',1], value: 'Song Thrush' }, + { action: 'insert', path: ['birds',0], values: ['Ring-necked parakeet'] }, + { action: 'put', path: ['birds',2], value: 'Redwing' } ]) assert.deepEqual(doc4.popPatches(), [ - { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet', conflict: false }, - { action: 'put', path: ['birds',2], value: 'Redwing', conflict: false }, - { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet', conflict: false }, - { action: 'put', path: ['birds',2], value: 'Redwing', conflict: true } + { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet' }, + { action: 'put', path: ['birds',2], value: 'Redwing' }, + { action: 'put', path: ['birds',0], value: 'Ring-necked parakeet' }, ]) - doc1.free(); doc2.free(); doc3.free(); doc4.free() }) it('should handle deletion of a conflict value', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), doc3 = create('cccc') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), doc3 = create(true, 'cccc') doc1.put('_root', 'bird', 'Robin') doc2.put('_root', 'bird', 'Wren') const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() @@ -826,20 +767,19 @@ describe('Automerge', () => { doc3.loadIncremental(change2) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa'], ['str', 'Wren', '1@bbbb']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Robin', conflict: false }, - { action: 'put', path: ['bird'], value: 'Wren', conflict: true } + { action: 'put', path: ['bird'], value: 'Robin' }, + { action: 'put', path: ['bird'], value: 'Wren' } ]) doc3.loadIncremental(change3) assert.deepEqual(doc3.getWithType('_root', 'bird'), ['str', 'Robin']) assert.deepEqual(doc3.getAll('_root', 'bird'), [['str', 'Robin', '1@aaaa']]) assert.deepEqual(doc3.popPatches(), [ - { action: 'put', path: ['bird'], value: 'Robin', conflict: false } + { action: 'put', path: ['bird'], value: 'Robin' } ]) - doc1.free(); doc2.free(); doc3.free() }) it('should handle conflicting nested objects', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc1.putObject('_root', 'birds', ['Parakeet']) doc2.putObject('_root', 'birds', { 'Sparrowhawk': 1 }) const change1 = doc1.saveIncremental(), change2 = doc2.saveIncremental() @@ -849,132 +789,115 @@ describe('Automerge', () => { doc2.loadIncremental(change1) assert.deepEqual(doc1.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['birds'], value: {}, conflict: true }, - { action: 'put', path: ['birds', 'Sparrowhawk'], value: 1, conflict: false } + { action: 'put', path: ['birds'], value: {} }, + { action: 'put', path: ['birds', 'Sparrowhawk'], value: 1 } ]) assert.deepEqual(doc2.getAll('_root', 'birds'), [['list', '1@aaaa'], ['map', '1@bbbb']]) - assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['birds'], value: {}, conflict: true }, - { action: 'splice', path: ['birds',0], values: ['Parakeet'] } - ]) - doc1.free(); doc2.free() + assert.deepEqual(doc2.popPatches(), []) }) it('should support date objects', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb'), now = new Date() + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb'), now = new Date() doc1.put('_root', 'createdAt', now) doc2.enablePatches(true) doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.getWithType('_root', 'createdAt'), ['timestamp', now]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['createdAt'], value: now, conflict: false } + { action: 'put', path: ['createdAt'], value: now } ]) - doc1.free(); doc2.free() }) it('should capture local put ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) doc1.put('_root', 'key1', 1) doc1.put('_root', 'key1', 2) doc1.put('_root', 'key2', 3) - const map = doc1.putObject('_root', 'map', {}) - const list = doc1.putObject('_root', 'list', []) + doc1.putObject('_root', 'map', {}) + doc1.putObject('_root', 'list', []) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['key1'], value: 1, conflict: false }, - { action: 'put', path: ['key1'], value: 2, conflict: false }, - { action: 'put', path: ['key2'], value: 3, conflict: false }, - { action: 'put', path: ['map'], value: {}, conflict: false }, - { action: 'put', path: ['list'], value: [], conflict: false }, + { action: 'put', path: ['key1'], value: 1 }, + { action: 'put', path: ['key1'], value: 2 }, + { action: 'put', path: ['key2'], value: 3 }, + { action: 'put', path: ['map'], value: {} }, + { action: 'put', path: ['list'], value: [] }, ]) - doc1.free() }) it('should capture local insert ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.insert(list, 0, 1) doc1.insert(list, 0, 2) doc1.insert(list, 2, 3) - const map = doc1.insertObject(list, 2, {}) - const list2 = doc1.insertObject(list, 2, []) + doc1.insertObject(list, 2, {}) + doc1.insertObject(list, 2, []) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['list'], value: [], conflict: false }, - { action: 'splice', path: ['list', 0], values: [1] }, - { action: 'splice', path: ['list', 0], values: [2] }, - { action: 'splice', path: ['list', 2], values: [3] }, - { action: 'splice', path: ['list', 2], values: [{}] }, - { action: 'splice', path: ['list', 2], values: [[]] }, + { action: 'put', path: ['list'], value: [] }, + { action: 'insert', path: ['list', 0], values: [2,1,[],{},3] }, ]) - doc1.free() }) it('should capture local push ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.push(list, 1) - const map = doc1.pushObject(list, {}) - const list2 = doc1.pushObject(list, []) + doc1.pushObject(list, {}) + doc1.pushObject(list, []) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['list'], value: [], conflict: false }, - { action: 'splice', path: ['list',0], values: [1,{},[]] }, + { action: 'put', path: ['list'], value: [] }, + { action: 'insert', path: ['list',0], values: [1,{},[]] }, ]) - doc1.free() }) it('should capture local splice ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc1.splice(list, 0, 0, [1, 2, 3, 4]) doc1.splice(list, 1, 2) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['list'], value: [], conflict: false }, - { action: 'splice', path: ['list',0], values: [1,2,3,4] }, - { action: 'del', path: ['list',1] }, - { action: 'del', path: ['list',1] }, + { action: 'put', path: ['list'], value: [] }, + { action: 'insert', path: ['list',0], values: [1,4] }, ]) - doc1.free() }) it('should capture local increment ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) doc1.put('_root', 'counter', 2, 'counter') doc1.increment('_root', 'counter', 4) assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['counter'], value: 2, conflict: false }, + { action: 'put', path: ['counter'], value: 2 }, { action: 'inc', path: ['counter'], value: 4 }, ]) - doc1.free() }) it('should capture local delete ops', () => { - const doc1 = create('aaaa') + const doc1 = create(true, 'aaaa') doc1.enablePatches(true) doc1.put('_root', 'key1', 1) doc1.put('_root', 'key2', 2) doc1.delete('_root', 'key1') doc1.delete('_root', 'key2') assert.deepEqual(doc1.popPatches(), [ - { action: 'put', path: ['key1'], value: 1, conflict: false }, - { action: 'put', path: ['key2'], value: 2, conflict: false }, + { action: 'put', path: ['key1'], value: 1 }, + { action: 'put', path: ['key2'], value: 2 }, { action: 'del', path: ['key1'], }, { action: 'del', path: ['key2'], }, ]) - doc1.free() }) it('should support counters in a map', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc2.enablePatches(true) doc1.put('_root', 'starlings', 2, 'counter') doc2.loadIncremental(doc1.saveIncremental()) @@ -982,14 +905,13 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.getWithType('_root', 'starlings'), ['counter', 3]) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['starlings'], value: 2, conflict: false }, + { action: 'put', path: ['starlings'], value: 2 }, { action: 'inc', path: ['starlings'], value: 1 } ]) - doc1.free(); doc2.free() }) it('should support counters in a list', () => { - const doc1 = create('aaaa'), doc2 = create('bbbb') + const doc1 = create(true, 'aaaa'), doc2 = create(true, 'bbbb') doc2.enablePatches(true) const list = doc1.putObject('_root', 'list', []) doc2.loadIncremental(doc1.saveIncremental()) @@ -1001,12 +923,11 @@ describe('Automerge', () => { doc2.loadIncremental(doc1.saveIncremental()) assert.deepEqual(doc2.popPatches(), [ - { action: 'put', path: ['list'], value: [], conflict: false }, - { action: 'splice', path: ['list',0], values: [1] }, + { action: 'put', path: ['list'], value: [] }, + { action: 'insert', path: ['list',0], values: [1] }, { action: 'inc', path: ['list',0], value: 2 }, { action: 'inc', path: ['list',0], value: -5 }, ]) - doc1.free(); doc2.free() }) it('should delete a counter from a map') // TODO @@ -1014,7 +935,7 @@ describe('Automerge', () => { describe('sync', () => { it('should send a sync message implying no local data', () => { - const doc = create() + const doc = create(true) const s1 = initSyncState() const m1 = doc.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } @@ -1028,7 +949,7 @@ describe('Automerge', () => { }) it('should not reply if we have no data as well', () => { - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) const s1 = initSyncState(), s2 = initSyncState() const m1 = n1.generateSyncMessage(s1) if (m1 === null) { throw new RangeError("message should not be null") } @@ -1038,7 +959,7 @@ describe('Automerge', () => { }) it('repos with equal heads do not need a reply message', () => { - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) const s1 = initSyncState(), s2 = initSyncState() // make two nodes with the same changes @@ -1063,7 +984,7 @@ describe('Automerge', () => { }) it('n1 should offer all changes to n2 when starting from nothing', () => { - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) // make changes for n1 that n2 should request const list = n1.putObject("_root", "n", []) @@ -1079,7 +1000,7 @@ describe('Automerge', () => { }) it('should sync peers where one has commits the other does not', () => { - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) // make changes for n1 that n2 should request const list = n1.putObject("_root", "n", []) @@ -1096,7 +1017,7 @@ describe('Automerge', () => { it('should work with prior sync state', () => { // create & synchronize two nodes - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1119,10 +1040,10 @@ describe('Automerge', () => { it('should not generate messages once synced', () => { // create & synchronize two nodes - const n1 = create('abc123'), n2 = create('def456') + const n1 = create(true, 'abc123'), n2 = create(true, 'def456') const s1 = initSyncState(), s2 = initSyncState() - let message, patch + let message for (let i = 0; i < 5; i++) { n1.put("_root", "x", i) n1.commit("", 0) @@ -1167,7 +1088,7 @@ describe('Automerge', () => { it('should allow simultaneous messages during synchronization', () => { // create & synchronize two nodes - const n1 = create('abc123'), n2 = create('def456') + const n1 = create(true, 'abc123'), n2 = create(true, 'def456') const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1246,7 +1167,7 @@ describe('Automerge', () => { }) it('should assume sent changes were received until we hear otherwise', () => { - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') const s1 = initSyncState(), s2 = initSyncState() let message = null @@ -1277,7 +1198,7 @@ describe('Automerge', () => { it('should work regardless of who initiates the exchange', () => { // create & synchronize two nodes - const n1 = create(), n2 = create() + const n1 = create(true), n2 = create(true) const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1305,8 +1226,8 @@ describe('Automerge', () => { // lastSync is undefined. // create two peers both with divergent commits - const n1 = create('01234567'), n2 = create('89abcdef') - const s1 = initSyncState(), s2 = initSyncState() + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') + //const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { n1.put("_root", "x", i) @@ -1338,7 +1259,7 @@ describe('Automerge', () => { // lastSync is c9. // create two peers both with divergent commits - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1367,7 +1288,7 @@ describe('Automerge', () => { }) it('should ensure non-empty state after sync', () => { - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') const s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 3; i++) { @@ -1386,7 +1307,7 @@ describe('Automerge', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 // n2 has changes {c0, c1, c2}, n1's lastSync is c5, and n2's lastSync is c2. // we want to successfully sync (n1) with (r), even though (n1) believes it's talking to (n2) - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') let s1 = initSyncState() const s2 = initSyncState() @@ -1431,10 +1352,11 @@ describe('Automerge', () => { sync(n1, r, s1, rSyncState) assert.deepStrictEqual(n1.getHeads(), r.getHeads()) assert.deepStrictEqual(n1.materialize(), r.materialize()) + r = null }) it('should re-sync after one node experiences data loss without disconnecting', () => { - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') const s1 = initSyncState(), s2 = initSyncState() // n1 makes three changes, which we sync to n2 @@ -1448,7 +1370,7 @@ describe('Automerge', () => { assert.deepStrictEqual(n1.getHeads(), n2.getHeads()) assert.deepStrictEqual(n1.materialize(), n2.materialize()) - const n2AfterDataLoss = create('89abcdef') + const n2AfterDataLoss = create(true, '89abcdef') // "n2" now has no data, but n1 still thinks it does. Note we don't do // decodeSyncState(encodeSyncState(s1)) in order to simulate data loss without disconnecting @@ -1458,7 +1380,7 @@ describe('Automerge', () => { }) it('should handle changes concurrent to the last sync heads', () => { - const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, 'fedcba98') const s12 = initSyncState(), s21 = initSyncState(), s23 = initSyncState(), s32 = initSyncState() // Change 1 is known to all three nodes @@ -1482,7 +1404,7 @@ describe('Automerge', () => { // simulate transmission over a network (see https://github.com/automerge/automerge/pull/362) let change = n3.getLastLocalChange() if (change === null) throw new RangeError("no local change") - //@ts-ignore + //ts-ignore if (typeof Buffer === 'function') change = Buffer.from(change) if (change === undefined) { throw new RangeError("last local change failed") } n2.applyChanges([change]) @@ -1494,12 +1416,12 @@ describe('Automerge', () => { }) it('should handle histories with lots of branching and merging', () => { - const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('fedcba98') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, 'fedcba98') n1.put("_root", "x", 0); n1.commit("", 0) - let change1 = n1.getLastLocalChange() + const change1 = n1.getLastLocalChange() if (change1 === null) throw new RangeError("no local change") n2.applyChanges([change1]) - let change2 = n1.getLastLocalChange() + const change2 = n1.getLastLocalChange() if (change2 === null) throw new RangeError("no local change") n3.applyChanges([change2]) n3.put("_root", "x", 1); n3.commit("", 0) @@ -1525,7 +1447,7 @@ describe('Automerge', () => { sync(n1, n2, s1, s2) // Having n3's last change concurrent to the last sync heads forces us into the slower code path - const change3 = n2.getLastLocalChange() + const change3 = n3.getLastLocalChange() if (change3 === null) throw new RangeError("no local change") n2.applyChanges([change3]) n1.put("_root", "n1", "final"); n1.commit("", 0) @@ -1542,7 +1464,7 @@ describe('Automerge', () => { // `-- n2 // where n2 is a false positive in the Bloom filter containing {n1}. // lastSync is c9. - let n1 = create('01234567'), n2 = create('89abcdef') + let n1 = create(true, '01234567'), n2 = create(true, '89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1556,7 +1478,6 @@ describe('Automerge', () => { const n2up = n2.clone('89abcdef'); n2up.put("_root", "x", `${i} @ n2`); n2up.commit("", 0) if (new BloomFilter(n1up.getHeads()).containsHash(n2up.getHeads()[0])) { - n1.free(); n2.free() n1 = n1up; n2 = n2up; break } } @@ -1578,8 +1499,8 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 // where n2c1 is a false positive in the Bloom filter containing {n1c1, n1c2}. // lastSync is c9. - n1 = create('01234567') - n2 = create('89abcdef') + n1 = create(true, '01234567') + n2 = create(true, '89abcdef') s1 = initSyncState() s2 = initSyncState() for (let i = 0; i < 10; i++) { @@ -1605,7 +1526,6 @@ describe('Automerge', () => { n1hash2 = n1us2.getHeads()[0]; n2hash2 = n2us2.getHeads()[0] if (new BloomFilter([n1hash1, n1hash2]).containsHash(n2hash1)) { - n1.free(); n2.free() n1 = n1us2; n2 = n2us2; break } } @@ -1649,7 +1569,7 @@ describe('Automerge', () => { assert.strictEqual(decodeSyncMessage(m2).changes.length, 1) // only n2c2; change n2c1 is not sent // n3 is a node that doesn't have the missing change. Nevertheless n1 is going to ask n3 for it - const n3 = create('fedcba98'), s13 = initSyncState(), s31 = initSyncState() + const n3 = create(true, 'fedcba98'), s13 = initSyncState(), s31 = initSyncState() sync(n1, n3, s13, s31) assert.deepStrictEqual(n1.getHeads(), [n1hash2]) assert.deepStrictEqual(n3.getHeads(), [n1hash2]) @@ -1662,7 +1582,7 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c2 is a false positive in the Bloom filter containing {n1c1, n1c2, n1c3}. // lastSync is c4. - let n1 = create('01234567'), n2 = create('89abcdef') + let n1 = create(true, '01234567'), n2 = create(true, '89abcdef') let s1 = initSyncState(), s2 = initSyncState() let n1hash3, n2hash3 @@ -1698,7 +1618,6 @@ describe('Automerge', () => { n1hash3 = n1us3.getHeads()[0]; n2hash3 = n2us3.getHeads()[0] if (new BloomFilter([n1hash1, n1hash2, n1hash3]).containsHash(n2hash2)) { - n1.free(); n2.free(); n1 = n1us3; n2 = n2us3; break } } @@ -1716,7 +1635,8 @@ describe('Automerge', () => { // `-- n2c1 <-- n2c2 <-- n2c3 // where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. // lastSync is c4. - let n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567') + let n2 = create(true, '89abcdef') let s1 = initSyncState(), s2 = initSyncState() for (let i = 0; i < 5; i++) { @@ -1756,7 +1676,7 @@ describe('Automerge', () => { // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ // `-- n2 // where n2 causes a false positive in the Bloom filter containing {n1}. - let n1 = create('01234567'), n2 = create('89abcdef') + let n1 = create(true, '01234567'), n2 = create(true, '89abcdef') let s1 = initSyncState(), s2 = initSyncState() let message @@ -1816,10 +1736,12 @@ describe('Automerge', () => { // n1 has {c0, c1, c2, n1c1, n1c2, n1c3, n2c1, n2c2}; // n2 has {c0, c1, c2, n1c1, n1c2, n2c1, n2c2, n2c3}; // n3 has {c0, c1, c2, n3c1, n3c2, n3c3}. - const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') - let s13 = initSyncState(), s12 = initSyncState(), s21 = initSyncState() + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, '76543210') + let s13 = initSyncState() + const s12 = initSyncState() + const s21 = initSyncState() let s32 = initSyncState(), s31 = initSyncState(), s23 = initSyncState() - let message1, message2, message3 + let message1, message3 for (let i = 0; i < 3; i++) { n1.put("_root", "x", i); n1.commit("", 0) @@ -1872,7 +1794,7 @@ describe('Automerge', () => { n2.receiveSyncMessage(s23, encodeSyncMessage(modifiedMessage)) // n2 replies to n3, sending only n2c3 (the one change that n2 has but n1 doesn't) - message2 = n2.generateSyncMessage(s23) + const message2 = n2.generateSyncMessage(s23) if (message2 === null) { throw new RangeError("message should not be null") } assert.strictEqual(decodeSyncMessage(message2).changes.length, 1) // {n2c3} n3.receiveSyncMessage(s32, message2) @@ -1886,7 +1808,7 @@ describe('Automerge', () => { }) it('should allow any change to be requested', () => { - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') const s1 = initSyncState(), s2 = initSyncState() let message = null @@ -1914,7 +1836,7 @@ describe('Automerge', () => { }) it('should ignore requests for a nonexistent change', () => { - const n1 = create('01234567'), n2 = create('89abcdef') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef') const s1 = initSyncState(), s2 = initSyncState() let message = null @@ -1937,9 +1859,9 @@ describe('Automerge', () => { // ,-- c1 <-- c2 // c0 <-+ // `-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 - const n1 = create('01234567'), n2 = create('89abcdef'), n3 = create('76543210') + const n1 = create(true, '01234567'), n2 = create(true, '89abcdef'), n3 = create(true, '76543210') let s1 = initSyncState(), s2 = initSyncState() - let msg, decodedMsg + let msg n1.put("_root", "x", 0); n1.commit("", 0) n3.applyChanges(n3.getChangesAdded(n1)) // merge() @@ -1978,13 +1900,14 @@ describe('Automerge', () => { n2.receiveSyncMessage(s2, msg) msg = n2.generateSyncMessage(s2) if (msg === null) { throw new RangeError("message should not be null") } - decodedMsg = decodeSyncMessage(msg) + const decodedMsg = decodeSyncMessage(msg) decodedMsg.changes = [change5, change6] msg = encodeSyncMessage(decodedMsg) const sentHashes: any = {} sentHashes[decodeChange(change5).hash] = true sentHashes[decodeChange(change6).hash] = true + s2.sentHashes = sentHashes n1.receiveSyncMessage(s1, msg) assert.deepStrictEqual(s1.sharedHeads, [c2, c6].sort()) @@ -2006,5 +1929,245 @@ describe('Automerge', () => { assert.deepStrictEqual(s1.sharedHeads, [c2, c8].sort()) }) }) + + it('can handle overlappying splices', () => { + const doc = create(true) + doc.enablePatches(true) + let mat : any = doc.materialize("/") + doc.putObject("/", "text", "abcdefghij") + doc.splice("/text", 2, 2, "00") + doc.splice("/text", 3, 5, "11") + mat = doc.applyPatches(mat) + assert.deepEqual(mat.text, "ab011ij") + }) + + it('can handle utf16 text', () => { + const doc = create(true) + doc.enablePatches(true) + let mat : any = doc.materialize("/") + + doc.putObject("/", "width1", "AAAAAA") + doc.putObject("/", "width2", "🐻🐻🐻🐻🐻🐻") + doc.putObject("/", "mixed", "A🐻A🐻A🐻") + + assert.deepEqual(doc.length("/width1"), 6); + assert.deepEqual(doc.length("/width2"), 12); + assert.deepEqual(doc.length("/mixed"), 9); + + const heads1 = doc.getHeads(); + + mat = doc.applyPatches(mat) + + const remote = load(doc.save(), true) + remote.enablePatches(true) + let r_mat : any = remote.materialize("/") + + assert.deepEqual(mat, { width1: "AAAAAA", width2: "🐻🐻🐻🐻🐻🐻", mixed: "A🐻A🐻A🐻" }) + assert.deepEqual(mat.width1.slice(2,4), "AA") + assert.deepEqual(mat.width2.slice(2,4), "🐻") + assert.deepEqual(mat.mixed.slice(1,4), "🐻A") + + assert.deepEqual(r_mat, { width1: "AAAAAA", width2: "🐻🐻🐻🐻🐻🐻", mixed: "A🐻A🐻A🐻" }) + assert.deepEqual(r_mat.width1.slice(2,4), "AA") + assert.deepEqual(r_mat.width2.slice(2,4), "🐻") + assert.deepEqual(r_mat.mixed.slice(1,4), "🐻A") + + doc.splice("/width1", 2, 2, "🐻") + doc.splice("/width2", 2, 2, "A🐻A") + doc.splice("/mixed", 3, 3, "X") + + mat = doc.applyPatches(mat) + remote.loadIncremental(doc.saveIncremental()); + r_mat = remote.applyPatches(r_mat) + + assert.deepEqual(mat.width1, "AA🐻AA") + assert.deepEqual(mat.width2, "🐻A🐻A🐻🐻🐻🐻") + assert.deepEqual(mat.mixed, "A🐻XA🐻") + + assert.deepEqual(r_mat.width1, "AA🐻AA") + assert.deepEqual(r_mat.width2, "🐻A🐻A🐻🐻🐻🐻") + assert.deepEqual(r_mat.mixed, "A🐻XA🐻") + assert.deepEqual(remote.length("/width1"), 6); + assert.deepEqual(remote.length("/width2"), 14); + assert.deepEqual(remote.length("/mixed"), 7); + + // when indexing in the middle of a multibyte char it indexes at the char before + doc.splice("/width2", 4, 1, "X") + mat = doc.applyPatches(mat) + remote.loadIncremental(doc.saveIncremental()); + r_mat = remote.applyPatches(r_mat) + + assert.deepEqual(mat.width2, "🐻AXA🐻🐻🐻🐻") + + assert.deepEqual(doc.length("/width1", heads1), 6); + assert.deepEqual(doc.length("/width2", heads1), 12); + assert.deepEqual(doc.length("/mixed", heads1), 9); + + assert.deepEqual(doc.get("/mixed", 0), 'A'); + assert.deepEqual(doc.get("/mixed", 1), '🐻'); + assert.deepEqual(doc.get("/mixed", 2), '🐻'); + assert.deepEqual(doc.get("/mixed", 3), 'X'); + assert.deepEqual(doc.get("/mixed", 1, heads1), '🐻'); + assert.deepEqual(doc.get("/mixed", 2, heads1), '🐻'); + assert.deepEqual(doc.get("/mixed", 3, heads1), 'A'); + assert.deepEqual(doc.get("/mixed", 4, heads1), '🐻'); + }) + + it('can handle non-characters embedded in text', () => { + const change : any = { + ops: [ + { action: 'makeText', obj: '_root', key: 'bad_text', pred: [] }, + { action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'A', pred: [] }, + { action: 'set', obj: '1@aaaa', elemId: '2@aaaa', insert: true, value: 'BBBBB', pred: [] }, + { action: 'makeMap', obj: '1@aaaa', elemId: '3@aaaa', insert: true, pred: [] }, + { action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'C', pred: [] } + ], + actor: 'aaaa', + seq: 1, + startOp: 1, + time: 0, + message: null, + deps: [] + } + const doc = load(encodeChange(change), true); + doc.enablePatches(true) + const mat : any = doc.materialize("/") + + // multi - char strings appear as a span of strings + // non strings appear as an object replacement unicode char + assert.deepEqual(mat.bad_text, 'ABBBBBC') + assert.deepEqual(doc.text("/bad_text"), 'ABBBBBC') + assert.deepEqual(doc.materialize("/bad_text"), 'ABBBBBC') + + // deleting in the middle of a multi-byte character will delete the whole thing + const doc1 = doc.fork() + doc1.splice("/bad_text", 3, 3, "X"); + assert.deepEqual(doc1.text("/bad_text"), 'AXC') + + // deleting in the middle of a multi-byte character will delete the whole thing + // and characters past its end + const doc2 = doc.fork() + doc2.splice("/bad_text", 3, 4, "X"); + assert.deepEqual(doc2.text("/bad_text"), 'AXC') + + const doc3 = doc.fork() + doc3.splice("/bad_text", 3, 5, "X"); + assert.deepEqual(doc3.text("/bad_text"), 'AX') + + // inserting in the middle of a mutli-bytes span inserts after + const doc4 = doc.fork() + doc4.splice("/bad_text", 3, 0, "X"); + assert.deepEqual(doc4.text("/bad_text"), 'ABBBBBXC') + + // deleting into the middle of a multi-byte span deletes the whole thing + const doc5 = doc.fork() + doc5.splice("/bad_text", 0, 2, "X"); + assert.deepEqual(doc5.text("/bad_text"), 'XC') + + // you can access elements in the text by text index + assert.deepEqual(doc5.getAll("/bad_text", 1), [['map', '4@aaaa' ]]) + assert.deepEqual(doc5.getAll("/bad_text", 2, doc.getHeads()), [['str', 'BBBBB', '3@aaaa' ]]) + }) + }) + + describe("the legacy text implementation", () => { + const root = "_root" + class FakeText { + elems: Array + constructor(elems: string | Array) { + if (typeof elems === "string") { + this.elems = Array.from(elems) + } else { + this.elems = elems + } + } + } + it("should materialize old style text", () => { + let doc = create(false); + doc.registerDatatype("text", (e: any) => new FakeText(e)) + doc.enablePatches(true) + let txt = doc.putObject(root, "text", "") + doc.splice(txt, 0, 0, "hello") + let mat: any = doc.materialize() + assert.deepEqual(mat.text, new FakeText("hello")) + }) + + it("should apply patches to old style text", () => { + let doc = create(false); + doc.registerDatatype("text", (e: any) => new FakeText(e)) + doc.enablePatches(true) + let mat : any = doc.materialize("/") + doc.putObject("/", "text", "abcdefghij") + doc.splice("/text", 2, 2, "00") + doc.splice("/text", 3, 5, "11") + mat = doc.applyPatches(mat) + assert.deepEqual(mat.text, new FakeText("ab011ij")) + }) + + it("should apply list patches to old style text", () => { + let doc = create(false); + doc.registerDatatype("text", (e: any) => new FakeText(e)) + doc.enablePatches(true) + let mat : any = doc.materialize("/") + doc.putObject("/", "text", "abc") + doc.insert("/text", 0, "0") + doc.insert("/text", 1, "1") + mat = doc.applyPatches(mat) + assert.deepEqual(mat.text, new FakeText("01abc")) + }) + + it("should allow inserting using list methods", () => { + let doc = create(false); + doc.registerDatatype("text", (e: any) => new FakeText(e)) + doc.enablePatches(true) + let mat : any = doc.materialize("/") + const txt = doc.putObject("/", "text", "abc") + doc.insert(txt, 3, "d") + doc.insert(txt, 0, "0") + mat = doc.applyPatches(mat) + assert.deepEqual(mat.text, new FakeText("0abcd")) + }) + + it("should allow inserting objects in old style text", () => { + let doc = create(false); + doc.registerDatatype("text", (e: any) => new FakeText(e)) + doc.enablePatches(true) + let mat : any = doc.materialize("/") + const txt = doc.putObject("/", "text", "abc") + doc.insertObject(txt, 0, {"key": "value"}) + doc.insertObject(txt, 2, ["elem"]) + doc.insert(txt, 2, "m") + mat = doc.applyPatches(mat) + assert.deepEqual(mat.text, new FakeText([ + {"key": "value"}, "a", "m", ["elem"], "b", "c" + ])) + }) + + class RawString { + val: string; + constructor(s: string) { + this.val = s + } + } + + it("should allow registering a different type for strings", () => { + let doc = create(false); + doc.registerDatatype("str", (e: any) => new RawString(e)) + doc.enablePatches(true) + doc.put("/", "key", "value") + let mat: any = doc.materialize() + assert.deepStrictEqual(mat.key, new RawString("value")) + }) + + it("should generate patches correctly for raw strings", () => { + let doc = create(false); + doc.registerDatatype("str", (e: any) => new RawString(e)) + doc.enablePatches(true) + let mat: any = doc.materialize() + doc.put("/", "key", "value") + mat = doc.applyPatches(mat) + assert.deepStrictEqual(mat.key, new RawString("value")) + }) + }) }) diff --git a/automerge-wasm/tsconfig.json b/rust/automerge-wasm/tsconfig.json similarity index 100% rename from automerge-wasm/tsconfig.json rename to rust/automerge-wasm/tsconfig.json diff --git a/automerge/.gitignore b/rust/automerge/.gitignore similarity index 100% rename from automerge/.gitignore rename to rust/automerge/.gitignore diff --git a/automerge/Cargo.toml b/rust/automerge/Cargo.toml similarity index 86% rename from automerge/Cargo.toml rename to rust/automerge/Cargo.toml index 959ce37b..0c10cc2b 100644 --- a/automerge/Cargo.toml +++ b/rust/automerge/Cargo.toml @@ -1,17 +1,17 @@ [package] name = "automerge" -version = "0.1.0" +version = "0.3.0" edition = "2021" license = "MIT" repository = "https://github.com/automerge/automerge-rs" documentation = "https://automerge.org/automerge-rs/automerge/" rust-version = "1.57.0" description = "A JSON-like data structure (a CRDT) that can be modified concurrently by different users, and merged again automatically" -readme = "../README.md" +readme = "./README.md" [features] optree-visualisation = ["dot", "rand"] -wasm = ["js-sys", "wasm-bindgen", "web-sys", "uuid/wasm-bindgen"] +wasm = ["js-sys", "wasm-bindgen", "web-sys", "uuid/js"] [dependencies] hex = "^0.4.3" @@ -20,7 +20,7 @@ sha2 = "^0.10.0" thiserror = "^1.0.16" itertools = "^0.10.3" flate2 = "^1.0.22" -uuid = { version = "^0.8.2", features=["v4", "serde"] } +uuid = { version = "^1.2.1", features=["v4", "serde"] } smol_str = { version = "^0.1.21", features=["serde"] } tracing = { version = "^0.1.29" } fxhash = "^0.2.1" @@ -43,10 +43,11 @@ pretty_assertions = "1.0.0" proptest = { version = "^1.0.0", default-features = false, features = ["std"] } serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true } maplit = { version = "^1.0" } -decorum = "0.3.1" -criterion = "0.3.5" +criterion = "0.4.0" test-log = { version = "0.2.10", features=["trace"], default-features = false} tracing-subscriber = {version = "0.3.9", features = ["fmt", "env-filter"] } +automerge-test = { path = "../automerge-test" } +prettytable = "0.10.0" [[bench]] name = "range" diff --git a/rust/automerge/README.md b/rust/automerge/README.md new file mode 100644 index 00000000..97dbe4f8 --- /dev/null +++ b/rust/automerge/README.md @@ -0,0 +1,5 @@ +# Automerge + +Automerge is a library of data structures for building collaborative +[local-first](https://www.inkandswitch.com/local-first/) applications. This is +the Rust implementation. See [automerge.org](https://automerge.org/) diff --git a/automerge/benches/map.rs b/rust/automerge/benches/map.rs similarity index 100% rename from automerge/benches/map.rs rename to rust/automerge/benches/map.rs diff --git a/automerge/benches/range.rs b/rust/automerge/benches/range.rs similarity index 62% rename from automerge/benches/range.rs rename to rust/automerge/benches/range.rs index aec5c293..008ae159 100644 --- a/automerge/benches/range.rs +++ b/rust/automerge/benches/range.rs @@ -1,4 +1,4 @@ -use automerge::{transaction::Transactable, Automerge, ROOT}; +use automerge::{transaction::Transactable, Automerge, ReadDoc, ROOT}; use criterion::{black_box, criterion_group, criterion_main, Criterion}; fn doc(n: u64) -> Automerge { @@ -16,36 +16,20 @@ fn range(doc: &Automerge) { range.for_each(drop); } -fn range_rev(doc: &Automerge) { - let range = doc.values(ROOT).rev(); - range.for_each(drop); -} - fn range_at(doc: &Automerge) { let range = doc.values_at(ROOT, &doc.get_heads()); range.for_each(drop); } -fn range_at_rev(doc: &Automerge) { - let range = doc.values_at(ROOT, &doc.get_heads()).rev(); - range.for_each(drop); -} - fn criterion_benchmark(c: &mut Criterion) { let n = 100_000; let doc = doc(n); c.bench_function(&format!("range {}", n), |b| { b.iter(|| range(black_box(&doc))) }); - c.bench_function(&format!("range rev {}", n), |b| { - b.iter(|| range_rev(black_box(&doc))) - }); c.bench_function(&format!("range_at {}", n), |b| { b.iter(|| range_at(black_box(&doc))) }); - c.bench_function(&format!("range_at rev {}", n), |b| { - b.iter(|| range_at_rev(black_box(&doc))) - }); } criterion_group!(benches, criterion_benchmark); diff --git a/automerge/benches/sync.rs b/rust/automerge/benches/sync.rs similarity index 96% rename from automerge/benches/sync.rs rename to rust/automerge/benches/sync.rs index 483fd2b4..13965792 100644 --- a/automerge/benches/sync.rs +++ b/rust/automerge/benches/sync.rs @@ -1,4 +1,8 @@ -use automerge::{sync, transaction::Transactable, Automerge, ROOT}; +use automerge::{ + sync::{self, SyncDoc}, + transaction::Transactable, + Automerge, ROOT, +}; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; #[derive(Default)] diff --git a/automerge/examples/README.md b/rust/automerge/examples/README.md similarity index 100% rename from automerge/examples/README.md rename to rust/automerge/examples/README.md diff --git a/automerge/examples/quickstart.rs b/rust/automerge/examples/quickstart.rs similarity index 89% rename from automerge/examples/quickstart.rs rename to rust/automerge/examples/quickstart.rs index 56d24858..fcb23d5e 100644 --- a/automerge/examples/quickstart.rs +++ b/rust/automerge/examples/quickstart.rs @@ -2,13 +2,13 @@ use automerge::transaction::CommitOptions; use automerge::transaction::Transactable; use automerge::AutomergeError; use automerge::ObjType; -use automerge::{Automerge, ROOT}; +use automerge::{Automerge, ReadDoc, ROOT}; // Based on https://automerge.github.io/docs/quickstart fn main() { let mut doc1 = Automerge::new(); let (cards, card1) = doc1 - .transact_with::<_, _, AutomergeError, _, ()>( + .transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Add card".to_owned()), |tx| { let cards = tx.put_object(ROOT, "cards", ObjType::List).unwrap(); @@ -30,7 +30,7 @@ fn main() { let binary = doc1.save(); let mut doc2 = Automerge::load(&binary).unwrap(); - doc1.transact_with::<_, _, AutomergeError, _, ()>( + doc1.transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Mark card as done".to_owned()), |tx| { tx.put(&card1, "done", true)?; @@ -39,7 +39,7 @@ fn main() { ) .unwrap(); - doc2.transact_with::<_, _, AutomergeError, _, ()>( + doc2.transact_with::<_, _, AutomergeError, _>( |_| CommitOptions::default().with_message("Delete card".to_owned()), |tx| { tx.delete(&cards, 0)?; diff --git a/automerge/examples/watch.rs b/rust/automerge/examples/watch.rs similarity index 79% rename from automerge/examples/watch.rs rename to rust/automerge/examples/watch.rs index ccc480e6..4cd8f4ea 100644 --- a/automerge/examples/watch.rs +++ b/rust/automerge/examples/watch.rs @@ -3,6 +3,7 @@ use automerge::transaction::Transactable; use automerge::Automerge; use automerge::AutomergeError; use automerge::Patch; +use automerge::ReadDoc; use automerge::VecOpObserver; use automerge::ROOT; @@ -11,7 +12,7 @@ fn main() { // a simple scalar change in the root object let mut result = doc - .transact_with::<_, _, AutomergeError, _, VecOpObserver>( + .transact_observed_with::<_, _, AutomergeError, _, VecOpObserver>( |_result| CommitOptions::default(), |tx| { tx.put(ROOT, "hello", "world").unwrap(); @@ -36,7 +37,7 @@ fn main() { tx.insert(&list, 1, "woo").unwrap(); let m = tx.insert_object(&list, 2, automerge::ObjType::Map).unwrap(); tx.put(&m, "hi", 2).unwrap(); - let patches = tx.op_observer.take_patches(); + let patches = tx.observer().take_patches(); let _heads3 = tx.commit_with(CommitOptions::default()); get_changes(&doc, patches); } @@ -66,6 +67,17 @@ fn get_changes(doc: &Automerge, patches: Vec) { doc.path_to_object(&obj) ) } + Patch::Splice { + obj, index, value, .. + } => { + println!( + "splice '{:?}' at {:?} in obj {:?}, object path {:?}", + value, + index, + obj, + doc.path_to_object(&obj) + ) + } Patch::Increment { obj, prop, value, .. } => { @@ -83,6 +95,12 @@ fn get_changes(doc: &Automerge, patches: Vec) { obj, doc.path_to_object(&obj) ), + Patch::Expose { obj, prop, .. } => println!( + "expose {:?} in obj {:?}, object path {:?}", + prop, + obj, + doc.path_to_object(&obj) + ), } } } diff --git a/rust/automerge/fuzz/.gitignore b/rust/automerge/fuzz/.gitignore new file mode 100644 index 00000000..2eb15f8e --- /dev/null +++ b/rust/automerge/fuzz/.gitignore @@ -0,0 +1,3 @@ +target +corpus +coverage diff --git a/rust/automerge/fuzz/Cargo.toml b/rust/automerge/fuzz/Cargo.toml new file mode 100644 index 00000000..3461e9f3 --- /dev/null +++ b/rust/automerge/fuzz/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "automerge-fuzz" +version = "0.0.0" +publish = false +edition = "2021" + +[package.metadata] +cargo-fuzz = true + +[dependencies] +libfuzzer-sys = "0.4" +leb128 = "^0.2.5" +sha2 = "^0.10.0" + +[dependencies.automerge] +path = ".." + +# Prevent this from interfering with workspaces +[workspace] +members = ["."] + +[profile.release] +debug = 1 + +[[bin]] +name = "load" +path = "fuzz_targets/load.rs" +test = false +doc = false \ No newline at end of file diff --git a/rust/automerge/fuzz/fuzz_targets/load.rs b/rust/automerge/fuzz/fuzz_targets/load.rs new file mode 100644 index 00000000..0dea2624 --- /dev/null +++ b/rust/automerge/fuzz/fuzz_targets/load.rs @@ -0,0 +1,37 @@ +#![no_main] + +use sha2::{Sha256, Digest}; +use automerge::{Automerge}; +use libfuzzer_sys::arbitrary::{Arbitrary, Result, Unstructured}; +use libfuzzer_sys::fuzz_target; + +#[derive(Debug)] +struct DocumentChunk { + bytes: Vec, +} + +fn add_header(typ: u8, data: &[u8]) -> Vec { + let mut input = vec![u8::from(typ)]; + leb128::write::unsigned(&mut input, data.len() as u64).unwrap(); + input.extend(data.as_ref()); + let hash_result = Sha256::digest(input.clone()); + let array: [u8; 32] = hash_result.into(); + + let mut out = vec![133, 111, 74, 131, array[0], array[1], array[2], array[3]]; + out.extend(input); + out +} + +impl<'a> Arbitrary<'a> for DocumentChunk +{ + fn arbitrary(u: &mut Unstructured<'a>) -> Result { + let input = u.bytes(u.len())?; + let contents = add_header(0, input); + + return Ok(DocumentChunk{bytes: contents}) + } +} + +fuzz_target!(|doc: DocumentChunk| { + Automerge::load(&doc.bytes); +}); diff --git a/automerge/src/autocommit.rs b/rust/automerge/src/autocommit.rs similarity index 53% rename from automerge/src/autocommit.rs rename to rust/automerge/src/autocommit.rs index 4520c67d..ae28596e 100644 --- a/automerge/src/autocommit.rs +++ b/rust/automerge/src/autocommit.rs @@ -1,33 +1,73 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::op_observer::OpObserver; +use crate::op_observer::{BranchableObserver, OpObserver}; +use crate::sync::SyncDoc; use crate::transaction::{CommitOptions, Transactable}; use crate::{ - sync, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, Parents, ScalarValue, + sync, Keys, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, Parents, ReadDoc, + ScalarValue, }; use crate::{ - transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, - Value, Values, + transaction::{Observation, Observed, TransactionInner, UnObserved}, + ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop, TextEncoding, Value, Values, }; /// An automerge document that automatically manages transactions. +/// +/// An `AutoCommit` can optionally manage an [`OpObserver`]. This observer will be notified of all +/// changes made by both remote and local changes. The type parameter `O` tracks whether this +/// document is observed or not. +/// +/// ## Creating, loading, merging and forking documents +/// +/// A new document can be created with [`Self::new`], which will create a document with a random +/// [`ActorId`]. Existing documents can be loaded with [`Self::load`]. +/// +/// If you have two documents and you want to merge the changes from one into the other you can use +/// [`Self::merge`]. +/// +/// If you have a document you want to split into two concurrent threads of execution you can use +/// [`Self::fork`]. If you want to split a document from ealier in its history you can use +/// [`Self::fork_at`]. +/// +/// ## Reading values +/// +/// [`Self`] implements [`ReadDoc`], which provides methods for reading values from the document. +/// +/// ## Modifying a document +/// +/// This type implements [`Transactable`] directly, so you can modify it using methods from [`Transactable`]. +/// +/// ## Synchronization +/// +/// To synchronise call [`Self::sync`] which returns an implementation of [`SyncDoc`] +/// +/// ## Observers +/// +/// An `AutoCommit` can optionally manage an [`OpObserver`]. [`Self::new`] will return a document +/// with no observer but you can set an observer using [`Self::with_observer`]. The observer must +/// implement both [`OpObserver`] and [`BranchableObserver`]. If you have an observed autocommit +/// then you can obtain a mutable reference to the observer with [`Self::observer`] #[derive(Debug, Clone)] -pub struct AutoCommitWithObs { +pub struct AutoCommitWithObs { doc: Automerge, transaction: Option<(Obs, TransactionInner)>, - op_observer: Obs, + observation: Obs, } -pub type AutoCommit = AutoCommitWithObs<()>; +/// An autocommit document with no observer +/// +/// See [`AutoCommitWithObs`] +pub type AutoCommit = AutoCommitWithObs; -impl Default for AutoCommitWithObs { +impl Default for AutoCommitWithObs> { fn default() -> Self { let op_observer = O::default(); AutoCommitWithObs { doc: Automerge::new(), transaction: None, - op_observer, + observation: Observed::new(op_observer), } } } @@ -37,22 +77,58 @@ impl AutoCommit { AutoCommitWithObs { doc: Automerge::new(), transaction: None, - op_observer: (), + observation: UnObserved, } } + + pub fn load(data: &[u8]) -> Result { + let doc = Automerge::load(data)?; + Ok(Self { + doc, + transaction: None, + observation: UnObserved, + }) + } } -impl AutoCommitWithObs { +impl AutoCommitWithObs> { pub fn observer(&mut self) -> &mut Obs { self.ensure_transaction_closed(); - &mut self.op_observer + self.observation.observer() + } +} + +impl AutoCommitWithObs { + pub fn fork(&mut self) -> Self { + self.ensure_transaction_closed(); + Self { + doc: self.doc.fork(), + transaction: self.transaction.clone(), + observation: self.observation.clone(), + } } - pub fn with_observer(self, op_observer: Obs2) -> AutoCommitWithObs { + pub fn fork_at(&mut self, heads: &[ChangeHash]) -> Result { + self.ensure_transaction_closed(); + Ok(Self { + doc: self.doc.fork_at(heads)?, + transaction: self.transaction.clone(), + observation: self.observation.clone(), + }) + } +} + +impl AutoCommitWithObs { + pub fn with_observer( + self, + op_observer: Obs2, + ) -> AutoCommitWithObs> { AutoCommitWithObs { doc: self.doc, - transaction: self.transaction.map(|(_, t)| (op_observer.branch(), t)), - op_observer, + transaction: self + .transaction + .map(|(_, t)| (Observed::new(op_observer.branch()), t)), + observation: Observed::new(op_observer), } } @@ -79,55 +155,44 @@ impl AutoCommitWithObs { self.doc.get_actor() } + /// Change the text encoding of this view of the document + /// + /// This is a cheap operation, it just changes the way indexes are calculated + pub fn with_encoding(mut self, encoding: TextEncoding) -> Self { + self.doc = self.doc.with_encoding(encoding); + self + } + fn ensure_transaction_open(&mut self) { if self.transaction.is_none() { - self.transaction = Some((self.op_observer.branch(), self.doc.transaction_inner())); + let args = self.doc.transaction_args(); + let inner = TransactionInner::new(args); + self.transaction = Some((self.observation.branch(), inner)) } } - pub fn fork(&mut self) -> Self { - self.ensure_transaction_closed(); - Self { - doc: self.doc.fork(), - transaction: self.transaction.clone(), - op_observer: self.op_observer.clone(), - } - } - - pub fn fork_at(&mut self, heads: &[ChangeHash]) -> Result { - self.ensure_transaction_closed(); - Ok(Self { - doc: self.doc.fork_at(heads)?, - transaction: self.transaction.clone(), - op_observer: self.op_observer.clone(), - }) - } - fn ensure_transaction_closed(&mut self) { if let Some((current, tx)) = self.transaction.take() { - self.op_observer.merge(¤t); + self.observation.merge(¤t); tx.commit(&mut self.doc, None, None); } } - pub fn load(data: &[u8]) -> Result { - // passing a () observer here has performance implications on all loads - // if we want an autocommit::load() method that can be observered we need to make a new method - // fn observed_load() ? - let doc = Automerge::load(data)?; - let op_observer = Obs::default(); - Ok(Self { - doc, - transaction: None, - op_observer, - }) - } - + /// Load an incremental save of a document. + /// + /// Unlike `load` this imports changes into an existing document. It will work with both the + /// output of [`Self::save`] and [`Self::save_incremental`] + /// + /// The return value is the number of ops which were applied, this is not useful and will + /// change in future. pub fn load_incremental(&mut self, data: &[u8]) -> Result { self.ensure_transaction_closed(); // TODO - would be nice to pass None here instead of &mut () - self.doc - .load_incremental_with(data, Some(&mut self.op_observer)) + if let Some(observer) = self.observation.observer() { + self.doc.load_incremental_with(data, Some(observer)) + } else { + self.doc.load_incremental(data) + } } pub fn apply_changes( @@ -135,32 +200,45 @@ impl AutoCommitWithObs { changes: impl IntoIterator, ) -> Result<(), AutomergeError> { self.ensure_transaction_closed(); - self.doc - .apply_changes_with(changes, Some(&mut self.op_observer)) + if let Some(observer) = self.observation.observer() { + self.doc.apply_changes_with(changes, Some(observer)) + } else { + self.doc.apply_changes(changes) + } } /// Takes all the changes in `other` which are not in `self` and applies them - pub fn merge( + pub fn merge( &mut self, other: &mut AutoCommitWithObs, ) -> Result, AutomergeError> { self.ensure_transaction_closed(); other.ensure_transaction_closed(); - self.doc - .merge_with(&mut other.doc, Some(&mut self.op_observer)) + if let Some(observer) = self.observation.observer() { + self.doc.merge_with(&mut other.doc, Some(observer)) + } else { + self.doc.merge(&mut other.doc) + } } + /// Save the entirety of this document in a compact form. pub fn save(&mut self) -> Vec { self.ensure_transaction_closed(); self.doc.save() } + /// Save this document, but don't run it through DEFLATE afterwards pub fn save_nocompress(&mut self) -> Vec { self.ensure_transaction_closed(); self.doc.save_nocompress() } - // should this return an empty vec instead of None? + /// Save the changes since the last call to [Self::save`] + /// + /// The output of this will not be a compressed document format, but a series of individual + /// changes. This is useful if you know you have only made a small change since the last `save` + /// and you want to immediately send it somewhere (e.g. you've inserted a single character in a + /// text object). pub fn save_incremental(&mut self) -> Vec { self.ensure_transaction_closed(); self.doc.save_incremental() @@ -171,6 +249,7 @@ impl AutoCommitWithObs { self.doc.get_missing_deps(heads) } + /// Get the last change made by this documents actor ID pub fn get_last_local_change(&mut self) -> Option<&Change> { self.ensure_transaction_closed(); self.doc.get_last_local_change() @@ -189,35 +268,24 @@ impl AutoCommitWithObs { self.doc.get_change_by_hash(hash) } + /// Get changes in `other` that are not in `self pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> { self.ensure_transaction_closed(); other.ensure_transaction_closed(); self.doc.get_changes_added(&other.doc) } - pub fn import(&self, s: &str) -> Result { + #[doc(hidden)] + pub fn import(&self, s: &str) -> Result<(ExId, ObjType), AutomergeError> { self.doc.import(s) } + #[doc(hidden)] pub fn dump(&mut self) { self.ensure_transaction_closed(); self.doc.dump() } - pub fn generate_sync_message(&mut self, sync_state: &mut sync::State) -> Option { - self.ensure_transaction_closed(); - self.doc.generate_sync_message(sync_state) - } - - pub fn receive_sync_message( - &mut self, - sync_state: &mut sync::State, - message: sync::Message, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_closed(); - self.doc.receive_sync_message(sync_state, message) - } - /// Return a graphviz representation of the opset. /// /// # Arguments @@ -237,12 +305,17 @@ impl AutoCommitWithObs { self.doc.get_heads() } - pub fn commit(&mut self) -> ChangeHash { + /// Commit any uncommitted changes + /// + /// Returns `None` if there were no operations to commit + pub fn commit(&mut self) -> Option { self.commit_with(CommitOptions::default()) } /// Commit the current operations with some options. /// + /// Returns `None` if there were no operations to commit + /// /// ``` /// # use automerge::transaction::CommitOptions; /// # use automerge::transaction::Transactable; @@ -256,34 +329,55 @@ impl AutoCommitWithObs { /// i64; /// doc.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(&mut self, options: CommitOptions) -> ChangeHash { + pub fn commit_with(&mut self, options: CommitOptions) -> Option { // ensure that even no changes triggers a change self.ensure_transaction_open(); let (current, tx) = self.transaction.take().unwrap(); - self.op_observer.merge(¤t); + self.observation.merge(¤t); tx.commit(&mut self.doc, options.message, options.time) } + /// Remove any changes that have been made in the current transaction from the document pub fn rollback(&mut self) -> usize { self.transaction .take() .map(|(_, tx)| tx.rollback(&mut self.doc)) .unwrap_or(0) } -} -impl Transactable for AutoCommitWithObs { - fn pending_ops(&self) -> usize { - self.transaction - .as_ref() - .map(|(_, t)| t.pending_ops()) - .unwrap_or(0) + /// Generate an empty change + /// + /// The main reason to do this is if you wish to create a "merge commit" which has all the + /// current heads of the documents as dependencies but you have no new operations to create. + /// + /// Because this structure is an "autocommit" there may actually be outstanding operations to + /// submit. If this is the case this function will create two changes, one with the outstanding + /// operations and a new one with no operations. The returned `ChangeHash` will always be the + /// hash of the empty change. + pub fn empty_change(&mut self, options: CommitOptions) -> ChangeHash { + self.ensure_transaction_closed(); + let args = self.doc.transaction_args(); + TransactionInner::empty(&mut self.doc, args, options.message, options.time) } - // KeysAt::() - // LenAt::() - // PropAt::() - // NthAt::() + /// An implementation of [`crate::sync::SyncDoc`] for this autocommit + /// + /// This ensures that any outstanding transactions for this document are committed before + /// taking part in the sync protocol + pub fn sync(&mut self) -> impl SyncDoc + '_ { + self.ensure_transaction_closed(); + SyncWrapper { inner: self } + } +} + +impl ReadDoc for AutoCommitWithObs { + fn parents>(&self, obj: O) -> Result, AutomergeError> { + self.doc.parents(obj) + } + + fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { + self.doc.path_to_object(obj) + } fn keys>(&self, obj: O) -> Keys<'_, '_> { self.doc.keys(obj) @@ -343,107 +437,10 @@ impl Transactable for AutoCommitWithObs { self.doc.length_at(obj, heads) } - fn object_type>(&self, obj: O) -> Option { + fn object_type>(&self, obj: O) -> Result { self.doc.object_type(obj) } - // set(obj, prop, value) - value can be scalar or objtype - // del(obj, prop) - // inc(obj, prop, value) - // insert(obj, index, value) - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Returns - /// - /// The opid of the operation which was created, or None if this operation doesn't change the - /// document or create a new object. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - fn put, P: Into, V: Into>( - &mut self, - obj: O, - prop: P, - value: V, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.put(&mut self.doc, current, obj.as_ref(), prop, value) - } - - fn put_object, P: Into>( - &mut self, - obj: O, - prop: P, - value: ObjType, - ) -> Result { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.put_object(&mut self.doc, current, obj.as_ref(), prop, value) - } - - fn insert, V: Into>( - &mut self, - obj: O, - index: usize, - value: V, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.insert(&mut self.doc, current, obj.as_ref(), index, value) - } - - fn insert_object>( - &mut self, - obj: O, - index: usize, - value: ObjType, - ) -> Result { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.insert_object(&mut self.doc, current, obj.as_ref(), index, value) - } - - fn increment, P: Into>( - &mut self, - obj: O, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.increment(&mut self.doc, current, obj.as_ref(), prop, value) - } - - fn delete, P: Into>( - &mut self, - obj: O, - prop: P, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.delete(&mut self.doc, current, obj.as_ref(), prop) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - fn splice, V: IntoIterator>( - &mut self, - obj: O, - pos: usize, - del: usize, - vals: V, - ) -> Result<(), AutomergeError> { - self.ensure_transaction_open(); - let (current, tx) = self.transaction.as_mut().unwrap(); - tx.splice(&mut self.doc, current, obj.as_ref(), pos, del, vals) - } - fn text>(&self, obj: O) -> Result { self.doc.text(obj) } @@ -456,9 +453,6 @@ impl Transactable for AutoCommitWithObs { self.doc.text_at(obj, heads) } - // TODO - I need to return these OpId's here **only** to get - // the legacy conflicts format of { [opid]: value } - // Something better? fn get, P: Into>( &self, obj: O, @@ -493,7 +487,186 @@ impl Transactable for AutoCommitWithObs { self.doc.get_all_at(obj, prop, heads) } - fn parents>(&self, obj: O) -> Result, AutomergeError> { - self.doc.parents(obj) + fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { + self.doc.get_missing_deps(heads) + } + + fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { + self.doc.get_change_by_hash(hash) + } +} + +impl Transactable for AutoCommitWithObs { + fn pending_ops(&self) -> usize { + self.transaction + .as_ref() + .map(|(_, t)| t.pending_ops()) + .unwrap_or(0) + } + + fn put, P: Into, V: Into>( + &mut self, + obj: O, + prop: P, + value: V, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.put(&mut self.doc, current.observer(), obj.as_ref(), prop, value) + } + + fn put_object, P: Into>( + &mut self, + obj: O, + prop: P, + value: ObjType, + ) -> Result { + self.ensure_transaction_open(); + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.put_object(&mut self.doc, current.observer(), obj.as_ref(), prop, value) + } + + fn insert, V: Into>( + &mut self, + obj: O, + index: usize, + value: V, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.insert( + &mut self.doc, + current.observer(), + obj.as_ref(), + index, + value, + ) + } + + fn insert_object>( + &mut self, + obj: O, + index: usize, + value: ObjType, + ) -> Result { + self.ensure_transaction_open(); + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.insert_object( + &mut self.doc, + current.observer(), + obj.as_ref(), + index, + value, + ) + } + + fn increment, P: Into>( + &mut self, + obj: O, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.increment(&mut self.doc, current.observer(), obj.as_ref(), prop, value) + } + + fn delete, P: Into>( + &mut self, + obj: O, + prop: P, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.delete(&mut self.doc, current.observer(), obj.as_ref(), prop) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + fn splice, V: IntoIterator>( + &mut self, + obj: O, + pos: usize, + del: usize, + vals: V, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.splice( + &mut self.doc, + current.observer(), + obj.as_ref(), + pos, + del, + vals, + ) + } + + fn splice_text>( + &mut self, + obj: O, + pos: usize, + del: usize, + text: &str, + ) -> Result<(), AutomergeError> { + self.ensure_transaction_open(); + let (current, tx) = self.transaction.as_mut().unwrap(); + tx.splice_text( + &mut self.doc, + current.observer(), + obj.as_ref(), + pos, + del, + text, + ) + } + + fn base_heads(&self) -> Vec { + self.doc.get_heads() + } +} + +// A wrapper we return from `AutoCommit::sync` to ensure that transactions are closed before we +// start syncing +struct SyncWrapper<'a, Obs: Observation> { + inner: &'a mut AutoCommitWithObs, +} + +impl<'a, Obs: Observation> SyncDoc for SyncWrapper<'a, Obs> { + fn generate_sync_message(&self, sync_state: &mut sync::State) -> Option { + self.inner.doc.generate_sync_message(sync_state) + } + + fn receive_sync_message( + &mut self, + sync_state: &mut sync::State, + message: sync::Message, + ) -> Result<(), AutomergeError> { + self.inner.ensure_transaction_closed(); + if let Some(observer) = self.inner.observation.observer() { + self.inner + .doc + .receive_sync_message_with(sync_state, message, observer) + } else { + self.inner.doc.receive_sync_message(sync_state, message) + } + } + + fn receive_sync_message_with( + &mut self, + sync_state: &mut sync::State, + message: sync::Message, + op_observer: &mut Obs2, + ) -> Result<(), AutomergeError> { + if let Some(our_observer) = self.inner.observation.observer() { + let mut composed = crate::op_observer::compose(our_observer, op_observer); + self.inner + .doc + .receive_sync_message_with(sync_state, message, &mut composed) + } else { + self.inner + .doc + .receive_sync_message_with(sync_state, message, op_observer) + } } } diff --git a/automerge/src/automerge.rs b/rust/automerge/src/automerge.rs similarity index 54% rename from automerge/src/automerge.rs rename to rust/automerge/src/automerge.rs index 0ca12934..0dd82253 100644 --- a/automerge/src/automerge.rs +++ b/rust/automerge/src/automerge.rs @@ -4,26 +4,29 @@ use std::fmt::Debug; use std::num::NonZeroU64; use std::ops::RangeBounds; -use crate::clock::ClockData; -use crate::clocks::Clocks; +use crate::change_graph::ChangeGraph; use crate::columnar::Key as EncodedKey; use crate::exid::ExId; use crate::keys::Keys; -use crate::op_observer::OpObserver; +use crate::op_observer::{BranchableObserver, OpObserver}; use crate::op_set::OpSet; use crate::parents::Parents; -use crate::storage::{self, load, CompressConfig}; -use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner}; +use crate::storage::{self, load, CompressConfig, VerificationMode}; +use crate::transaction::{ + self, CommitOptions, Failure, Observed, Success, Transaction, TransactionArgs, UnObserved, +}; use crate::types::{ - ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType, - ScalarValue, Value, + ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ListEncoding, ObjId, Op, OpId, + OpType, ScalarValue, TextEncoding, Value, }; use crate::{ query, AutomergeError, Change, KeysAt, ListRange, ListRangeAt, MapRange, MapRangeAt, ObjType, - Prop, Values, + Prop, ReadDoc, Values, }; use serde::Serialize; +mod current_state; + #[cfg(test)] mod tests; @@ -33,29 +36,71 @@ pub(crate) enum Actor { Cached(usize), } -/// An automerge document. +/// What to do when loading a document partially succeeds +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum OnPartialLoad { + /// Ignore the error and return the loaded changes + Ignore, + /// Fail the entire load + Error, +} + +/// An automerge document which does not manage transactions for you. +/// +/// ## Creating, loading, merging and forking documents +/// +/// A new document can be created with [`Self::new`], which will create a document with a random +/// [`ActorId`]. Existing documents can be loaded with [`Self::load`], or [`Self::load_with`]. +/// +/// If you have two documents and you want to merge the changes from one into the other you can use +/// [`Self::merge`] or [`Self::merge_with`]. +/// +/// If you have a document you want to split into two concurrent threads of execution you can use +/// [`Self::fork`]. If you want to split a document from ealier in its history you can use +/// [`Self::fork_at`]. +/// +/// ## Reading values +/// +/// [`Self`] implements [`ReadDoc`], which provides methods for reading values from the document. +/// +/// ## Modifying a document (Transactions) +/// +/// [`Automerge`] provides an interface for viewing and modifying automerge documents which does +/// not manage transactions for you. To create changes you use either [`Automerge::transaction`] or +/// [`Automerge::transact`] (or the `_with` variants). +/// +/// ## Sync +/// +/// This type implements [`crate::sync::SyncDoc`] +/// +/// ## Observers +/// +/// Many of the methods on this type have an `_with` or `_observed` variant +/// which allow you to pass in an [`OpObserver`] to observe any changes which +/// occur. #[derive(Debug, Clone)] pub struct Automerge { /// The list of unapplied changes that are not causally ready. - pub(crate) queue: Vec, + queue: Vec, /// The history of changes that form this document, topologically sorted too. - pub(crate) history: Vec, + history: Vec, /// Mapping from change hash to index into the history list. - pub(crate) history_index: HashMap, - /// Mapping from change hash to vector clock at this state. - pub(crate) clocks: HashMap, + history_index: HashMap, + /// Graph of changes + change_graph: ChangeGraph, /// Mapping from actor index to list of seqs seen for them. - pub(crate) states: HashMap>, + states: HashMap>, /// Current dependencies of this document (heads hashes). - pub(crate) deps: HashSet, + deps: HashSet, /// Heads at the last save. - pub(crate) saved: Vec, + saved: Vec, /// The set of operations that form this document. - pub(crate) ops: OpSet, + ops: OpSet, /// The current actor. - pub(crate) actor: Actor, + actor: Actor, /// The maximum operation counter this document has seen. - pub(crate) max_op: u64, + max_op: u64, + text_encoding: TextEncoding, } impl Automerge { @@ -65,16 +110,69 @@ impl Automerge { queue: vec![], history: vec![], history_index: HashMap::new(), - clocks: HashMap::new(), + change_graph: ChangeGraph::new(), states: HashMap::new(), ops: Default::default(), deps: Default::default(), saved: Default::default(), actor: Actor::Unused(ActorId::random()), max_op: 0, + text_encoding: Default::default(), } } + pub(crate) fn ops_mut(&mut self) -> &mut OpSet { + &mut self.ops + } + + pub(crate) fn ops(&self) -> &OpSet { + &self.ops + } + + /// Whether this document has any operations + pub fn is_empty(&self) -> bool { + self.history.is_empty() && self.queue.is_empty() + } + + pub(crate) fn actor_id(&self) -> ActorId { + match &self.actor { + Actor::Unused(id) => id.clone(), + Actor::Cached(idx) => self.ops.m.actors[*idx].clone(), + } + } + + /// Remove the current actor from the opset if it has no ops + /// + /// If the current actor ID has no ops in the opset then remove it from the cache of actor IDs. + /// This us used when rolling back a transaction. If the rolled back ops are the only ops for + /// the current actor then we want to remove that actor from the opset so it doesn't end up in + /// any saved version of the document. + /// + /// # Panics + /// + /// If the last actor in the OpSet is not the actor ID of this document + pub(crate) fn rollback_last_actor(&mut self) { + if let Actor::Cached(actor_idx) = self.actor { + if self.states.get(&actor_idx).is_none() && self.ops.m.actors.len() > 0 { + assert!(self.ops.m.actors.len() == actor_idx + 1); + let actor = self.ops.m.actors.remove_last(); + self.actor = Actor::Unused(actor); + } + } + } + + pub(crate) fn text_encoding(&self) -> TextEncoding { + self.text_encoding + } + + /// Change the text encoding of this view of the document + /// + /// This is a cheap operation, it just changes the way indexes are calculated + pub fn with_encoding(mut self, encoding: TextEncoding) -> Self { + self.text_encoding = encoding; + self + } + /// Set the actor id for this document. pub fn with_actor(mut self, actor: ActorId) -> Self { self.actor = Actor::Unused(actor); @@ -111,26 +209,21 @@ impl Automerge { } /// Start a transaction. - pub fn transaction(&mut self) -> Transaction<'_, ()> { - Transaction { - inner: Some(self.transaction_inner()), - doc: self, - op_observer: (), - } + pub fn transaction(&mut self) -> Transaction<'_, UnObserved> { + let args = self.transaction_args(); + Transaction::new(self, args, UnObserved) } - pub fn transaction_with_observer( + /// Start a transaction with an observer + pub fn transaction_with_observer( &mut self, op_observer: Obs, - ) -> Transaction<'_, Obs> { - Transaction { - inner: Some(self.transaction_inner()), - doc: self, - op_observer, - } + ) -> Transaction<'_, Observed> { + let args = self.transaction_args(); + Transaction::new(self, args, Observed::new(op_observer)) } - pub(crate) fn transaction_inner(&mut self) -> TransactionInner { + pub(crate) fn transaction_args(&mut self) -> TransactionArgs { let actor = self.get_actor_index(); let seq = self.states.get(&actor).map_or(0, |v| v.len()) as u64 + 1; let mut deps = self.get_heads(); @@ -140,15 +233,13 @@ impl Automerge { deps.push(last_hash); } } + // SAFETY: this unwrap is safe as we always add 1 + let start_op = NonZeroU64::new(self.max_op + 1).unwrap(); - TransactionInner { - actor, + TransactionArgs { + actor_index: actor, seq, - // SAFETY: this unwrap is safe as we always add 1 - start_op: NonZeroU64::new(self.max_op + 1).unwrap(), - time: 0, - message: None, - operations: vec![], + start_op, deps, } } @@ -157,42 +248,43 @@ impl Automerge { /// afterwards. pub fn transact(&mut self, f: F) -> transaction::Result where - F: FnOnce(&mut Transaction<'_, ()>) -> Result, + F: FnOnce(&mut Transaction<'_, UnObserved>) -> Result, + { + self.transact_with_impl(None::<&dyn Fn(&O) -> CommitOptions>, f) + } + + /// Like [`Self::transact`] but with a function for generating the commit options. + pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result + where + F: FnOnce(&mut Transaction<'_, UnObserved>) -> Result, + C: FnOnce(&O) -> CommitOptions, + { + self.transact_with_impl(Some(c), f) + } + + fn transact_with_impl( + &mut self, + c: Option, + f: F, + ) -> transaction::Result + where + F: FnOnce(&mut Transaction<'_, UnObserved>) -> Result, + C: FnOnce(&O) -> CommitOptions, { let mut tx = self.transaction(); let result = f(&mut tx); - match result { - Ok(result) => Ok(Success { - result, - op_observer: (), - hash: tx.commit(), - }), - Err(error) => Err(Failure { - error, - cancelled: tx.rollback(), - }), - } - } - - /// Like [`Self::transact`] but with a function for generating the commit options. - pub fn transact_with(&mut self, c: C, f: F) -> transaction::Result - where - F: FnOnce(&mut Transaction<'_, Obs>) -> Result, - C: FnOnce(&O) -> CommitOptions, - Obs: OpObserver, - { - let mut op_observer = Obs::default(); - let mut tx = self.transaction_with_observer(Default::default()); - let result = f(&mut tx); match result { Ok(result) => { - let commit_options = c(&result); - std::mem::swap(&mut op_observer, &mut tx.op_observer); - let hash = tx.commit_with(commit_options); + let hash = if let Some(c) = c { + let commit_options = c(&result); + tx.commit_with(commit_options) + } else { + tx.commit() + }; Ok(Success { result, hash, - op_observer, + op_observer: (), }) } Err(error) => Err(Failure { @@ -202,14 +294,85 @@ impl Automerge { } } + /// Run a transaction on this document in a closure, observing ops with `Obs`, automatically handling commit or rollback + /// afterwards. + pub fn transact_observed(&mut self, f: F) -> transaction::Result + where + F: FnOnce(&mut Transaction<'_, Observed>) -> Result, + Obs: OpObserver + BranchableObserver + Default, + { + self.transact_observed_with_impl(None::<&dyn Fn(&O) -> CommitOptions>, f) + } + + /// Like [`Self::transact_observed`] but with a function for generating the commit options + pub fn transact_observed_with( + &mut self, + c: C, + f: F, + ) -> transaction::Result + where + F: FnOnce(&mut Transaction<'_, Observed>) -> Result, + C: FnOnce(&O) -> CommitOptions, + Obs: OpObserver + BranchableObserver + Default, + { + self.transact_observed_with_impl(Some(c), f) + } + + fn transact_observed_with_impl( + &mut self, + c: Option, + f: F, + ) -> transaction::Result + where + F: FnOnce(&mut Transaction<'_, Observed>) -> Result, + C: FnOnce(&O) -> CommitOptions, + Obs: OpObserver + BranchableObserver + Default, + { + let observer = Obs::default(); + let mut tx = self.transaction_with_observer(observer); + let result = f(&mut tx); + match result { + Ok(result) => { + let (obs, hash) = if let Some(c) = c { + let commit_options = c(&result); + tx.commit_with(commit_options) + } else { + tx.commit() + }; + Ok(Success { + result, + hash, + op_observer: obs, + }) + } + Err(error) => Err(Failure { + error, + cancelled: tx.rollback(), + }), + } + } + + /// Generate an empty change + /// + /// The main reason to do this is if you want to create a "merge commit", which is a change + /// that has all the current heads of the document as dependencies. + pub fn empty_commit(&mut self, opts: CommitOptions) -> ChangeHash { + let args = self.transaction_args(); + Transaction::empty(self, args, opts) + } + /// Fork this document at the current point for use by a different actor. + /// + /// This will create a new actor ID for the forked document pub fn fork(&self) -> Self { let mut f = self.clone(); f.set_actor(ActorId::random()); f } - /// Fork this document at the give heads + /// Fork this document at the given heads + /// + /// This will create a new actor ID for the forked document pub fn fork_at(&self, heads: &[ChangeHash]) -> Result { let mut seen = heads.iter().cloned().collect::>(); let mut heads = heads.to_vec(); @@ -234,193 +397,14 @@ impl Automerge { Ok(f) } - // KeysAt::() - // LenAt::() - // PropAt::() - // NthAt::() - - /// Get the parents of an object in the document tree. - /// - /// ### Errors - /// - /// Returns an error when the id given is not the id of an object in this document. - /// This function does not get the parents of scalar values contained within objects. - /// - /// ### Experimental - /// - /// This function may in future be changed to allow getting the parents from the id of a scalar - /// value. - pub fn parents>(&self, obj: O) -> Result, AutomergeError> { - let obj_id = self.exid_to_obj(obj.as_ref())?; - Ok(self.ops.parents(obj_id)) - } - - pub fn path_to_object>( - &self, - obj: O, - ) -> Result, AutomergeError> { - let mut path = self.parents(obj.as_ref().clone())?.collect::>(); - path.reverse(); - Ok(path) - } - - /// Get the keys of the object `obj`. - /// - /// For a map this returns the keys of the map. - /// For a list this returns the element ids (opids) encoded as strings. - pub fn keys>(&self, obj: O) -> Keys<'_, '_> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - let iter_keys = self.ops.keys(obj); - Keys::new(self, iter_keys) - } else { - Keys::new(self, None) - } - } - - /// Historical version of [`keys`](Self::keys). - pub fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - return KeysAt::new(self, self.ops.keys_at(obj, clock)); - } - } - KeysAt::new(self, None) - } - - /// Iterate over the keys and values of the map `obj` in the given range. - pub fn map_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> MapRange<'_, R> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - MapRange::new(self, self.ops.map_range(obj, range)) - } else { - MapRange::new(self, None) - } - } - - /// Historical version of [`map_range`](Self::map_range). - pub fn map_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> MapRangeAt<'_, R> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - let iter_range = self.ops.map_range_at(obj, range, clock); - return MapRangeAt::new(self, iter_range); - } - } - MapRangeAt::new(self, None) - } - - /// Iterate over the indexes and values of the list `obj` in the given range. - pub fn list_range, R: RangeBounds>( - &self, - obj: O, - range: R, - ) -> ListRange<'_, R> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - ListRange::new(self, self.ops.list_range(obj, range)) - } else { - ListRange::new(self, None) - } - } - - /// Historical version of [`list_range`](Self::list_range). - pub fn list_range_at, R: RangeBounds>( - &self, - obj: O, - range: R, - heads: &[ChangeHash], - ) -> ListRangeAt<'_, R> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - let iter_range = self.ops.list_range_at(obj, range, clock); - return ListRangeAt::new(self, iter_range); - } - } - ListRangeAt::new(self, None) - } - - pub fn values>(&self, obj: O) -> Values<'_> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - match self.ops.object_type(&obj) { - Some(t) if t.is_sequence() => Values::new(self, self.ops.list_range(obj, ..)), - Some(_) => Values::new(self, self.ops.map_range(obj, ..)), - None => Values::empty(self), - } - } else { - Values::empty(self) - } - } - - pub fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { - if let Ok(obj) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - return match self.ops.object_type(&obj) { - Some(ObjType::Map) | Some(ObjType::Table) => { - let iter_range = self.ops.map_range_at(obj, .., clock); - Values::new(self, iter_range) - } - Some(ObjType::List) | Some(ObjType::Text) => { - let iter_range = self.ops.list_range_at(obj, .., clock); - Values::new(self, iter_range) - } - None => Values::empty(self), - }; - } - } - Values::empty(self) - } - - /// Get the length of the given object. - pub fn length>(&self, obj: O) -> usize { - if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { - match self.ops.object_type(&inner_obj) { - Some(ObjType::Map) | Some(ObjType::Table) => self.keys(obj).count(), - Some(ObjType::List) | Some(ObjType::Text) => { - self.ops.search(&inner_obj, query::Len::new()).len - } - None => 0, - } - } else { - 0 - } - } - - /// Historical version of [`length`](Self::length). - pub fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { - if let Ok(inner_obj) = self.exid_to_obj(obj.as_ref()) { - if let Ok(clock) = self.clock_at(heads) { - return match self.ops.object_type(&inner_obj) { - Some(ObjType::Map) | Some(ObjType::Table) => self.keys_at(obj, heads).count(), - Some(ObjType::List) | Some(ObjType::Text) => { - self.ops.search(&inner_obj, query::LenAt::new(clock)).len - } - None => 0, - }; - } - } - 0 - } - - /// Get the type of this object, if it is an object. - pub fn object_type>(&self, obj: O) -> Option { - let obj = self.exid_to_obj(obj.as_ref()).ok()?; - self.ops.object_type(&obj) - } - - pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result { + pub(crate) fn exid_to_obj(&self, id: &ExId) -> Result<(ObjId, ObjType), AutomergeError> { match id { - ExId::Root => Ok(ObjId::root()), + ExId::Root => Ok((ObjId::root(), ObjType::Map)), ExId::Id(ctr, actor, idx) => { // do a direct get here b/c this could be foriegn and not be within the array // bounds let obj = if self.ops.m.actors.cache.get(*idx) == Some(actor) { - ObjId(OpId(*ctr, *idx)) + ObjId(OpId::new(*ctr, *idx)) } else { // FIXME - make a real error let idx = self @@ -429,10 +413,10 @@ impl Automerge { .actors .lookup(actor) .ok_or(AutomergeError::Fail)?; - ObjId(OpId(*ctr, idx)) + ObjId(OpId::new(*ctr, idx)) }; - if self.ops.object_type(&obj).is_some() { - Ok(obj) + if let Some(obj_type) = self.ops.object_type(&obj) { + Ok((obj, obj_type)) } else { Err(AutomergeError::NotAnObject) } @@ -444,146 +428,29 @@ impl Automerge { self.ops.id_to_exid(id) } - /// Get the string represented by the given text object. - pub fn text>(&self, obj: O) -> Result { - let obj = self.exid_to_obj(obj.as_ref())?; - let query = self.ops.search(&obj, query::ListVals::new()); - let mut buffer = String::new(); - for q in &query.ops { - if let OpType::Put(ScalarValue::Str(s)) = &q.action { - buffer.push_str(s); - } else { - buffer.push('\u{fffc}'); - } - } - Ok(buffer) - } - - /// Historical version of [`text`](Self::text). - pub fn text_at>( - &self, - obj: O, - heads: &[ChangeHash], - ) -> Result { - let obj = self.exid_to_obj(obj.as_ref())?; - let clock = self.clock_at(heads)?; - let query = self.ops.search(&obj, query::ListValsAt::new(clock)); - let mut buffer = String::new(); - for q in &query.ops { - if let OpType::Put(ScalarValue::Str(s)) = &q.action { - buffer.push_str(s); - } else { - buffer.push('\u{fffc}'); - } - } - Ok(buffer) - } - - // TODO - I need to return these OpId's here **only** to get - // the legacy conflicts format of { [opid]: value } - // Something better? - /// Get a value out of the document. - /// - /// Returns both the value and the id of the operation that created it, useful for handling - /// conflicts and serves as the object id if the value is an object. - pub fn get, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - Ok(self.get_all(obj, prop.into())?.last().cloned()) - } - - /// Historical version of [`get`](Self::get). - pub fn get_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - Ok(self.get_all_at(obj, prop, heads)?.last().cloned()) - } - - /// Get all conflicting values out of the document at this prop that conflict. - /// - /// Returns both the value and the id of the operation that created it, useful for handling - /// conflicts and serves as the object id if the value is an object. - pub fn get_all, P: Into>( - &self, - obj: O, - prop: P, - ) -> Result, ExId)>, AutomergeError> { - let obj = self.exid_to_obj(obj.as_ref())?; - let mut result = match prop.into() { - Prop::Map(p) => { - let prop = self.ops.m.props.lookup(&p); - if let Some(p) = prop { - self.ops - .search(&obj, query::Prop::new(p)) - .ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect() - } else { - vec![] - } - } - Prop::Seq(n) => self - .ops - .search(&obj, query::Nth::new(n)) - .ops - .into_iter() - .map(|o| (o.value(), self.id_to_exid(o.id))) - .collect(), - }; - result.sort_by(|a, b| b.1.cmp(&a.1)); - Ok(result) - } - - /// Historical version of [`get_all`](Self::get_all). - pub fn get_all_at, P: Into>( - &self, - obj: O, - prop: P, - heads: &[ChangeHash], - ) -> Result, ExId)>, AutomergeError> { - let prop = prop.into(); - let obj = self.exid_to_obj(obj.as_ref())?; - let clock = self.clock_at(heads)?; - let result = match prop { - Prop::Map(p) => { - let prop = self.ops.m.props.lookup(&p); - if let Some(p) = prop { - self.ops - .search(&obj, query::PropAt::new(p, clock)) - .ops - .into_iter() - .map(|o| (o.clone_value(), self.id_to_exid(o.id))) - .collect() - } else { - vec![] - } - } - Prop::Seq(n) => self - .ops - .search(&obj, query::NthAt::new(n, clock)) - .ops - .into_iter() - .map(|o| (o.clone_value(), self.id_to_exid(o.id))) - .collect(), - }; - Ok(result) - } - /// Load a document. pub fn load(data: &[u8]) -> Result { - Self::load_with::<()>(data, None) + Self::load_with::<()>(data, OnPartialLoad::Error, VerificationMode::Check, None) } - /// Load a document. + /// Load a document without verifying the head hashes + /// + /// This is useful for debugging as it allows you to examine a corrupted document. + pub fn load_unverified_heads(data: &[u8]) -> Result { + Self::load_with::<()>( + data, + OnPartialLoad::Error, + VerificationMode::DontCheck, + None, + ) + } + + /// Load a document with an observer #[tracing::instrument(skip(data, observer), err)] pub fn load_with( data: &[u8], + on_error: OnPartialLoad, + mode: VerificationMode, mut observer: Option<&mut Obs>, ) -> Result { if data.is_empty() { @@ -597,6 +464,7 @@ impl Automerge { return Err(load::Error::BadChecksum.into()); } + let mut change: Option = None; let mut am = match first_chunk { storage::Chunk::Document(d) => { tracing::trace!("first chunk is document chunk, inflating"); @@ -605,21 +473,18 @@ impl Automerge { result: op_set, changes, heads, - } = match &mut observer { - Some(o) => storage::load::reconstruct_document(&d, OpSet::observed_builder(*o)), - None => storage::load::reconstruct_document(&d, OpSet::builder()), - } - .map_err(|e| load::Error::InflateDocument(Box::new(e)))?; + } = storage::load::reconstruct_document(&d, mode, OpSet::builder()) + .map_err(|e| load::Error::InflateDocument(Box::new(e)))?; let mut hashes_by_index = HashMap::new(); let mut actor_to_history: HashMap> = HashMap::new(); - let mut clocks = Clocks::new(); + let mut change_graph = ChangeGraph::new(); for (index, change) in changes.iter().enumerate() { // SAFETY: This should be fine because we just constructed an opset containing // all the changes let actor_index = op_set.m.actors.lookup(change.actor_id()).unwrap(); actor_to_history.entry(actor_index).or_default().push(index); hashes_by_index.insert(index, change.hash()); - clocks.add_change(change, actor_index)?; + change_graph.add_change(change, actor_index)?; } let history_index = hashes_by_index.into_iter().map(|(k, v)| (v, k)).collect(); Self { @@ -627,57 +492,84 @@ impl Automerge { history: changes, history_index, states: actor_to_history, - clocks: clocks.into(), + change_graph, ops: op_set, deps: heads.into_iter().collect(), saved: Default::default(), actor: Actor::Unused(ActorId::random()), max_op, + text_encoding: Default::default(), } } storage::Chunk::Change(stored_change) => { - tracing::trace!("first chunk is change chunk, applying"); - let change = Change::new_from_unverified(stored_change.into_owned(), None) - .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?; - let mut am = Self::new(); - am.apply_change(change, &mut observer); - am + tracing::trace!("first chunk is change chunk"); + change = Some( + Change::new_from_unverified(stored_change.into_owned(), None) + .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?, + ); + Self::new() } storage::Chunk::CompressedChange(stored_change, compressed) => { - tracing::trace!("first chunk is compressed change, decompressing and applying"); - let change = Change::new_from_unverified( - stored_change.into_owned(), - Some(compressed.into_owned()), - ) - .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?; - let mut am = Self::new(); - am.apply_change(change, &mut observer); - am + tracing::trace!("first chunk is compressed change"); + change = Some( + Change::new_from_unverified( + stored_change.into_owned(), + Some(compressed.into_owned()), + ) + .map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?, + ); + Self::new() } }; - tracing::trace!("first chunk loaded, loading remaining chunks"); + tracing::trace!("loading change chunks"); match load::load_changes(remaining.reset()) { load::LoadedChanges::Complete(c) => { - for change in c { - am.apply_change(change, &mut observer); + am.apply_changes(change.into_iter().chain(c))?; + if !am.queue.is_empty() { + return Err(AutomergeError::MissingDeps); } } - load::LoadedChanges::Partial { error, .. } => return Err(error.into()), + load::LoadedChanges::Partial { error, .. } => { + if on_error == OnPartialLoad::Error { + return Err(error.into()); + } + } + } + if let Some(observer) = &mut observer { + current_state::observe_current_state(&am, *observer); } Ok(am) } /// Load an incremental save of a document. + /// + /// Unlike `load` this imports changes into an existing document. It will work with both the + /// output of [`Self::save`] and [`Self::save_incremental`] + /// + /// The return value is the number of ops which were applied, this is not useful and will + /// change in future. pub fn load_incremental(&mut self, data: &[u8]) -> Result { self.load_incremental_with::<()>(data, None) } - /// Load an incremental save of a document. + /// Like [`Self::load_incremental`] but with an observer pub fn load_incremental_with( &mut self, data: &[u8], op_observer: Option<&mut Obs>, ) -> Result { + if self.is_empty() { + let mut doc = + Self::load_with::<()>(data, OnPartialLoad::Ignore, VerificationMode::Check, None)?; + doc = doc + .with_encoding(self.text_encoding) + .with_actor(self.actor_id()); + if let Some(obs) = op_observer { + current_state::observe_current_state(&doc, obs); + } + *self = doc; + return Ok(self.ops.len()); + } let changes = match load::load_changes(storage::parse::Input::new(data)) { load::LoadedChanges::Complete(c) => c, load::LoadedChanges::Partial { error, loaded, .. } => { @@ -702,6 +594,9 @@ impl Automerge { } /// Apply changes to this document. + /// + /// This is idemptotent in the sense that if a change has already been applied it will be + /// ignored. pub fn apply_changes( &mut self, changes: impl IntoIterator, @@ -709,12 +604,17 @@ impl Automerge { self.apply_changes_with::<_, ()>(changes, None) } - /// Apply changes to this document. + /// Like [`Self::apply_changes`] but with an observer pub fn apply_changes_with, Obs: OpObserver>( &mut self, changes: I, mut op_observer: Option<&mut Obs>, ) -> Result<(), AutomergeError> { + // Record this so we can avoid observing each individual change and instead just observe + // the final state after all the changes have been applied. We can only do this for an + // empty document right now, once we have logic to produce the diffs between arbitrary + // states of the OpSet we can make this cleaner. + let empty_at_start = self.is_empty(); for c in changes { if !self.history_index.contains_key(&c.hash()) { if self.duplicate_seq(&c) { @@ -724,7 +624,11 @@ impl Automerge { )); } if self.is_causally_ready(&c) { - self.apply_change(c, &mut op_observer); + if empty_at_start { + self.apply_change::<()>(c, &mut None); + } else { + self.apply_change(c, &mut op_observer); + } } else { self.queue.push(c); } @@ -732,7 +636,16 @@ impl Automerge { } while let Some(c) = self.pop_next_causally_ready_change() { if !self.history_index.contains_key(&c.hash()) { - self.apply_change(c, &mut op_observer); + if empty_at_start { + self.apply_change::<()>(c, &mut None); + } else { + self.apply_change(c, &mut op_observer); + } + } + } + if empty_at_start { + if let Some(observer) = &mut op_observer { + current_state::observe_current_state(self, *observer); } } Ok(()) @@ -743,11 +656,11 @@ impl Automerge { self.update_history(change, ops.len()); if let Some(observer) = observer { for (obj, op) in ops { - self.ops.insert_op_with_observer(&obj, op, *observer); + self.insert_op_with_observer(&obj, op, *observer); } } else { for (obj, op) in ops { - self.ops.insert_op(&obj, op); + self.insert_op(&obj, op); } } } @@ -785,29 +698,32 @@ impl Automerge { .iter_ops() .enumerate() .map(|(i, c)| { - let id = OpId(change.start_op().get() + i as u64, actor); + let id = OpId::new(change.start_op().get() + i as u64, actor); let key = match &c.key { EncodedKey::Prop(n) => Key::Map(self.ops.m.props.cache(n.to_string())), EncodedKey::Elem(e) if e.is_head() => Key::Seq(ElemId::head()), EncodedKey::Elem(ElemId(o)) => { - Key::Seq(ElemId(OpId::new(actors[o.actor()], o.counter()))) + Key::Seq(ElemId(OpId::new(o.counter(), actors[o.actor()]))) } }; let obj = if c.obj.is_root() { ObjId::root() } else { - ObjId(OpId(c.obj.opid().counter(), actors[c.obj.opid().actor()])) + ObjId(OpId::new( + c.obj.opid().counter(), + actors[c.obj.opid().actor()], + )) }; let pred = c .pred .iter() - .map(|p| OpId::new(actors[p.actor()], p.counter())); + .map(|p| OpId::new(p.counter(), actors[p.actor()])); let pred = self.ops.m.sorted_opids(pred); ( obj, Op { id, - action: OpType::from_index_and_value(c.action, c.val).unwrap(), + action: OpType::from_action_and_value(c.action, c.val), key, succ: Default::default(), pred, @@ -841,12 +757,16 @@ impl Automerge { } /// Save the entirety of this document in a compact form. + /// + /// This takes a mutable reference to self because it saves the heads of the last save so that + /// `save_incremental` can be used to produce only the changes since the last `save`. This API + /// will be changing in future. pub fn save(&mut self) -> Vec { let heads = self.get_heads(); let c = self.history.iter(); let bytes = crate::storage::save::save_document( c, - self.ops.iter(), + self.ops.iter().map(|(objid, _, op)| (objid, op)), &self.ops.m.actors, &self.ops.m.props, &heads, @@ -856,12 +776,13 @@ impl Automerge { bytes } + /// Save this document, but don't run it through DEFLATE afterwards pub fn save_nocompress(&mut self) -> Vec { let heads = self.get_heads(); let c = self.history.iter(); let bytes = crate::storage::save::save_document( c, - self.ops.iter(), + self.ops.iter().map(|(objid, _, op)| (objid, op)), &self.ops.m.actors, &self.ops.m.props, &heads, @@ -871,7 +792,12 @@ impl Automerge { bytes } - /// Save the changes since last save in a compact form. + /// Save the changes since the last call to [Self::save`] + /// + /// The output of this will not be a compressed document format, but a series of individual + /// changes. This is useful if you know you have only made a small change since the last `save` + /// and you want to immediately send it somewhere (e.g. you've inserted a single character in a + /// text object). pub fn save_incremental(&mut self) -> Vec { let changes = self .get_changes(self.saved.as_slice()) @@ -899,51 +825,16 @@ impl Automerge { .filter(|hash| self.history_index.contains_key(hash)) .copied() .collect::>(); - let heads_clock = self.clock_at(&heads)?; - // keep the hashes that are concurrent or after the heads - changes.retain(|hash| { - self.clocks - .get(hash) - .unwrap() - .partial_cmp(&heads_clock) - .map_or(true, |o| o == Ordering::Greater) - }); + self.change_graph.remove_ancestors(changes, &heads); Ok(()) } - /// Get the hashes of the changes in this document that aren't transitive dependencies of the - /// given `heads`. - pub fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { - let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash()).collect(); - let mut missing = HashSet::new(); - - for head in self.queue.iter().flat_map(|change| change.deps()) { - if !self.history_index.contains_key(head) { - missing.insert(head); - } - } - - for head in heads { - if !self.history_index.contains_key(head) { - missing.insert(head); - } - } - - let mut missing = missing - .into_iter() - .filter(|hash| !in_queue.contains(hash)) - .copied() - .collect::>(); - missing.sort(); - missing - } - /// Get the changes since `have_deps` in this document using a clock internally. fn get_changes_clock(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { // get the clock for the given deps - let clock = self.clock_at(have_deps)?; + let clock = self.clock_at(have_deps); // get the documents current clock @@ -968,10 +859,6 @@ impl Automerge { .collect()) } - pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { - self.get_changes_clock(have_deps) - } - /// Get the last change this actor made to the document. pub fn get_last_local_change(&self) -> Option<&Change> { return self @@ -981,67 +868,8 @@ impl Automerge { .find(|c| c.actor_id() == self.get_actor()); } - fn clock_at(&self, heads: &[ChangeHash]) -> Result { - if let Some(first_hash) = heads.first() { - let mut clock = self - .clocks - .get(first_hash) - .ok_or(AutomergeError::MissingHash(*first_hash))? - .clone(); - - for hash in &heads[1..] { - let c = self - .clocks - .get(hash) - .ok_or(AutomergeError::MissingHash(*hash))?; - clock.merge(c); - } - - Ok(clock) - } else { - Ok(Clock::new()) - } - } - - /// Get a change by its hash. - pub fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { - self.history_index - .get(hash) - .and_then(|index| self.history.get(*index)) - } - - /// Get the changes that the other document added compared to this document. - #[tracing::instrument(skip(self, other))] - pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { - // Depth-first traversal from the heads through the dependency graph, - // until we reach a change that is already present in other - let mut stack: Vec<_> = other.get_heads(); - tracing::trace!(their_heads=?stack, "finding changes to merge"); - let mut seen_hashes = HashSet::new(); - let mut added_change_hashes = Vec::new(); - while let Some(hash) = stack.pop() { - if !seen_hashes.contains(&hash) && self.get_change_by_hash(&hash).is_none() { - seen_hashes.insert(hash); - added_change_hashes.push(hash); - if let Some(change) = other.get_change_by_hash(&hash) { - stack.extend(change.deps()); - } - } - } - // Return those changes in the reverse of the order in which the depth-first search - // found them. This is not necessarily a topological sort, but should usually be close. - added_change_hashes.reverse(); - added_change_hashes - .into_iter() - .filter_map(|h| other.get_change_by_hash(&h)) - .collect() - } - - /// Get the heads of this document. - pub fn get_heads(&self) -> Vec { - let mut deps: Vec<_> = self.deps.iter().copied().collect(); - deps.sort_unstable(); - deps + fn clock_at(&self, heads: &[ChangeHash]) -> Clock { + self.change_graph.clock_for_heads(heads) } fn get_hash(&self, actor: usize, seq: u64) -> Result { @@ -1067,24 +895,10 @@ impl Automerge { .push(history_index); self.history_index.insert(change.hash(), history_index); - let mut clock = Clock::new(); - for hash in change.deps() { - let c = self - .clocks - .get(hash) - .expect("Change's deps should already be in the document"); - clock.merge(c); - } - clock.include( - actor_index, - ClockData { - max_op: change.max_op(), - seq: change.seq(), - }, - ); - self.clocks.insert(change.hash(), clock); + self.change_graph + .add_change(&change, actor_index) + .expect("Change's deps should already be in the document"); - self.history_index.insert(change.hash(), history_index); self.history.push(change); history_index @@ -1097,9 +911,10 @@ impl Automerge { self.deps.insert(change.hash()); } - pub fn import(&self, s: &str) -> Result { + #[doc(hidden)] + pub fn import(&self, s: &str) -> Result<(ExId, ObjType), AutomergeError> { if s == "_root" { - Ok(ExId::Root) + Ok((ExId::Root, ObjType::Map)) } else { let n = s .find('@') @@ -1114,11 +929,11 @@ impl Automerge { .actors .lookup(&actor) .ok_or_else(|| AutomergeError::InvalidObjId(s.to_owned()))?; - Ok(ExId::Id( - counter, - self.ops.m.actors.cache[actor].clone(), - actor, - )) + let obj = ExId::Id(counter, self.ops.m.actors.cache[actor].clone(), actor); + let obj_type = self + .object_type(&obj) + .map_err(|_| AutomergeError::InvalidObjId(s.to_owned()))?; + Ok((obj, obj_type)) } } @@ -1140,7 +955,7 @@ impl Automerge { "pred", "succ" ); - for (obj, op) in self.ops.iter() { + for (obj, _, op) in self.ops.iter() { let id = self.to_string(op.id); let obj = self.to_string(obj); let key = match op.key { @@ -1175,10 +990,448 @@ impl Automerge { /// visualised #[cfg(feature = "optree-visualisation")] pub fn visualise_optree(&self, objects: Option>) -> String { - let objects = - objects.map(|os| os.iter().filter_map(|o| self.exid_to_obj(o).ok()).collect()); + let objects = objects.map(|os| { + os.iter() + .filter_map(|o| self.exid_to_obj(o).ok()) + .map(|o| o.0) + .collect() + }); self.ops.visualise(objects) } + + pub(crate) fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op { + let q = self.ops.search(obj, query::SeekOp::new(&op)); + + let succ = q.succ; + let pos = q.pos; + + self.ops.add_succ(obj, &succ, &op); + + if !op.is_delete() { + self.ops.insert(pos, obj, op.clone()); + } + op + } + + pub(crate) fn insert_op_with_observer( + &mut self, + obj: &ObjId, + op: Op, + observer: &mut Obs, + ) -> Op { + let obj_type = self.ops.object_type(obj); + let encoding = obj_type + .map(|o| ListEncoding::new(o, self.text_encoding)) + .unwrap_or_default(); + let q = self + .ops + .search(obj, query::SeekOpWithPatch::new(&op, encoding)); + + let query::SeekOpWithPatch { + pos, + succ, + seen, + last_width, + values, + had_value_before, + .. + } = q; + + let ex_obj = self.ops.id_to_exid(obj.0); + + let key = match op.key { + Key::Map(index) => self.ops.m.props[index].clone().into(), + Key::Seq(_) => seen.into(), + }; + + if op.insert { + if obj_type == Some(ObjType::Text) { + observer.splice_text(self, ex_obj, seen, op.to_str()); + } else { + let value = (op.value(), self.ops.id_to_exid(op.id)); + observer.insert(self, ex_obj, seen, value); + } + } else if op.is_delete() { + if let Some(winner) = &values.last() { + let value = (winner.value(), self.ops.id_to_exid(winner.id)); + let conflict = values.len() > 1; + observer.expose(self, ex_obj, key, value, conflict); + } else if had_value_before { + match key { + Prop::Map(k) => observer.delete_map(self, ex_obj, &k), + Prop::Seq(index) => observer.delete_seq(self, ex_obj, index, last_width), + } + } + } else if let Some(value) = op.get_increment_value() { + // only observe this increment if the counter is visible, i.e. the counter's + // create op is in the values + //if values.iter().any(|value| op.pred.contains(&value.id)) { + if values + .last() + .map(|value| op.pred.contains(&value.id)) + .unwrap_or_default() + { + // we have observed the value + observer.increment(self, ex_obj, key, (value, self.ops.id_to_exid(op.id))); + } + } else { + let just_conflict = values + .last() + .map(|value| self.ops.m.lamport_cmp(op.id, value.id) != Ordering::Greater) + .unwrap_or(false); + let value = (op.value(), self.ops.id_to_exid(op.id)); + if op.is_list_op() && !had_value_before { + observer.insert(self, ex_obj, seen, value); + } else if just_conflict { + observer.flag_conflict(self, ex_obj, key); + } else { + let conflict = !values.is_empty(); + observer.put(self, ex_obj, key, value, conflict); + } + } + + self.ops.add_succ(obj, &succ, &op); + + if !op.is_delete() { + self.ops.insert(pos, obj, op.clone()); + } + + op + } + + /// Get the heads of this document. + pub fn get_heads(&self) -> Vec { + let mut deps: Vec<_> = self.deps.iter().copied().collect(); + deps.sort_unstable(); + deps + } + + pub fn get_changes(&self, have_deps: &[ChangeHash]) -> Result, AutomergeError> { + self.get_changes_clock(have_deps) + } + + /// Get changes in `other` that are not in `self + pub fn get_changes_added<'a>(&self, other: &'a Self) -> Vec<&'a Change> { + // Depth-first traversal from the heads through the dependency graph, + // until we reach a change that is already present in other + let mut stack: Vec<_> = other.get_heads(); + tracing::trace!(their_heads=?stack, "finding changes to merge"); + let mut seen_hashes = HashSet::new(); + let mut added_change_hashes = Vec::new(); + while let Some(hash) = stack.pop() { + if !seen_hashes.contains(&hash) && self.get_change_by_hash(&hash).is_none() { + seen_hashes.insert(hash); + added_change_hashes.push(hash); + if let Some(change) = other.get_change_by_hash(&hash) { + stack.extend(change.deps()); + } + } + } + // Return those changes in the reverse of the order in which the depth-first search + // found them. This is not necessarily a topological sort, but should usually be close. + added_change_hashes.reverse(); + added_change_hashes + .into_iter() + .filter_map(|h| other.get_change_by_hash(&h)) + .collect() + } +} + +impl ReadDoc for Automerge { + fn parents>(&self, obj: O) -> Result, AutomergeError> { + let (obj_id, _) = self.exid_to_obj(obj.as_ref())?; + Ok(self.ops.parents(obj_id)) + } + + fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { + Ok(self.parents(obj.as_ref().clone())?.path()) + } + + fn keys>(&self, obj: O) -> Keys<'_, '_> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + let iter_keys = self.ops.keys(obj); + Keys::new(self, iter_keys) + } else { + Keys::new(self, None) + } + } + + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + let clock = self.clock_at(heads); + return KeysAt::new(self, self.ops.keys_at(obj, clock)); + } + KeysAt::new(self, None) + } + + fn map_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> MapRange<'_, R> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + MapRange::new(self, self.ops.map_range(obj, range)) + } else { + MapRange::new(self, None) + } + } + + fn map_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> MapRangeAt<'_, R> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + let clock = self.clock_at(heads); + let iter_range = self.ops.map_range_at(obj, range, clock); + return MapRangeAt::new(self, iter_range); + } + MapRangeAt::new(self, None) + } + + fn list_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> ListRange<'_, R> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + ListRange::new(self, self.ops.list_range(obj, range)) + } else { + ListRange::new(self, None) + } + } + + fn list_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> ListRangeAt<'_, R> { + if let Ok((obj, _)) = self.exid_to_obj(obj.as_ref()) { + let clock = self.clock_at(heads); + let iter_range = self.ops.list_range_at(obj, range, clock); + return ListRangeAt::new(self, iter_range); + } + ListRangeAt::new(self, None) + } + + fn values>(&self, obj: O) -> Values<'_> { + if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { + if obj_type.is_sequence() { + Values::new(self, self.ops.list_range(obj, ..)) + } else { + Values::new(self, self.ops.map_range(obj, ..)) + } + } else { + Values::empty(self) + } + } + + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_> { + if let Ok((obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { + let clock = self.clock_at(heads); + match obj_type { + ObjType::Map | ObjType::Table => { + let iter_range = self.ops.map_range_at(obj, .., clock); + Values::new(self, iter_range) + } + ObjType::List | ObjType::Text => { + let iter_range = self.ops.list_range_at(obj, .., clock); + Values::new(self, iter_range) + } + } + } else { + Values::empty(self) + } + } + + fn length>(&self, obj: O) -> usize { + if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { + if obj_type == ObjType::Map || obj_type == ObjType::Table { + self.keys(obj).count() + } else { + let encoding = ListEncoding::new(obj_type, self.text_encoding); + self.ops.search(&inner_obj, query::Len::new(encoding)).len + } + } else { + 0 + } + } + + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize { + if let Ok((inner_obj, obj_type)) = self.exid_to_obj(obj.as_ref()) { + let clock = self.clock_at(heads); + if obj_type == ObjType::Map || obj_type == ObjType::Table { + self.keys_at(obj, heads).count() + } else { + let encoding = ListEncoding::new(obj_type, self.text_encoding); + self.ops + .search(&inner_obj, query::LenAt::new(clock, encoding)) + .len + } + } else { + 0 + } + } + + fn object_type>(&self, obj: O) -> Result { + let (_, obj_type) = self.exid_to_obj(obj.as_ref())?; + Ok(obj_type) + } + + fn text>(&self, obj: O) -> Result { + let obj = self.exid_to_obj(obj.as_ref())?.0; + let query = self.ops.search(&obj, query::ListVals::new()); + let mut buffer = String::new(); + for q in &query.ops { + buffer.push_str(q.to_str()); + } + Ok(buffer) + } + + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result { + let obj = self.exid_to_obj(obj.as_ref())?.0; + let clock = self.clock_at(heads); + let query = self.ops.search(&obj, query::ListValsAt::new(clock)); + let mut buffer = String::new(); + for q in &query.ops { + if let OpType::Put(ScalarValue::Str(s)) = &q.action { + buffer.push_str(s); + } else { + buffer.push('\u{fffc}'); + } + } + Ok(buffer) + } + + fn get, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, ExId)>, AutomergeError> { + Ok(self.get_all(obj, prop.into())?.last().cloned()) + } + + fn get_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, ExId)>, AutomergeError> { + Ok(self.get_all_at(obj, prop, heads)?.last().cloned()) + } + + fn get_all, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, ExId)>, AutomergeError> { + let obj = self.exid_to_obj(obj.as_ref())?.0; + let mut result = match prop.into() { + Prop::Map(p) => { + let prop = self.ops.m.props.lookup(&p); + if let Some(p) = prop { + self.ops + .search(&obj, query::Prop::new(p)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } else { + vec![] + } + } + Prop::Seq(n) => { + let obj_type = self.ops.object_type(&obj); + let encoding = obj_type + .map(|o| ListEncoding::new(o, self.text_encoding)) + .unwrap_or_default(); + self.ops + .search(&obj, query::Nth::new(n, encoding)) + .ops + .into_iter() + .map(|o| (o.value(), self.id_to_exid(o.id))) + .collect() + } + }; + result.sort_by(|a, b| b.1.cmp(&a.1)); + Ok(result) + } + + fn get_all_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, ExId)>, AutomergeError> { + let prop = prop.into(); + let obj = self.exid_to_obj(obj.as_ref())?.0; + let clock = self.clock_at(heads); + let result = match prop { + Prop::Map(p) => { + let prop = self.ops.m.props.lookup(&p); + if let Some(p) = prop { + self.ops + .search(&obj, query::PropAt::new(p, clock)) + .ops + .into_iter() + .map(|o| (o.clone_value(), self.id_to_exid(o.id))) + .collect() + } else { + vec![] + } + } + Prop::Seq(n) => { + let obj_type = self.ops.object_type(&obj); + let encoding = obj_type + .map(|o| ListEncoding::new(o, self.text_encoding)) + .unwrap_or_default(); + self.ops + .search(&obj, query::NthAt::new(n, clock, encoding)) + .ops + .into_iter() + .map(|o| (o.clone_value(), self.id_to_exid(o.id))) + .collect() + } + }; + Ok(result) + } + + fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { + let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash()).collect(); + let mut missing = HashSet::new(); + + for head in self.queue.iter().flat_map(|change| change.deps()) { + if !self.history_index.contains_key(head) { + missing.insert(head); + } + } + + for head in heads { + if !self.history_index.contains_key(head) { + missing.insert(head); + } + } + + let mut missing = missing + .into_iter() + .filter(|hash| !in_queue.contains(hash)) + .copied() + .collect::>(); + missing.sort(); + missing + } + + fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change> { + self.history_index + .get(hash) + .and_then(|index| self.history.get(*index)) + } } impl Default for Automerge { diff --git a/rust/automerge/src/automerge/current_state.rs b/rust/automerge/src/automerge/current_state.rs new file mode 100644 index 00000000..3f7f4afc --- /dev/null +++ b/rust/automerge/src/automerge/current_state.rs @@ -0,0 +1,915 @@ +use std::{borrow::Cow, collections::HashSet, iter::Peekable}; + +use itertools::Itertools; + +use crate::{ + types::{ElemId, Key, ListEncoding, ObjId, Op, OpId}, + ObjType, OpObserver, OpType, ScalarValue, Value, +}; + +/// Traverse the "current" state of the document, notifying `observer` +/// +/// The "current" state of the document is the set of visible operations. This function will +/// traverse that set of operations and call the corresponding methods on the `observer` as it +/// encounters values. The `observer` methods will be called in the order in which they appear in +/// the document. That is to say that the observer will be notified of parent objects before the +/// objects they contain and elements of a sequence will be notified in the order they occur. +/// +/// Due to only notifying of visible operations the observer will only be called with `put`, +/// `insert`, and `splice`, operations. +pub(super) fn observe_current_state(doc: &crate::Automerge, observer: &mut O) { + // The OpSet already exposes operations in the order they appear in the document. + // `OpSet::iter_objs` iterates over the objects in causal order, this means that parent objects + // will always appear before their children. Furthermore, the operations within each object are + // ordered by key (which means by their position in a sequence for sequences). + // + // Effectively then we iterate over each object, then we group the operations in the object by + // key and for each key find the visible operations for that key. Then we notify the observer + // for each of those visible operations. + let mut visible_objs = HashSet::new(); + visible_objs.insert(ObjId::root()); + for (obj, typ, ops) in doc.ops().iter_objs() { + if !visible_objs.contains(obj) { + continue; + } + let ops_by_key = ops.group_by(|o| o.key); + let actions = ops_by_key + .into_iter() + .flat_map(|(key, key_ops)| key_actions(key, key_ops)); + if typ == ObjType::Text && !observer.text_as_seq() { + track_new_objs_and_notify( + &mut visible_objs, + doc, + obj, + typ, + observer, + text_actions(actions), + ) + } else if typ == ObjType::List { + track_new_objs_and_notify( + &mut visible_objs, + doc, + obj, + typ, + observer, + list_actions(actions), + ) + } else { + track_new_objs_and_notify(&mut visible_objs, doc, obj, typ, observer, actions) + } + } +} + +fn track_new_objs_and_notify, O: OpObserver>( + visible_objs: &mut HashSet, + doc: &crate::Automerge, + obj: &ObjId, + typ: ObjType, + observer: &mut O, + actions: I, +) { + let exid = doc.id_to_exid(obj.0); + for action in actions { + if let Some(obj) = action.made_object() { + visible_objs.insert(obj); + } + action.notify_observer(doc, &exid, obj, typ, observer); + } +} + +trait Action { + /// Notify an observer of whatever this action does + fn notify_observer( + self, + doc: &crate::Automerge, + exid: &crate::ObjId, + obj: &ObjId, + typ: ObjType, + observer: &mut O, + ); + + /// If this action created an object, return the ID of that object + fn made_object(&self) -> Option; +} + +fn key_actions<'a, I: Iterator>( + key: Key, + key_ops: I, +) -> impl Iterator> { + #[derive(Clone)] + enum CurrentOp<'a> { + Put { + value: Value<'a>, + id: OpId, + conflicted: bool, + }, + Insert(Value<'a>, OpId), + } + let current_ops = key_ops + .filter(|o| o.visible()) + .filter_map(|o| match o.action { + OpType::Make(obj_type) => { + let value = Value::Object(obj_type); + if o.insert { + Some(CurrentOp::Insert(value, o.id)) + } else { + Some(CurrentOp::Put { + value, + id: o.id, + conflicted: false, + }) + } + } + OpType::Put(ref value) => { + let value = Value::Scalar(Cow::Borrowed(value)); + if o.insert { + Some(CurrentOp::Insert(value, o.id)) + } else { + Some(CurrentOp::Put { + value, + id: o.id, + conflicted: false, + }) + } + } + _ => None, + }); + current_ops + .coalesce(|previous, current| match (previous, current) { + (CurrentOp::Put { .. }, CurrentOp::Put { value, id, .. }) => Ok(CurrentOp::Put { + value, + id, + conflicted: true, + }), + (previous, current) => Err((previous, current)), + }) + .map(move |op| match op { + CurrentOp::Put { + value, + id, + conflicted, + } => SimpleAction::Put { + prop: key, + tagged_value: (value, id), + conflict: conflicted, + }, + CurrentOp::Insert(val, id) => SimpleAction::Insert { + elem_id: ElemId(id), + tagged_value: (val, id), + }, + }) +} + +/// Either a "put" or "insert" action. i.e. not splicing for text values +enum SimpleAction<'a> { + Put { + prop: Key, + tagged_value: (Value<'a>, OpId), + conflict: bool, + }, + Insert { + elem_id: ElemId, + tagged_value: (Value<'a>, OpId), + }, +} + +impl<'a> Action for SimpleAction<'a> { + fn notify_observer( + self, + doc: &crate::Automerge, + exid: &crate::ObjId, + obj: &ObjId, + typ: ObjType, + observer: &mut O, + ) { + let encoding = match typ { + ObjType::Text => ListEncoding::Text(doc.text_encoding()), + _ => ListEncoding::List, + }; + match self { + Self::Put { + prop, + tagged_value, + conflict, + } => { + let tagged_value = (tagged_value.0, doc.id_to_exid(tagged_value.1)); + let prop = doc.ops().export_key(*obj, prop, encoding).unwrap(); + observer.put(doc, exid.clone(), prop, tagged_value, conflict); + } + Self::Insert { + elem_id, + tagged_value: (value, opid), + } => { + let index = doc + .ops() + .search(obj, crate::query::ElemIdPos::new(elem_id, encoding)) + .index() + .unwrap(); + let tagged_value = (value, doc.id_to_exid(opid)); + observer.insert(doc, doc.id_to_exid(obj.0), index, tagged_value); + } + } + } + + fn made_object(&self) -> Option { + match self { + Self::Put { + tagged_value: (Value::Object(_), id), + .. + } => Some((*id).into()), + Self::Insert { + tagged_value: (Value::Object(_), id), + .. + } => Some((*id).into()), + _ => None, + } + } +} + +/// An `Action` which splices for text values +enum TextAction<'a> { + Action(SimpleAction<'a>), + Splice { start: ElemId, chars: String }, +} + +impl<'a> Action for TextAction<'a> { + fn notify_observer( + self, + doc: &crate::Automerge, + exid: &crate::ObjId, + obj: &ObjId, + typ: ObjType, + observer: &mut O, + ) { + match self { + Self::Action(action) => action.notify_observer(doc, exid, obj, typ, observer), + Self::Splice { start, chars } => { + let index = doc + .ops() + .search( + obj, + crate::query::ElemIdPos::new( + start, + ListEncoding::Text(doc.text_encoding()), + ), + ) + .index() + .unwrap(); + observer.splice_text(doc, doc.id_to_exid(obj.0), index, chars.as_str()); + } + } + } + + fn made_object(&self) -> Option { + match self { + Self::Action(action) => action.made_object(), + _ => None, + } + } +} + +fn list_actions<'a, I: Iterator>>( + actions: I, +) -> impl Iterator> { + actions.map(|a| match a { + SimpleAction::Put { + prop: Key::Seq(elem_id), + tagged_value, + .. + } => SimpleAction::Insert { + elem_id, + tagged_value, + }, + a => a, + }) +} + +/// Condense consecutive `SimpleAction::Insert` actions into one `TextAction::Splice` +fn text_actions<'a, I>(actions: I) -> impl Iterator> +where + I: Iterator>, +{ + TextActions { + ops: actions.peekable(), + } +} + +struct TextActions<'a, I: Iterator>> { + ops: Peekable, +} + +impl<'a, I: Iterator>> Iterator for TextActions<'a, I> { + type Item = TextAction<'a>; + + fn next(&mut self) -> Option { + if let Some(SimpleAction::Insert { .. }) = self.ops.peek() { + let (start, value) = match self.ops.next() { + Some(SimpleAction::Insert { + tagged_value: (value, opid), + .. + }) => (opid, value), + _ => unreachable!(), + }; + let mut chars = match value { + Value::Scalar(Cow::Borrowed(ScalarValue::Str(s))) => s.to_string(), + _ => "\u{fffc}".to_string(), + }; + while let Some(SimpleAction::Insert { .. }) = self.ops.peek() { + if let Some(SimpleAction::Insert { + tagged_value: (value, _), + .. + }) = self.ops.next() + { + match value { + Value::Scalar(Cow::Borrowed(ScalarValue::Str(s))) => chars.push_str(s), + _ => chars.push('\u{fffc}'), + } + } + } + Some(TextAction::Splice { + start: ElemId(start), + chars, + }) + } else { + self.ops.next().map(TextAction::Action) + } + } +} + +#[cfg(test)] +mod tests { + use std::{borrow::Cow, fs}; + + use crate::{transaction::Transactable, Automerge, ObjType, OpObserver, Prop, ReadDoc, Value}; + + // Observer ops often carry a "tagged value", which is a value and the OpID of the op which + // created that value. For a lot of values (i.e. any scalar value) we don't care about the + // opid. This type implements `PartialEq` for the `Untagged` variant by ignoring the tag, which + // allows us to express tests which don't care about the tag. + #[derive(Clone, Debug)] + enum ObservedValue { + Tagged(crate::Value<'static>, crate::ObjId), + Untagged(crate::Value<'static>), + } + + impl<'a> From<(Value<'a>, crate::ObjId)> for ObservedValue { + fn from(value: (Value<'a>, crate::ObjId)) -> Self { + Self::Tagged(value.0.into_owned(), value.1) + } + } + + impl PartialEq for ObservedValue { + fn eq(&self, other: &ObservedValue) -> bool { + match (self, other) { + (Self::Tagged(v1, o1), Self::Tagged(v2, o2)) => equal_vals(v1, v2) && o1 == o2, + (Self::Untagged(v1), Self::Untagged(v2)) => equal_vals(v1, v2), + (Self::Tagged(v1, _), Self::Untagged(v2)) => equal_vals(v1, v2), + (Self::Untagged(v1), Self::Tagged(v2, _)) => equal_vals(v1, v2), + } + } + } + + /// Consider counters equal if they have the same current value + fn equal_vals(v1: &Value<'_>, v2: &Value<'_>) -> bool { + match (v1, v2) { + (Value::Scalar(v1), Value::Scalar(v2)) => match (v1.as_ref(), v2.as_ref()) { + (crate::ScalarValue::Counter(c1), crate::ScalarValue::Counter(c2)) => { + c1.current == c2.current + } + _ => v1 == v2, + }, + _ => v1 == v2, + } + } + + #[derive(Debug, Clone, PartialEq)] + enum ObserverCall { + Put { + obj: crate::ObjId, + prop: Prop, + value: ObservedValue, + conflict: bool, + }, + Insert { + obj: crate::ObjId, + index: usize, + value: ObservedValue, + }, + SpliceText { + obj: crate::ObjId, + index: usize, + chars: String, + }, + } + + // A Vec is pretty hard to look at in a test failure. This wrapper prints the + // calls out in a nice table so it's easier to see what's different + #[derive(Clone, PartialEq)] + struct Calls(Vec); + + impl std::fmt::Debug for Calls { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut table = prettytable::Table::new(); + table.set_format(*prettytable::format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR); + table.set_titles(prettytable::row![ + "Op", "Object", "Property", "Value", "Conflict" + ]); + for call in &self.0 { + match call { + ObserverCall::Put { + obj, + prop, + value, + conflict, + } => { + table.add_row(prettytable::row![ + "Put", + format!("{}", obj), + prop, + match value { + ObservedValue::Tagged(v, o) => format!("{} ({})", v, o), + ObservedValue::Untagged(v) => format!("{}", v), + }, + conflict + ]); + } + ObserverCall::Insert { obj, index, value } => { + table.add_row(prettytable::row![ + "Insert", + format!("{}", obj), + index, + match value { + ObservedValue::Tagged(v, o) => format!("{} ({})", v, o), + ObservedValue::Untagged(v) => format!("{}", v), + }, + "" + ]); + } + ObserverCall::SpliceText { obj, index, chars } => { + table.add_row(prettytable::row![ + "SpliceText", + format!("{}", obj), + index, + chars, + "" + ]); + } + } + } + let mut out = Vec::new(); + table.print(&mut out).unwrap(); + write!(f, "\n{}\n", String::from_utf8(out).unwrap()) + } + } + + struct ObserverStub { + ops: Vec, + text_as_seq: bool, + } + + impl ObserverStub { + fn new() -> Self { + Self { + ops: Vec::new(), + text_as_seq: true, + } + } + + fn new_text_v2() -> Self { + Self { + ops: Vec::new(), + text_as_seq: false, + } + } + } + + impl OpObserver for ObserverStub { + fn insert( + &mut self, + _doc: &R, + objid: crate::ObjId, + index: usize, + tagged_value: (crate::Value<'_>, crate::ObjId), + ) { + self.ops.push(ObserverCall::Insert { + obj: objid, + index, + value: tagged_value.into(), + }); + } + + fn splice_text( + &mut self, + _doc: &R, + objid: crate::ObjId, + index: usize, + value: &str, + ) { + self.ops.push(ObserverCall::SpliceText { + obj: objid, + index, + chars: value.to_string(), + }); + } + + fn put( + &mut self, + _doc: &R, + objid: crate::ObjId, + prop: crate::Prop, + tagged_value: (crate::Value<'_>, crate::ObjId), + conflict: bool, + ) { + self.ops.push(ObserverCall::Put { + obj: objid, + prop, + value: tagged_value.into(), + conflict, + }); + } + + fn expose( + &mut self, + _doc: &R, + _objid: crate::ObjId, + _prop: crate::Prop, + _tagged_value: (crate::Value<'_>, crate::ObjId), + _conflict: bool, + ) { + panic!("expose not expected"); + } + + fn increment( + &mut self, + _doc: &R, + _objid: crate::ObjId, + _prop: crate::Prop, + _tagged_value: (i64, crate::ObjId), + ) { + panic!("increment not expected"); + } + + fn delete_map(&mut self, _doc: &R, _objid: crate::ObjId, _key: &str) { + panic!("delete not expected"); + } + + fn delete_seq( + &mut self, + _doc: &R, + _objid: crate::ObjId, + _index: usize, + _num: usize, + ) { + panic!("delete not expected"); + } + + fn text_as_seq(&self) -> bool { + self.text_as_seq + } + } + + #[test] + fn basic_test() { + let mut doc = crate::AutoCommit::new(); + doc.put(crate::ROOT, "key", "value").unwrap(); + let map = doc.put_object(crate::ROOT, "map", ObjType::Map).unwrap(); + doc.put(&map, "nested_key", "value").unwrap(); + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + doc.insert(&list, 0, "value").unwrap(); + let text = doc.put_object(crate::ROOT, "text", ObjType::Text).unwrap(); + doc.insert(&text, 0, "a").unwrap(); + + let mut obs = ObserverStub::new(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "key".into(), + value: ObservedValue::Untagged("value".into()), + conflict: false, + }, + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Put { + obj: crate::ROOT, + prop: "map".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::Map), map.clone()), + conflict: false, + }, + ObserverCall::Put { + obj: crate::ROOT, + prop: "text".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::Text), text.clone()), + conflict: false, + }, + ObserverCall::Put { + obj: map.clone(), + prop: "nested_key".into(), + value: ObservedValue::Untagged("value".into()), + conflict: false, + }, + ObserverCall::Insert { + obj: list, + index: 0, + value: ObservedValue::Untagged("value".into()), + }, + ObserverCall::Insert { + obj: text, + index: 0, + value: ObservedValue::Untagged("a".into()), + }, + ]) + ); + } + + #[test] + fn test_deleted_ops_omitted() { + let mut doc = crate::AutoCommit::new(); + doc.put(crate::ROOT, "key", "value").unwrap(); + doc.delete(crate::ROOT, "key").unwrap(); + let map = doc.put_object(crate::ROOT, "map", ObjType::Map).unwrap(); + doc.put(&map, "nested_key", "value").unwrap(); + doc.delete(&map, "nested_key").unwrap(); + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + doc.insert(&list, 0, "value").unwrap(); + doc.delete(&list, 0).unwrap(); + let text = doc.put_object(crate::ROOT, "text", ObjType::Text).unwrap(); + doc.insert(&text, 0, "a").unwrap(); + doc.delete(&text, 0).unwrap(); + + doc.put_object(crate::ROOT, "deleted_map", ObjType::Map) + .unwrap(); + doc.delete(crate::ROOT, "deleted_map").unwrap(); + doc.put_object(crate::ROOT, "deleted_list", ObjType::List) + .unwrap(); + doc.delete(crate::ROOT, "deleted_list").unwrap(); + doc.put_object(crate::ROOT, "deleted_text", ObjType::Text) + .unwrap(); + doc.delete(crate::ROOT, "deleted_text").unwrap(); + + let mut obs = ObserverStub::new(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Put { + obj: crate::ROOT, + prop: "map".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::Map), map.clone()), + conflict: false, + }, + ObserverCall::Put { + obj: crate::ROOT, + prop: "text".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::Text), text.clone()), + conflict: false, + }, + ]) + ); + } + + #[test] + fn test_text_spliced() { + let mut doc = crate::AutoCommit::new(); + let text = doc.put_object(crate::ROOT, "text", ObjType::Text).unwrap(); + doc.insert(&text, 0, "a").unwrap(); + doc.splice_text(&text, 1, 0, "bcdef").unwrap(); + doc.splice_text(&text, 2, 2, "g").unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "text".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::Text), text.clone()), + conflict: false, + }, + ObserverCall::SpliceText { + obj: text, + index: 0, + chars: "abgef".to_string() + } + ]) + ); + } + + #[test] + fn test_counters() { + let actor1 = crate::ActorId::from("aa".as_bytes()); + let actor2 = crate::ActorId::from("bb".as_bytes()); + let mut doc = crate::AutoCommit::new().with_actor(actor2); + + let mut doc2 = doc.fork().with_actor(actor1); + doc2.put(crate::ROOT, "key", "someval").unwrap(); + + doc.put(crate::ROOT, "key", crate::ScalarValue::Counter(1.into())) + .unwrap(); + doc.increment(crate::ROOT, "key", 2).unwrap(); + doc.increment(crate::ROOT, "key", 3).unwrap(); + + doc.merge(&mut doc2).unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ObserverCall::Put { + obj: crate::ROOT, + prop: "key".into(), + value: ObservedValue::Untagged(Value::Scalar(Cow::Owned( + crate::ScalarValue::Counter(6.into()) + ))), + conflict: true, + },]) + ); + } + + #[test] + fn test_multiple_list_insertions() { + let mut doc = crate::AutoCommit::new(); + + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + doc.insert(&list, 0, 1).unwrap(); + doc.insert(&list, 1, 2).unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Insert { + obj: list.clone(), + index: 0, + value: ObservedValue::Untagged(1.into()), + }, + ObserverCall::Insert { + obj: list, + index: 1, + value: ObservedValue::Untagged(2.into()), + }, + ]) + ); + } + + #[test] + fn test_concurrent_insertions_at_same_index() { + let mut doc = crate::AutoCommit::new().with_actor(crate::ActorId::from("aa".as_bytes())); + + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + + let mut doc2 = doc.fork().with_actor(crate::ActorId::from("bb".as_bytes())); + + doc.insert(&list, 0, 1).unwrap(); + doc2.insert(&list, 0, 2).unwrap(); + doc.merge(&mut doc2).unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Insert { + obj: list.clone(), + index: 0, + value: ObservedValue::Untagged(2.into()), + }, + ObserverCall::Insert { + obj: list, + index: 1, + value: ObservedValue::Untagged(1.into()), + }, + ]) + ); + } + + #[test] + fn test_insert_objects() { + let mut doc = crate::AutoCommit::new().with_actor(crate::ActorId::from("aa".as_bytes())); + + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + + let map = doc.insert_object(&list, 0, ObjType::Map).unwrap(); + doc.put(&map, "key", "value").unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Insert { + obj: list.clone(), + index: 0, + value: ObservedValue::Tagged(Value::Object(ObjType::Map), map.clone()), + }, + ObserverCall::Put { + obj: map, + prop: "key".into(), + value: ObservedValue::Untagged("value".into()), + conflict: false + }, + ]) + ); + } + + #[test] + fn test_insert_and_update() { + let mut doc = crate::AutoCommit::new(); + + let list = doc.put_object(crate::ROOT, "list", ObjType::List).unwrap(); + + doc.insert(&list, 0, "one").unwrap(); + doc.insert(&list, 1, "two").unwrap(); + doc.put(&list, 0, "three").unwrap(); + doc.put(&list, 1, "four").unwrap(); + + let mut obs = ObserverStub::new_text_v2(); + super::observe_current_state(doc.document(), &mut obs); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ + ObserverCall::Put { + obj: crate::ROOT, + prop: "list".into(), + value: ObservedValue::Tagged(Value::Object(ObjType::List), list.clone()), + conflict: false, + }, + ObserverCall::Insert { + obj: list.clone(), + index: 0, + value: ObservedValue::Untagged("three".into()), + }, + ObserverCall::Insert { + obj: list.clone(), + index: 1, + value: ObservedValue::Untagged("four".into()), + }, + ]) + ); + } + + #[test] + fn test_load_changes() { + fn fixture(name: &str) -> Vec { + fs::read("./tests/fixtures/".to_owned() + name).unwrap() + } + + let mut obs = ObserverStub::new(); + let _doc = Automerge::load_with( + &fixture("counter_value_is_ok.automerge"), + crate::OnPartialLoad::Error, + crate::storage::VerificationMode::Check, + Some(&mut obs), + ); + + assert_eq!( + Calls(obs.ops), + Calls(vec![ObserverCall::Put { + obj: crate::ROOT, + prop: "a".into(), + value: ObservedValue::Untagged(crate::ScalarValue::Counter(2000.into()).into()), + conflict: false, + },]) + ); + } +} diff --git a/automerge/src/automerge/tests.rs b/rust/automerge/src/automerge/tests.rs similarity index 94% rename from automerge/src/automerge/tests.rs rename to rust/automerge/src/automerge/tests.rs index 9c1a1ff7..3511c4ed 100644 --- a/automerge/src/automerge/tests.rs +++ b/rust/automerge/src/automerge/tests.rs @@ -192,14 +192,14 @@ fn test_props_vals_at() -> Result<(), AutomergeError> { assert!(doc.keys_at(ROOT, &heads1).collect_vec() == vec!["prop1".to_owned()]); assert_eq!(doc.length_at(ROOT, &heads1), 1); assert!(doc.get_at(ROOT, "prop1", &heads1)?.unwrap().0 == Value::str("val1")); - assert!(doc.get_at(ROOT, "prop2", &heads1)? == None); - assert!(doc.get_at(ROOT, "prop3", &heads1)? == None); + assert!(doc.get_at(ROOT, "prop2", &heads1)?.is_none()); + assert!(doc.get_at(ROOT, "prop3", &heads1)?.is_none()); assert!(doc.keys_at(ROOT, &heads2).collect_vec() == vec!["prop1".to_owned()]); assert_eq!(doc.length_at(ROOT, &heads2), 1); assert!(doc.get_at(ROOT, "prop1", &heads2)?.unwrap().0 == Value::str("val2")); - assert!(doc.get_at(ROOT, "prop2", &heads2)? == None); - assert!(doc.get_at(ROOT, "prop3", &heads2)? == None); + assert!(doc.get_at(ROOT, "prop2", &heads2)?.is_none()); + assert!(doc.get_at(ROOT, "prop3", &heads2)?.is_none()); assert!( doc.keys_at(ROOT, &heads3).collect_vec() == vec!["prop1".to_owned(), "prop2".to_owned()] @@ -207,28 +207,28 @@ fn test_props_vals_at() -> Result<(), AutomergeError> { assert_eq!(doc.length_at(ROOT, &heads3), 2); assert!(doc.get_at(ROOT, "prop1", &heads3)?.unwrap().0 == Value::str("val2")); assert!(doc.get_at(ROOT, "prop2", &heads3)?.unwrap().0 == Value::str("val3")); - assert!(doc.get_at(ROOT, "prop3", &heads3)? == None); + assert!(doc.get_at(ROOT, "prop3", &heads3)?.is_none()); assert!(doc.keys_at(ROOT, &heads4).collect_vec() == vec!["prop2".to_owned()]); assert_eq!(doc.length_at(ROOT, &heads4), 1); - assert!(doc.get_at(ROOT, "prop1", &heads4)? == None); + assert!(doc.get_at(ROOT, "prop1", &heads4)?.is_none()); assert!(doc.get_at(ROOT, "prop2", &heads4)?.unwrap().0 == Value::str("val3")); - assert!(doc.get_at(ROOT, "prop3", &heads4)? == None); + assert!(doc.get_at(ROOT, "prop3", &heads4)?.is_none()); assert!( doc.keys_at(ROOT, &heads5).collect_vec() == vec!["prop2".to_owned(), "prop3".to_owned()] ); assert_eq!(doc.length_at(ROOT, &heads5), 2); assert_eq!(doc.length(ROOT), 2); - assert!(doc.get_at(ROOT, "prop1", &heads5)? == None); + assert!(doc.get_at(ROOT, "prop1", &heads5)?.is_none()); assert!(doc.get_at(ROOT, "prop2", &heads5)?.unwrap().0 == Value::str("val3")); assert!(doc.get_at(ROOT, "prop3", &heads5)?.unwrap().0 == Value::str("val4")); assert_eq!(doc.keys_at(ROOT, &[]).count(), 0); assert_eq!(doc.length_at(ROOT, &[]), 0); - assert!(doc.get_at(ROOT, "prop1", &[])? == None); - assert!(doc.get_at(ROOT, "prop2", &[])? == None); - assert!(doc.get_at(ROOT, "prop3", &[])? == None); + assert!(doc.get_at(ROOT, "prop1", &[])?.is_none()); + assert!(doc.get_at(ROOT, "prop2", &[])?.is_none()); + assert!(doc.get_at(ROOT, "prop3", &[])?.is_none()); Ok(()) } @@ -1080,8 +1080,8 @@ fn delete_nothing_in_map_is_noop() { // deleting a missing key in a map should just be a noop assert!(tx.delete(ROOT, "a",).is_ok()); tx.commit(); - let last_change = doc.get_last_local_change().unwrap(); - assert_eq!(last_change.len(), 0); + let last_change = doc.get_last_local_change(); + assert!(last_change.is_none()); let bytes = doc.save(); assert!(Automerge::load(&bytes,).is_ok()); @@ -1318,21 +1318,33 @@ fn compute_list_indexes_correctly_when_list_element_is_split_across_tree_nodes() fn get_parent_objects() { let mut doc = AutoCommit::new(); let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + let list = doc.put_object(&map, "b", ObjType::List).unwrap(); doc.insert(&list, 0, 2).unwrap(); let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); assert_eq!( doc.parents(&map).unwrap().next(), - Some((ROOT, Prop::Map("a".into()))) + Some(Parent { + obj: ROOT, + prop: Prop::Map("a".into()), + visible: true + }) ); assert_eq!( doc.parents(&list).unwrap().next(), - Some((map, Prop::Seq(0))) + Some(Parent { + obj: map, + prop: Prop::Map("b".into()), + visible: true + }) ); assert_eq!( doc.parents(&text).unwrap().next(), - Some((list, Prop::Seq(0))) + Some(Parent { + obj: list, + prop: Prop::Seq(0), + visible: true + }) ); } @@ -1340,7 +1352,7 @@ fn get_parent_objects() { fn get_path_to_object() { let mut doc = AutoCommit::new(); let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + let list = doc.put_object(&map, "b", ObjType::List).unwrap(); doc.insert(&list, 0, 2).unwrap(); let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); @@ -1350,13 +1362,16 @@ fn get_path_to_object() { ); assert_eq!( doc.path_to_object(&list).unwrap(), - vec![(ROOT, Prop::Map("a".into())), (map.clone(), Prop::Seq(0)),] - ); - assert_eq!( - doc.path_to_object(&text).unwrap(), vec![ (ROOT, Prop::Map("a".into())), - (map, Prop::Seq(0)), + (map.clone(), Prop::Map("b".into())), + ] + ); + assert_eq!( + doc.path_to_object(text).unwrap(), + vec![ + (ROOT, Prop::Map("a".into())), + (map, Prop::Map("b".into())), (list, Prop::Seq(0)), ] ); @@ -1366,14 +1381,35 @@ fn get_path_to_object() { fn parents_iterator() { let mut doc = AutoCommit::new(); let map = doc.put_object(ROOT, "a", ObjType::Map).unwrap(); - let list = doc.insert_object(&map, 0, ObjType::List).unwrap(); + let list = doc.put_object(&map, "b", ObjType::List).unwrap(); doc.insert(&list, 0, 2).unwrap(); let text = doc.put_object(&list, 0, ObjType::Text).unwrap(); let mut parents = doc.parents(text).unwrap(); - assert_eq!(parents.next(), Some((list, Prop::Seq(0)))); - assert_eq!(parents.next(), Some((map, Prop::Seq(0)))); - assert_eq!(parents.next(), Some((ROOT, Prop::Map("a".into())))); + assert_eq!( + parents.next(), + Some(Parent { + obj: list, + prop: Prop::Seq(0), + visible: true + }) + ); + assert_eq!( + parents.next(), + Some(Parent { + obj: map, + prop: Prop::Map("b".into()), + visible: true + }) + ); + assert_eq!( + parents.next(), + Some(Parent { + obj: ROOT, + prop: Prop::Map("a".into()), + visible: true + }) + ); assert_eq!(parents.next(), None); } @@ -1383,27 +1419,28 @@ fn can_insert_a_grapheme_into_text() { let mut tx = doc.transaction(); let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); let polar_bear = "🐻‍❄️"; - tx.insert(&text, 0, polar_bear).unwrap(); + tx.splice_text(&text, 0, 0, polar_bear).unwrap(); tx.commit(); let s = doc.text(&text).unwrap(); assert_eq!(s, polar_bear); let len = doc.length(&text); - assert_eq!(len, 1); // just one grapheme + assert_eq!(len, 4); // 4 utf8 chars } #[test] -fn can_insert_long_string_into_text() { +fn long_strings_spliced_into_text_get_segmented_by_utf8_chars() { let mut doc = Automerge::new(); let mut tx = doc.transaction(); let text = tx.put_object(ROOT, "text", ObjType::Text).unwrap(); let polar_bear = "🐻‍❄️"; let polar_bear_army = polar_bear.repeat(100); - tx.insert(&text, 0, &polar_bear_army).unwrap(); + tx.splice_text(&text, 0, 0, &polar_bear_army).unwrap(); tx.commit(); let s = doc.text(&text).unwrap(); assert_eq!(s, polar_bear_army); let len = doc.length(&text); - assert_eq!(len, 1); // many graphemes + assert_eq!(len, polar_bear.chars().count() * 100); + assert_eq!(len, 400); } #[test] @@ -1470,6 +1507,11 @@ fn observe_counter_change_application() { let changes = doc.get_changes(&[]).unwrap().into_iter().cloned(); let mut new_doc = AutoCommit::new().with_observer(VecOpObserver::default()); + // make a new change to the doc to stop the empty doc logic from skipping the intermediate + // patches. The is probably not really necessary, we could update this test to just test that + // the correct final state is emitted. For now though, we leave it as is. + new_doc.put(ROOT, "foo", "bar").unwrap(); + new_doc.observer().take_patches(); new_doc.apply_changes(changes).unwrap(); assert_eq!( new_doc.observer().take_patches(), @@ -1502,7 +1544,7 @@ fn observe_counter_change_application() { #[test] fn get_changes_heads_empty() { - let mut doc = AutoCommit::default(); + let mut doc = AutoCommit::new(); doc.put(ROOT, "key1", 1).unwrap(); doc.commit(); doc.put(ROOT, "key2", 1).unwrap(); diff --git a/automerge/src/autoserde.rs b/rust/automerge/src/autoserde.rs similarity index 69% rename from automerge/src/autoserde.rs rename to rust/automerge/src/autoserde.rs index 63b0848a..ccfc6ae6 100644 --- a/automerge/src/autoserde.rs +++ b/rust/automerge/src/autoserde.rs @@ -1,18 +1,33 @@ use serde::ser::{SerializeMap, SerializeSeq}; -use crate::{Automerge, ObjId, ObjType, Value}; +use crate::{ObjId, ObjType, ReadDoc, Value}; -/// A wrapper type which implements [`serde::Serialize`] for an [`Automerge`]. +/// A wrapper type which implements [`serde::Serialize`] for a [`ReadDoc`]. +/// +/// # Example +/// +/// ``` +/// # fn main() -> Result<(), Box> { +/// use automerge::{AutoCommit, AutomergeError, Value, transaction::Transactable}; +/// let mut doc = AutoCommit::new(); +/// doc.put(automerge::ROOT, "key", "value")?; +/// +/// let serialized = serde_json::to_string(&automerge::AutoSerde::from(&doc)).unwrap(); +/// +/// assert_eq!(serialized, r#"{"key":"value"}"#); +/// # Ok(()) +/// # } +/// ``` #[derive(Debug)] -pub struct AutoSerde<'a>(&'a Automerge); +pub struct AutoSerde<'a, R: crate::ReadDoc>(&'a R); -impl<'a> From<&'a Automerge> for AutoSerde<'a> { - fn from(a: &'a Automerge) -> Self { +impl<'a, R: ReadDoc> From<&'a R> for AutoSerde<'a, R> { + fn from(a: &'a R) -> Self { AutoSerde(a) } } -impl<'a> serde::Serialize for AutoSerde<'a> { +impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerde<'a, R> { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -25,12 +40,12 @@ impl<'a> serde::Serialize for AutoSerde<'a> { } } -struct AutoSerdeMap<'a> { - doc: &'a Automerge, +struct AutoSerdeMap<'a, R> { + doc: &'a R, obj: ObjId, } -impl<'a> serde::Serialize for AutoSerdeMap<'a> { +impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerdeMap<'a, R> { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -51,12 +66,12 @@ impl<'a> serde::Serialize for AutoSerdeMap<'a> { } } -struct AutoSerdeSeq<'a> { - doc: &'a Automerge, +struct AutoSerdeSeq<'a, R> { + doc: &'a R, obj: ObjId, } -impl<'a> serde::Serialize for AutoSerdeSeq<'a> { +impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerdeSeq<'a, R> { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -77,13 +92,13 @@ impl<'a> serde::Serialize for AutoSerdeSeq<'a> { } } -struct AutoSerdeVal<'a> { - doc: &'a Automerge, +struct AutoSerdeVal<'a, R> { + doc: &'a R, val: Value<'a>, obj: ObjId, } -impl<'a> serde::Serialize for AutoSerdeVal<'a> { +impl<'a, R: crate::ReadDoc> serde::Serialize for AutoSerdeVal<'a, R> { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, diff --git a/automerge/src/change.rs b/rust/automerge/src/change.rs similarity index 75% rename from automerge/src/change.rs rename to rust/automerge/src/change.rs index 3c45a524..be467a84 100644 --- a/automerge/src/change.rs +++ b/rust/automerge/src/change.rs @@ -142,6 +142,12 @@ impl AsRef> for Change { } } +impl From for StoredChange<'static, Verified> { + fn from(c: Change) -> Self { + c.stored + } +} + #[derive(thiserror::Error, Debug)] pub enum LoadError { #[error("unable to parse change: {0}")] @@ -272,7 +278,7 @@ impl From<&Change> for crate::ExpandedChange { let operations = c .iter_ops() .map(|o| crate::legacy::Op { - action: crate::types::OpType::from_index_and_value(o.action, o.val).unwrap(), + action: crate::types::OpType::from_action_and_value(o.action, o.val), insert: o.insert, key: match o.key { StoredKey::Elem(e) if e.is_head() => { @@ -313,3 +319,84 @@ impl From<&Change> for crate::ExpandedChange { } } } + +#[cfg(test)] +pub(crate) mod gen { + use super::Change; + use crate::{ + op_tree::OpSetMetadata, + storage::{change::ChangeBuilder, convert::op_as_actor_id}, + types::{ + gen::{gen_hash, gen_op}, + ObjId, Op, OpId, + }, + ActorId, + }; + use proptest::prelude::*; + + fn gen_actor() -> impl Strategy { + proptest::array::uniform32(proptest::bits::u8::ANY).prop_map(ActorId::from) + } + + prop_compose! { + fn gen_actors()(this_actor in gen_actor(), other_actors in proptest::collection::vec(gen_actor(), 0..10)) -> (ActorId, Vec) { + (this_actor, other_actors) + } + } + + fn gen_ops( + this_actor: ActorId, + other_actors: Vec, + ) -> impl Strategy, OpSetMetadata)> { + let mut all_actors = vec![this_actor]; + all_actors.extend(other_actors); + let mut m = OpSetMetadata::from_actors(all_actors); + m.props.cache("someprop".to_string()); + let root_id = ObjId::root(); + (0_u64..10) + .prop_map(|num_ops| { + (0..num_ops) + .map(|counter| OpId::new(counter, 0)) + .collect::>() + }) + .prop_flat_map(move |opids| { + let mut strat = Just(Vec::new()).boxed(); + for opid in opids { + strat = (gen_op(opid, vec![0]), strat) + .prop_map(move |(op, ops)| { + let mut result = Vec::with_capacity(ops.len() + 1); + result.extend(ops); + result.push((root_id, op)); + result + }) + .boxed(); + } + strat + }) + .prop_map(move |ops| (ops, m.clone())) + } + + prop_compose! { + pub(crate) fn gen_change()((this_actor, other_actors) in gen_actors())( + (ops, metadata) in gen_ops(this_actor.clone(), other_actors), + start_op in 1_u64..200000, + seq in 0_u64..200000, + timestamp in 0..i64::MAX, + deps in proptest::collection::vec(gen_hash(), 0..100), + message in proptest::option::of("[a-z]{200}"), + this_actor in Just(this_actor), + ) -> Change { + let ops = ops.iter().map(|(obj, op)| op_as_actor_id(obj, op, &metadata)); + Change::new(ChangeBuilder::new() + .with_dependencies(deps) + .with_start_op(start_op.try_into().unwrap()) + .with_message(message) + .with_actor(this_actor) + .with_seq(seq) + .with_timestamp(timestamp) + .build(ops.into_iter()) + .unwrap()) + } + + } +} diff --git a/rust/automerge/src/change_graph.rs b/rust/automerge/src/change_graph.rs new file mode 100644 index 00000000..01d269d8 --- /dev/null +++ b/rust/automerge/src/change_graph.rs @@ -0,0 +1,344 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use crate::{ + clock::{Clock, ClockData}, + Change, ChangeHash, +}; + +/// The graph of changes +/// +/// This is a sort of adjacency list based representation, except that instead of using linked +/// lists, we keep all the edges and nodes in two vecs and reference them by index which plays nice +/// with the cache +#[derive(Debug, Clone)] +pub(crate) struct ChangeGraph { + nodes: Vec, + edges: Vec, + hashes: Vec, + nodes_by_hash: BTreeMap, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +struct NodeIdx(u32); + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +struct EdgeIdx(u32); + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +struct HashIdx(u32); + +#[derive(Debug, Clone)] +struct Edge { + // Edges are always child -> parent so we only store the target, the child is implicit + // as you get the edge from the child + target: NodeIdx, + next: Option, +} + +#[derive(Debug, Clone)] +struct ChangeNode { + hash_idx: HashIdx, + actor_index: usize, + seq: u64, + max_op: u64, + parents: Option, +} + +impl ChangeGraph { + pub(crate) fn new() -> Self { + Self { + nodes: Vec::new(), + edges: Vec::new(), + nodes_by_hash: BTreeMap::new(), + hashes: Vec::new(), + } + } + + pub(crate) fn add_change( + &mut self, + change: &Change, + actor_idx: usize, + ) -> Result<(), MissingDep> { + let hash = change.hash(); + if self.nodes_by_hash.contains_key(&hash) { + return Ok(()); + } + let parent_indices = change + .deps() + .iter() + .map(|h| self.nodes_by_hash.get(h).copied().ok_or(MissingDep(*h))) + .collect::, _>>()?; + let node_idx = self.add_node(actor_idx, change); + self.nodes_by_hash.insert(hash, node_idx); + for parent_idx in parent_indices { + self.add_parent(node_idx, parent_idx); + } + Ok(()) + } + + fn add_node(&mut self, actor_index: usize, change: &Change) -> NodeIdx { + let idx = NodeIdx(self.nodes.len() as u32); + let hash_idx = self.add_hash(change.hash()); + self.nodes.push(ChangeNode { + hash_idx, + actor_index, + seq: change.seq(), + max_op: change.max_op(), + parents: None, + }); + idx + } + + fn add_hash(&mut self, hash: ChangeHash) -> HashIdx { + let idx = HashIdx(self.hashes.len() as u32); + self.hashes.push(hash); + idx + } + + fn add_parent(&mut self, child_idx: NodeIdx, parent_idx: NodeIdx) { + let new_edge_idx = EdgeIdx(self.edges.len() as u32); + let new_edge = Edge { + target: parent_idx, + next: None, + }; + self.edges.push(new_edge); + + let child = &mut self.nodes[child_idx.0 as usize]; + if let Some(edge_idx) = child.parents { + let mut edge = &mut self.edges[edge_idx.0 as usize]; + while let Some(next) = edge.next { + edge = &mut self.edges[next.0 as usize]; + } + edge.next = Some(new_edge_idx); + } else { + child.parents = Some(new_edge_idx); + } + } + + fn parents(&self, node_idx: NodeIdx) -> impl Iterator + '_ { + let mut edge_idx = self.nodes[node_idx.0 as usize].parents; + std::iter::from_fn(move || { + let this_edge_idx = edge_idx?; + let edge = &self.edges[this_edge_idx.0 as usize]; + edge_idx = edge.next; + Some(edge.target) + }) + } + + pub(crate) fn clock_for_heads(&self, heads: &[ChangeHash]) -> Clock { + let mut clock = Clock::new(); + + self.traverse_ancestors(heads, |node, _hash| { + clock.include( + node.actor_index, + ClockData { + max_op: node.max_op, + seq: node.seq, + }, + ); + }); + + clock + } + + pub(crate) fn remove_ancestors( + &self, + changes: &mut BTreeSet, + heads: &[ChangeHash], + ) { + self.traverse_ancestors(heads, |_node, hash| { + changes.remove(hash); + }); + } + + /// Call `f` for each (node, hash) in the graph, starting from the given heads + /// + /// No guarantees are made about the order of traversal but each node will only be visited + /// once. + fn traverse_ancestors( + &self, + heads: &[ChangeHash], + mut f: F, + ) { + let mut to_visit = heads + .iter() + .filter_map(|h| self.nodes_by_hash.get(h)) + .copied() + .collect::>(); + + let mut visited = BTreeSet::new(); + + while let Some(idx) = to_visit.pop() { + if visited.contains(&idx) { + continue; + } else { + visited.insert(idx); + } + let node = &self.nodes[idx.0 as usize]; + let hash = &self.hashes[node.hash_idx.0 as usize]; + f(node, hash); + to_visit.extend(self.parents(idx)); + } + } +} + +#[derive(Debug, thiserror::Error)] +#[error("attempted to derive a clock for a change with dependencies we don't have")] +pub struct MissingDep(ChangeHash); + +#[cfg(test)] +mod tests { + use std::{ + num::NonZeroU64, + time::{SystemTime, UNIX_EPOCH}, + }; + + use crate::{ + clock::ClockData, + op_tree::OpSetMetadata, + storage::{change::ChangeBuilder, convert::op_as_actor_id}, + types::{Key, ObjId, Op, OpId, OpIds}, + ActorId, + }; + + use super::*; + + #[test] + fn clock_by_heads() { + let mut builder = TestGraphBuilder::new(); + let actor1 = builder.actor(); + let actor2 = builder.actor(); + let actor3 = builder.actor(); + let change1 = builder.change(&actor1, 10, &[]); + let change2 = builder.change(&actor2, 20, &[change1]); + let change3 = builder.change(&actor3, 30, &[change1]); + let change4 = builder.change(&actor1, 10, &[change2, change3]); + let graph = builder.build(); + + let mut expected_clock = Clock::new(); + expected_clock.include(builder.index(&actor1), ClockData { max_op: 50, seq: 2 }); + expected_clock.include(builder.index(&actor2), ClockData { max_op: 30, seq: 1 }); + expected_clock.include(builder.index(&actor3), ClockData { max_op: 40, seq: 1 }); + + let clock = graph.clock_for_heads(&[change4]); + assert_eq!(clock, expected_clock); + } + + #[test] + fn remove_ancestors() { + let mut builder = TestGraphBuilder::new(); + let actor1 = builder.actor(); + let actor2 = builder.actor(); + let actor3 = builder.actor(); + let change1 = builder.change(&actor1, 10, &[]); + let change2 = builder.change(&actor2, 20, &[change1]); + let change3 = builder.change(&actor3, 30, &[change1]); + let change4 = builder.change(&actor1, 10, &[change2, change3]); + let graph = builder.build(); + + let mut changes = vec![change1, change2, change3, change4] + .into_iter() + .collect::>(); + let heads = vec![change2]; + graph.remove_ancestors(&mut changes, &heads); + + let expected_changes = vec![change3, change4].into_iter().collect::>(); + + assert_eq!(changes, expected_changes); + } + + struct TestGraphBuilder { + actors: Vec, + changes: Vec, + seqs_by_actor: BTreeMap, + } + + impl TestGraphBuilder { + fn new() -> Self { + TestGraphBuilder { + actors: Vec::new(), + changes: Vec::new(), + seqs_by_actor: BTreeMap::new(), + } + } + + fn actor(&mut self) -> ActorId { + let actor = ActorId::random(); + self.actors.push(actor.clone()); + actor + } + + fn index(&self, actor: &ActorId) -> usize { + self.actors.iter().position(|a| a == actor).unwrap() + } + + /// Create a change with `num_new_ops` and `parents` for `actor` + /// + /// The `start_op` and `seq` of the change will be computed from the + /// previous changes for the same actor. + fn change( + &mut self, + actor: &ActorId, + num_new_ops: usize, + parents: &[ChangeHash], + ) -> ChangeHash { + let mut meta = OpSetMetadata::from_actors(self.actors.clone()); + let key = meta.props.cache("key".to_string()); + + let start_op = parents + .iter() + .map(|c| { + self.changes + .iter() + .find(|change| change.hash() == *c) + .unwrap() + .max_op() + }) + .max() + .unwrap_or(0) + + 1; + + let actor_idx = self.index(actor); + let ops = (0..num_new_ops) + .map(|opnum| Op { + id: OpId::new(start_op + opnum as u64, actor_idx), + action: crate::OpType::Put("value".into()), + key: Key::Map(key), + succ: OpIds::empty(), + pred: OpIds::empty(), + insert: false, + }) + .collect::>(); + + let root = ObjId::root(); + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as i64; + let seq = self.seqs_by_actor.entry(actor.clone()).or_insert(1); + let change = Change::new( + ChangeBuilder::new() + .with_dependencies(parents.to_vec()) + .with_start_op(NonZeroU64::new(start_op).unwrap()) + .with_actor(actor.clone()) + .with_seq(*seq) + .with_timestamp(timestamp) + .build(ops.iter().map(|op| op_as_actor_id(&root, op, &meta))) + .unwrap(), + ); + *seq = seq.checked_add(1).unwrap(); + let hash = change.hash(); + self.changes.push(change); + hash + } + + fn build(&self) -> ChangeGraph { + let mut graph = ChangeGraph::new(); + for change in &self.changes { + let actor_idx = self.index(change.actor_id()); + graph.add_change(change, actor_idx).unwrap(); + } + graph + } + } +} diff --git a/automerge/src/clock.rs b/rust/automerge/src/clock.rs similarity index 88% rename from automerge/src/clock.rs rename to rust/automerge/src/clock.rs index 11890ffb..64d00fcf 100644 --- a/automerge/src/clock.rs +++ b/rust/automerge/src/clock.rs @@ -59,8 +59,8 @@ impl Clock { } pub(crate) fn covers(&self, id: &OpId) -> bool { - if let Some(data) = self.0.get(&id.1) { - data.max_op >= id.0 + if let Some(data) = self.0.get(&id.actor()) { + data.max_op >= id.counter() } else { false } @@ -71,12 +71,6 @@ impl Clock { self.0.get(actor_index) } - pub(crate) fn merge(&mut self, other: &Self) { - for (actor, data) in &other.0 { - self.include(*actor, *data); - } - } - fn is_greater(&self, other: &Self) -> bool { let mut has_greater = false; @@ -123,16 +117,16 @@ mod tests { clock.include(1, ClockData { max_op: 20, seq: 1 }); clock.include(2, ClockData { max_op: 10, seq: 2 }); - assert!(clock.covers(&OpId(10, 1))); - assert!(clock.covers(&OpId(20, 1))); - assert!(!clock.covers(&OpId(30, 1))); + assert!(clock.covers(&OpId::new(10, 1))); + assert!(clock.covers(&OpId::new(20, 1))); + assert!(!clock.covers(&OpId::new(30, 1))); - assert!(clock.covers(&OpId(5, 2))); - assert!(clock.covers(&OpId(10, 2))); - assert!(!clock.covers(&OpId(15, 2))); + assert!(clock.covers(&OpId::new(5, 2))); + assert!(clock.covers(&OpId::new(10, 2))); + assert!(!clock.covers(&OpId::new(15, 2))); - assert!(!clock.covers(&OpId(1, 3))); - assert!(!clock.covers(&OpId(100, 3))); + assert!(!clock.covers(&OpId::new(1, 3))); + assert!(!clock.covers(&OpId::new(100, 3))); } #[test] diff --git a/automerge/src/columnar.rs b/rust/automerge/src/columnar.rs similarity index 100% rename from automerge/src/columnar.rs rename to rust/automerge/src/columnar.rs diff --git a/automerge/src/columnar/column_range.rs b/rust/automerge/src/columnar/column_range.rs similarity index 100% rename from automerge/src/columnar/column_range.rs rename to rust/automerge/src/columnar/column_range.rs diff --git a/automerge/src/columnar/column_range/boolean.rs b/rust/automerge/src/columnar/column_range/boolean.rs similarity index 100% rename from automerge/src/columnar/column_range/boolean.rs rename to rust/automerge/src/columnar/column_range/boolean.rs diff --git a/automerge/src/columnar/column_range/delta.rs b/rust/automerge/src/columnar/column_range/delta.rs similarity index 100% rename from automerge/src/columnar/column_range/delta.rs rename to rust/automerge/src/columnar/column_range/delta.rs diff --git a/automerge/src/columnar/column_range/deps.rs b/rust/automerge/src/columnar/column_range/deps.rs similarity index 91% rename from automerge/src/columnar/column_range/deps.rs rename to rust/automerge/src/columnar/column_range/deps.rs index df49192a..1956acd1 100644 --- a/automerge/src/columnar/column_range/deps.rs +++ b/rust/automerge/src/columnar/column_range/deps.rs @@ -62,7 +62,11 @@ impl<'a> DepsIter<'a> { } None => return Ok(None), }; - let mut result = Vec::with_capacity(num); + // We cannot trust `num` because it is provided over the network, + // but in the common case it will be correct and small (so we + // use with_capacity to make sure the vector is precisely the right + // size). + let mut result = Vec::with_capacity(std::cmp::min(num, 100)); while result.len() < num { match self .deps diff --git a/automerge/src/columnar/column_range/generic.rs b/rust/automerge/src/columnar/column_range/generic.rs similarity index 100% rename from automerge/src/columnar/column_range/generic.rs rename to rust/automerge/src/columnar/column_range/generic.rs diff --git a/automerge/src/columnar/column_range/generic/group.rs b/rust/automerge/src/columnar/column_range/generic/group.rs similarity index 100% rename from automerge/src/columnar/column_range/generic/group.rs rename to rust/automerge/src/columnar/column_range/generic/group.rs diff --git a/automerge/src/columnar/column_range/generic/simple.rs b/rust/automerge/src/columnar/column_range/generic/simple.rs similarity index 100% rename from automerge/src/columnar/column_range/generic/simple.rs rename to rust/automerge/src/columnar/column_range/generic/simple.rs diff --git a/automerge/src/columnar/column_range/key.rs b/rust/automerge/src/columnar/column_range/key.rs similarity index 98% rename from automerge/src/columnar/column_range/key.rs rename to rust/automerge/src/columnar/column_range/key.rs index 5283fc39..70ea8e1e 100644 --- a/automerge/src/columnar/column_range/key.rs +++ b/rust/automerge/src/columnar/column_range/key.rs @@ -167,11 +167,11 @@ impl<'a> KeyIter<'a> { Ok(Some(Key::Prop(string))) } (Some(None) | None, Some(Some(0)), Some(None) | None) => { - Ok(Some(Key::Elem(ElemId(OpId(0, 0))))) + Ok(Some(Key::Elem(ElemId(OpId::new(0, 0))))) } (Some(Some(actor)), Some(Some(ctr)), Some(None) | None) => match ctr.try_into() { //Ok(ctr) => Some(Ok(Key::Elem(ElemId(OpId(ctr, actor as usize))))), - Ok(ctr) => Ok(Some(Key::Elem(ElemId(OpId::new(actor as usize, ctr))))), + Ok(ctr) => Ok(Some(Key::Elem(ElemId(OpId::new(ctr, actor as usize))))), Err(_) => Err(DecodeColumnError::invalid_value( "counter", "negative value for counter", diff --git a/automerge/src/columnar/column_range/obj_id.rs b/rust/automerge/src/columnar/column_range/obj_id.rs similarity index 98% rename from automerge/src/columnar/column_range/obj_id.rs rename to rust/automerge/src/columnar/column_range/obj_id.rs index f6525b44..d282563e 100644 --- a/automerge/src/columnar/column_range/obj_id.rs +++ b/rust/automerge/src/columnar/column_range/obj_id.rs @@ -133,7 +133,7 @@ impl<'a> ObjIdIter<'a> { .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; match (actor, counter) { (None | Some(None), None | Some(None)) => Ok(Some(ObjId::root())), - (Some(Some(a)), Some(Some(c))) => Ok(Some(ObjId(OpId(c, a as usize)))), + (Some(Some(a)), Some(Some(c))) => Ok(Some(ObjId(OpId::new(c, a as usize)))), (_, Some(Some(0))) => Ok(Some(ObjId::root())), (Some(None) | None, _) => Err(DecodeColumnError::unexpected_null("actor")), (_, Some(None) | None) => Err(DecodeColumnError::unexpected_null("counter")), @@ -166,7 +166,7 @@ impl ObjIdEncoder { } convert::ObjId::Op(o) => { self.actor.append_value(o.actor() as u64); - self.counter.append_value(o.counter() as u64); + self.counter.append_value(o.counter()); } } } diff --git a/automerge/src/columnar/column_range/opid.rs b/rust/automerge/src/columnar/column_range/opid.rs similarity index 96% rename from automerge/src/columnar/column_range/opid.rs rename to rust/automerge/src/columnar/column_range/opid.rs index 592f6041..d2cdce79 100644 --- a/automerge/src/columnar/column_range/opid.rs +++ b/rust/automerge/src/columnar/column_range/opid.rs @@ -104,11 +104,11 @@ impl<'a> OpIdIter<'a> { .transpose() .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; match (actor, counter) { - (Some(Some(a)), Some(Some(c))) => match c.try_into() { - Ok(c) => Ok(Some(OpId(c, a as usize))), + (Some(Some(a)), Some(Some(c))) => match u32::try_from(c) { + Ok(c) => Ok(Some(OpId::new(c as u64, a as usize))), Err(_) => Err(DecodeColumnError::invalid_value( "counter", - "negative value encountered", + "negative or large value encountered", )), }, (Some(None), _) => Err(DecodeColumnError::unexpected_null("actor")), diff --git a/automerge/src/columnar/column_range/opid_list.rs b/rust/automerge/src/columnar/column_range/opid_list.rs similarity index 96% rename from automerge/src/columnar/column_range/opid_list.rs rename to rust/automerge/src/columnar/column_range/opid_list.rs index 03b92ccf..6a9c8a38 100644 --- a/automerge/src/columnar/column_range/opid_list.rs +++ b/rust/automerge/src/columnar/column_range/opid_list.rs @@ -189,7 +189,12 @@ impl<'a> OpIdListIter<'a> { Some(None) => return Err(DecodeColumnError::unexpected_null("num")), None => return Ok(None), }; - let mut p = Vec::with_capacity(num as usize); + + // We cannot trust `num` because it is provided over the network, + // but in the common case it will be correct and small (so we + // use with_capacity to make sure the vector is precisely the right + // size). + let mut p = Vec::with_capacity(std::cmp::min(num, 100) as usize); for _ in 0..num { let actor = self .actor @@ -203,7 +208,7 @@ impl<'a> OpIdListIter<'a> { .map_err(|e| DecodeColumnError::decode_raw("counter", e))?; match (actor, counter) { (Some(Some(a)), Some(Some(ctr))) => match ctr.try_into() { - Ok(ctr) => p.push(OpId(ctr, a as usize)), + Ok(ctr) => p.push(OpId::new(ctr, a as usize)), Err(_e) => { return Err(DecodeColumnError::invalid_value( "counter", diff --git a/automerge/src/columnar/column_range/raw.rs b/rust/automerge/src/columnar/column_range/raw.rs similarity index 100% rename from automerge/src/columnar/column_range/raw.rs rename to rust/automerge/src/columnar/column_range/raw.rs diff --git a/automerge/src/columnar/column_range/rle.rs b/rust/automerge/src/columnar/column_range/rle.rs similarity index 98% rename from automerge/src/columnar/column_range/rle.rs rename to rust/automerge/src/columnar/column_range/rle.rs index 63c0b123..c500a7f4 100644 --- a/automerge/src/columnar/column_range/rle.rs +++ b/rust/automerge/src/columnar/column_range/rle.rs @@ -147,7 +147,7 @@ mod tests { let mut buf = Vec::with_capacity(vals.len() * 3); let mut encoder: RleEncoder<_, u64> = RleEncoder::new(&mut buf); for val in vals { - encoder.append_value(&val) + encoder.append_value(val) } let (_, total_slice_len) = encoder.finish(); let mut decoder: RleDecoder<'_, u64> = @@ -167,7 +167,7 @@ mod tests { for val in vals.iter().take(4) { encoder.append_value(val) } - encoder.append_value(&5); + encoder.append_value(5); for val in vals.iter().skip(4) { encoder.append_value(val); } diff --git a/automerge/src/columnar/column_range/value.rs b/rust/automerge/src/columnar/column_range/value.rs similarity index 91% rename from automerge/src/columnar/column_range/value.rs rename to rust/automerge/src/columnar/column_range/value.rs index 43f63437..03a5aa60 100644 --- a/automerge/src/columnar/column_range/value.rs +++ b/rust/automerge/src/columnar/column_range/value.rs @@ -4,10 +4,15 @@ use crate::{ columnar::{ encoding::{ leb128::{lebsize, ulebsize}, - raw, DecodeColumnError, RawBytes, RawDecoder, RawEncoder, RleDecoder, RleEncoder, Sink, + raw, DecodeColumnError, DecodeError, RawBytes, RawDecoder, RawEncoder, RleDecoder, + RleEncoder, Sink, }, SpliceError, }, + storage::parse::{ + leb128::{leb128_i64, leb128_u64}, + Input, ParseResult, + }, ScalarValue, }; @@ -217,18 +222,8 @@ impl<'a> Iterator for ValueIter<'a> { ValueType::Null => Some(Ok(ScalarValue::Null)), ValueType::True => Some(Ok(ScalarValue::Boolean(true))), ValueType::False => Some(Ok(ScalarValue::Boolean(false))), - ValueType::Uleb => self.parse_raw(val_meta, |mut bytes| { - let val = leb128::read::unsigned(&mut bytes).map_err(|e| { - DecodeColumnError::invalid_value("value", e.to_string()) - })?; - Ok(ScalarValue::Uint(val)) - }), - ValueType::Leb => self.parse_raw(val_meta, |mut bytes| { - let val = leb128::read::signed(&mut bytes).map_err(|e| { - DecodeColumnError::invalid_value("value", e.to_string()) - })?; - Ok(ScalarValue::Int(val)) - }), + ValueType::Uleb => self.parse_input(val_meta, leb128_u64), + ValueType::Leb => self.parse_input(val_meta, leb128_i64), ValueType::String => self.parse_raw(val_meta, |bytes| { let val = std::str::from_utf8(bytes) .map_err(|e| DecodeColumnError::invalid_value("value", e.to_string()))? @@ -250,17 +245,11 @@ impl<'a> Iterator for ValueIter<'a> { let val = f64::from_le_bytes(raw); Ok(ScalarValue::F64(val)) }), - ValueType::Counter => self.parse_raw(val_meta, |mut bytes| { - let val = leb128::read::signed(&mut bytes).map_err(|e| { - DecodeColumnError::invalid_value("value", e.to_string()) - })?; - Ok(ScalarValue::Counter(val.into())) + ValueType::Counter => self.parse_input(val_meta, |input| { + leb128_i64(input).map(|(i, n)| (i, ScalarValue::Counter(n.into()))) }), - ValueType::Timestamp => self.parse_raw(val_meta, |mut bytes| { - let val = leb128::read::signed(&mut bytes).map_err(|e| { - DecodeColumnError::invalid_value("value", e.to_string()) - })?; - Ok(ScalarValue::Timestamp(val)) + ValueType::Timestamp => self.parse_input(val_meta, |input| { + leb128_i64(input).map(|(i, n)| (i, ScalarValue::Timestamp(n))) }), ValueType::Unknown(code) => self.parse_raw(val_meta, |bytes| { Ok(ScalarValue::Unknown { @@ -284,8 +273,8 @@ impl<'a> Iterator for ValueIter<'a> { } impl<'a> ValueIter<'a> { - fn parse_raw Result>( - &mut self, + fn parse_raw<'b, R, F: Fn(&'b [u8]) -> Result>( + &'b mut self, meta: ValueMeta, f: F, ) -> Option> { @@ -298,11 +287,24 @@ impl<'a> ValueIter<'a> { } Ok(bytes) => bytes, }; - let val = match f(raw) { - Ok(v) => v, - Err(e) => return Some(Err(e)), - }; - Some(Ok(val)) + Some(f(raw)) + } + + fn parse_input<'b, R, F: Fn(Input<'b>) -> ParseResult<'b, R, DecodeError>>( + &'b mut self, + meta: ValueMeta, + f: F, + ) -> Option> + where + R: Into, + { + self.parse_raw(meta, |raw| match f(Input::new(raw)) { + Err(e) => Err(DecodeColumnError::invalid_value("value", e.to_string())), + Ok((i, _)) if !i.is_empty() => { + Err(DecodeColumnError::invalid_value("value", "extra bytes")) + } + Ok((_, v)) => Ok(v.into()), + }) } pub(crate) fn done(&self) -> bool { diff --git a/automerge/src/columnar/encoding.rs b/rust/automerge/src/columnar/encoding.rs similarity index 94% rename from automerge/src/columnar/encoding.rs rename to rust/automerge/src/columnar/encoding.rs index bbdb34a8..c9435448 100644 --- a/automerge/src/columnar/encoding.rs +++ b/rust/automerge/src/columnar/encoding.rs @@ -46,6 +46,8 @@ pub(crate) enum DecodeError { FromInt(#[from] std::num::TryFromIntError), #[error("bad leb128")] BadLeb(#[from] ::leb128::read::Error), + #[error(transparent)] + BadLeb128(#[from] crate::storage::parse::leb128::Error), #[error("attempted to allocate {attempted} which is larger than the maximum of {maximum}")] OverlargeAllocation { attempted: usize, maximum: usize }, #[error("invalid string encoding")] diff --git a/automerge/src/columnar/encoding/boolean.rs b/rust/automerge/src/columnar/encoding/boolean.rs similarity index 100% rename from automerge/src/columnar/encoding/boolean.rs rename to rust/automerge/src/columnar/encoding/boolean.rs diff --git a/automerge/src/columnar/encoding/col_error.rs b/rust/automerge/src/columnar/encoding/col_error.rs similarity index 98% rename from automerge/src/columnar/encoding/col_error.rs rename to rust/automerge/src/columnar/encoding/col_error.rs index c8d5c5c0..089556b6 100644 --- a/automerge/src/columnar/encoding/col_error.rs +++ b/rust/automerge/src/columnar/encoding/col_error.rs @@ -1,5 +1,5 @@ #[derive(Clone, Debug)] -pub(crate) struct DecodeColumnError { +pub struct DecodeColumnError { path: Path, error: DecodeColErrorKind, } diff --git a/automerge/src/columnar/encoding/column_decoder.rs b/rust/automerge/src/columnar/encoding/column_decoder.rs similarity index 100% rename from automerge/src/columnar/encoding/column_decoder.rs rename to rust/automerge/src/columnar/encoding/column_decoder.rs diff --git a/automerge/src/columnar/encoding/decodable_impls.rs b/rust/automerge/src/columnar/encoding/decodable_impls.rs similarity index 100% rename from automerge/src/columnar/encoding/decodable_impls.rs rename to rust/automerge/src/columnar/encoding/decodable_impls.rs diff --git a/automerge/src/columnar/encoding/delta.rs b/rust/automerge/src/columnar/encoding/delta.rs similarity index 96% rename from automerge/src/columnar/encoding/delta.rs rename to rust/automerge/src/columnar/encoding/delta.rs index 049bb6fb..6234875b 100644 --- a/automerge/src/columnar/encoding/delta.rs +++ b/rust/automerge/src/columnar/encoding/delta.rs @@ -22,7 +22,7 @@ impl DeltaEncoder { pub(crate) fn append_value(&mut self, value: i64) { self.rle - .append_value(&(value.saturating_sub(self.absolute_value))); + .append_value(value.saturating_sub(self.absolute_value)); self.absolute_value = value; } diff --git a/automerge/src/columnar/encoding/encodable_impls.rs b/rust/automerge/src/columnar/encoding/encodable_impls.rs similarity index 100% rename from automerge/src/columnar/encoding/encodable_impls.rs rename to rust/automerge/src/columnar/encoding/encodable_impls.rs diff --git a/automerge/src/columnar/encoding/leb128.rs b/rust/automerge/src/columnar/encoding/leb128.rs similarity index 66% rename from automerge/src/columnar/encoding/leb128.rs rename to rust/automerge/src/columnar/encoding/leb128.rs index 036cfba8..cbb82c31 100644 --- a/automerge/src/columnar/encoding/leb128.rs +++ b/rust/automerge/src/columnar/encoding/leb128.rs @@ -1,29 +1,22 @@ /// The number of bytes required to encode `val` as a LEB128 integer -pub(crate) fn lebsize(val: i64) -> u64 { - let numbits = numbits_i64(val); - (numbits as f64 / 7.0).floor() as u64 + 1 +pub(crate) fn lebsize(mut val: i64) -> u64 { + if val < 0 { + val = !val + } + // 1 extra for the sign bit + leb_bytes(1 + 64 - val.leading_zeros() as u64) } /// The number of bytes required to encode `val` as a uLEB128 integer pub(crate) fn ulebsize(val: u64) -> u64 { - if val <= 1 { + if val == 0 { return 1; } - let numbits = numbits_u64(val); - let mut numblocks = (numbits as f64 / 7.0).floor() as u64; - if numbits % 7 != 0 { - numblocks += 1; - } - numblocks + leb_bytes(64 - val.leading_zeros() as u64) } -fn numbits_i64(val: i64) -> u64 { - // Is this right? This feels like it's not right - (std::mem::size_of::() as u32 * 8 - val.abs().leading_zeros()) as u64 -} - -fn numbits_u64(val: u64) -> u64 { - (std::mem::size_of::() as u32 * 8 - val.leading_zeros()) as u64 +fn leb_bytes(bits: u64) -> u64 { + (bits + 6) / 7 } #[cfg(test)] @@ -51,7 +44,7 @@ mod tests { #[test] fn ulebsize_examples() { - let scenarios = vec![0, 1, 127, 128, 129, 169]; + let scenarios = vec![0, 1, 127, 128, 129, 169, u64::MAX]; for val in scenarios { let mut out = Vec::new(); leb128::write::unsigned(&mut out, val).unwrap(); @@ -62,7 +55,23 @@ mod tests { #[test] fn lebsize_examples() { - let scenarios = vec![0, 1, -1, 127, 128, -127, -128, -2097152, 169]; + let scenarios = vec![ + 0, + 1, + -1, + 63, + 64, + -64, + -65, + 127, + 128, + -127, + -128, + -2097152, + 169, + i64::MIN, + i64::MAX, + ]; for val in scenarios { let mut out = Vec::new(); leb128::write::signed(&mut out, val).unwrap(); diff --git a/automerge/src/columnar/encoding/properties.rs b/rust/automerge/src/columnar/encoding/properties.rs similarity index 98% rename from automerge/src/columnar/encoding/properties.rs rename to rust/automerge/src/columnar/encoding/properties.rs index a6345cad..30f1169d 100644 --- a/automerge/src/columnar/encoding/properties.rs +++ b/rust/automerge/src/columnar/encoding/properties.rs @@ -139,7 +139,7 @@ pub(crate) fn option_splice_scenario< } pub(crate) fn opid() -> impl Strategy + Clone { - (0..(i64::MAX as usize), 0..(i64::MAX as u64)).prop_map(|(actor, ctr)| OpId(ctr, actor)) + (0..(u32::MAX as usize), 0..(u32::MAX as u64)).prop_map(|(actor, ctr)| OpId::new(ctr, actor)) } pub(crate) fn elemid() -> impl Strategy + Clone { diff --git a/automerge/src/columnar/encoding/raw.rs b/rust/automerge/src/columnar/encoding/raw.rs similarity index 100% rename from automerge/src/columnar/encoding/raw.rs rename to rust/automerge/src/columnar/encoding/raw.rs diff --git a/automerge/src/columnar/encoding/rle.rs b/rust/automerge/src/columnar/encoding/rle.rs similarity index 100% rename from automerge/src/columnar/encoding/rle.rs rename to rust/automerge/src/columnar/encoding/rle.rs diff --git a/automerge/src/columnar/splice_error.rs b/rust/automerge/src/columnar/splice_error.rs similarity index 100% rename from automerge/src/columnar/splice_error.rs rename to rust/automerge/src/columnar/splice_error.rs diff --git a/automerge/src/convert.rs b/rust/automerge/src/convert.rs similarity index 100% rename from automerge/src/convert.rs rename to rust/automerge/src/convert.rs diff --git a/automerge/src/decoding.rs b/rust/automerge/src/decoding.rs similarity index 100% rename from automerge/src/decoding.rs rename to rust/automerge/src/decoding.rs diff --git a/automerge/src/error.rs b/rust/automerge/src/error.rs similarity index 75% rename from automerge/src/error.rs rename to rust/automerge/src/error.rs index 406b5d2b..62a7b72f 100644 --- a/automerge/src/error.rs +++ b/rust/automerge/src/error.rs @@ -1,46 +1,65 @@ +use crate::change::LoadError as LoadChangeError; use crate::storage::load::Error as LoadError; use crate::types::{ActorId, ScalarValue}; use crate::value::DataType; -use crate::ChangeHash; +use crate::{ChangeHash, ObjType}; use thiserror::Error; #[derive(Error, Debug)] pub enum AutomergeError { - #[error("id was not an object id")] - NotAnObject, - #[error("invalid obj id format `{0}`")] - InvalidObjIdFormat(String), - #[error("invalid obj id `{0}`")] - InvalidObjId(String), - #[error("key must not be an empty string")] - EmptyStringKey, - #[error("invalid seq {0}")] - InvalidSeq(u64), - #[error("index {0} is out of bounds")] - InvalidIndex(usize), + #[error(transparent)] + ChangeGraph(#[from] crate::change_graph::MissingDep), + #[error("failed to load compressed data: {0}")] + Deflate(#[source] std::io::Error), #[error("duplicate seq {0} found for actor {1}")] DuplicateSeqNumber(u64, ActorId), + #[error("key must not be an empty string")] + EmptyStringKey, + #[error("general failure")] + Fail, + #[error("invalid actor ID `{0}`")] + InvalidActorId(String), + #[error(transparent)] + InvalidChangeHashBytes(#[from] InvalidChangeHashSlice), + #[error("invalid UTF-8 character at {0}")] + InvalidCharacter(usize), #[error("invalid hash {0}")] InvalidHash(ChangeHash), - #[error("hash {0} does not correspond to a change in this document")] - MissingHash(ChangeHash), - #[error("increment operations must be against a counter value")] - MissingCounter, + #[error("index {0} is out of bounds")] + InvalidIndex(usize), + #[error("invalid obj id `{0}`")] + InvalidObjId(String), + #[error("invalid obj id format `{0}`")] + InvalidObjIdFormat(String), + #[error("invalid op for object of type `{0}`")] + InvalidOp(ObjType), + #[error("seq {0} is out of bounds")] + InvalidSeq(u64), #[error("invalid type of value, expected `{expected}` but received `{unexpected}`")] InvalidValueType { expected: String, unexpected: String, }, - #[error("general failure")] - Fail, #[error(transparent)] Load(#[from] LoadError), - #[error("failed to load compressed data: {0}")] - Deflate(#[source] std::io::Error), + #[error(transparent)] + LoadChangeError(#[from] LoadChangeError), + #[error("increment operations must be against a counter value")] + MissingCounter, + #[error("hash {0} does not correspond to a change in this document")] + MissingHash(ChangeHash), + #[error("change's deps should already be in the document")] + MissingDeps, #[error("compressed chunk was not a change")] NonChangeCompressed, - #[error(transparent)] - Clocks(#[from] crate::clocks::MissingDep), + #[error("id was not an object id")] + NotAnObject, +} + +impl PartialEq for AutomergeError { + fn eq(&self, other: &Self) -> bool { + std::mem::discriminant(self) == std::mem::discriminant(other) + } } #[cfg(feature = "wasm")] @@ -80,7 +99,7 @@ pub struct InvalidElementId(pub String); pub struct InvalidOpId(pub String); #[derive(Error, Debug)] -pub(crate) enum InvalidOpType { +pub enum InvalidOpType { #[error("unrecognized action index {0}")] UnknownAction(u64), #[error("non numeric argument for inc op")] diff --git a/rust/automerge/src/exid.rs b/rust/automerge/src/exid.rs new file mode 100644 index 00000000..3a5a2ca2 --- /dev/null +++ b/rust/automerge/src/exid.rs @@ -0,0 +1,224 @@ +use crate::storage::parse; +use crate::ActorId; +use serde::Serialize; +use serde::Serializer; +use std::cmp::{Ord, Ordering}; +use std::fmt; +use std::hash::{Hash, Hasher}; + +/// An identifier for an object in a document +/// +/// This can be persisted using `to_bytes` and `TryFrom<&[u8]>` breaking changes to the +/// serialization format will be considered breaking changes for this library version. +#[derive(Debug, Clone)] +pub enum ExId { + Root, + Id(u64, ActorId, usize), +} + +const SERIALIZATION_VERSION_TAG: u8 = 0; +const TYPE_ROOT: u8 = 0; +const TYPE_ID: u8 = 1; + +impl ExId { + /// Serialize this object ID to a byte array. + /// + /// This serialization format is versioned and incompatible changes to it will be considered a + /// breaking change for the version of this library. + pub fn to_bytes(&self) -> Vec { + // The serialized format is + // + // .--------------------------------. + // | version | type | data | + // +--------------------------------+ + // | 4 bytes |4 bytes | variable | + // '--------------------------------' + // + // Version is currently always `0` + // + // `data` depends on the type + // + // * If the type is `TYPE_ROOT` (0) then there is no data + // * If the type is `TYPE_ID` (1) then the data is + // + // .-------------------------------------------------------. + // | actor ID len | actor ID bytes | counter | actor index | + // '-------------------------------------------------------' + // + // Where the actor ID len, counter, and actor index are all uLEB encoded + // integers. The actor ID bytes is just an array of bytes. + // + match self { + ExId::Root => { + let val: u8 = SERIALIZATION_VERSION_TAG | (TYPE_ROOT << 4); + vec![val] + } + ExId::Id(id, actor, counter) => { + let actor_bytes = actor.to_bytes(); + let mut bytes = Vec::with_capacity(actor_bytes.len() + 4 + 4); + let tag = SERIALIZATION_VERSION_TAG | (TYPE_ID << 4); + bytes.push(tag); + leb128::write::unsigned(&mut bytes, actor_bytes.len() as u64).unwrap(); + bytes.extend_from_slice(actor_bytes); + leb128::write::unsigned(&mut bytes, *counter as u64).unwrap(); + leb128::write::unsigned(&mut bytes, *id).unwrap(); + bytes + } + } + } +} + +#[derive(Debug, thiserror::Error)] +pub enum ObjIdFromBytesError { + #[error("no version tag")] + NoVersion, + #[error("invalid version tag")] + InvalidVersion(u8), + #[error("invalid type tag")] + InvalidType(u8), + #[error("invalid Actor ID length: {0}")] + ParseActorLen(String), + #[error("Not enough bytes in actor ID")] + ParseActor, + #[error("invalid counter: {0}")] + ParseCounter(String), + #[error("invalid actor index hint: {0}")] + ParseActorIdxHint(String), +} + +impl<'a> TryFrom<&'a [u8]> for ExId { + type Error = ObjIdFromBytesError; + + fn try_from(value: &'a [u8]) -> Result { + let i = parse::Input::new(value); + let (i, tag) = parse::take1::<()>(i).map_err(|_| ObjIdFromBytesError::NoVersion)?; + let version = tag & 0b1111; + if version != SERIALIZATION_VERSION_TAG { + return Err(ObjIdFromBytesError::InvalidVersion(version)); + } + let type_tag = tag >> 4; + match type_tag { + TYPE_ROOT => Ok(ExId::Root), + TYPE_ID => { + let (i, len) = parse::leb128_u64::(i) + .map_err(|e| ObjIdFromBytesError::ParseActorLen(e.to_string()))?; + let (i, actor) = parse::take_n::<()>(len as usize, i) + .map_err(|_| ObjIdFromBytesError::ParseActor)?; + let (i, counter) = parse::leb128_u64::(i) + .map_err(|e| ObjIdFromBytesError::ParseCounter(e.to_string()))?; + let (_i, actor_idx_hint) = parse::leb128_u64::(i) + .map_err(|e| ObjIdFromBytesError::ParseActorIdxHint(e.to_string()))?; + Ok(Self::Id(actor_idx_hint, actor.into(), counter as usize)) + } + other => Err(ObjIdFromBytesError::InvalidType(other)), + } + } +} + +impl PartialEq for ExId { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (ExId::Root, ExId::Root) => true, + (ExId::Id(ctr1, actor1, _), ExId::Id(ctr2, actor2, _)) + if ctr1 == ctr2 && actor1 == actor2 => + { + true + } + _ => false, + } + } +} + +impl Eq for ExId {} + +impl fmt::Display for ExId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ExId::Root => write!(f, "_root"), + ExId::Id(ctr, actor, _) => write!(f, "{}@{}", ctr, actor), + } + } +} + +impl Hash for ExId { + fn hash(&self, state: &mut H) { + match self { + ExId::Root => 0.hash(state), + ExId::Id(ctr, actor, _) => { + ctr.hash(state); + actor.hash(state); + } + } + } +} + +impl Ord for ExId { + fn cmp(&self, other: &Self) -> Ordering { + match (self, other) { + (ExId::Root, ExId::Root) => Ordering::Equal, + (ExId::Root, _) => Ordering::Less, + (_, ExId::Root) => Ordering::Greater, + (ExId::Id(c1, a1, _), ExId::Id(c2, a2, _)) if c1 == c2 => a2.cmp(a1), + (ExId::Id(c1, _, _), ExId::Id(c2, _, _)) => c1.cmp(c2), + } + } +} + +impl PartialOrd for ExId { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Serialize for ExId { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(self.to_string().as_str()) + } +} + +impl AsRef for ExId { + fn as_ref(&self) -> &ExId { + self + } +} + +#[cfg(test)] +mod tests { + use super::ExId; + use proptest::prelude::*; + + use crate::ActorId; + + fn gen_actorid() -> impl Strategy { + proptest::collection::vec(any::(), 0..100).prop_map(ActorId::from) + } + + prop_compose! { + fn gen_non_root_objid()(actor in gen_actorid(), counter in any::(), idx in any::()) -> ExId { + ExId::Id(idx as u64, actor, counter) + } + } + + fn gen_obji() -> impl Strategy { + prop_oneof![Just(ExId::Root), gen_non_root_objid()] + } + + proptest! { + #[test] + fn objid_roundtrip(objid in gen_obji()) { + let bytes = objid.to_bytes(); + let objid2 = ExId::try_from(&bytes[..]).unwrap(); + assert_eq!(objid, objid2); + } + } + + #[test] + fn test_root_roundtrip() { + let bytes = ExId::Root.to_bytes(); + let objid2 = ExId::try_from(&bytes[..]).unwrap(); + assert_eq!(ExId::Root, objid2); + } +} diff --git a/automerge/src/indexed_cache.rs b/rust/automerge/src/indexed_cache.rs similarity index 100% rename from automerge/src/indexed_cache.rs rename to rust/automerge/src/indexed_cache.rs diff --git a/automerge/src/keys.rs b/rust/automerge/src/keys.rs similarity index 78% rename from automerge/src/keys.rs rename to rust/automerge/src/keys.rs index f8e0c676..838015ef 100644 --- a/automerge/src/keys.rs +++ b/rust/automerge/src/keys.rs @@ -1,5 +1,9 @@ use crate::{query, Automerge}; +/// An iterator over the keys of an object +/// +/// This is returned by [`crate::ReadDoc::keys`] and method. The returned item is either +/// the keys of a map, or the encoded element IDs of a sequence. #[derive(Debug)] pub struct Keys<'a, 'k> { keys: Option>, diff --git a/automerge/src/keys_at.rs b/rust/automerge/src/keys_at.rs similarity index 76% rename from automerge/src/keys_at.rs rename to rust/automerge/src/keys_at.rs index c957e175..fd747bbc 100644 --- a/automerge/src/keys_at.rs +++ b/rust/automerge/src/keys_at.rs @@ -1,5 +1,9 @@ use crate::{query, Automerge}; +/// An iterator over the keys of an object at a particular point in history +/// +/// This is returned by [`crate::ReadDoc::keys_at`] method. The returned item is either the keys of a map, +/// or the encoded element IDs of a sequence. #[derive(Debug)] pub struct KeysAt<'a, 'k> { keys: Option>, diff --git a/automerge/src/legacy/mod.rs b/rust/automerge/src/legacy/mod.rs similarity index 100% rename from automerge/src/legacy/mod.rs rename to rust/automerge/src/legacy/mod.rs diff --git a/automerge/src/legacy/serde_impls/actor_id.rs b/rust/automerge/src/legacy/serde_impls/actor_id.rs similarity index 100% rename from automerge/src/legacy/serde_impls/actor_id.rs rename to rust/automerge/src/legacy/serde_impls/actor_id.rs diff --git a/automerge/src/legacy/serde_impls/change_hash.rs b/rust/automerge/src/legacy/serde_impls/change_hash.rs similarity index 93% rename from automerge/src/legacy/serde_impls/change_hash.rs rename to rust/automerge/src/legacy/serde_impls/change_hash.rs index 4d637909..04b876af 100644 --- a/automerge/src/legacy/serde_impls/change_hash.rs +++ b/rust/automerge/src/legacy/serde_impls/change_hash.rs @@ -9,7 +9,7 @@ impl Serialize for ChangeHash { where S: Serializer, { - hex::encode(&self.0).serialize(serializer) + hex::encode(self.0).serialize(serializer) } } diff --git a/automerge/src/legacy/serde_impls/element_id.rs b/rust/automerge/src/legacy/serde_impls/element_id.rs similarity index 100% rename from automerge/src/legacy/serde_impls/element_id.rs rename to rust/automerge/src/legacy/serde_impls/element_id.rs diff --git a/automerge/src/legacy/serde_impls/mod.rs b/rust/automerge/src/legacy/serde_impls/mod.rs similarity index 100% rename from automerge/src/legacy/serde_impls/mod.rs rename to rust/automerge/src/legacy/serde_impls/mod.rs diff --git a/automerge/src/legacy/serde_impls/object_id.rs b/rust/automerge/src/legacy/serde_impls/object_id.rs similarity index 100% rename from automerge/src/legacy/serde_impls/object_id.rs rename to rust/automerge/src/legacy/serde_impls/object_id.rs diff --git a/automerge/src/legacy/serde_impls/op.rs b/rust/automerge/src/legacy/serde_impls/op.rs similarity index 100% rename from automerge/src/legacy/serde_impls/op.rs rename to rust/automerge/src/legacy/serde_impls/op.rs diff --git a/automerge/src/legacy/serde_impls/op_type.rs b/rust/automerge/src/legacy/serde_impls/op_type.rs similarity index 100% rename from automerge/src/legacy/serde_impls/op_type.rs rename to rust/automerge/src/legacy/serde_impls/op_type.rs diff --git a/automerge/src/legacy/serde_impls/opid.rs b/rust/automerge/src/legacy/serde_impls/opid.rs similarity index 100% rename from automerge/src/legacy/serde_impls/opid.rs rename to rust/automerge/src/legacy/serde_impls/opid.rs diff --git a/automerge/src/legacy/serde_impls/scalar_value.rs b/rust/automerge/src/legacy/serde_impls/scalar_value.rs similarity index 100% rename from automerge/src/legacy/serde_impls/scalar_value.rs rename to rust/automerge/src/legacy/serde_impls/scalar_value.rs diff --git a/automerge/src/legacy/utility_impls/element_id.rs b/rust/automerge/src/legacy/utility_impls/element_id.rs similarity index 100% rename from automerge/src/legacy/utility_impls/element_id.rs rename to rust/automerge/src/legacy/utility_impls/element_id.rs diff --git a/automerge/src/legacy/utility_impls/key.rs b/rust/automerge/src/legacy/utility_impls/key.rs similarity index 100% rename from automerge/src/legacy/utility_impls/key.rs rename to rust/automerge/src/legacy/utility_impls/key.rs diff --git a/automerge/src/legacy/utility_impls/mod.rs b/rust/automerge/src/legacy/utility_impls/mod.rs similarity index 100% rename from automerge/src/legacy/utility_impls/mod.rs rename to rust/automerge/src/legacy/utility_impls/mod.rs diff --git a/automerge/src/legacy/utility_impls/object_id.rs b/rust/automerge/src/legacy/utility_impls/object_id.rs similarity index 100% rename from automerge/src/legacy/utility_impls/object_id.rs rename to rust/automerge/src/legacy/utility_impls/object_id.rs diff --git a/automerge/src/legacy/utility_impls/opid.rs b/rust/automerge/src/legacy/utility_impls/opid.rs similarity index 100% rename from automerge/src/legacy/utility_impls/opid.rs rename to rust/automerge/src/legacy/utility_impls/opid.rs diff --git a/rust/automerge/src/lib.rs b/rust/automerge/src/lib.rs new file mode 100644 index 00000000..cbb535af --- /dev/null +++ b/rust/automerge/src/lib.rs @@ -0,0 +1,301 @@ +//! # Automerge +//! +//! Automerge is a library of data structures for building collaborative, +//! [local-first](https://www.inkandswitch.com/local-first/) applications. The +//! idea of automerge is to provide a data structure which is quite general, +//! \- consisting of nested key/value maps and/or lists - which can be modified +//! entirely locally but which can at any time be merged with other instances of +//! the same data structure. +//! +//! In addition to the core data structure (which we generally refer to as a +//! "document"), we also provide an implementation of a sync protocol (in +//! [`crate::sync`]) which can be used over any reliable in-order transport; and +//! an efficient binary storage format. +//! +//! This crate is organised around two representations of a document - +//! [`Automerge`] and [`AutoCommit`]. The difference between the two is that +//! [`AutoCommit`] manages transactions for you. Both of these representations +//! implement [`ReadDoc`] for reading values from a document and +//! [`sync::SyncDoc`] for taking part in the sync protocol. [`AutoCommit`] +//! directly implements [`transaction::Transactable`] for making changes to a +//! document, whilst [`Automerge`] requires you to explicitly create a +//! [`transaction::Transaction`]. +//! +//! NOTE: The API this library provides for modifying data is quite low level +//! (somewhat analogous to directly creating JSON values rather than using +//! `serde` derive macros or equivalent). If you're writing a Rust application which uses automerge +//! you may want to look at [autosurgeon](https://github.com/automerge/autosurgeon). +//! +//! ## Data Model +//! +//! An automerge document is a map from strings to values +//! ([`Value`]) where values can be either +//! +//! * A nested composite value which is either +//! * A map from strings to values ([`ObjType::Map`]) +//! * A list of values ([`ObjType::List`]) +//! * A text object (a sequence of unicode characters) ([`ObjType::Text`]) +//! * A primitive value ([`ScalarValue`]) which is one of +//! * A string +//! * A 64 bit floating point number +//! * A signed 64 bit integer +//! * An unsigned 64 bit integer +//! * A boolean +//! * A counter object (a 64 bit integer which merges by addition) +//! ([`ScalarValue::Counter`]) +//! * A timestamp (a 64 bit integer which is milliseconds since the unix epoch) +//! +//! All composite values have an ID ([`ObjId`]) which is created when the value +//! is inserted into the document or is the root object ID [`ROOT`]. Values in +//! the document are then referred to by the pair (`object ID`, `key`). The +//! `key` is represented by the [`Prop`] type and is either a string for a maps, +//! or an index for sequences. +//! +//! ### Conflicts +//! +//! There are some things automerge cannot merge sensibly. For example, two +//! actors concurrently setting the key "name" to different values. In this case +//! automerge will pick a winning value in a random but deterministic way, but +//! the conflicting value is still available via the [`ReadDoc::get_all`] method. +//! +//! ### Change hashes and historical values +//! +//! Like git, points in the history of a document are identified by hash. Unlike +//! git there can be multiple hashes representing a particular point (because +//! automerge supports concurrent changes). These hashes can be obtained using +//! either [`Automerge::get_heads`] or [`AutoCommit::get_heads`] (note these +//! methods are not part of [`ReadDoc`] because in the case of [`AutoCommit`] it +//! requires a mutable reference to the document). +//! +//! These hashes can be used to read values from the document at a particular +//! point in history using the various `*_at` methods on [`ReadDoc`] which take a +//! slice of [`ChangeHash`] as an argument. +//! +//! ### Actor IDs +//! +//! Any change to an automerge document is made by an actor, represented by an +//! [`ActorId`]. An actor ID is any random sequence of bytes but each change by +//! the same actor ID must be sequential. This often means you will want to +//! maintain at least one actor ID per device. It is fine to generate a new +//! actor ID for each change, but be aware that each actor ID takes up space in +//! a document so if you expect a document to be long lived and/or to have many +//! changes then you should try to reuse actor IDs where possible. +//! +//! ### Text Encoding +//! +//! Both [`Automerge`] and [`AutoCommit`] provide a `with_encoding` method which +//! allows you to specify the [`crate::TextEncoding`] which is used for +//! interpreting the indexes passed to methods like [`ReadDoc::list_range`] or +//! [`transaction::Transactable::splice`]. The default encoding is UTF-8, but +//! you can switch to UTF-16. +//! +//! ## Sync Protocol +//! +//! See the [`sync`] module. +//! +//! ## Serde serialization +//! +//! Sometimes you just want to get the JSON value of an automerge document. For +//! this you can use [`AutoSerde`], which implements `serde::Serialize` for an +//! automerge document. +//! +//! ## Example +//! +//! Let's create a document representing an address book. +//! +//! ``` +//! use automerge::{ObjType, AutoCommit, transaction::Transactable, ReadDoc}; +//! +//! # fn main() -> Result<(), Box> { +//! let mut doc = AutoCommit::new(); +//! +//! // `put_object` creates a nested object in the root key/value map and +//! // returns the ID of the new object, in this case a list. +//! let contacts = doc.put_object(automerge::ROOT, "contacts", ObjType::List)?; +//! +//! // Now we can insert objects into the list +//! let alice = doc.insert_object(&contacts, 0, ObjType::Map)?; +//! +//! // Finally we can set keys in the "alice" map +//! doc.put(&alice, "name", "Alice")?; +//! doc.put(&alice, "email", "alice@example.com")?; +//! +//! // Create another contact +//! let bob = doc.insert_object(&contacts, 1, ObjType::Map)?; +//! doc.put(&bob, "name", "Bob")?; +//! doc.put(&bob, "email", "bob@example.com")?; +//! +//! // Now we save the address book, we can put this in a file +//! let data: Vec = doc.save(); +//! # Ok(()) +//! # } +//! ``` +//! +//! Now modify this document on two separate devices and merge the modifications. +//! +//! ``` +//! use std::borrow::Cow; +//! use automerge::{ObjType, AutoCommit, transaction::Transactable, ReadDoc}; +//! +//! # fn main() -> Result<(), Box> { +//! # let mut doc = AutoCommit::new(); +//! # let contacts = doc.put_object(automerge::ROOT, "contacts", ObjType::List)?; +//! # let alice = doc.insert_object(&contacts, 0, ObjType::Map)?; +//! # doc.put(&alice, "name", "Alice")?; +//! # doc.put(&alice, "email", "alice@example.com")?; +//! # let bob = doc.insert_object(&contacts, 1, ObjType::Map)?; +//! # doc.put(&bob, "name", "Bob")?; +//! # doc.put(&bob, "email", "bob@example.com")?; +//! # let saved: Vec = doc.save(); +//! +//! // Load the document on the first device and change alices email +//! let mut doc1 = AutoCommit::load(&saved)?; +//! let contacts = match doc1.get(automerge::ROOT, "contacts")? { +//! Some((automerge::Value::Object(ObjType::List), contacts)) => contacts, +//! _ => panic!("contacts should be a list"), +//! }; +//! let alice = match doc1.get(&contacts, 0)? { +//! Some((automerge::Value::Object(ObjType::Map), alice)) => alice, +//! _ => panic!("alice should be a map"), +//! }; +//! doc1.put(&alice, "email", "alicesnewemail@example.com")?; +//! +//! +//! // Load the document on the second device and change bobs name +//! let mut doc2 = AutoCommit::load(&saved)?; +//! let contacts = match doc2.get(automerge::ROOT, "contacts")? { +//! Some((automerge::Value::Object(ObjType::List), contacts)) => contacts, +//! _ => panic!("contacts should be a list"), +//! }; +//! let bob = match doc2.get(&contacts, 1)? { +//! Some((automerge::Value::Object(ObjType::Map), bob)) => bob, +//! _ => panic!("bob should be a map"), +//! }; +//! doc2.put(&bob, "name", "Robert")?; +//! +//! // Finally, we can merge the changes from the two devices +//! doc1.merge(&mut doc2)?; +//! let bobsname: Option = doc1.get(&bob, "name")?.map(|(v, _)| v); +//! assert_eq!(bobsname, Some(automerge::Value::Scalar(Cow::Owned("Robert".into())))); +//! +//! let alices_email: Option = doc1.get(&alice, "email")?.map(|(v, _)| v); +//! assert_eq!(alices_email, Some(automerge::Value::Scalar(Cow::Owned("alicesnewemail@example.com".into())))); +//! # Ok(()) +//! # } +//! ``` +//! + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg", + html_favicon_url = "https:///raw.githubusercontent.com/automerge/automerge-rs/main/img/favicon.ico" +)] +#![warn( + missing_debug_implementations, + // missing_docs, // TODO: add documentation! + rust_2018_idioms, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + private_in_public, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true +)] + +#[doc(hidden)] +#[macro_export] +macro_rules! log { + ( $( $t:tt )* ) => { + { + use $crate::__log; + __log!( $( $t )* ); + } + } + } + +#[cfg(all(feature = "wasm", target_family = "wasm"))] +#[doc(hidden)] +#[macro_export] +macro_rules! __log { + ( $( $t:tt )* ) => { + web_sys::console::log_1(&format!( $( $t )* ).into()); + } + } + +#[cfg(not(all(feature = "wasm", target_family = "wasm")))] +#[doc(hidden)] +#[macro_export] +macro_rules! __log { + ( $( $t:tt )* ) => { + println!( $( $t )* ); + } + } + +mod autocommit; +mod automerge; +mod autoserde; +mod change; +mod change_graph; +mod clock; +mod columnar; +mod convert; +mod error; +mod exid; +mod indexed_cache; +mod keys; +mod keys_at; +mod legacy; +mod list_range; +mod list_range_at; +mod map_range; +mod map_range_at; +pub mod op_observer; +mod op_set; +mod op_tree; +mod parents; +mod query; +mod read; +mod storage; +pub mod sync; +pub mod transaction; +mod types; +mod value; +mod values; +#[cfg(feature = "optree-visualisation")] +mod visualisation; + +pub use crate::automerge::{Automerge, OnPartialLoad}; +pub use autocommit::{AutoCommit, AutoCommitWithObs}; +pub use autoserde::AutoSerde; +pub use change::{Change, LoadError as LoadChangeError}; +pub use error::AutomergeError; +pub use error::InvalidActorId; +pub use error::InvalidChangeHashSlice; +pub use exid::{ExId as ObjId, ObjIdFromBytesError}; +pub use keys::Keys; +pub use keys_at::KeysAt; +pub use legacy::Change as ExpandedChange; +pub use list_range::ListRange; +pub use list_range_at::ListRangeAt; +pub use map_range::MapRange; +pub use map_range_at::MapRangeAt; +pub use op_observer::OpObserver; +pub use op_observer::Patch; +pub use op_observer::VecOpObserver; +pub use parents::{Parent, Parents}; +pub use read::ReadDoc; +pub use types::{ActorId, ChangeHash, ObjType, OpType, ParseChangeHashError, Prop, TextEncoding}; +pub use value::{ScalarValue, Value}; +pub use values::Values; + +/// The object ID for the root map of a document +pub const ROOT: ObjId = ObjId::Root; diff --git a/automerge/src/list_range.rs b/rust/automerge/src/list_range.rs similarity index 85% rename from automerge/src/list_range.rs rename to rust/automerge/src/list_range.rs index ae7b2aa5..a043da72 100644 --- a/automerge/src/list_range.rs +++ b/rust/automerge/src/list_range.rs @@ -3,6 +3,9 @@ use crate::{exid::ExId, Value}; use crate::{query, Automerge}; use std::ops::RangeBounds; +/// An iterator over the elements of a list object +/// +/// This is returned by the [`crate::ReadDoc::list_range`] method #[derive(Debug)] pub struct ListRange<'a, R: RangeBounds> { range: Option>, diff --git a/automerge/src/list_range_at.rs b/rust/automerge/src/list_range_at.rs similarity index 82% rename from automerge/src/list_range_at.rs rename to rust/automerge/src/list_range_at.rs index 37db9677..ce8f5a46 100644 --- a/automerge/src/list_range_at.rs +++ b/rust/automerge/src/list_range_at.rs @@ -3,6 +3,9 @@ use std::ops::RangeBounds; use crate::{query, Automerge}; +/// An iterator over the elements of a list object at a particular set of heads +/// +/// This is returned by the [`crate::ReadDoc::list_range_at`] method #[derive(Debug)] pub struct ListRangeAt<'a, R: RangeBounds> { range: Option>, diff --git a/automerge/src/map_range.rs b/rust/automerge/src/map_range.rs similarity index 88% rename from automerge/src/map_range.rs rename to rust/automerge/src/map_range.rs index 8029b84d..ad33ebf5 100644 --- a/automerge/src/map_range.rs +++ b/rust/automerge/src/map_range.rs @@ -3,6 +3,9 @@ use std::ops::RangeBounds; use crate::{query, Automerge}; +/// An iterator over the keys and values of a map object +/// +/// This is returned by the [`crate::ReadDoc::map_range`] method #[derive(Debug)] pub struct MapRange<'a, R: RangeBounds> { range: Option>, diff --git a/automerge/src/map_range_at.rs b/rust/automerge/src/map_range_at.rs similarity index 86% rename from automerge/src/map_range_at.rs rename to rust/automerge/src/map_range_at.rs index b2eb3fb2..8d008e89 100644 --- a/automerge/src/map_range_at.rs +++ b/rust/automerge/src/map_range_at.rs @@ -3,6 +3,9 @@ use std::ops::RangeBounds; use crate::{query, Automerge}; +/// An iterator over the keys and values of a map object as at a particuar heads +/// +/// This is returned by the [`crate::ReadDoc::map_range_at`] method #[derive(Debug)] pub struct MapRangeAt<'a, R: RangeBounds> { range: Option>, diff --git a/rust/automerge/src/op_observer.rs b/rust/automerge/src/op_observer.rs new file mode 100644 index 00000000..5b33c21f --- /dev/null +++ b/rust/automerge/src/op_observer.rs @@ -0,0 +1,392 @@ +use crate::exid::ExId; +use crate::Prop; +use crate::ReadDoc; +use crate::Value; + +mod compose; +pub use compose::compose; + +/// An observer of operations applied to the document. +pub trait OpObserver { + /// A new value has been inserted into the given object. + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been inserted into. + /// - `index`: the index the new value has been inserted at. + /// - `tagged_value`: the value that has been inserted and the id of the operation that did the + /// insert. + fn insert( + &mut self, + doc: &R, + objid: ExId, + index: usize, + tagged_value: (Value<'_>, ExId), + ); + + /// Some text has been spliced into a text object + fn splice_text(&mut self, _doc: &R, _objid: ExId, _index: usize, _value: &str); + + /// A new value has been put into the given object. + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been put into. + /// - `prop`: the prop that the value as been put at. + /// - `tagged_value`: the value that has been put into the object and the id of the operation + /// that did the put. + /// - `conflict`: whether this put conflicts with other operations. + fn put( + &mut self, + doc: &R, + objid: ExId, + prop: Prop, + tagged_value: (Value<'_>, ExId), + conflict: bool, + ); + + /// When a delete op exposes a previously conflicted value + /// Similar to a put op - except for maps, lists and text, edits + /// may already exist and need to be queried + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been put into. + /// - `prop`: the prop that the value as been put at. + /// - `tagged_value`: the value that has been put into the object and the id of the operation + /// that did the put. + /// - `conflict`: whether this put conflicts with other operations. + fn expose( + &mut self, + doc: &R, + objid: ExId, + prop: Prop, + tagged_value: (Value<'_>, ExId), + conflict: bool, + ); + + /// Flag a new conflict on a value without changing it + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been put into. + /// - `prop`: the prop that the value as been put at. + fn flag_conflict(&mut self, _doc: &R, _objid: ExId, _prop: Prop) {} + + /// A counter has been incremented. + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that contains the counter. + /// - `prop`: they prop that the chounter is at. + /// - `tagged_value`: the amount the counter has been incremented by, and the the id of the + /// increment operation. + fn increment( + &mut self, + doc: &R, + objid: ExId, + prop: Prop, + tagged_value: (i64, ExId), + ); + + /// A map value has beeen deleted. + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been deleted in. + /// - `prop`: the prop to be deleted + fn delete(&mut self, doc: &R, objid: ExId, prop: Prop) { + match prop { + Prop::Map(k) => self.delete_map(doc, objid, &k), + Prop::Seq(i) => self.delete_seq(doc, objid, i, 1), + } + } + + /// A map value has beeen deleted. + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been deleted in. + /// - `key`: the map key to be deleted + fn delete_map(&mut self, doc: &R, objid: ExId, key: &str); + + /// A one or more list values have beeen deleted. + /// + /// - `doc`: a handle to the doc after the op has been inserted, can be used to query information + /// - `objid`: the object that has been deleted in. + /// - `index`: the index of the deletion + /// - `num`: the number of sequential elements deleted + fn delete_seq(&mut self, doc: &R, objid: ExId, index: usize, num: usize); + + /// Whether to call sequence methods or `splice_text` when encountering changes in text + /// + /// Returns `false` by default + fn text_as_seq(&self) -> bool { + false + } +} + +/// An observer which can be branched +/// +/// This is used when observing operations in a transaction. In this case `branch` will be called +/// at the beginning of the transaction to return a new observer and then `merge` will be called +/// with the branched observer as `other` when the transaction is comitted. +pub trait BranchableObserver { + /// Branch of a new op_observer later to be merged + /// + /// Called when creating a new transaction. Observer branch will be merged on `commit()` or + /// thrown away on `rollback()` + fn branch(&self) -> Self; + + /// Merge observed information from a transaction. + /// + /// Called by AutoCommit on `commit()` + /// + /// - `other`: Another Op Observer of the same type + fn merge(&mut self, other: &Self); +} + +impl OpObserver for () { + fn insert( + &mut self, + _doc: &R, + _objid: ExId, + _index: usize, + _tagged_value: (Value<'_>, ExId), + ) { + } + + fn splice_text(&mut self, _doc: &R, _objid: ExId, _index: usize, _value: &str) {} + + fn put( + &mut self, + _doc: &R, + _objid: ExId, + _prop: Prop, + _tagged_value: (Value<'_>, ExId), + _conflict: bool, + ) { + } + + fn expose( + &mut self, + _doc: &R, + _objid: ExId, + _prop: Prop, + _tagged_value: (Value<'_>, ExId), + _conflict: bool, + ) { + } + + fn increment( + &mut self, + _doc: &R, + _objid: ExId, + _prop: Prop, + _tagged_value: (i64, ExId), + ) { + } + + fn delete_map(&mut self, _doc: &R, _objid: ExId, _key: &str) {} + + fn delete_seq(&mut self, _doc: &R, _objid: ExId, _index: usize, _num: usize) {} +} + +impl BranchableObserver for () { + fn merge(&mut self, _other: &Self) {} + fn branch(&self) -> Self {} +} + +/// Capture operations into a [`Vec`] and store them as patches. +#[derive(Default, Debug, Clone)] +pub struct VecOpObserver { + patches: Vec, +} + +impl VecOpObserver { + /// Take the current list of patches, leaving the internal list empty and ready for new + /// patches. + pub fn take_patches(&mut self) -> Vec { + std::mem::take(&mut self.patches) + } +} + +impl OpObserver for VecOpObserver { + fn insert( + &mut self, + doc: &R, + obj: ExId, + index: usize, + (value, id): (Value<'_>, ExId), + ) { + if let Ok(p) = doc.parents(&obj) { + self.patches.push(Patch::Insert { + obj, + path: p.path(), + index, + value: (value.into_owned(), id), + }); + } + } + + fn splice_text(&mut self, doc: &R, obj: ExId, index: usize, value: &str) { + if let Ok(p) = doc.parents(&obj) { + self.patches.push(Patch::Splice { + obj, + path: p.path(), + index, + value: value.to_string(), + }) + } + } + + fn put( + &mut self, + doc: &R, + obj: ExId, + prop: Prop, + (value, id): (Value<'_>, ExId), + conflict: bool, + ) { + if let Ok(p) = doc.parents(&obj) { + self.patches.push(Patch::Put { + obj, + path: p.path(), + prop, + value: (value.into_owned(), id), + conflict, + }); + } + } + + fn expose( + &mut self, + doc: &R, + obj: ExId, + prop: Prop, + (value, id): (Value<'_>, ExId), + conflict: bool, + ) { + if let Ok(p) = doc.parents(&obj) { + self.patches.push(Patch::Expose { + obj, + path: p.path(), + prop, + value: (value.into_owned(), id), + conflict, + }); + } + } + + fn increment(&mut self, doc: &R, obj: ExId, prop: Prop, tagged_value: (i64, ExId)) { + if let Ok(p) = doc.parents(&obj) { + self.patches.push(Patch::Increment { + obj, + path: p.path(), + prop, + value: tagged_value, + }); + } + } + + fn delete_map(&mut self, doc: &R, obj: ExId, key: &str) { + if let Ok(p) = doc.parents(&obj) { + self.patches.push(Patch::Delete { + obj, + path: p.path(), + prop: Prop::Map(key.to_owned()), + num: 1, + }) + } + } + + fn delete_seq(&mut self, doc: &R, obj: ExId, index: usize, num: usize) { + if let Ok(p) = doc.parents(&obj) { + self.patches.push(Patch::Delete { + obj, + path: p.path(), + prop: Prop::Seq(index), + num, + }) + } + } +} + +impl BranchableObserver for VecOpObserver { + fn merge(&mut self, other: &Self) { + self.patches.extend_from_slice(other.patches.as_slice()) + } + + fn branch(&self) -> Self { + Self::default() + } +} + +/// A notification to the application that something has changed in a document. +#[derive(Debug, Clone, PartialEq)] +pub enum Patch { + /// Associating a new value with a prop in a map, or an existing list element + Put { + /// path to the object + path: Vec<(ExId, Prop)>, + /// The object that was put into. + obj: ExId, + /// The prop that the new value was put at. + prop: Prop, + /// The value that was put, and the id of the operation that put it there. + value: (Value<'static>, ExId), + /// Whether this put conflicts with another. + conflict: bool, + }, + /// Exposing (via delete) an old but conflicted value with a prop in a map, or a list element + Expose { + /// path to the object + path: Vec<(ExId, Prop)>, + /// The object that was put into. + obj: ExId, + /// The prop that the new value was put at. + prop: Prop, + /// The value that was put, and the id of the operation that put it there. + value: (Value<'static>, ExId), + /// Whether this put conflicts with another. + conflict: bool, + }, + /// Inserting a new element into a list + Insert { + /// path to the object + path: Vec<(ExId, Prop)>, + /// The object that was inserted into. + obj: ExId, + /// The index that the new value was inserted at. + index: usize, + /// The value that was inserted, and the id of the operation that inserted it there. + value: (Value<'static>, ExId), + }, + /// Splicing a text object + Splice { + /// path to the object + path: Vec<(ExId, Prop)>, + /// The object that was inserted into. + obj: ExId, + /// The index that the new value was inserted at. + index: usize, + /// The value that was spliced + value: String, + }, + /// Incrementing a counter. + Increment { + /// path to the object + path: Vec<(ExId, Prop)>, + /// The object that was incremented in. + obj: ExId, + /// The prop that was incremented. + prop: Prop, + /// The amount that the counter was incremented by, and the id of the operation that + /// did the increment. + value: (i64, ExId), + }, + /// Deleting an element from a list/text + Delete { + /// path to the object + path: Vec<(ExId, Prop)>, + /// The object that was deleted from. + obj: ExId, + /// The prop that was deleted. + prop: Prop, + /// number of items deleted (for seq) + num: usize, + }, +} diff --git a/rust/automerge/src/op_observer/compose.rs b/rust/automerge/src/op_observer/compose.rs new file mode 100644 index 00000000..92fe3b1e --- /dev/null +++ b/rust/automerge/src/op_observer/compose.rs @@ -0,0 +1,102 @@ +use super::OpObserver; + +pub fn compose<'a, O1: OpObserver, O2: OpObserver>( + obs1: &'a mut O1, + obs2: &'a mut O2, +) -> impl OpObserver + 'a { + ComposeObservers { obs1, obs2 } +} + +struct ComposeObservers<'a, O1: OpObserver, O2: OpObserver> { + obs1: &'a mut O1, + obs2: &'a mut O2, +} + +impl<'a, O1: OpObserver, O2: OpObserver> OpObserver for ComposeObservers<'a, O1, O2> { + fn insert( + &mut self, + doc: &R, + objid: crate::ObjId, + index: usize, + tagged_value: (crate::Value<'_>, crate::ObjId), + ) { + self.obs1 + .insert(doc, objid.clone(), index, tagged_value.clone()); + self.obs2.insert(doc, objid, index, tagged_value); + } + + fn splice_text( + &mut self, + doc: &R, + objid: crate::ObjId, + index: usize, + value: &str, + ) { + self.obs1.splice_text(doc, objid.clone(), index, value); + self.obs2.splice_text(doc, objid, index, value); + } + + fn put( + &mut self, + doc: &R, + objid: crate::ObjId, + prop: crate::Prop, + tagged_value: (crate::Value<'_>, crate::ObjId), + conflict: bool, + ) { + self.obs1.put( + doc, + objid.clone(), + prop.clone(), + tagged_value.clone(), + conflict, + ); + self.obs2.put(doc, objid, prop, tagged_value, conflict); + } + + fn expose( + &mut self, + doc: &R, + objid: crate::ObjId, + prop: crate::Prop, + tagged_value: (crate::Value<'_>, crate::ObjId), + conflict: bool, + ) { + self.obs1.expose( + doc, + objid.clone(), + prop.clone(), + tagged_value.clone(), + conflict, + ); + self.obs2.expose(doc, objid, prop, tagged_value, conflict); + } + + fn increment( + &mut self, + doc: &R, + objid: crate::ObjId, + prop: crate::Prop, + tagged_value: (i64, crate::ObjId), + ) { + self.obs1 + .increment(doc, objid.clone(), prop.clone(), tagged_value.clone()); + self.obs2.increment(doc, objid, prop, tagged_value); + } + + fn delete_map(&mut self, doc: &R, objid: crate::ObjId, key: &str) { + self.obs1.delete_map(doc, objid.clone(), key); + self.obs2.delete_map(doc, objid, key); + } + + fn delete_seq( + &mut self, + doc: &R, + objid: crate::ObjId, + index: usize, + num: usize, + ) { + self.obs2.delete_seq(doc, objid.clone(), index, num); + self.obs2.delete_seq(doc, objid, index, num); + } +} diff --git a/automerge/src/op_set.rs b/rust/automerge/src/op_set.rs similarity index 62% rename from automerge/src/op_set.rs rename to rust/automerge/src/op_set.rs index 8f08b211..aab8ce74 100644 --- a/automerge/src/op_set.rs +++ b/rust/automerge/src/op_set.rs @@ -3,9 +3,9 @@ use crate::exid::ExId; use crate::indexed_cache::IndexedCache; use crate::op_tree::{self, OpTree}; use crate::parents::Parents; -use crate::query::{self, OpIdSearch, TreeQuery}; -use crate::types::{self, ActorId, Key, ObjId, Op, OpId, OpIds, OpType, Prop}; -use crate::{ObjType, OpObserver}; +use crate::query::{self, OpIdVisSearch, TreeQuery}; +use crate::types::{self, ActorId, Key, ListEncoding, ObjId, Op, OpId, OpIds, OpType, Prop}; +use crate::ObjType; use fxhash::FxBuildHasher; use std::borrow::Borrow; use std::cmp::Ordering; @@ -13,7 +13,7 @@ use std::collections::HashMap; use std::ops::RangeBounds; mod load; -pub(crate) use load::{ObservedOpSetBuilder, OpSetBuilder}; +pub(crate) use load::OpSetBuilder; pub(crate) type OpSet = OpSetInternal; @@ -32,12 +32,6 @@ impl OpSetInternal { OpSetBuilder::new() } - /// Create a builder which passes each operation to `observer`. This will be significantly - /// slower than `OpSetBuilder` - pub(crate) fn observed_builder(observer: &mut O) -> ObservedOpSetBuilder<'_, O> { - ObservedOpSetBuilder::new(observer) - } - pub(crate) fn new() -> Self { let mut trees: HashMap<_, _, _> = Default::default(); trees.insert(ObjId::root(), OpTree::new()); @@ -55,12 +49,16 @@ impl OpSetInternal { if id == types::ROOT { ExId::Root } else { - ExId::Id(id.0, self.m.actors.cache[id.1].clone(), id.1) + ExId::Id( + id.counter(), + self.m.actors.cache[id.actor()].clone(), + id.actor(), + ) } } pub(crate) fn iter(&self) -> Iter<'_> { - let mut objs: Vec<_> = self.trees.iter().collect(); + let mut objs: Vec<_> = self.trees.iter().map(|t| (t.0, t.1.objtype, t.1)).collect(); objs.sort_by(|a, b| self.m.lamport_cmp((a.0).0, (b.0).0)); Iter { opset: self, @@ -69,25 +67,44 @@ impl OpSetInternal { } } + /// Iterate over objects in the opset in causal order + pub(crate) fn iter_objs( + &self, + ) -> impl Iterator)> + '_ { + let mut objs: Vec<_> = self.trees.iter().map(|t| (t.0, t.1.objtype, t.1)).collect(); + objs.sort_by(|a, b| self.m.lamport_cmp((a.0).0, (b.0).0)); + IterObjs { + trees: objs.into_iter(), + } + } + pub(crate) fn parents(&self, obj: ObjId) -> Parents<'_> { Parents { obj, ops: self } } - pub(crate) fn parent_object(&self, obj: &ObjId) -> Option<(ObjId, Key)> { + pub(crate) fn parent_object(&self, obj: &ObjId) -> Option { let parent = self.trees.get(obj)?.parent?; - let key = self.search(&parent, OpIdSearch::new(obj.0)).key().unwrap(); - Some((parent, key)) + let query = self.search(&parent, OpIdVisSearch::new(obj.0)); + let key = query.key().unwrap(); + let visible = query.visible; + Some(Parent { + obj: parent, + key, + visible, + }) } - pub(crate) fn export_key(&self, obj: ObjId, key: Key) -> Prop { + pub(crate) fn export_key(&self, obj: ObjId, key: Key, encoding: ListEncoding) -> Option { match key { - Key::Map(m) => Prop::Map(self.m.props.get(m).into()), + Key::Map(m) => self.m.props.safe_get(m).map(|s| Prop::Map(s.to_string())), Key::Seq(opid) => { - let i = self - .search(&obj, query::ElemIdPos::new(opid)) - .index() - .unwrap(); - Prop::Seq(i) + if opid.is_head() { + Some(Prop::Seq(0)) + } else { + self.search(&obj, query::ElemIdPos::new(opid, encoding)) + .index() + .map(Prop::Seq) + } } } } @@ -158,36 +175,37 @@ impl OpSetInternal { } } - pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, query: Q) -> Q + pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, obj: &ObjId, mut query: Q) -> Q where Q: TreeQuery<'a>, { if let Some(tree) = self.trees.get(obj) { - tree.internal.search(query, &self.m) + if query.can_shortcut_search(tree) { + query + } else { + tree.internal.search(query, &self.m) + } } else { query } } - pub(crate) fn replace(&mut self, obj: &ObjId, index: usize, f: F) + pub(crate) fn change_vis(&mut self, obj: &ObjId, index: usize, f: F) where F: Fn(&mut Op), { if let Some(tree) = self.trees.get_mut(obj) { + tree.last_insert = None; tree.internal.update(index, f) } } /// Add `op` as a successor to each op at `op_indices` in `obj` - pub(crate) fn add_succ>( - &mut self, - obj: &ObjId, - op_indices: I, - op: &Op, - ) { + pub(crate) fn add_succ(&mut self, obj: &ObjId, op_indices: &[usize], op: &Op) { if let Some(tree) = self.trees.get_mut(obj) { + tree.last_insert = None; for i in op_indices { - tree.internal.update(i, |old_op| { + tree.internal.update(*i, |old_op| { old_op.add_succ(op, |left, right| self.m.lamport_cmp(*left, *right)) }); } @@ -198,6 +216,7 @@ impl OpSetInternal { // this happens on rollback - be sure to go back to the old state let tree = self.trees.get_mut(obj).unwrap(); self.length -= 1; + tree.last_insert = None; let op = tree.internal.remove(index); if let OpType::Make(_) = &op.action { self.trees.remove(&op.id.into()); @@ -209,6 +228,12 @@ impl OpSetInternal { self.length } + pub(crate) fn hint(&mut self, obj: &ObjId, index: usize, pos: usize) { + if let Some(tree) = self.trees.get_mut(obj) { + tree.last_insert = Some((index, pos)) + } + } + #[tracing::instrument(skip(self, index))] pub(crate) fn insert(&mut self, index: usize, obj: &ObjId, element: Op) { if let OpType::Make(typ) = element.action { @@ -217,13 +242,14 @@ impl OpSetInternal { OpTree { internal: Default::default(), objtype: typ, + last_insert: None, parent: Some(*obj), }, ); } if let Some(tree) = self.trees.get_mut(obj) { - //let tree = self.trees.get_mut(&element.obj).unwrap(); + tree.last_insert = None; tree.internal.insert(index, element); self.length += 1; } else { @@ -231,91 +257,6 @@ impl OpSetInternal { } } - pub(crate) fn insert_op(&mut self, obj: &ObjId, op: Op) -> Op { - let q = self.search(obj, query::SeekOp::new(&op)); - - let succ = q.succ; - let pos = q.pos; - - self.add_succ(obj, succ.iter().copied(), &op); - - if !op.is_delete() { - self.insert(pos, obj, op.clone()); - } - op - } - - pub(crate) fn insert_op_with_observer( - &mut self, - obj: &ObjId, - op: Op, - observer: &mut Obs, - ) -> Op { - let q = self.search(obj, query::SeekOpWithPatch::new(&op)); - - let query::SeekOpWithPatch { - pos, - succ, - seen, - values, - had_value_before, - .. - } = q; - - let ex_obj = self.id_to_exid(obj.0); - let parents = self.parents(*obj); - - let key = match op.key { - Key::Map(index) => self.m.props[index].clone().into(), - Key::Seq(_) => seen.into(), - }; - - if op.insert { - let value = (op.value(), self.id_to_exid(op.id)); - observer.insert(parents, ex_obj, seen, value); - } else if op.is_delete() { - if let Some(winner) = &values.last() { - let value = (winner.value(), self.id_to_exid(winner.id)); - let conflict = values.len() > 1; - observer.put(parents, ex_obj, key, value, conflict); - } else { - observer.delete(parents, ex_obj, key); - } - } else if let Some(value) = op.get_increment_value() { - // only observe this increment if the counter is visible, i.e. the counter's - // create op is in the values - if values.iter().any(|value| op.pred.contains(&value.id)) { - // we have observed the value - observer.increment(parents, ex_obj, key, (value, self.id_to_exid(op.id))); - } - } else { - let winner = if let Some(last_value) = values.last() { - if self.m.lamport_cmp(op.id, last_value.id) == Ordering::Greater { - &op - } else { - last_value - } - } else { - &op - }; - let value = (winner.value(), self.id_to_exid(winner.id)); - if op.is_list_op() && !had_value_before { - observer.insert(parents, ex_obj, seen, value); - } else { - let conflict = !values.is_empty(); - observer.put(parents, ex_obj, key, value, conflict); - } - } - - self.add_succ(obj, succ.iter().copied(), &op); - - if !op.is_delete() { - self.insert(pos, obj, op.clone()); - } - - op - } - pub(crate) fn object_type(&self, id: &ObjId) -> Option { self.trees.get(id).map(|tree| tree.objtype) } @@ -350,7 +291,7 @@ impl Default for OpSetInternal { } impl<'a> IntoIterator for &'a OpSetInternal { - type Item = (&'a ObjId, &'a Op); + type Item = (&'a ObjId, ObjType, &'a Op); type IntoIter = Iter<'a>; @@ -359,27 +300,41 @@ impl<'a> IntoIterator for &'a OpSetInternal { } } +pub(crate) struct IterObjs<'a> { + trees: std::vec::IntoIter<(&'a ObjId, ObjType, &'a op_tree::OpTree)>, +} + +impl<'a> Iterator for IterObjs<'a> { + type Item = (&'a ObjId, ObjType, op_tree::OpTreeIter<'a>); + + fn next(&mut self) -> Option { + self.trees + .next() + .map(|(id, typ, tree)| (id, typ, tree.iter())) + } +} + #[derive(Clone)] pub(crate) struct Iter<'a> { opset: &'a OpSet, - trees: std::vec::IntoIter<(&'a ObjId, &'a op_tree::OpTree)>, - current: Option<(&'a ObjId, op_tree::OpTreeIter<'a>)>, + trees: std::vec::IntoIter<(&'a ObjId, ObjType, &'a op_tree::OpTree)>, + current: Option<(&'a ObjId, ObjType, op_tree::OpTreeIter<'a>)>, } impl<'a> Iterator for Iter<'a> { - type Item = (&'a ObjId, &'a Op); + type Item = (&'a ObjId, ObjType, &'a Op); fn next(&mut self) -> Option { - if let Some((id, tree)) = &mut self.current { + if let Some((id, typ, tree)) = &mut self.current { if let Some(next) = tree.next() { - return Some((id, next)); + return Some((id, *typ, next)); } } loop { - self.current = self.trees.next().map(|o| (o.0, o.1.iter())); - if let Some((obj, tree)) = &mut self.current { + self.current = self.trees.next().map(|o| (o.0, o.1, o.2.iter())); + if let Some((obj, typ, tree)) = &mut self.current { if let Some(next) = tree.next() { - return Some((obj, next)); + return Some((obj, *typ, next)); } } else { return None; @@ -425,13 +380,7 @@ impl OpSetMetadata { } pub(crate) fn lamport_cmp(&self, left: OpId, right: OpId) -> Ordering { - match (left, right) { - (OpId(0, _), OpId(0, _)) => Ordering::Equal, - (OpId(0, _), OpId(_, _)) => Ordering::Less, - (OpId(_, _), OpId(0, _)) => Ordering::Greater, - (OpId(a, x), OpId(b, y)) if a == b => self.actors[x].cmp(&self.actors[y]), - (OpId(a, _), OpId(b, _)) => a.cmp(&b), - } + left.lamport_cmp(&right, &self.actors.cache) } pub(crate) fn sorted_opids>(&self, opids: I) -> OpIds { @@ -448,3 +397,9 @@ impl OpSetMetadata { self.props.cache(key.borrow().to_string()) } } + +pub(crate) struct Parent { + pub(crate) obj: ObjId, + pub(crate) key: Key, + pub(crate) visible: bool, +} diff --git a/automerge/src/op_set/load.rs b/rust/automerge/src/op_set/load.rs similarity index 58% rename from automerge/src/op_set/load.rs rename to rust/automerge/src/op_set/load.rs index 0f810d15..e14f46b7 100644 --- a/automerge/src/op_set/load.rs +++ b/rust/automerge/src/op_set/load.rs @@ -6,8 +6,7 @@ use super::{OpSet, OpTree}; use crate::{ op_tree::OpTreeInternal, storage::load::{DocObserver, LoadedObject}, - types::{ObjId, Op}, - OpObserver, + types::ObjId, }; /// An opset builder which creates an optree for each object as it finishes loading, inserting the @@ -37,6 +36,7 @@ impl DocObserver for OpSetBuilder { internal, objtype: loaded.obj_type, parent: loaded.parent, + last_insert: None, }; self.completed_objects.insert(loaded.id, tree); } @@ -50,38 +50,3 @@ impl DocObserver for OpSetBuilder { } } } - -/// A DocObserver which just accumulates ops until the document has finished reconstructing and -/// then inserts all of the ops using `OpSet::insert_op_with_observer` -pub(crate) struct ObservedOpSetBuilder<'a, O: OpObserver> { - observer: &'a mut O, - ops: Vec<(ObjId, Op)>, -} - -impl<'a, O: OpObserver> ObservedOpSetBuilder<'a, O> { - pub(crate) fn new(observer: &'a mut O) -> Self { - Self { - observer, - ops: Vec::new(), - } - } -} - -impl<'a, O: OpObserver> DocObserver for ObservedOpSetBuilder<'a, O> { - type Output = OpSet; - - fn object_loaded(&mut self, object: LoadedObject) { - self.ops.reserve(object.ops.len()); - for op in object.ops { - self.ops.push((object.id, op)); - } - } - - fn finish(self, _metadata: super::OpSetMetadata) -> Self::Output { - let mut opset = OpSet::new(); - for (obj, op) in self.ops { - opset.insert_op_with_observer(&obj, op, self.observer); - } - opset - } -} diff --git a/rust/automerge/src/op_tree.rs b/rust/automerge/src/op_tree.rs new file mode 100644 index 00000000..7de00dc3 --- /dev/null +++ b/rust/automerge/src/op_tree.rs @@ -0,0 +1,373 @@ +use std::{fmt::Debug, mem, ops::RangeBounds}; + +pub(crate) use crate::op_set::OpSetMetadata; +use crate::{ + clock::Clock, + query::{self, ChangeVisibility, QueryResult, TreeQuery}, +}; +use crate::{ + types::{ObjId, Op, OpId}, + ObjType, +}; +use std::collections::HashSet; + +mod iter; +mod node; + +pub(crate) use iter::OpTreeIter; +#[allow(unused)] +pub(crate) use node::{OpTreeNode, B}; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct OpTree { + pub(crate) internal: OpTreeInternal, + pub(crate) objtype: ObjType, + /// The id of the parent object, root has no parent. + pub(crate) parent: Option, + /// record the last list index and tree position + /// inserted into the op_set - this allows us to + /// short circuit the query if the follow op is another + /// insert or delete at the same spot + pub(crate) last_insert: Option<(usize, usize)>, +} + +impl OpTree { + pub(crate) fn new() -> Self { + Self { + internal: Default::default(), + objtype: ObjType::Map, + parent: None, + last_insert: None, + } + } + + pub(crate) fn iter(&self) -> OpTreeIter<'_> { + self.internal.iter() + } + + pub(crate) fn len(&self) -> usize { + self.internal.len() + } +} + +#[derive(Clone, Debug)] +pub(crate) struct OpTreeInternal { + pub(crate) root_node: Option, + pub(crate) ops: Vec, +} + +impl OpTreeInternal { + /// Construct a new, empty, sequence. + pub(crate) fn new() -> Self { + Self { + root_node: None, + ops: vec![], + } + } + + /// Get the length of the sequence. + pub(crate) fn len(&self) -> usize { + self.root_node.as_ref().map_or(0, |n| n.len()) + } + + pub(crate) fn keys(&self) -> Option> { + if self.root_node.is_some() { + Some(query::Keys::new(self)) + } else { + None + } + } + + pub(crate) fn keys_at(&self, clock: Clock) -> Option> { + if self.root_node.is_some() { + Some(query::KeysAt::new(self, clock)) + } else { + None + } + } + + pub(crate) fn map_range<'a, R: RangeBounds>( + &'a self, + range: R, + meta: &'a OpSetMetadata, + ) -> Option> { + if self.root_node.is_some() { + Some(query::MapRange::new(range, self, meta)) + } else { + None + } + } + + pub(crate) fn map_range_at<'a, R: RangeBounds>( + &'a self, + range: R, + meta: &'a OpSetMetadata, + clock: Clock, + ) -> Option> { + if self.root_node.is_some() { + Some(query::MapRangeAt::new(range, self, meta, clock)) + } else { + None + } + } + + pub(crate) fn list_range>( + &self, + range: R, + ) -> Option> { + if self.root_node.is_some() { + Some(query::ListRange::new(range, self)) + } else { + None + } + } + + pub(crate) fn list_range_at>( + &self, + range: R, + clock: Clock, + ) -> Option> { + if self.root_node.is_some() { + Some(query::ListRangeAt::new(range, clock, self)) + } else { + None + } + } + + pub(crate) fn search<'a, 'b: 'a, Q>(&'b self, mut query: Q, m: &OpSetMetadata) -> Q + where + Q: TreeQuery<'a>, + { + self.root_node.as_ref().map(|root| { + match query.query_node_with_metadata(root, m, &self.ops) { + QueryResult::Descend => root.search(&mut query, m, &self.ops, None), + QueryResult::Skip(skip) => root.search(&mut query, m, &self.ops, Some(skip)), + _ => true, + } + }); + query + } + + /// Create an iterator through the sequence. + pub(crate) fn iter(&self) -> OpTreeIter<'_> { + iter::OpTreeIter::new(self) + } + + /// Insert the `element` into the sequence at `index`. + /// + /// # Panics + /// + /// Panics if `index > len`. + pub(crate) fn insert(&mut self, index: usize, op: Op) { + assert!( + index <= self.len(), + "tried to insert at {} but len is {}", + index, + self.len() + ); + + let element = self.ops.len(); + self.ops.push(op); + + let old_len = self.len(); + if let Some(root) = self.root_node.as_mut() { + #[cfg(debug_assertions)] + root.check(); + + if root.is_full() { + let original_len = root.len(); + let new_root = OpTreeNode::new(); + + // move new_root to root position + let old_root = mem::replace(root, new_root); + + root.length += old_root.len(); + root.index = old_root.index.clone(); + root.children.push(old_root); + root.split_child(0, &self.ops); + + assert_eq!(original_len, root.len()); + + // after splitting the root has one element and two children, find which child the + // index is in + let first_child_len = root.children[0].len(); + let (child, insertion_index) = if first_child_len < index { + (&mut root.children[1], index - (first_child_len + 1)) + } else { + (&mut root.children[0], index) + }; + root.length += 1; + root.index.insert(&self.ops[element]); + child.insert_into_non_full_node(insertion_index, element, &self.ops) + } else { + root.insert_into_non_full_node(index, element, &self.ops) + } + } else { + let mut root = OpTreeNode::new(); + root.insert_into_non_full_node(index, element, &self.ops); + self.root_node = Some(root) + } + assert_eq!(self.len(), old_len + 1, "{:#?}", self); + } + + /// Get the `element` at `index` in the sequence. + pub(crate) fn get(&self, index: usize) -> Option<&Op> { + self.root_node + .as_ref() + .and_then(|n| n.get(index)) + .map(|n| &self.ops[n]) + } + + // this replaces get_mut() because it allows the indexes to update correctly + pub(crate) fn update(&mut self, index: usize, f: F) + where + F: FnOnce(&mut Op), + { + if self.len() > index { + let n = self.root_node.as_ref().unwrap().get(index).unwrap(); + let new_element = self.ops.get_mut(n).unwrap(); + let old_vis = new_element.visible(); + f(new_element); + let vis = ChangeVisibility { + old_vis, + new_vis: new_element.visible(), + op: new_element, + }; + self.root_node.as_mut().unwrap().update(index, vis); + } + } + + /// Removes the element at `index` from the sequence. + /// + /// # Panics + /// + /// Panics if `index` is out of bounds. + pub(crate) fn remove(&mut self, index: usize) -> Op { + if let Some(root) = self.root_node.as_mut() { + #[cfg(debug_assertions)] + let len = root.check(); + let old = root.remove(index, &self.ops); + + if root.elements.is_empty() { + if root.is_leaf() { + self.root_node = None; + } else { + self.root_node = Some(root.children.remove(0)); + } + } + + #[cfg(debug_assertions)] + debug_assert_eq!(len, self.root_node.as_ref().map_or(0, |r| r.check()) + 1); + self.ops[old].clone() + } else { + panic!("remove from empty tree") + } + } +} + +impl Default for OpTreeInternal { + fn default() -> Self { + Self::new() + } +} + +impl PartialEq for OpTreeInternal { + fn eq(&self, other: &Self) -> bool { + self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b) + } +} + +impl<'a> IntoIterator for &'a OpTreeInternal { + type Item = &'a Op; + + type IntoIter = Iter<'a>; + + fn into_iter(self) -> Self::IntoIter { + Iter { + inner: self, + index: 0, + } + } +} + +pub(crate) struct Iter<'a> { + inner: &'a OpTreeInternal, + index: usize, +} + +impl<'a> Iterator for Iter<'a> { + type Item = &'a Op; + + fn next(&mut self) -> Option { + self.index += 1; + self.inner.get(self.index - 1) + } + + fn nth(&mut self, n: usize) -> Option { + self.index += n + 1; + self.inner.get(self.index - 1) + } +} + +#[derive(Debug, Clone, PartialEq)] +struct CounterData { + pos: usize, + val: i64, + succ: HashSet, + op: Op, +} + +#[cfg(test)] +mod tests { + use crate::legacy as amp; + use crate::types::{Op, OpId}; + + use super::*; + + fn op() -> Op { + let zero = OpId::new(0, 0); + Op { + id: zero, + action: amp::OpType::Put(0.into()), + key: zero.into(), + succ: Default::default(), + pred: Default::default(), + insert: false, + } + } + + #[test] + fn insert() { + let mut t: OpTree = OpTree::new(); + + t.internal.insert(0, op()); + t.internal.insert(1, op()); + t.internal.insert(0, op()); + t.internal.insert(0, op()); + t.internal.insert(0, op()); + t.internal.insert(3, op()); + t.internal.insert(4, op()); + } + + #[test] + fn insert_book() { + let mut t: OpTree = OpTree::new(); + + for i in 0..100 { + t.internal.insert(i % 2, op()); + } + } + + #[test] + fn insert_book_vec() { + let mut t: OpTree = OpTree::new(); + let mut v = Vec::new(); + + for i in 0..100 { + t.internal.insert(i % 3, op()); + v.insert(i % 3, op()); + + assert_eq!(v, t.internal.iter().cloned().collect::>()) + } + } +} diff --git a/automerge/src/op_tree/iter.rs b/rust/automerge/src/op_tree/iter.rs similarity index 96% rename from automerge/src/op_tree/iter.rs rename to rust/automerge/src/op_tree/iter.rs index 8d070f11..0b19f359 100644 --- a/automerge/src/op_tree/iter.rs +++ b/rust/automerge/src/op_tree/iter.rs @@ -21,6 +21,7 @@ impl<'a> OpTreeIter<'a> { }, cumulative_index: 0, root_node: root, + ops: &tree.ops, }) .unwrap_or(Inner::Empty), ) @@ -50,6 +51,7 @@ enum Inner<'a> { // How far through the whole optree we are cumulative_index: usize, root_node: &'a OpTreeNode, + ops: &'a [Op], }, } @@ -75,6 +77,7 @@ impl<'a> Iterator for Inner<'a> { Inner::Empty => None, Inner::NonEmpty { ancestors, + ops, current, cumulative_index, .. @@ -83,10 +86,10 @@ impl<'a> Iterator for Inner<'a> { // If we're in a leaf node and we haven't exhausted it yet we just return the elements // of the leaf node if current.index < current.node.len() { - let result = ¤t.node.elements[current.index]; + let result = current.node.elements[current.index]; current.index += 1; *cumulative_index += 1; - Some(result) + Some(&ops[result]) } else { // We've exhausted the leaf node, we must find the nearest non-exhausted parent (lol) let node_iter = loop { @@ -113,10 +116,10 @@ impl<'a> Iterator for Inner<'a> { // return the element from the parent node which is one after the index at which we // descended into the child *current = node_iter; - let result = ¤t.node.elements[current.index]; + let result = current.node.elements[current.index]; current.index += 1; *cumulative_index += 1; - Some(result) + Some(&ops[result]) } } else { // If we're in a non-leaf node then the last iteration returned an element from the @@ -147,6 +150,7 @@ impl<'a> Iterator for Inner<'a> { Self::Empty => None, Self::NonEmpty { root_node, + ops, cumulative_index, current, ancestors, @@ -177,7 +181,7 @@ impl<'a> Iterator for Inner<'a> { Ordering::Equal => { *cumulative_index += child.len() + 1; current.index = child_index + 1; - return Some(¤t.node.elements[child_index]); + return Some(&ops[current.node.elements[child_index]]); } Ordering::Greater => { current.index = child_index; @@ -197,7 +201,7 @@ impl<'a> Iterator for Inner<'a> { // we're in a leaf node and we kept track of the cumulative index as we went, let index_in_this_node = n.saturating_sub(*cumulative_index); current.index = index_in_this_node + 1; - Some(¤t.node.elements[index_in_this_node]) + Some(&ops[current.node.elements[index_in_this_node]]) } } } @@ -258,7 +262,7 @@ mod tests { fn op(counter: u64) -> Op { Op { action: OpType::Put(ScalarValue::Uint(counter)), - id: OpId(counter, 0), + id: OpId::new(counter, 0), key: Key::Map(0), succ: Default::default(), pred: Default::default(), diff --git a/rust/automerge/src/op_tree/node.rs b/rust/automerge/src/op_tree/node.rs new file mode 100644 index 00000000..ed1b7646 --- /dev/null +++ b/rust/automerge/src/op_tree/node.rs @@ -0,0 +1,492 @@ +use std::{ + cmp::{min, Ordering}, + fmt::Debug, + mem, +}; + +pub(crate) use crate::op_set::OpSetMetadata; +use crate::query::{ChangeVisibility, Index, QueryResult, TreeQuery}; +use crate::types::Op; +pub(crate) const B: usize = 16; + +#[derive(Clone, Debug)] +pub(crate) struct OpTreeNode { + pub(crate) children: Vec, + pub(crate) elements: Vec, + pub(crate) index: Index, + pub(crate) length: usize, +} + +impl OpTreeNode { + pub(crate) fn new() -> Self { + Self { + elements: Vec::new(), + children: Vec::new(), + index: Default::default(), + length: 0, + } + } + + fn search_element<'a, 'b: 'a, Q>( + &'b self, + query: &mut Q, + m: &OpSetMetadata, + ops: &'a [Op], + index: usize, + ) -> bool + where + Q: TreeQuery<'a>, + { + if let Some(e) = self.elements.get(index) { + if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish { + return true; + } + } + false + } + + pub(crate) fn search<'a, 'b: 'a, Q>( + &'b self, + query: &mut Q, + m: &OpSetMetadata, + ops: &'a [Op], + mut skip: Option, + ) -> bool + where + Q: TreeQuery<'a>, + { + if self.is_leaf() { + for e in self.elements.iter().skip(skip.unwrap_or(0)) { + if query.query_element_with_metadata(&ops[*e], m) == QueryResult::Finish { + return true; + } + } + false + } else { + for (child_index, child) in self.children.iter().enumerate() { + match skip { + Some(n) if n > child.len() => { + skip = Some(n - child.len() - 1); + } + Some(n) if n == child.len() => { + skip = Some(0); // important to not be None so we never call query_node again + if self.search_element(query, m, ops, child_index) { + return true; + } + } + Some(n) => { + if child.search(query, m, ops, Some(n)) { + return true; + } + skip = Some(0); // important to not be None so we never call query_node again + if self.search_element(query, m, ops, child_index) { + return true; + } + } + None => { + // descend and try find it + match query.query_node_with_metadata(child, m, ops) { + QueryResult::Descend => { + if child.search(query, m, ops, None) { + return true; + } + } + QueryResult::Finish => return true, + QueryResult::Next => (), + QueryResult::Skip(_) => panic!("had skip from non-root node"), + } + if self.search_element(query, m, ops, child_index) { + return true; + } + } + } + } + false + } + } + + pub(crate) fn len(&self) -> usize { + self.length + } + + fn reindex(&mut self, ops: &[Op]) { + let mut index = Index::new(); + for c in &self.children { + index.merge(&c.index); + } + for i in &self.elements { + index.insert(&ops[*i]); + } + self.index = index + } + + pub(crate) fn is_leaf(&self) -> bool { + self.children.is_empty() + } + + pub(crate) fn is_full(&self) -> bool { + self.elements.len() >= 2 * B - 1 + } + + /// Returns the child index and the given index adjusted for the cumulative index before that + /// child. + fn find_child_index(&self, index: usize) -> (usize, usize) { + let mut cumulative_len = 0; + for (child_index, child) in self.children.iter().enumerate() { + if cumulative_len + child.len() >= index { + return (child_index, index - cumulative_len); + } else { + cumulative_len += child.len() + 1; + } + } + panic!("index {} not found in node with len {}", index, self.len()) + } + + pub(crate) fn insert_into_non_full_node(&mut self, index: usize, element: usize, ops: &[Op]) { + assert!(!self.is_full()); + + self.index.insert(&ops[element]); + + if self.is_leaf() { + self.length += 1; + self.elements.insert(index, element); + } else { + let (child_index, sub_index) = self.find_child_index(index); + let child = &mut self.children[child_index]; + + if child.is_full() { + self.split_child(child_index, ops); + + // child structure has changed so we need to find the index again + let (child_index, sub_index) = self.find_child_index(index); + let child = &mut self.children[child_index]; + child.insert_into_non_full_node(sub_index, element, ops); + } else { + child.insert_into_non_full_node(sub_index, element, ops); + } + self.length += 1; + } + } + + // A utility function to split the child `full_child_index` of this node + // Note that `full_child_index` must be full when this function is called. + pub(crate) fn split_child(&mut self, full_child_index: usize, ops: &[Op]) { + let original_len_self = self.len(); + + let full_child = &mut self.children[full_child_index]; + + // Create a new node which is going to store (B-1) keys + // of the full child. + let mut successor_sibling = OpTreeNode::new(); + + let original_len = full_child.len(); + assert!(full_child.is_full()); + + successor_sibling.elements = full_child.elements.split_off(B); + + if !full_child.is_leaf() { + successor_sibling.children = full_child.children.split_off(B); + } + + let middle = full_child.elements.pop().unwrap(); + + full_child.length = + full_child.elements.len() + full_child.children.iter().map(|c| c.len()).sum::(); + + successor_sibling.length = successor_sibling.elements.len() + + successor_sibling + .children + .iter() + .map(|c| c.len()) + .sum::(); + + let z_len = successor_sibling.len(); + + let full_child_len = full_child.len(); + + full_child.reindex(ops); + successor_sibling.reindex(ops); + + self.children + .insert(full_child_index + 1, successor_sibling); + + self.elements.insert(full_child_index, middle); + + assert_eq!(full_child_len + z_len + 1, original_len, "{:#?}", self); + + assert_eq!(original_len_self, self.len()); + } + + fn remove_from_leaf(&mut self, index: usize) -> usize { + self.length -= 1; + self.elements.remove(index) + } + + fn remove_element_from_non_leaf( + &mut self, + index: usize, + element_index: usize, + ops: &[Op], + ) -> usize { + self.length -= 1; + if self.children[element_index].elements.len() >= B { + let total_index = self.cumulative_index(element_index); + // recursively delete index - 1 in predecessor_node + let predecessor = self.children[element_index].remove(index - 1 - total_index, ops); + // replace element with that one + mem::replace(&mut self.elements[element_index], predecessor) + } else if self.children[element_index + 1].elements.len() >= B { + // recursively delete index + 1 in successor_node + let total_index = self.cumulative_index(element_index + 1); + let successor = self.children[element_index + 1].remove(index + 1 - total_index, ops); + // replace element with that one + mem::replace(&mut self.elements[element_index], successor) + } else { + let middle_element = self.elements.remove(element_index); + let successor_child = self.children.remove(element_index + 1); + self.children[element_index].merge(middle_element, successor_child, ops); + + let total_index = self.cumulative_index(element_index); + self.children[element_index].remove(index - total_index, ops) + } + } + + fn cumulative_index(&self, child_index: usize) -> usize { + self.children[0..child_index] + .iter() + .map(|c| c.len() + 1) + .sum() + } + + fn remove_from_internal_child( + &mut self, + index: usize, + mut child_index: usize, + ops: &[Op], + ) -> usize { + if self.children[child_index].elements.len() < B + && if child_index > 0 { + self.children[child_index - 1].elements.len() < B + } else { + true + } + && if child_index + 1 < self.children.len() { + self.children[child_index + 1].elements.len() < B + } else { + true + } + { + // if the child and its immediate siblings have B-1 elements merge the child + // with one sibling, moving an element from this node into the new merged node + // to be the median + + if child_index > 0 { + let middle = self.elements.remove(child_index - 1); + + // use the predessor sibling + let successor = self.children.remove(child_index); + child_index -= 1; + + self.children[child_index].merge(middle, successor, ops); + } else { + let middle = self.elements.remove(child_index); + + // use the sucessor sibling + let successor = self.children.remove(child_index + 1); + + self.children[child_index].merge(middle, successor, ops); + } + } else if self.children[child_index].elements.len() < B { + if child_index > 0 + && self + .children + .get(child_index - 1) + .map_or(false, |c| c.elements.len() >= B) + { + let last_element = self.children[child_index - 1].elements.pop().unwrap(); + assert!(!self.children[child_index - 1].elements.is_empty()); + self.children[child_index - 1].length -= 1; + self.children[child_index - 1] + .index + .remove(&ops[last_element]); + + let parent_element = + mem::replace(&mut self.elements[child_index - 1], last_element); + + self.children[child_index] + .index + .insert(&ops[parent_element]); + self.children[child_index] + .elements + .insert(0, parent_element); + self.children[child_index].length += 1; + + if let Some(last_child) = self.children[child_index - 1].children.pop() { + self.children[child_index - 1].length -= last_child.len(); + self.children[child_index - 1].reindex(ops); + self.children[child_index].length += last_child.len(); + self.children[child_index].children.insert(0, last_child); + self.children[child_index].reindex(ops); + } + } else if self + .children + .get(child_index + 1) + .map_or(false, |c| c.elements.len() >= B) + { + let first_element = self.children[child_index + 1].elements.remove(0); + self.children[child_index + 1] + .index + .remove(&ops[first_element]); + self.children[child_index + 1].length -= 1; + + assert!(!self.children[child_index + 1].elements.is_empty()); + + let parent_element = mem::replace(&mut self.elements[child_index], first_element); + + self.children[child_index].length += 1; + self.children[child_index] + .index + .insert(&ops[parent_element]); + self.children[child_index].elements.push(parent_element); + + if !self.children[child_index + 1].is_leaf() { + let first_child = self.children[child_index + 1].children.remove(0); + self.children[child_index + 1].length -= first_child.len(); + self.children[child_index + 1].reindex(ops); + self.children[child_index].length += first_child.len(); + + self.children[child_index].children.push(first_child); + self.children[child_index].reindex(ops); + } + } + } + self.length -= 1; + let total_index = self.cumulative_index(child_index); + self.children[child_index].remove(index - total_index, ops) + } + + pub(crate) fn check(&self) -> usize { + let l = self.elements.len() + self.children.iter().map(|c| c.check()).sum::(); + assert_eq!(self.len(), l, "{:#?}", self); + + l + } + + pub(crate) fn remove(&mut self, index: usize, ops: &[Op]) -> usize { + let original_len = self.len(); + if self.is_leaf() { + let v = self.remove_from_leaf(index); + self.index.remove(&ops[v]); + assert_eq!(original_len, self.len() + 1); + debug_assert_eq!(self.check(), self.len()); + v + } else { + let mut total_index = 0; + for (child_index, child) in self.children.iter().enumerate() { + match (total_index + child.len()).cmp(&index) { + Ordering::Less => { + // should be later on in the loop + total_index += child.len() + 1; + continue; + } + Ordering::Equal => { + let v = self.remove_element_from_non_leaf( + index, + min(child_index, self.elements.len() - 1), + ops, + ); + self.index.remove(&ops[v]); + assert_eq!(original_len, self.len() + 1); + debug_assert_eq!(self.check(), self.len()); + return v; + } + Ordering::Greater => { + let v = self.remove_from_internal_child(index, child_index, ops); + self.index.remove(&ops[v]); + assert_eq!(original_len, self.len() + 1); + debug_assert_eq!(self.check(), self.len()); + return v; + } + } + } + panic!( + "index not found to remove {} {} {} {}", + index, + total_index, + self.len(), + self.check() + ); + } + } + + fn merge(&mut self, middle: usize, successor_sibling: OpTreeNode, ops: &[Op]) { + self.index.insert(&ops[middle]); + self.index.merge(&successor_sibling.index); + self.elements.push(middle); + self.elements.extend(successor_sibling.elements); + self.children.extend(successor_sibling.children); + self.length += successor_sibling.length + 1; + assert!(self.is_full()); + } + + /// Update the operation at the given index using the provided function. + /// + /// This handles updating the indices after the update. + pub(crate) fn update<'a>( + &mut self, + index: usize, + vis: ChangeVisibility<'a>, + ) -> ChangeVisibility<'a> { + if self.is_leaf() { + self.index.change_vis(vis) + } else { + let mut cumulative_len = 0; + let len = self.len(); + for (_child_index, child) in self.children.iter_mut().enumerate() { + match (cumulative_len + child.len()).cmp(&index) { + Ordering::Less => { + cumulative_len += child.len() + 1; + } + Ordering::Equal => { + return self.index.change_vis(vis); + } + Ordering::Greater => { + let vis = child.update(index - cumulative_len, vis); + return self.index.change_vis(vis); + } + } + } + panic!("Invalid index to set: {} but len was {}", index, len) + } + } + + pub(crate) fn last(&self) -> usize { + if self.is_leaf() { + // node is never empty so this is safe + *self.elements.last().unwrap() + } else { + // if not a leaf then there is always at least one child + self.children.last().unwrap().last() + } + } + + pub(crate) fn get(&self, index: usize) -> Option { + if self.is_leaf() { + return self.elements.get(index).copied(); + } else { + let mut cumulative_len = 0; + for (child_index, child) in self.children.iter().enumerate() { + match (cumulative_len + child.len()).cmp(&index) { + Ordering::Less => { + cumulative_len += child.len() + 1; + } + Ordering::Equal => return self.elements.get(child_index).copied(), + Ordering::Greater => { + return child.get(index - cumulative_len); + } + } + } + } + None + } +} diff --git a/rust/automerge/src/parents.rs b/rust/automerge/src/parents.rs new file mode 100644 index 00000000..e1c5cc66 --- /dev/null +++ b/rust/automerge/src/parents.rs @@ -0,0 +1,121 @@ +use crate::op_set; +use crate::op_set::OpSet; +use crate::types::{ListEncoding, ObjId}; +use crate::{exid::ExId, Prop}; + +/// An iterator over the "parents" of an object +/// +/// The "parent" of an object in this context is the ([`ExId`], [`Prop`]) pair which specifies the +/// location of this object in the composite object which contains it. Each element in the iterator +/// is a [`Parent`], yielded in reverse order. This means that once the iterator returns `None` you +/// have reached the root of the document. +/// +/// This is returned by [`crate::ReadDoc::parents`] +#[derive(Debug)] +pub struct Parents<'a> { + pub(crate) obj: ObjId, + pub(crate) ops: &'a OpSet, +} + +impl<'a> Parents<'a> { + /// Return the path this `Parents` represents + /// + /// This is _not_ in reverse order. + pub fn path(self) -> Vec<(ExId, Prop)> { + let mut path = self + .map(|Parent { obj, prop, .. }| (obj, prop)) + .collect::>(); + path.reverse(); + path + } + + /// Like `path` but returns `None` if the target is not visible + pub fn visible_path(self) -> Option> { + let mut path = Vec::new(); + for Parent { obj, prop, visible } in self { + if !visible { + return None; + } + path.push((obj, prop)) + } + path.reverse(); + Some(path) + } +} + +impl<'a> Iterator for Parents<'a> { + type Item = Parent; + + fn next(&mut self) -> Option { + if self.obj.is_root() { + None + } else if let Some(op_set::Parent { obj, key, visible }) = self.ops.parent_object(&self.obj) + { + self.obj = obj; + Some(Parent { + obj: self.ops.id_to_exid(self.obj.0), + prop: self + .ops + .export_key(self.obj, key, ListEncoding::List) + .unwrap(), + visible, + }) + } else { + None + } + } +} + +/// A component of a path to an object +#[derive(Debug, PartialEq, Eq)] +pub struct Parent { + /// The object ID this component refers to + pub obj: ExId, + /// The property within `obj` this component refers to + pub prop: Prop, + /// Whether this component is "visible" + /// + /// An "invisible" component is one where the property is hidden, either because it has been + /// deleted or because there is a conflict on this (object, property) pair and this value does + /// not win the conflict. + pub visible: bool, +} + +#[cfg(test)] +mod tests { + use super::Parent; + use crate::{transaction::Transactable, Prop, ReadDoc}; + + #[test] + fn test_invisible_parents() { + // Create a document with a list of objects, then delete one of the objects, then generate + // a path to the deleted object. + + let mut doc = crate::AutoCommit::new(); + let list = doc + .put_object(crate::ROOT, "list", crate::ObjType::List) + .unwrap(); + let obj1 = doc.insert_object(&list, 0, crate::ObjType::Map).unwrap(); + let _obj2 = doc.insert_object(&list, 1, crate::ObjType::Map).unwrap(); + doc.put(&obj1, "key", "value").unwrap(); + doc.delete(&list, 0).unwrap(); + + let mut parents = doc.parents(&obj1).unwrap().collect::>(); + parents.reverse(); + assert_eq!( + parents, + vec![ + Parent { + obj: crate::ROOT, + prop: Prop::Map("list".to_string()), + visible: true, + }, + Parent { + obj: list, + prop: Prop::Seq(0), + visible: false, + }, + ] + ); + } +} diff --git a/automerge/src/query.rs b/rust/automerge/src/query.rs similarity index 53% rename from automerge/src/query.rs rename to rust/automerge/src/query.rs index f09ed0c1..640ecf8d 100644 --- a/automerge/src/query.rs +++ b/rust/automerge/src/query.rs @@ -1,5 +1,7 @@ -use crate::op_tree::{OpSetMetadata, OpTreeNode}; -use crate::types::{Clock, Counter, Key, Op, OpId, OpType, ScalarValue}; +use crate::op_tree::{OpSetMetadata, OpTree, OpTreeNode}; +use crate::types::{ + Clock, Counter, Key, ListEncoding, Op, OpId, OpType, ScalarValue, TextEncoding, +}; use fxhash::FxBuildHasher; use std::cmp::Ordering; use std::collections::{HashMap, HashSet}; @@ -20,6 +22,7 @@ mod map_range_at; mod nth; mod nth_at; mod opid; +mod opid_vis; mod prop; mod prop_at; mod seek_op; @@ -40,6 +43,7 @@ pub(crate) use map_range_at::MapRangeAt; pub(crate) use nth::Nth; pub(crate) use nth_at::NthAt; pub(crate) use opid::OpIdSearch; +pub(crate) use opid_vis::OpIdVisSearch; pub(crate) use prop::Prop; pub(crate) use prop_at::PropAt; pub(crate) use seek_op::SeekOp; @@ -47,12 +51,10 @@ pub(crate) use seek_op_with_patch::SeekOpWithPatch; // use a struct for the args for clarity as they are passed up the update chain in the optree #[derive(Debug, Clone)] -pub(crate) struct ReplaceArgs { - pub(crate) old_id: OpId, - pub(crate) new_id: OpId, - pub(crate) old_visible: bool, - pub(crate) new_visible: bool, - pub(crate) new_key: Key, +pub(crate) struct ChangeVisibility<'a> { + pub(crate) old_vis: bool, + pub(crate) new_vis: bool, + pub(crate) op: &'a Op, } #[derive(Debug, Clone, PartialEq)] @@ -63,17 +65,26 @@ pub(crate) struct CounterData { op: Op, } -pub(crate) trait TreeQuery<'a> { +pub(crate) trait TreeQuery<'a>: Clone + Debug { + fn equiv(&mut self, _other: &Self) -> bool { + false + } + + fn can_shortcut_search(&mut self, _tree: &'a OpTree) -> bool { + false + } + #[inline(always)] fn query_node_with_metadata( &mut self, child: &'a OpTreeNode, _m: &OpSetMetadata, + ops: &[Op], ) -> QueryResult { - self.query_node(child) + self.query_node(child, ops) } - fn query_node(&mut self, _child: &'a OpTreeNode) -> QueryResult { + fn query_node(&mut self, _child: &'a OpTreeNode, _ops: &[Op]) -> QueryResult { QueryResult::Descend } @@ -96,65 +107,140 @@ pub(crate) enum QueryResult { Finish, } +#[derive(Clone, Debug, PartialEq)] +struct TextWidth { + utf8: usize, + utf16: usize, +} + +impl TextWidth { + fn add_op(&mut self, op: &Op) { + self.utf8 += op.width(ListEncoding::Text(TextEncoding::Utf8)); + self.utf16 += op.width(ListEncoding::Text(TextEncoding::Utf16)); + } + + fn remove_op(&mut self, op: &Op) { + // Why are we using saturating_sub here? Shouldn't this always be greater than 0? + // + // In the case of objects which are _not_ `Text` we may end up subtracting more than the + // current width. This can happen if the elements in a list are `ScalarValue::str` and + // there are conflicting elements for the same index in the list. Like so: + // + // ```notrust + // [ + // "element", + // ["conflict1", "conflict2_longer"], + // "element" + // ] + // ``` + // + // Where there are two conflicted elements at index 1 + // + // in `Index::insert` and `Index::change_visibility` we add the width of the inserted op in + // utf8 and utf16 to the current width, but only if there was not a previous element for + // that index. Imagine that we encounter the "conflict1" op first, then we will add the + // length of 'conflict1' to the text widths. When 'conflict2_longer' is added we don't do + // anything because we've already seen an op for this index. Imagine that later we remove + // the `conflict2_longer` op, then we will end up subtracting the length of + // 'conflict2_longer' from the text widths, hence, `saturating_sub`. This isn't a problem + // because for non text objects we don't need the text widths to be accurate anyway. + // + // Really this is a sign that we should be tracking the type of the Index (List or Text) at + // the type level, but for now we just look the other way. + self.utf8 = self + .utf8 + .saturating_sub(op.width(ListEncoding::Text(TextEncoding::Utf8))); + self.utf16 = self + .utf16 + .saturating_sub(op.width(ListEncoding::Text(TextEncoding::Utf16))); + } + + fn merge(&mut self, other: &TextWidth) { + self.utf8 += other.utf8; + self.utf16 += other.utf16; + } +} + #[derive(Clone, Debug, PartialEq)] pub(crate) struct Index { /// The map of visible keys to the number of visible operations for that key. - pub(crate) visible: HashMap, + visible: HashMap, + visible_text: TextWidth, /// Set of opids found in this node and below. - pub(crate) ops: HashSet, + ops: HashSet, } impl Index { pub(crate) fn new() -> Self { Index { visible: Default::default(), + visible_text: TextWidth { utf8: 0, utf16: 0 }, ops: Default::default(), } } /// Get the number of visible elements in this index. - pub(crate) fn visible_len(&self) -> usize { - self.visible.len() + pub(crate) fn visible_len(&self, encoding: ListEncoding) -> usize { + match encoding { + ListEncoding::List => self.visible.len(), + ListEncoding::Text(TextEncoding::Utf8) => self.visible_text.utf8, + ListEncoding::Text(TextEncoding::Utf16) => self.visible_text.utf16, + } } pub(crate) fn has_visible(&self, seen: &Key) -> bool { self.visible.contains_key(seen) } - pub(crate) fn replace( - &mut self, - ReplaceArgs { - old_id, - new_id, - old_visible, - new_visible, - new_key, - }: &ReplaceArgs, - ) { - if old_id != new_id { - self.ops.remove(old_id); - self.ops.insert(*new_id); - } + /// Whether `opid` is in this node or any below it + pub(crate) fn has_op(&self, opid: &OpId) -> bool { + self.ops.contains(opid) + } - match (new_visible, old_visible, new_key) { - (false, true, key) => match self.visible.get(key).copied() { + pub(crate) fn change_vis<'a>( + &mut self, + change_vis: ChangeVisibility<'a>, + ) -> ChangeVisibility<'a> { + let ChangeVisibility { + old_vis, + new_vis, + op, + } = &change_vis; + let key = op.elemid_or_key(); + match (old_vis, new_vis) { + (true, false) => match self.visible.get(&key).copied() { Some(n) if n == 1 => { - self.visible.remove(key); + self.visible.remove(&key); + self.visible_text.remove_op(op); } Some(n) => { - self.visible.insert(*key, n - 1); + self.visible.insert(key, n - 1); } None => panic!("remove overun in index"), }, - (true, false, key) => *self.visible.entry(*key).or_default() += 1, + (false, true) => { + if let Some(n) = self.visible.get(&key) { + self.visible.insert(key, n + 1); + } else { + self.visible.insert(key, 1); + self.visible_text.add_op(op); + } + } _ => {} } + change_vis } pub(crate) fn insert(&mut self, op: &Op) { self.ops.insert(op.id); if op.visible() { - *self.visible.entry(op.elemid_or_key()).or_default() += 1; + let key = op.elemid_or_key(); + if let Some(n) = self.visible.get(&key) { + self.visible.insert(key, n + 1); + } else { + self.visible.insert(key, 1); + self.visible_text.add_op(op); + } } } @@ -165,6 +251,7 @@ impl Index { match self.visible.get(&key).copied() { Some(n) if n == 1 => { self.visible.remove(&key); + self.visible_text.remove_op(op); } Some(n) => { self.visible.insert(key, n - 1); @@ -178,9 +265,13 @@ impl Index { for id in &other.ops { self.ops.insert(*id); } - for (elem, n) in other.visible.iter() { - *self.visible.entry(*elem).or_default() += n; + for (elem, other_len) in other.visible.iter() { + self.visible + .entry(*elem) + .and_modify(|len| *len += *other_len) + .or_insert(*other_len); } + self.visible_text.merge(&other.visible_text); } } @@ -253,7 +344,7 @@ impl VisWindow { } } -pub(crate) fn binary_search_by(node: &OpTreeNode, f: F) -> usize +pub(crate) fn binary_search_by(node: &OpTreeNode, ops: &[Op], f: F) -> usize where F: Fn(&Op) -> Ordering, { @@ -261,7 +352,7 @@ where let mut left = 0; while left < right { let seq = (left + right) / 2; - if f(node.get(seq).unwrap()) == Ordering::Less { + if f(&ops[node.get(seq).unwrap()]) == Ordering::Less { left = seq + 1; } else { right = seq; diff --git a/rust/automerge/src/query/elem_id_pos.rs b/rust/automerge/src/query/elem_id_pos.rs new file mode 100644 index 00000000..cb559216 --- /dev/null +++ b/rust/automerge/src/query/elem_id_pos.rs @@ -0,0 +1,74 @@ +use crate::{ + op_tree::OpTreeNode, + types::{ElemId, ListEncoding, Op, OpId}, +}; + +use super::{QueryResult, TreeQuery}; + +/// Lookup the index in the list that this elemid occupies, includes hidden elements. +#[derive(Clone, Debug)] +pub(crate) struct ElemIdPos { + elem_opid: OpId, + pos: usize, + found: bool, + encoding: ListEncoding, +} + +impl ElemIdPos { + pub(crate) fn new(elemid: ElemId, encoding: ListEncoding) -> Self { + if elemid.is_head() { + Self { + elem_opid: elemid.0, + pos: 0, + found: true, + encoding, + } + } else { + Self { + elem_opid: elemid.0, + pos: 0, + found: false, + encoding, + } + } + } + + pub(crate) fn index(&self) -> Option { + if self.found { + Some(self.pos) + } else { + None + } + } +} + +impl<'a> TreeQuery<'a> for ElemIdPos { + fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { + if self.found { + return QueryResult::Finish; + } + // if index has our element then we can continue + if child.index.has_op(&self.elem_opid) { + // element is in this node somewhere + QueryResult::Descend + } else { + // not in this node, try the next one + self.pos += child.index.visible_len(self.encoding); + QueryResult::Next + } + } + + fn query_element(&mut self, element: &crate::types::Op) -> QueryResult { + if self.found { + return QueryResult::Finish; + } + if element.elemid() == Some(ElemId(self.elem_opid)) { + // this is it + self.found = true; + return QueryResult::Finish; + } else if element.visible() { + self.pos += element.width(self.encoding); + } + QueryResult::Next + } +} diff --git a/automerge/src/query/insert.rs b/rust/automerge/src/query/insert.rs similarity index 74% rename from automerge/src/query/insert.rs rename to rust/automerge/src/query/insert.rs index 9e495c49..0dc0e98d 100644 --- a/automerge/src/query/insert.rs +++ b/rust/automerge/src/query/insert.rs @@ -1,7 +1,7 @@ use crate::error::AutomergeError; use crate::op_tree::OpTreeNode; -use crate::query::{QueryResult, TreeQuery}; -use crate::types::{ElemId, Key, Op, HEAD}; +use crate::query::{OpTree, QueryResult, TreeQuery}; +use crate::types::{ElemId, Key, ListEncoding, Op, HEAD}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -10,6 +10,8 @@ pub(crate) struct InsertNth { target: usize, /// the number of visible operations seen seen: usize, + last_width: usize, + encoding: ListEncoding, //pub pos: usize, /// the number of operations (including non-visible) that we have seen n: usize, @@ -22,7 +24,7 @@ pub(crate) struct InsertNth { } impl InsertNth { - pub(crate) fn new(target: usize) -> Self { + pub(crate) fn new(target: usize, encoding: ListEncoding) -> Self { let (valid, last_valid_insert) = if target == 0 { (Some(0), Some(Key::Seq(HEAD))) } else { @@ -31,6 +33,8 @@ impl InsertNth { InsertNth { target, seen: 0, + last_width: 0, + encoding, n: 0, valid, last_seen: None, @@ -46,23 +50,30 @@ impl InsertNth { pub(crate) fn key(&self) -> Result { self.last_valid_insert .ok_or(AutomergeError::InvalidIndex(self.target)) - //if self.target == 0 { - /* - if self.last_insert.is_none() { - Ok(HEAD.into()) - } else if self.seen == self.target && self.last_insert.is_some() { - Ok(Key::Seq(self.last_insert.unwrap())) - } else { - Err(AutomergeError::InvalidIndex(self.target)) - } - */ } } impl<'a> TreeQuery<'a> for InsertNth { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + fn equiv(&mut self, other: &Self) -> bool { + self.pos() == other.pos() && self.key() == other.key() + } + + fn can_shortcut_search(&mut self, tree: &'a OpTree) -> bool { + if let Some((index, pos)) = &tree.last_insert { + if let Some(op) = tree.internal.get(*pos) { + if *index + op.width(self.encoding) == self.target { + self.valid = Some(*pos + 1); + self.last_valid_insert = Some(op.elemid_or_key()); + return true; + } + } + } + false + } + + fn query_node(&mut self, child: &OpTreeNode, ops: &[Op]) -> QueryResult { // if this node has some visible elements then we may find our target within - let mut num_vis = child.index.visible_len(); + let mut num_vis = child.index.visible_len(self.encoding); if let Some(last_seen) = self.last_seen { if child.index.has_visible(&last_seen) { num_vis -= 1; @@ -83,7 +94,7 @@ impl<'a> TreeQuery<'a> for InsertNth { // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly // - the visible op is in this node and the elemid references it so it can be set here // - the visible op is in a future node and so it will be counted as seen there - let last_elemid = child.last().elemid_or_key(); + let last_elemid = ops[child.last()].elemid_or_key(); if child.index.has_visible(&last_elemid) { self.last_seen = Some(last_elemid); } @@ -103,7 +114,8 @@ impl<'a> TreeQuery<'a> for InsertNth { if self.seen >= self.target { return QueryResult::Finish; } - self.seen += 1; + self.last_width = element.width(self.encoding); + self.seen += self.last_width; self.last_seen = Some(element.elemid_or_key()); self.last_valid_insert = self.last_seen } diff --git a/automerge/src/query/keys.rs b/rust/automerge/src/query/keys.rs similarity index 80% rename from automerge/src/query/keys.rs rename to rust/automerge/src/query/keys.rs index 30436f31..edda4fe9 100644 --- a/automerge/src/query/keys.rs +++ b/rust/automerge/src/query/keys.rs @@ -1,4 +1,4 @@ -use crate::op_tree::OpTreeNode; +use crate::op_tree::OpTreeInternal; use crate::types::Key; use std::fmt::Debug; @@ -8,17 +8,17 @@ pub(crate) struct Keys<'a> { last_key: Option, index_back: usize, last_key_back: Option, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, } impl<'a> Keys<'a> { - pub(crate) fn new(root_child: &'a OpTreeNode) -> Self { + pub(crate) fn new(op_tree: &'a OpTreeInternal) -> Self { Self { index: 0, last_key: None, - index_back: root_child.len(), + index_back: op_tree.len(), last_key_back: None, - root_child, + op_tree, } } } @@ -28,7 +28,7 @@ impl<'a> Iterator for Keys<'a> { fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; self.index += 1; if Some(op.elemid_or_key()) != self.last_key && op.visible() { self.last_key = Some(op.elemid_or_key()); @@ -42,7 +42,7 @@ impl<'a> Iterator for Keys<'a> { impl<'a> DoubleEndedIterator for Keys<'a> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; self.index_back -= 1; if Some(op.elemid_or_key()) != self.last_key_back && op.visible() { self.last_key_back = Some(op.elemid_or_key()); diff --git a/automerge/src/query/keys_at.rs b/rust/automerge/src/query/keys_at.rs similarity index 82% rename from automerge/src/query/keys_at.rs rename to rust/automerge/src/query/keys_at.rs index 71da2927..bf5b5e0e 100644 --- a/automerge/src/query/keys_at.rs +++ b/rust/automerge/src/query/keys_at.rs @@ -1,4 +1,4 @@ -use crate::op_tree::OpTreeNode; +use crate::op_tree::OpTreeInternal; use crate::query::VisWindow; use crate::types::{Clock, Key}; use std::fmt::Debug; @@ -11,19 +11,19 @@ pub(crate) struct KeysAt<'a> { last_key: Option, index_back: usize, last_key_back: Option, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, } impl<'a> KeysAt<'a> { - pub(crate) fn new(root_child: &'a OpTreeNode, clock: Clock) -> Self { + pub(crate) fn new(op_tree: &'a OpTreeInternal, clock: Clock) -> Self { Self { clock, window: VisWindow::default(), index: 0, last_key: None, - index_back: root_child.len(), + index_back: op_tree.len(), last_key_back: None, - root_child, + op_tree, } } } @@ -33,7 +33,7 @@ impl<'a> Iterator for KeysAt<'a> { fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; if Some(op.elemid_or_key()) != self.last_key && visible { @@ -48,7 +48,7 @@ impl<'a> Iterator for KeysAt<'a> { impl<'a> DoubleEndedIterator for KeysAt<'a> { fn next_back(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index_back -= 1; if Some(op.elemid_or_key()) != self.last_key_back && visible { diff --git a/rust/automerge/src/query/len.rs b/rust/automerge/src/query/len.rs new file mode 100644 index 00000000..9134b11f --- /dev/null +++ b/rust/automerge/src/query/len.rs @@ -0,0 +1,23 @@ +use crate::op_tree::OpTreeNode; +use crate::query::{QueryResult, TreeQuery}; +use crate::types::{ListEncoding, Op}; +use std::fmt::Debug; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct Len { + pub(crate) len: usize, + encoding: ListEncoding, +} + +impl Len { + pub(crate) fn new(encoding: ListEncoding) -> Self { + Len { len: 0, encoding } + } +} + +impl<'a> TreeQuery<'a> for Len { + fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { + self.len = child.index.visible_len(self.encoding); + QueryResult::Finish + } +} diff --git a/automerge/src/query/len_at.rs b/rust/automerge/src/query/len_at.rs similarity index 78% rename from automerge/src/query/len_at.rs rename to rust/automerge/src/query/len_at.rs index 46744c84..9380501e 100644 --- a/automerge/src/query/len_at.rs +++ b/rust/automerge/src/query/len_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::types::{Clock, ElemId, Op}; +use crate::types::{Clock, ElemId, ListEncoding, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -7,16 +7,18 @@ pub(crate) struct LenAt { pub(crate) len: usize, clock: Clock, pos: usize, + encoding: ListEncoding, last: Option, window: VisWindow, } impl LenAt { - pub(crate) fn new(clock: Clock) -> Self { + pub(crate) fn new(clock: Clock, encoding: ListEncoding) -> Self { LenAt { clock, pos: 0, len: 0, + encoding, last: None, window: Default::default(), } @@ -31,7 +33,7 @@ impl<'a> TreeQuery<'a> for LenAt { let elem = op.elemid(); let visible = self.window.visible_at(op, self.pos, &self.clock); if elem != self.last && visible { - self.len += 1; + self.len += op.width(self.encoding); self.last = elem; } self.pos += 1; diff --git a/automerge/src/query/list_range.rs b/rust/automerge/src/query/list_range.rs similarity index 89% rename from automerge/src/query/list_range.rs rename to rust/automerge/src/query/list_range.rs index d3206af3..d01082ab 100644 --- a/automerge/src/query/list_range.rs +++ b/rust/automerge/src/query/list_range.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::op_tree::OpTreeNode; +use crate::op_tree::OpTreeInternal; use crate::types::{ElemId, OpId}; use crate::values::ValueIter; use crate::{Automerge, Value}; @@ -14,19 +14,19 @@ pub(crate) struct ListRange<'a, R: RangeBounds> { last_elemid: Option, next_result: Option<(usize, Value<'a>, OpId)>, index_back: usize, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, } impl<'a, R: RangeBounds> ListRange<'a, R> { - pub(crate) fn new(range: R, root_child: &'a OpTreeNode) -> Self { + pub(crate) fn new(range: R, op_tree: &'a OpTreeInternal) -> Self { Self { range, index: 0, // FIXME root_child.seek_to_pos(range.start) pos: 0, // FIXME range.start last_elemid: None, next_result: None, - index_back: root_child.len(), - root_child, + index_back: op_tree.len(), + op_tree, } } } @@ -45,7 +45,7 @@ impl<'a, R: RangeBounds> Iterator for ListRange<'a, R> { // point and stop at the end point and not needless scan all the ops before and after the range fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; self.index += 1; if op.visible() { if op.elemid() != self.last_elemid { diff --git a/automerge/src/query/list_range_at.rs b/rust/automerge/src/query/list_range_at.rs similarity index 88% rename from automerge/src/query/list_range_at.rs rename to rust/automerge/src/query/list_range_at.rs index 5c7257af..33cdf548 100644 --- a/automerge/src/query/list_range_at.rs +++ b/rust/automerge/src/query/list_range_at.rs @@ -1,6 +1,6 @@ use super::VisWindow; use crate::exid::ExId; -use crate::op_tree::OpTreeNode; +use crate::op_tree::OpTreeInternal; use crate::types::{Clock, ElemId, OpId}; use crate::values::ValueIter; use crate::{Automerge, Value}; @@ -15,7 +15,7 @@ pub(crate) struct ListRangeAt<'a, R: RangeBounds> { last_elemid: Option, next_result: Option<(usize, Value<'a>, OpId)>, index_back: usize, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, clock: Clock, window: VisWindow, } @@ -27,15 +27,15 @@ impl<'a, R: RangeBounds> ValueIter<'a> for ListRangeAt<'a, R> { } impl<'a, R: RangeBounds> ListRangeAt<'a, R> { - pub(crate) fn new(range: R, clock: Clock, root_child: &'a OpTreeNode) -> Self { + pub(crate) fn new(range: R, clock: Clock, op_tree: &'a OpTreeInternal) -> Self { Self { range, index: 0, // FIXME root_child.seek_to_pos(range.start) pos: 0, // FIXME range.start last_elemid: None, next_result: None, - index_back: root_child.len(), - root_child, + index_back: op_tree.len(), + op_tree, clock, window: VisWindow::default(), } @@ -47,7 +47,7 @@ impl<'a, R: RangeBounds> Iterator for ListRangeAt<'a, R> { fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; if visible { diff --git a/automerge/src/query/list_vals.rs b/rust/automerge/src/query/list_vals.rs similarity index 85% rename from automerge/src/query/list_vals.rs rename to rust/automerge/src/query/list_vals.rs index 4ad2f47b..6c056621 100644 --- a/automerge/src/query/list_vals.rs +++ b/rust/automerge/src/query/list_vals.rs @@ -19,10 +19,10 @@ impl ListVals { } impl<'a> TreeQuery<'a> for ListVals { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, child: &OpTreeNode, ops: &[Op]) -> QueryResult { let start = 0; for pos in start..child.len() { - let op = child.get(pos).unwrap(); + let op = &ops[child.get(pos).unwrap()]; if op.insert { self.last_elem = None; } diff --git a/automerge/src/query/list_vals_at.rs b/rust/automerge/src/query/list_vals_at.rs similarity index 100% rename from automerge/src/query/list_vals_at.rs rename to rust/automerge/src/query/list_vals_at.rs diff --git a/automerge/src/query/map_range.rs b/rust/automerge/src/query/map_range.rs similarity index 91% rename from automerge/src/query/map_range.rs rename to rust/automerge/src/query/map_range.rs index 81334ca4..909312db 100644 --- a/automerge/src/query/map_range.rs +++ b/rust/automerge/src/query/map_range.rs @@ -1,5 +1,5 @@ use crate::exid::ExId; -use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::op_tree::{OpSetMetadata, OpTreeInternal}; use crate::types::{Key, OpId}; use crate::values::ValueIter; use crate::{Automerge, Value}; @@ -14,7 +14,7 @@ pub(crate) struct MapRange<'a, R: RangeBounds> { next_result: Option<(&'a str, Value<'a>, OpId)>, index_back: usize, last_key_back: Option, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, meta: &'a OpSetMetadata, } @@ -25,15 +25,15 @@ impl<'a, R: RangeBounds> ValueIter<'a> for MapRange<'a, R> { } impl<'a, R: RangeBounds> MapRange<'a, R> { - pub(crate) fn new(range: R, root_child: &'a OpTreeNode, meta: &'a OpSetMetadata) -> Self { + pub(crate) fn new(range: R, op_tree: &'a OpTreeInternal, meta: &'a OpSetMetadata) -> Self { Self { range, index: 0, last_key: None, next_result: None, - index_back: root_child.len(), + index_back: op_tree.len(), last_key_back: None, - root_child, + op_tree, meta, } } @@ -47,7 +47,7 @@ impl<'a, R: RangeBounds> Iterator for MapRange<'a, R> { // point and stop at the end point and not needless scan all the ops before and after the range fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; self.index += 1; if op.visible() { let prop = match op.key { @@ -72,7 +72,7 @@ impl<'a, R: RangeBounds> Iterator for MapRange<'a, R> { impl<'a, R: RangeBounds> DoubleEndedIterator for MapRange<'a, R> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; self.index_back -= 1; if Some(op.key) != self.last_key_back && op.visible() { diff --git a/automerge/src/query/map_range_at.rs b/rust/automerge/src/query/map_range_at.rs similarity index 92% rename from automerge/src/query/map_range_at.rs rename to rust/automerge/src/query/map_range_at.rs index 84453955..c5c5af06 100644 --- a/automerge/src/query/map_range_at.rs +++ b/rust/automerge/src/query/map_range_at.rs @@ -1,6 +1,6 @@ use crate::clock::Clock; use crate::exid::ExId; -use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::op_tree::{OpSetMetadata, OpTreeInternal}; use crate::types::{Key, OpId}; use crate::values::ValueIter; use crate::{Automerge, Value}; @@ -22,7 +22,7 @@ pub(crate) struct MapRangeAt<'a, R: RangeBounds> { index_back: usize, last_key_back: Option, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, meta: &'a OpSetMetadata, } @@ -35,7 +35,7 @@ impl<'a, R: RangeBounds> ValueIter<'a> for MapRangeAt<'a, R> { impl<'a, R: RangeBounds> MapRangeAt<'a, R> { pub(crate) fn new( range: R, - root_child: &'a OpTreeNode, + op_tree: &'a OpTreeInternal, meta: &'a OpSetMetadata, clock: Clock, ) -> Self { @@ -46,9 +46,9 @@ impl<'a, R: RangeBounds> MapRangeAt<'a, R> { index: 0, last_key: None, next_result: None, - index_back: root_child.len(), + index_back: op_tree.len(), last_key_back: None, - root_child, + op_tree, meta, } } @@ -59,7 +59,7 @@ impl<'a, R: RangeBounds> Iterator for MapRangeAt<'a, R> { fn next(&mut self) -> Option { for i in self.index..self.index_back { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index += 1; if visible { @@ -85,7 +85,7 @@ impl<'a, R: RangeBounds> Iterator for MapRangeAt<'a, R> { impl<'a, R: RangeBounds> DoubleEndedIterator for MapRangeAt<'a, R> { fn next_back(&mut self) -> Option { for i in (self.index..self.index_back).rev() { - let op = self.root_child.get(i)?; + let op = self.op_tree.get(i)?; let visible = self.window.visible_at(op, i, &self.clock); self.index_back -= 1; if Some(op.key) != self.last_key_back && visible { diff --git a/automerge/src/query/nth.rs b/rust/automerge/src/query/nth.rs similarity index 65% rename from automerge/src/query/nth.rs rename to rust/automerge/src/query/nth.rs index f73f2a10..ed374b9b 100644 --- a/automerge/src/query/nth.rs +++ b/rust/automerge/src/query/nth.rs @@ -1,13 +1,16 @@ use crate::error::AutomergeError; -use crate::op_tree::OpTreeNode; +use crate::op_set::OpSet; +use crate::op_tree::{OpTree, OpTreeNode}; use crate::query::{QueryResult, TreeQuery}; -use crate::types::{Key, Op}; +use crate::types::{Key, ListEncoding, Op, OpIds}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] pub(crate) struct Nth<'a> { target: usize, seen: usize, + encoding: ListEncoding, + last_width: usize, /// last_seen is the target elemid of the last `seen` operation. /// It is used to avoid double counting visible elements (which arise through conflicts) that are split across nodes. last_seen: Option, @@ -17,10 +20,12 @@ pub(crate) struct Nth<'a> { } impl<'a> Nth<'a> { - pub(crate) fn new(target: usize) -> Self { + pub(crate) fn new(target: usize, encoding: ListEncoding) -> Self { Nth { target, seen: 0, + last_width: 1, + encoding, last_seen: None, ops: vec![], ops_pos: vec![], @@ -28,6 +33,10 @@ impl<'a> Nth<'a> { } } + pub(crate) fn pred(&self, ops: &OpSet) -> OpIds { + ops.m.sorted_opids(self.ops.iter().map(|o| o.id)) + } + /// Get the key pub(crate) fn key(&self) -> Result { // the query collects the ops so we can use that to get the key they all use @@ -37,11 +46,35 @@ impl<'a> Nth<'a> { Err(AutomergeError::InvalidIndex(self.target)) } } + + pub(crate) fn index(&self) -> usize { + self.seen - self.last_width + } } impl<'a> TreeQuery<'a> for Nth<'a> { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { - let mut num_vis = child.index.visible_len(); + fn equiv(&mut self, other: &Self) -> bool { + self.index() == other.index() && self.key() == other.key() + } + + fn can_shortcut_search(&mut self, tree: &'a OpTree) -> bool { + if let Some((index, pos)) = &tree.last_insert { + if *index == self.target { + if let Some(op) = tree.internal.get(*pos) { + self.last_width = op.width(self.encoding); + self.seen = *index + self.last_width; + self.ops.push(op); + self.ops_pos.push(*pos); + self.pos = *pos + 1; + return true; + } + } + } + false + } + + fn query_node(&mut self, child: &OpTreeNode, ops: &[Op]) -> QueryResult { + let mut num_vis = child.index.visible_len(self.encoding); if let Some(last_seen) = self.last_seen { if child.index.has_visible(&last_seen) { num_vis -= 1; @@ -61,7 +94,7 @@ impl<'a> TreeQuery<'a> for Nth<'a> { // - the insert was at a previous node and this is a long run of overwrites so last_seen should already be set correctly // - the visible op is in this node and the elemid references it so it can be set here // - the visible op is in a future node and so it will be counted as seen there - let last_elemid = child.last().elemid_or_key(); + let last_elemid = ops[child.last()].elemid_or_key(); if child.index.has_visible(&last_elemid) { self.last_seen = Some(last_elemid); } @@ -79,11 +112,12 @@ impl<'a> TreeQuery<'a> for Nth<'a> { } let visible = element.visible(); if visible && self.last_seen.is_none() { - self.seen += 1; + self.last_width = element.width(self.encoding); + self.seen += self.last_width; // we have a new visible element self.last_seen = Some(element.elemid_or_key()) } - if self.seen == self.target + 1 && visible { + if self.seen > self.target && visible { self.ops.push(element); self.ops_pos.push(self.pos); } diff --git a/automerge/src/query/nth_at.rs b/rust/automerge/src/query/nth_at.rs similarity index 86% rename from automerge/src/query/nth_at.rs rename to rust/automerge/src/query/nth_at.rs index 10851e7c..e193ca03 100644 --- a/automerge/src/query/nth_at.rs +++ b/rust/automerge/src/query/nth_at.rs @@ -1,5 +1,5 @@ use crate::query::{QueryResult, TreeQuery, VisWindow}; -use crate::types::{Clock, ElemId, Op}; +use crate::types::{Clock, ElemId, ListEncoding, Op}; use std::fmt::Debug; #[derive(Debug, Clone, PartialEq)] @@ -7,6 +7,7 @@ pub(crate) struct NthAt { clock: Clock, target: usize, seen: usize, + encoding: ListEncoding, last_seen: Option, window: VisWindow, pub(crate) ops: Vec, @@ -15,11 +16,12 @@ pub(crate) struct NthAt { } impl NthAt { - pub(crate) fn new(target: usize, clock: Clock) -> Self { + pub(crate) fn new(target: usize, clock: Clock, encoding: ListEncoding) -> Self { NthAt { clock, target, seen: 0, + encoding, last_seen: None, ops: vec![], ops_pos: vec![], @@ -39,10 +41,10 @@ impl<'a> TreeQuery<'a> for NthAt { } let visible = self.window.visible_at(element, self.pos, &self.clock); if visible && self.last_seen.is_none() { - self.seen += 1; + self.seen += element.width(self.encoding); self.last_seen = element.elemid() } - if self.seen == self.target + 1 && visible { + if self.seen > self.target && visible { for (vpos, vop) in self.window.seen_op(element, self.pos) { if vop.is_counter() { // this could be out of order because of inc's - we can find the right place diff --git a/automerge/src/query/opid.rs b/rust/automerge/src/query/opid.rs similarity index 76% rename from automerge/src/query/opid.rs rename to rust/automerge/src/query/opid.rs index 6c29dcf6..3d4c8b24 100644 --- a/automerge/src/query/opid.rs +++ b/rust/automerge/src/query/opid.rs @@ -1,6 +1,6 @@ use crate::op_tree::OpTreeNode; use crate::query::{QueryResult, TreeQuery}; -use crate::types::{ElemId, Key, Op, OpId}; +use crate::types::{Key, Op, OpId}; /// Search for an OpId in a tree. /// Returns the index of the operation in the tree. @@ -30,14 +30,10 @@ impl OpIdSearch { None } } - - pub(crate) fn key(&self) -> &Option { - &self.key - } } impl<'a> TreeQuery<'a> for OpIdSearch { - fn query_node(&mut self, child: &OpTreeNode) -> QueryResult { + fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { if child.index.ops.contains(&self.target) { QueryResult::Descend } else { @@ -49,11 +45,6 @@ impl<'a> TreeQuery<'a> for OpIdSearch { fn query_element(&mut self, element: &Op) -> QueryResult { if element.id == self.target { self.found = true; - if element.insert { - self.key = Some(Key::Seq(ElemId(element.id))); - } else { - self.key = Some(element.key); - } QueryResult::Finish } else { self.pos += 1; diff --git a/rust/automerge/src/query/opid_vis.rs b/rust/automerge/src/query/opid_vis.rs new file mode 100644 index 00000000..c0d2cc89 --- /dev/null +++ b/rust/automerge/src/query/opid_vis.rs @@ -0,0 +1,62 @@ +use crate::op_tree::OpTreeNode; +use crate::query::{QueryResult, TreeQuery}; +use crate::types::{Key, Op, OpId}; + +/// Search for an OpId in a tree. +/// Returns the index of the operation in the tree. +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct OpIdVisSearch { + target: OpId, + found: bool, + pub(crate) visible: bool, + key: Option, +} + +impl OpIdVisSearch { + pub(crate) fn new(target: OpId) -> Self { + OpIdVisSearch { + target, + found: false, + visible: true, + key: None, + } + } + + pub(crate) fn key(&self) -> &Option { + &self.key + } +} + +impl<'a> TreeQuery<'a> for OpIdVisSearch { + fn query_node(&mut self, child: &OpTreeNode, _ops: &[Op]) -> QueryResult { + if child.index.ops.contains(&self.target) { + QueryResult::Descend + } else { + QueryResult::Next + } + } + + fn query_element(&mut self, element: &Op) -> QueryResult { + if element.id == self.target { + self.found = true; + self.key = Some(element.elemid_or_key()); + if element.visible() { + QueryResult::Next + } else { + self.visible = false; + QueryResult::Finish + } + } else if self.found { + if self.key != Some(element.elemid_or_key()) { + QueryResult::Finish + } else if element.visible() { + self.visible = false; + QueryResult::Finish + } else { + QueryResult::Next + } + } else { + QueryResult::Next + } + } +} diff --git a/automerge/src/query/prop.rs b/rust/automerge/src/query/prop.rs similarity index 57% rename from automerge/src/query/prop.rs rename to rust/automerge/src/query/prop.rs index 105b268f..d2a11361 100644 --- a/automerge/src/query/prop.rs +++ b/rust/automerge/src/query/prop.rs @@ -9,7 +9,6 @@ pub(crate) struct Prop<'a> { pub(crate) ops: Vec<&'a Op>, pub(crate) ops_pos: Vec, pub(crate) pos: usize, - start: Option, } impl<'a> Prop<'a> { @@ -19,7 +18,6 @@ impl<'a> Prop<'a> { ops: vec![], ops_pos: vec![], pos: 0, - start: None, } } } @@ -29,27 +27,11 @@ impl<'a> TreeQuery<'a> for Prop<'a> { &mut self, child: &'a OpTreeNode, m: &OpSetMetadata, + ops: &[Op], ) -> QueryResult { - if let Some(start) = self.start { - if self.pos + child.len() >= start { - // skip empty nodes - if child.index.visible_len() == 0 { - self.pos += child.len(); - QueryResult::Next - } else { - QueryResult::Descend - } - } else { - self.pos += child.len(); - QueryResult::Next - } - } else { - // in the root node find the first op position for the key - let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); - self.start = Some(start); - self.pos = start; - QueryResult::Skip(start) - } + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.key)); + self.pos = start; + QueryResult::Skip(start) } fn query_element(&mut self, op: &'a Op) -> QueryResult { diff --git a/automerge/src/query/prop_at.rs b/rust/automerge/src/query/prop_at.rs similarity index 92% rename from automerge/src/query/prop_at.rs rename to rust/automerge/src/query/prop_at.rs index 08b1cb59..f0c2eedc 100644 --- a/automerge/src/query/prop_at.rs +++ b/rust/automerge/src/query/prop_at.rs @@ -29,12 +29,13 @@ impl<'a> TreeQuery<'a> for PropAt { &mut self, child: &'a OpTreeNode, m: &OpSetMetadata, + ops: &[Op], ) -> QueryResult { - let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.key)); + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.key)); let mut window: VisWindow = Default::default(); self.pos = start; for pos in start..child.len() { - let op = child.get(pos).unwrap(); + let op = &ops[child.get(pos).unwrap()]; if op.key != self.key { break; } diff --git a/rust/automerge/src/query/seek_op.rs b/rust/automerge/src/query/seek_op.rs new file mode 100644 index 00000000..2ed875d2 --- /dev/null +++ b/rust/automerge/src/query/seek_op.rs @@ -0,0 +1,247 @@ +use crate::op_tree::{OpSetMetadata, OpTreeNode}; +use crate::query::{binary_search_by, QueryResult, TreeQuery}; +use crate::types::{Key, Op, HEAD}; +use std::cmp::Ordering; +use std::fmt::Debug; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct SeekOp<'a> { + /// the op we are looking for + op: &'a Op, + /// The position to insert at + pub(crate) pos: usize, + /// The indices of ops that this op overwrites + pub(crate) succ: Vec, + /// whether a position has been found + found: bool, +} + +impl<'a> SeekOp<'a> { + pub(crate) fn new(op: &'a Op) -> Self { + SeekOp { + op, + succ: vec![], + pos: 0, + found: false, + } + } + + fn lesser_insert(&self, op: &Op, m: &OpSetMetadata) -> bool { + op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less + } + + fn greater_opid(&self, op: &Op, m: &OpSetMetadata) -> bool { + m.lamport_cmp(op.id, self.op.id) == Ordering::Greater + } + + fn is_target_insert(&self, op: &Op) -> bool { + op.insert && op.elemid() == self.op.key.elemid() + } +} + +impl<'a> TreeQuery<'a> for SeekOp<'a> { + fn query_node_with_metadata( + &mut self, + child: &OpTreeNode, + m: &OpSetMetadata, + ops: &[Op], + ) -> QueryResult { + if self.found { + return QueryResult::Descend; + } + match self.op.key { + Key::Seq(HEAD) => { + while self.pos < child.len() { + let op = &ops[child.get(self.pos).unwrap()]; + if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { + break; + } + self.pos += 1; + } + QueryResult::Finish + } + Key::Seq(e) => { + if child.index.ops.contains(&e.0) { + QueryResult::Descend + } else { + self.pos += child.len(); + QueryResult::Next + } + } + Key::Map(_) => { + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.op.key)); + self.pos = start; + QueryResult::Skip(start) + } + } + } + + fn query_element_with_metadata(&mut self, e: &Op, m: &OpSetMetadata) -> QueryResult { + match self.op.key { + Key::Map(_) => { + // don't bother looking at things past our key + if e.key != self.op.key { + return QueryResult::Finish; + } + + if self.op.overwrites(e) { + self.succ.push(self.pos); + } + + if m.lamport_cmp(e.id, self.op.id) == Ordering::Greater { + return QueryResult::Finish; + } + + self.pos += 1; + QueryResult::Next + } + Key::Seq(_) => { + if !self.found { + if self.is_target_insert(e) { + self.found = true; + if self.op.overwrites(e) { + self.succ.push(self.pos); + } + } + self.pos += 1; + QueryResult::Next + } else { + // we have already found the target + if self.op.overwrites(e) { + self.succ.push(self.pos); + } + if self.op.insert { + if self.lesser_insert(e, m) { + QueryResult::Finish + } else { + self.pos += 1; + QueryResult::Next + } + } else if e.insert || self.greater_opid(e, m) { + QueryResult::Finish + } else { + self.pos += 1; + QueryResult::Next + } + } + } + } + } +} + +#[cfg(test)] +pub(crate) mod tests { + use crate::{ + op_set::OpSet, + op_tree::B, + query::SeekOp, + types::{Key, ObjId, Op, OpId}, + ActorId, ScalarValue, + }; + + /// Create an optree in which the only visible ops are on the boundaries of the nodes, + /// i.e. the visible elements are in the internal nodes. Like so + /// + /// ```notrust + /// + /// .----------------------. + /// | id | key | succ | + /// | B | "a" | | + /// | 2B | "b" | | + /// '----------------------' + /// / | \ + /// ;------------------------. | `------------------------------------. + /// | id | op | succ | | | id | op | succ | + /// | 0 |set "a" | 1 | | | 2B + 1 |set "c" | 2B + 2 | + /// | 1 |set "a" | 2 | | | 2B + 2 |set "c" | 2B + 3 | + /// | 2 |set "a" | 3 | | ... + /// ... | | 3B |set "c" | | + /// | B - 1 |set "a" | B | | '------------------------------------' + /// '--------'--------'------' | + /// | + /// .-----------------------------. + /// | id | key | succ | + /// | B + 1 | "b" | B + 2 | + /// | B + 2 | "b" | B + 3 | + /// .... + /// | B + (B - 1 | "b" | 2B | + /// '-----------------------------' + /// ``` + /// + /// The important point here is that the leaf nodes contain no visible ops for keys "a" and + /// "b". + /// + /// # Returns + /// + /// The opset in question and an op which should be inserted at the next position after the + /// internally visible ops. + pub(crate) fn optree_with_only_internally_visible_ops() -> (OpSet, Op) { + let mut set = OpSet::new(); + let actor = set.m.actors.cache(ActorId::random()); + let a = set.m.props.cache("a".to_string()); + let b = set.m.props.cache("b".to_string()); + let c = set.m.props.cache("c".to_string()); + + let mut counter = 0; + // For each key insert `B` operations with the `pred` and `succ` setup such that the final + // operation for each key is the only visible op. + for key in [a, b, c] { + for iteration in 0..B { + // Generate a value to insert + let keystr = set.m.props.get(key); + let val = keystr.repeat(iteration + 1); + + // Only the last op is visible + let pred = if iteration == 0 { + Default::default() + } else { + set.m + .sorted_opids(vec![OpId::new(counter - 1, actor)].into_iter()) + }; + + // only the last op is visible + let succ = if iteration == B - 1 { + Default::default() + } else { + set.m + .sorted_opids(vec![OpId::new(counter, actor)].into_iter()) + }; + + let op = Op { + id: OpId::new(counter, actor), + action: crate::OpType::Put(ScalarValue::Str(val.into())), + key: Key::Map(key), + succ, + pred, + insert: false, + }; + set.insert(counter as usize, &ObjId::root(), op); + counter += 1; + } + } + + // Now try and create an op which inserts at the next index of 'a' + let new_op = Op { + id: OpId::new(counter, actor), + action: crate::OpType::Put(ScalarValue::Str("test".into())), + key: Key::Map(a), + succ: Default::default(), + pred: set + .m + .sorted_opids(std::iter::once(OpId::new(B as u64 - 1, actor))), + insert: false, + }; + (set, new_op) + } + + #[test] + fn seek_on_page_boundary() { + let (set, new_op) = optree_with_only_internally_visible_ops(); + + let q = SeekOp::new(&new_op); + let q = set.search(&ObjId::root(), q); + + // we've inserted `B - 1` elements for "a", so the index should be `B` + assert_eq!(q.pos, B); + } +} diff --git a/automerge/src/query/seek_op_with_patch.rs b/rust/automerge/src/query/seek_op_with_patch.rs similarity index 86% rename from automerge/src/query/seek_op_with_patch.rs rename to rust/automerge/src/query/seek_op_with_patch.rs index e8ebded8..cd30f5bb 100644 --- a/automerge/src/query/seek_op_with_patch.rs +++ b/rust/automerge/src/query/seek_op_with_patch.rs @@ -1,6 +1,6 @@ use crate::op_tree::{OpSetMetadata, OpTreeNode}; use crate::query::{binary_search_by, QueryResult, TreeQuery}; -use crate::types::{Key, Op, HEAD}; +use crate::types::{Key, ListEncoding, Op, HEAD}; use std::cmp::Ordering; use std::fmt::Debug; @@ -8,31 +8,29 @@ use std::fmt::Debug; pub(crate) struct SeekOpWithPatch<'a> { op: Op, pub(crate) pos: usize, - /// A position counter for after we find the insert position to record conflicts. - later_pos: usize, pub(crate) succ: Vec, found: bool, + encoding: ListEncoding, pub(crate) seen: usize, + pub(crate) last_width: usize, last_seen: Option, pub(crate) values: Vec<&'a Op>, pub(crate) had_value_before: bool, - /// The found start position of the key if there is one yet (for map objects). - start: Option, } impl<'a> SeekOpWithPatch<'a> { - pub(crate) fn new(op: &Op) -> Self { + pub(crate) fn new(op: &Op, encoding: ListEncoding) -> Self { SeekOpWithPatch { op: op.clone(), succ: vec![], pos: 0, - later_pos: 0, found: false, + encoding, seen: 0, + last_width: 0, last_seen: None, values: vec![], had_value_before: false, - start: None, } } @@ -60,7 +58,7 @@ impl<'a> SeekOpWithPatch<'a> { self.last_seen = None } if e.visible() && self.last_seen.is_none() { - self.seen += 1; + self.seen += e.width(self.encoding); self.last_seen = Some(e.elemid_or_key()) } } @@ -71,6 +69,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { &mut self, child: &'a OpTreeNode, m: &OpSetMetadata, + ops: &[Op], ) -> QueryResult { if self.found { return QueryResult::Descend; @@ -81,7 +80,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // the opId of the operation being inserted. Key::Seq(e) if e == HEAD => { while self.pos < child.len() { - let op = child.get(self.pos).unwrap(); + let op = &ops[child.get(self.pos).unwrap()]; if op.insert && m.lamport_cmp(op.id, self.op.id) == Ordering::Less { break; } @@ -104,7 +103,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // elements it contains. However, it could happen that a visible element is // split across two tree nodes. To avoid double-counting in this situation, we // subtract one if the last visible element also appears in this tree node. - let mut num_vis = child.index.visible_len(); + let mut num_vis = child.index.visible_len(self.encoding); if num_vis > 0 { // FIXME: I think this is wrong: we should subtract one only if this // subtree contains a *visible* (i.e. empty succs) operation for the list @@ -122,7 +121,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // the last operation's elemId regardless of whether it's visible or not. // This will lead to incorrect counting if `last_seen` is not visible: it's // not counted towards `num_vis`, so we shouldn't be subtracting 1. - self.last_seen = Some(child.last().elemid_or_key()); + self.last_seen = Some(ops[child.last()].elemid_or_key()); } QueryResult::Next } @@ -130,28 +129,9 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // Updating a map: operations appear in sorted order by key Key::Map(_) => { - if let Some(start) = self.start { - if self.pos + child.len() >= start { - // skip empty nodes - if child.index.visible_len() == 0 { - self.pos += child.len(); - QueryResult::Next - } else { - QueryResult::Descend - } - } else { - self.pos += child.len(); - QueryResult::Next - } - } else { - // in the root node find the first op position for the key - // Search for the place where we need to insert the new operation. First find the - // first op with a key >= the key we're updating - let start = binary_search_by(child, |op| m.key_cmp(&op.key, &self.op.key)); - self.start = Some(start); - self.pos = start; - QueryResult::Skip(start) - } + let start = binary_search_by(child, ops, |op| m.key_cmp(&op.key, &self.op.key)); + self.pos = start; + QueryResult::Skip(start) } } } @@ -176,6 +156,11 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { self.values.push(e); } self.succ.push(self.pos); + self.last_width = e.width(self.encoding); + + if e.visible() { + self.had_value_before = true; + } } else if e.visible() { self.values.push(e); } @@ -184,7 +169,6 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { // we reach an op with an opId greater than that of the new operation if m.lamport_cmp(e.id, self.op.id) == Ordering::Greater { self.found = true; - self.later_pos = self.pos + 1; return QueryResult::Next; } @@ -202,7 +186,6 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { if e.visible() { self.values.push(e); } - self.later_pos += 1; } QueryResult::Next } @@ -219,6 +202,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { self.values.push(e); } self.succ.push(self.pos); + self.last_width = e.width(self.encoding); } if e.visible() { self.had_value_before = true; @@ -236,6 +220,7 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { self.values.push(e); } self.succ.push(self.pos); + self.last_width = e.width(self.encoding); } // If the new op is an insertion, skip over any existing list elements whose elemId is @@ -284,3 +269,23 @@ impl<'a> TreeQuery<'a> for SeekOpWithPatch<'a> { } } } + +#[cfg(test)] +mod tests { + use super::{super::seek_op::tests::optree_with_only_internally_visible_ops, SeekOpWithPatch}; + use crate::{ + op_tree::B, + types::{ListEncoding, ObjId}, + }; + + #[test] + fn test_insert_on_internal_only_nodes() { + let (set, new_op) = optree_with_only_internally_visible_ops(); + + let q = SeekOpWithPatch::new(&new_op, ListEncoding::List); + let q = set.search(&ObjId::root(), q); + + // we've inserted `B - 1` elements for "a", so the index should be `B` + assert_eq!(q.pos, B); + } +} diff --git a/rust/automerge/src/read.rs b/rust/automerge/src/read.rs new file mode 100644 index 00000000..6d479718 --- /dev/null +++ b/rust/automerge/src/read.rs @@ -0,0 +1,199 @@ +use crate::{ + error::AutomergeError, exid::ExId, keys::Keys, keys_at::KeysAt, list_range::ListRange, + list_range_at::ListRangeAt, map_range::MapRange, map_range_at::MapRangeAt, parents::Parents, + values::Values, Change, ChangeHash, ObjType, Prop, Value, +}; + +use std::ops::RangeBounds; + +/// Methods for reading values from an automerge document +/// +/// Many of the methods on this trait have an alternate `*_at` version which +/// takes an additional argument of `&[ChangeHash]`. This allows you to retrieve +/// the value at a particular point in the document history identified by the +/// given change hashes. +pub trait ReadDoc { + /// Get the parents of an object in the document tree. + /// + /// See the documentation for [`Parents`] for more details. + /// + /// ### Errors + /// + /// Returns an error when the id given is not the id of an object in this document. + /// This function does not get the parents of scalar values contained within objects. + /// + /// ### Experimental + /// + /// This function may in future be changed to allow getting the parents from the id of a scalar + /// value. + fn parents>(&self, obj: O) -> Result, AutomergeError>; + + /// Get the path to an object + /// + /// "path" here means the sequence of `(object Id, key)` pairs which leads + /// to the object in question. + /// + /// ### Errors + /// + /// * If the object ID `obj` is not in the document + fn path_to_object>(&self, obj: O) -> Result, AutomergeError>; + + /// Get the keys of the object `obj`. + /// + /// For a map this returns the keys of the map. + /// For a list this returns the element ids (opids) encoded as strings. + fn keys>(&self, obj: O) -> Keys<'_, '_>; + + /// Get the keys of the object `obj` as at `heads` + /// + /// See [`Self::keys`] + fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_>; + + /// Iterate over the keys and values of the map `obj` in the given range. + /// + /// If the object correspoding to `obj` is a list then this will return an empty iterator + /// + /// The returned iterator yields `(key, value, exid)` tuples, where the + /// third element is the ID of the operation which created the value. + fn map_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> MapRange<'_, R>; + + /// Iterate over the keys and values of the map `obj` in the given range as + /// at `heads` + /// + /// If the object correspoding to `obj` is a list then this will return an empty iterator + /// + /// The returned iterator yields `(key, value, exid)` tuples, where the + /// third element is the ID of the operation which created the value. + /// + /// See [`Self::map_range`] + fn map_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> MapRangeAt<'_, R>; + + /// Iterate over the indexes and values of the list or text `obj` in the given range. + /// + /// The reuturned iterator yields `(index, value, exid)` tuples, where the third + /// element is the ID of the operation which created the value. + fn list_range, R: RangeBounds>( + &self, + obj: O, + range: R, + ) -> ListRange<'_, R>; + + /// Iterate over the indexes and values of the list or text `obj` in the given range as at `heads` + /// + /// The returned iterator yields `(index, value, exid)` tuples, where the third + /// element is the ID of the operation which created the value. + /// + /// See [`Self::list_range`] + fn list_range_at, R: RangeBounds>( + &self, + obj: O, + range: R, + heads: &[ChangeHash], + ) -> ListRangeAt<'_, R>; + + /// Iterate over the values in a map, list, or text object + /// + /// The returned iterator yields `(value, exid)` tuples, where the second element + /// is the ID of the operation which created the value. + fn values>(&self, obj: O) -> Values<'_>; + + /// Iterate over the values in a map, list, or text object as at `heads` + /// + /// The returned iterator yields `(value, exid)` tuples, where the second element + /// is the ID of the operation which created the value. + /// + /// See [`Self::values`] + fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> Values<'_>; + + /// Get the length of the given object. + /// + /// If the given object is not in this document this method will return `0` + fn length>(&self, obj: O) -> usize; + + /// Get the length of the given object as at `heads` + /// + /// If the given object is not in this document this method will return `0` + /// + /// See [`Self::length`] + fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize; + + /// Get the type of this object, if it is an object. + fn object_type>(&self, obj: O) -> Result; + + /// Get the string represented by the given text object. + fn text>(&self, obj: O) -> Result; + + /// Get the string represented by the given text object as at `heads`, see + /// [`Self::text`] + fn text_at>( + &self, + obj: O, + heads: &[ChangeHash], + ) -> Result; + + /// Get a value out of the document. + /// + /// This returns a tuple of `(value, object ID)`. This is for two reasons: + /// + /// 1. If `value` is an object (represented by `Value::Object`) then the ID + /// is the ID of that object. This can then be used to retrieve nested + /// values from the document. + /// 2. Even if `value` is a scalar, the ID represents the operation which + /// created the value. This is useful if there are conflicting values for + /// this key as each value is tagged with the ID. + /// + /// In the case of a key which has conflicting values, this method will + /// return a single arbitrarily chosen value. This value will be chosen + /// deterministically on all nodes. If you want to get all the values for a + /// key use [`Self::get_all`]. + fn get, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, ExId)>, AutomergeError>; + + /// Get the value of the given key as at `heads`, see `[Self::get]` + fn get_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, ExId)>, AutomergeError>; + + /// Get all conflicting values out of the document at this prop that conflict. + /// + /// If there are multiple conflicting values for a given key this method + /// will return all of them, with each value tagged by the ID of the + /// operation which created it. + fn get_all, P: Into>( + &self, + obj: O, + prop: P, + ) -> Result, ExId)>, AutomergeError>; + + /// Get all possibly conflicting values for a key as at `heads` + /// + /// See `[Self::get_all]` + fn get_all_at, P: Into>( + &self, + obj: O, + prop: P, + heads: &[ChangeHash], + ) -> Result, ExId)>, AutomergeError>; + + /// Get the hashes of the changes in this document that aren't transitive dependencies of the + /// given `heads`. + fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec; + + /// Get a change by its hash. + fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&Change>; +} diff --git a/automerge/src/storage.rs b/rust/automerge/src/storage.rs similarity index 95% rename from automerge/src/storage.rs rename to rust/automerge/src/storage.rs index c8a2183d..5b3d03a7 100644 --- a/automerge/src/storage.rs +++ b/rust/automerge/src/storage.rs @@ -14,6 +14,7 @@ pub(crate) use { chunk::{CheckSum, Chunk, ChunkType, Header}, columns::{Columns, MismatchingColumn, RawColumn, RawColumns}, document::{AsChangeMeta, AsDocOp, ChangeMetadata, CompressConfig, DocOp, Document}, + load::VerificationMode, }; fn shift_range(range: Range, by: usize) -> Range { diff --git a/automerge/src/storage/change.rs b/rust/automerge/src/storage/change.rs similarity index 97% rename from automerge/src/storage/change.rs rename to rust/automerge/src/storage/change.rs index cbe014ac..61db0b00 100644 --- a/automerge/src/storage/change.rs +++ b/rust/automerge/src/storage/change.rs @@ -40,7 +40,7 @@ impl OpReadState for Unverified {} /// ReadChangeOpError>`. /// /// [1]: https://alexjg.github.io/automerge-storage-docs/#change-chunks -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug)] pub(crate) struct Change<'a, O: OpReadState> { /// The raw bytes of the entire chunk containing this change, including the header. bytes: Cow<'a, [u8]>, @@ -59,6 +59,12 @@ pub(crate) struct Change<'a, O: OpReadState> { _phantom: PhantomData, } +impl<'a, O: OpReadState> PartialEq for Change<'a, O> { + fn eq(&self, other: &Self) -> bool { + self.bytes == other.bytes + } +} + #[derive(thiserror::Error, Debug)] pub(crate) enum ParseError { #[error(transparent)] @@ -171,6 +177,9 @@ impl<'a> Change<'a, Unverified> { for op in self.iter_ops() { f(op?); } + if u32::try_from(u64::from(self.start_op)).is_err() { + return Err(ReadChangeOpError::CounterTooLarge); + } Ok(Change { bytes: self.bytes, header: self.header, @@ -461,7 +470,7 @@ impl ChangeBuilder, Set, Set, Set> { ); leb128::write::unsigned(&mut data, other_actors.len() as u64).unwrap(); for actor in other_actors.iter() { - length_prefixed_bytes(&actor, &mut data); + length_prefixed_bytes(actor, &mut data); } cols.raw_columns().write(&mut data); let ops_data_start = data.len(); diff --git a/automerge/src/storage/change/change_actors.rs b/rust/automerge/src/storage/change/change_actors.rs similarity index 100% rename from automerge/src/storage/change/change_actors.rs rename to rust/automerge/src/storage/change/change_actors.rs diff --git a/automerge/src/storage/change/change_op_columns.rs b/rust/automerge/src/storage/change/change_op_columns.rs similarity index 96% rename from automerge/src/storage/change/change_op_columns.rs rename to rust/automerge/src/storage/change/change_op_columns.rs index c50c67ae..86ec59c2 100644 --- a/automerge/src/storage/change/change_op_columns.rs +++ b/rust/automerge/src/storage/change/change_op_columns.rs @@ -14,6 +14,7 @@ use crate::{ }, }, convert, + error::InvalidOpType, storage::{ change::AsChangeOp, columns::{ @@ -22,6 +23,7 @@ use crate::{ RawColumns, }, types::{ElemId, ObjId, OpId, ScalarValue}, + OpType, }; const OBJ_COL_ID: ColumnId = ColumnId::new(0); @@ -177,7 +179,7 @@ impl ChangeOpsColumns { obj.append(op.obj()); key.append(op.key()); insert.append(op.insert()); - action.append_value(op.action() as u64); + action.append_value(op.action()); val.append(&op.val()); pred.append(op.pred()); } @@ -276,7 +278,14 @@ impl ChangeOpsColumns { #[derive(thiserror::Error, Debug)] #[error(transparent)] -pub struct ReadChangeOpError(#[from] DecodeColumnError); +pub enum ReadChangeOpError { + #[error(transparent)] + DecodeError(#[from] DecodeColumnError), + #[error(transparent)] + InvalidOpType(#[from] InvalidOpType), + #[error("counter too large")] + CounterTooLarge, +} #[derive(Clone)] pub(crate) struct ChangeOpsIter<'a> { @@ -308,6 +317,11 @@ impl<'a> ChangeOpsIter<'a> { let action = self.action.next_in_col("action")?; let val = self.val.next_in_col("value")?; let pred = self.pred.next_in_col("pred")?; + + // This check is necessary to ensure that OpType::from_action_and_value + // cannot panic later in the process. + OpType::validate_action_and_value(action, &val)?; + Ok(Some(ChangeOp { obj, key, @@ -458,10 +472,14 @@ mod tests { action in 0_u64..6, obj in opid(), insert in any::()) -> ChangeOp { + + let val = if action == 5 && !(value.is_int() || value.is_uint()) { + ScalarValue::Uint(0) + } else { value }; ChangeOp { obj: obj.into(), key, - val: value, + val, pred, action, insert, diff --git a/automerge/src/storage/change/compressed.rs b/rust/automerge/src/storage/change/compressed.rs similarity index 100% rename from automerge/src/storage/change/compressed.rs rename to rust/automerge/src/storage/change/compressed.rs diff --git a/automerge/src/storage/change/op_with_change_actors.rs b/rust/automerge/src/storage/change/op_with_change_actors.rs similarity index 100% rename from automerge/src/storage/change/op_with_change_actors.rs rename to rust/automerge/src/storage/change/op_with_change_actors.rs diff --git a/automerge/src/storage/chunk.rs b/rust/automerge/src/storage/chunk.rs similarity index 99% rename from automerge/src/storage/chunk.rs rename to rust/automerge/src/storage/chunk.rs index 821c2c55..d0048528 100644 --- a/automerge/src/storage/chunk.rs +++ b/rust/automerge/src/storage/chunk.rs @@ -258,7 +258,7 @@ impl Header { Header { checksum: checksum_bytes.into(), chunk_type, - data_len: data.len() as usize, + data_len: data.len(), header_size: header.len(), hash, }, @@ -286,7 +286,7 @@ impl Header { fn hash(typ: ChunkType, data: &[u8]) -> ChangeHash { let mut out = vec![u8::from(typ)]; leb128::write::unsigned(&mut out, data.len() as u64).unwrap(); - out.extend(data.as_ref()); + out.extend(data); let hash_result = Sha256::digest(out); let array: [u8; 32] = hash_result.into(); ChangeHash(array) diff --git a/automerge/src/storage/columns.rs b/rust/automerge/src/storage/columns.rs similarity index 100% rename from automerge/src/storage/columns.rs rename to rust/automerge/src/storage/columns.rs diff --git a/automerge/src/storage/columns/column.rs b/rust/automerge/src/storage/columns/column.rs similarity index 100% rename from automerge/src/storage/columns/column.rs rename to rust/automerge/src/storage/columns/column.rs diff --git a/automerge/src/storage/columns/column_builder.rs b/rust/automerge/src/storage/columns/column_builder.rs similarity index 100% rename from automerge/src/storage/columns/column_builder.rs rename to rust/automerge/src/storage/columns/column_builder.rs diff --git a/automerge/src/storage/columns/column_specification.rs b/rust/automerge/src/storage/columns/column_specification.rs similarity index 100% rename from automerge/src/storage/columns/column_specification.rs rename to rust/automerge/src/storage/columns/column_specification.rs diff --git a/automerge/src/storage/columns/raw_column.rs b/rust/automerge/src/storage/columns/raw_column.rs similarity index 91% rename from automerge/src/storage/columns/raw_column.rs rename to rust/automerge/src/storage/columns/raw_column.rs index 053c3c75..ac9a5759 100644 --- a/automerge/src/storage/columns/raw_column.rs +++ b/rust/automerge/src/storage/columns/raw_column.rs @@ -73,15 +73,19 @@ impl RawColumn { } } - fn decompress(&self, input: &[u8], out: &mut Vec) -> (ColumnSpec, usize) { + fn decompress( + &self, + input: &[u8], + out: &mut Vec, + ) -> Result<(ColumnSpec, usize), ParseError> { let len = if self.spec.deflate() { let mut inflater = flate2::bufread::DeflateDecoder::new(&input[self.data.clone()]); - inflater.read_to_end(out).unwrap() + inflater.read_to_end(out).map_err(ParseError::Deflate)? } else { out.extend(&input[self.data.clone()]); self.data.len() }; - (self.spec.inflated(), len) + Ok((self.spec.inflated(), len)) } } @@ -140,7 +144,7 @@ impl RawColumns { &self, input: &[u8], out: &mut Vec, - ) -> RawColumns { + ) -> Result, ParseError> { let mut result = Vec::with_capacity(self.0.len()); let mut start = 0; for col in &self.0 { @@ -148,7 +152,7 @@ impl RawColumns { out.extend(&input[decomp.data.clone()]); (decomp.spec, decomp.data.len()) } else { - col.decompress(input, out) + col.decompress(input, out)? }; result.push(RawColumn { spec, @@ -157,7 +161,7 @@ impl RawColumns { }); start += len; } - RawColumns(result) + Ok(RawColumns(result)) } } @@ -193,6 +197,8 @@ pub(crate) enum ParseError { NotInNormalOrder, #[error(transparent)] Leb128(#[from] parse::leb128::Error), + #[error(transparent)] + Deflate(#[from] std::io::Error), } impl RawColumns { @@ -213,7 +219,10 @@ impl RawColumns { let columns: Vec> = specs_and_lens .into_iter() .scan(0_usize, |offset, (spec, len)| { - let end = *offset + len as usize; + // Note: we use a saturating add here as len was passed over the network + // and so could be anything. If the addition does every saturate we would + // expect parsing to fail later (but at least it won't panic!). + let end = offset.saturating_add(len as usize); let data = *offset..end; *offset = end; Some(RawColumn { diff --git a/automerge/src/storage/convert.rs b/rust/automerge/src/storage/convert.rs similarity index 100% rename from automerge/src/storage/convert.rs rename to rust/automerge/src/storage/convert.rs diff --git a/automerge/src/storage/convert/op_as_changeop.rs b/rust/automerge/src/storage/convert/op_as_changeop.rs similarity index 100% rename from automerge/src/storage/convert/op_as_changeop.rs rename to rust/automerge/src/storage/convert/op_as_changeop.rs diff --git a/automerge/src/storage/convert/op_as_docop.rs b/rust/automerge/src/storage/convert/op_as_docop.rs similarity index 100% rename from automerge/src/storage/convert/op_as_docop.rs rename to rust/automerge/src/storage/convert/op_as_docop.rs diff --git a/automerge/src/storage/document.rs b/rust/automerge/src/storage/document.rs similarity index 99% rename from automerge/src/storage/document.rs rename to rust/automerge/src/storage/document.rs index 500fbe85..ecef0bfd 100644 --- a/automerge/src/storage/document.rs +++ b/rust/automerge/src/storage/document.rs @@ -173,7 +173,8 @@ impl<'a> Document<'a> { raw_columns: ops_meta, }, extra_args: (), - }); + }) + .map_err(|e| parse::ParseError::Error(ParseError::RawColumns(e)))?; let ops_layout = Columns::parse(op_bytes.len(), ops.iter()).map_err(|e| { parse::ParseError::Error(ParseError::BadColumnLayout { diff --git a/automerge/src/storage/document/compression.rs b/rust/automerge/src/storage/document/compression.rs similarity index 83% rename from automerge/src/storage/document/compression.rs rename to rust/automerge/src/storage/document/compression.rs index f7daa127..2f0e96ce 100644 --- a/automerge/src/storage/document/compression.rs +++ b/rust/automerge/src/storage/document/compression.rs @@ -1,6 +1,9 @@ -use std::{borrow::Cow, ops::Range}; +use std::{borrow::Cow, convert::Infallible, ops::Range}; -use crate::storage::{columns::compression, shift_range, ChunkType, Header, RawColumns}; +use crate::storage::{ + columns::{compression, raw_column}, + shift_range, ChunkType, Header, RawColumns, +}; pub(super) struct Args<'a, T: compression::ColumnCompression, DirArgs> { /// The original data of the entire document chunk (compressed or uncompressed) @@ -23,40 +26,50 @@ pub(super) struct CompressArgs { } /// Compress a document chunk returning the compressed bytes -pub(super) fn compress<'a>(args: Args<'a, compression::Uncompressed, CompressArgs>) -> Vec { +pub(super) fn compress(args: Args<'_, compression::Uncompressed, CompressArgs>) -> Vec { let header_len = args.extra_args.original_header_len; let threshold = args.extra_args.threshold; - Compression::<'a, Compressing, _>::new( - args, - Compressing { - threshold, - header_len, - }, - ) - .changes() - .ops() - .write_data() - .finish() + // Wrap in a closure so we can use `?` in the construction but still force the compiler + // to check that the error type is `Infallible` + let result: Result<_, Infallible> = (|| { + Ok(Compression::::new( + args, + Compressing { + threshold, + header_len, + }, + ) + .changes()? + .ops()? + .write_data() + .finish()) + })(); + // We just checked the error is `Infallible` so unwrap is fine + result.unwrap() } -pub(super) fn decompress<'a>(args: Args<'a, compression::Unknown, ()>) -> Decompressed<'a> { +pub(super) fn decompress<'a>( + args: Args<'a, compression::Unknown, ()>, +) -> Result, raw_column::ParseError> { match ( args.changes.raw_columns.uncompressed(), args.ops.raw_columns.uncompressed(), ) { - (Some(changes), Some(ops)) => Decompressed { + (Some(changes), Some(ops)) => Ok(Decompressed { changes, ops, compressed: None, uncompressed: args.original, change_bytes: args.changes.data, op_bytes: args.ops.data, - }, - _ => Compression::<'a, Decompressing, _>::new(args, Decompressing) - .changes() - .ops() - .write_data() - .finish(), + }), + _ => Ok( + Compression::<'a, Decompressing, _>::new(args, Decompressing) + .changes()? + .ops()? + .write_data() + .finish(), + ), } } @@ -94,6 +107,7 @@ pub(super) struct Cols { trait Direction: std::fmt::Debug { type Out: compression::ColumnCompression; type In: compression::ColumnCompression; + type Error; type Args; /// This method represents the (de)compression process for a direction. The arguments are: @@ -108,7 +122,7 @@ trait Direction: std::fmt::Debug { input: &[u8], out: &mut Vec, meta_out: &mut Vec, - ) -> Cols; + ) -> Result, Self::Error>; } #[derive(Debug)] struct Compressing { @@ -117,6 +131,7 @@ struct Compressing { } impl Direction for Compressing { + type Error = Infallible; type Out = compression::Unknown; type In = compression::Uncompressed; type Args = CompressArgs; @@ -127,16 +142,16 @@ impl Direction for Compressing { input: &[u8], out: &mut Vec, meta_out: &mut Vec, - ) -> Cols { + ) -> Result, Self::Error> { let start = out.len(); let raw_columns = cols .raw_columns .compress(&input[cols.data.clone()], out, self.threshold); raw_columns.write(meta_out); - Cols { + Ok(Cols { data: start..out.len(), raw_columns, - } + }) } } @@ -144,6 +159,7 @@ impl Direction for Compressing { struct Decompressing; impl Direction for Decompressing { + type Error = raw_column::ParseError; type Out = compression::Uncompressed; type In = compression::Unknown; type Args = (); @@ -154,14 +170,16 @@ impl Direction for Decompressing { input: &[u8], out: &mut Vec, meta_out: &mut Vec, - ) -> Cols { + ) -> Result, raw_column::ParseError> { let start = out.len(); - let raw_columns = cols.raw_columns.uncompress(&input[cols.data.clone()], out); + let raw_columns = cols + .raw_columns + .uncompress(&input[cols.data.clone()], out)?; raw_columns.write(meta_out); - Cols { + Ok(Cols { data: start..out.len(), raw_columns, - } + }) } } @@ -233,7 +251,7 @@ impl<'a, D: Direction> Compression<'a, D, Starting> { } impl<'a, D: Direction> Compression<'a, D, Starting> { - fn changes(self) -> Compression<'a, D, Changes> { + fn changes(self) -> Result>, D::Error> { let Starting { mut data_out, mut meta_out, @@ -243,8 +261,8 @@ impl<'a, D: Direction> Compression<'a, D, Starting> { &self.args.original, &mut data_out, &mut meta_out, - ); - Compression { + )?; + Ok(Compression { args: self.args, direction: self.direction, state: Changes { @@ -252,12 +270,12 @@ impl<'a, D: Direction> Compression<'a, D, Starting> { meta_out, data_out, }, - } + }) } } impl<'a, D: Direction> Compression<'a, D, Changes> { - fn ops(self) -> Compression<'a, D, ChangesAndOps> { + fn ops(self) -> Result>, D::Error> { let Changes { change_cols, mut meta_out, @@ -268,8 +286,8 @@ impl<'a, D: Direction> Compression<'a, D, Changes> { &self.args.original, &mut data_out, &mut meta_out, - ); - Compression { + )?; + Ok(Compression { args: self.args, direction: self.direction, state: ChangesAndOps { @@ -278,7 +296,7 @@ impl<'a, D: Direction> Compression<'a, D, Changes> { meta_out, data_out, }, - } + }) } } diff --git a/automerge/src/storage/document/doc_change_columns.rs b/rust/automerge/src/storage/document/doc_change_columns.rs similarity index 100% rename from automerge/src/storage/document/doc_change_columns.rs rename to rust/automerge/src/storage/document/doc_change_columns.rs diff --git a/automerge/src/storage/document/doc_op_columns.rs b/rust/automerge/src/storage/document/doc_op_columns.rs similarity index 99% rename from automerge/src/storage/document/doc_op_columns.rs rename to rust/automerge/src/storage/document/doc_op_columns.rs index 5f61dff8..82de17eb 100644 --- a/automerge/src/storage/document/doc_op_columns.rs +++ b/rust/automerge/src/storage/document/doc_op_columns.rs @@ -116,7 +116,7 @@ impl DocOpColumns { let key = KeyRange::encode(ops.clone().map(|o| o.key()), out); let id = OpIdRange::encode(ops.clone().map(|o| o.id()), out); let insert = BooleanRange::encode(ops.clone().map(|o| o.insert()), out); - let action = RleRange::encode(ops.clone().map(|o| Some(o.action() as u64)), out); + let action = RleRange::encode(ops.clone().map(|o| Some(o.action())), out); let val = ValueRange::encode(ops.clone().map(|o| o.val()), out); let succ = OpIdListRange::encode(ops.map(|o| o.succ()), out); Self { diff --git a/automerge/src/storage/load.rs b/rust/automerge/src/storage/load.rs similarity index 97% rename from automerge/src/storage/load.rs rename to rust/automerge/src/storage/load.rs index fe2e8429..80ab3d82 100644 --- a/automerge/src/storage/load.rs +++ b/rust/automerge/src/storage/load.rs @@ -8,7 +8,7 @@ use crate::{ mod change_collector; mod reconstruct_document; pub(crate) use reconstruct_document::{ - reconstruct_document, DocObserver, LoadedObject, Reconstructed, + reconstruct_document, DocObserver, LoadedObject, Reconstructed, VerificationMode, }; #[derive(Debug, thiserror::Error)] @@ -84,7 +84,7 @@ fn load_next_change<'a>( let Reconstructed { changes: new_changes, .. - } = reconstruct_document(&d, NullObserver) + } = reconstruct_document(&d, VerificationMode::DontCheck, NullObserver) .map_err(|e| Error::InflateDocument(Box::new(e)))?; changes.extend(new_changes); } diff --git a/automerge/src/storage/load/change_collector.rs b/rust/automerge/src/storage/load/change_collector.rs similarity index 91% rename from automerge/src/storage/load/change_collector.rs rename to rust/automerge/src/storage/load/change_collector.rs index 5a877a60..d05367a9 100644 --- a/automerge/src/storage/load/change_collector.rs +++ b/rust/automerge/src/storage/load/change_collector.rs @@ -26,6 +26,8 @@ pub(crate) enum Error { MissingChange, #[error("unable to read change metadata: {0}")] ReadChange(Box), + #[error("incorrect max op")] + IncorrectMaxOp, #[error("missing ops")] MissingOps, } @@ -52,7 +54,9 @@ impl<'a> ChangeCollector<'a> { let change = change.map_err(|e| Error::ReadChange(Box::new(e)))?; let actor_changes = changes_by_actor.entry(change.actor).or_default(); if let Some(prev) = actor_changes.last() { - if prev.max_op >= change.max_op { + // Note that we allow max_op to be equal to the previous max_op in case the + // previous change had no ops (which is permitted) + if prev.max_op > change.max_op { return Err(Error::ChangesOutOfOrder); } } @@ -178,7 +182,18 @@ impl<'a> PartialChange<'a> { .ops .iter() .map(|(obj, op)| op_as_actor_id(obj, op, metadata)); - let actor = metadata.actors.get(self.actor).clone(); + let actor = metadata + .actors + .safe_get(self.actor) + .ok_or_else(|| { + tracing::error!(actor_index = self.actor, "actor out of bounds"); + Error::MissingActor + })? + .clone(); + + if num_ops > self.max_op { + return Err(Error::IncorrectMaxOp); + } let change = match StoredChange::builder() .with_dependencies(deps) diff --git a/automerge/src/storage/load/reconstruct_document.rs b/rust/automerge/src/storage/load/reconstruct_document.rs similarity index 92% rename from automerge/src/storage/load/reconstruct_document.rs rename to rust/automerge/src/storage/load/reconstruct_document.rs index e8221e5c..44ace72a 100644 --- a/automerge/src/storage/load/reconstruct_document.rs +++ b/rust/automerge/src/storage/load/reconstruct_document.rs @@ -6,7 +6,7 @@ use crate::{ change::Change, columnar::Key as DocOpKey, op_tree::OpSetMetadata, - storage::{DocOp, Document}, + storage::{change::Verified, Change as StoredChange, DocOp, Document}, types::{ChangeHash, ElemId, Key, ObjId, ObjType, Op, OpId, OpIds, OpType}, ScalarValue, }; @@ -24,13 +24,29 @@ pub(crate) enum Error { #[error("invalid changes: {0}")] InvalidChanges(#[from] super::change_collector::Error), #[error("mismatching heads")] - MismatchingHeads, + MismatchingHeads(MismatchedHeads), #[error("missing operations")] MissingOps, #[error("succ out of order")] SuccOutOfOrder, } +pub(crate) struct MismatchedHeads { + changes: Vec>, + expected_heads: BTreeSet, + derived_heads: BTreeSet, +} + +impl std::fmt::Debug for MismatchedHeads { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("MismatchedHeads") + .field("changes", &self.changes.len()) + .field("expected_heads", &self.expected_heads) + .field("derived_heads", &self.derived_heads) + .finish() + } +} + /// All the operations loaded from an object in the document format pub(crate) struct LoadedObject { /// The id of the object @@ -67,9 +83,16 @@ pub(crate) struct Reconstructed { pub(crate) heads: BTreeSet, } +#[derive(Debug)] +pub enum VerificationMode { + Check, + DontCheck, +} + #[instrument(skip(doc, observer))] pub(crate) fn reconstruct_document<'a, O: DocObserver>( doc: &'a Document<'a>, + mode: VerificationMode, mut observer: O, ) -> Result, Error> { // The document format does not contain the bytes of the changes which are encoded in it @@ -185,10 +208,16 @@ pub(crate) fn reconstruct_document<'a, O: DocObserver>( let super::change_collector::CollectedChanges { history, heads } = collector.finish(&metadata)?; - let expected_heads: BTreeSet<_> = doc.heads().iter().cloned().collect(); - if expected_heads != heads { - tracing::error!(?expected_heads, ?heads, "mismatching heads"); - return Err(Error::MismatchingHeads); + if matches!(mode, VerificationMode::Check) { + let expected_heads: BTreeSet<_> = doc.heads().iter().cloned().collect(); + if expected_heads != heads { + tracing::error!(?expected_heads, ?heads, "mismatching heads"); + return Err(Error::MismatchingHeads(MismatchedHeads { + changes: history, + expected_heads, + derived_heads: heads, + })); + } } let result = observer.finish(metadata); diff --git a/automerge/src/storage/parse.rs b/rust/automerge/src/storage/parse.rs similarity index 99% rename from automerge/src/storage/parse.rs rename to rust/automerge/src/storage/parse.rs index 64419fda..6751afb4 100644 --- a/automerge/src/storage/parse.rs +++ b/rust/automerge/src/storage/parse.rs @@ -110,7 +110,7 @@ use crate::{ActorId, ChangeHash}; const HASH_SIZE: usize = 32; // 256 bits = 32 bytes #[allow(unused_imports)] -pub(crate) use self::leb128::{leb128_i32, leb128_i64, leb128_u32, leb128_u64, nonzero_leb128_u64}; +pub(crate) use self::leb128::{leb128_i64, leb128_u32, leb128_u64, nonzero_leb128_u64}; pub(crate) type ParseResult<'a, O, E> = Result<(Input<'a>, O), ParseError>; @@ -308,6 +308,7 @@ impl<'a> Input<'a> { } /// The bytes behind this input - including bytes which have been consumed + #[allow(clippy::misnamed_getters)] pub(crate) fn bytes(&self) -> &'a [u8] { self.original } diff --git a/rust/automerge/src/storage/parse/leb128.rs b/rust/automerge/src/storage/parse/leb128.rs new file mode 100644 index 00000000..9f5e72a2 --- /dev/null +++ b/rust/automerge/src/storage/parse/leb128.rs @@ -0,0 +1,302 @@ +use std::num::NonZeroU64; + +use super::{take1, Input, ParseError, ParseResult}; + +#[derive(PartialEq, thiserror::Error, Debug, Clone)] +pub(crate) enum Error { + #[error("leb128 was too large for the destination type")] + Leb128TooLarge, + #[error("leb128 was improperly encoded")] + Leb128Overlong, + #[error("leb128 was zero when it was expected to be nonzero")] + UnexpectedZero, +} + +pub(crate) fn leb128_u64(input: Input<'_>) -> ParseResult<'_, u64, E> +where + E: From, +{ + let mut res = 0; + let mut shift = 0; + let mut input = input; + + loop { + let (i, byte) = take1(input)?; + input = i; + res |= ((byte & 0x7F) as u64) << shift; + shift += 7; + + if (byte & 0x80) == 0 { + if shift > 64 && byte > 1 { + return Err(ParseError::Error(Error::Leb128TooLarge.into())); + } else if shift > 7 && byte == 0 { + return Err(ParseError::Error(Error::Leb128Overlong.into())); + } + return Ok((input, res)); + } else if shift > 64 { + return Err(ParseError::Error(Error::Leb128TooLarge.into())); + } + } +} + +pub(crate) fn leb128_i64(input: Input<'_>) -> ParseResult<'_, i64, E> +where + E: From, +{ + let mut res = 0; + let mut shift = 0; + + let mut input = input; + let mut prev = 0; + loop { + let (i, byte) = take1(input)?; + input = i; + res |= ((byte & 0x7F) as i64) << shift; + shift += 7; + + if (byte & 0x80) == 0 { + if shift > 64 && byte != 0 && byte != 0x7f { + // the 10th byte (if present) must contain only the sign-extended sign bit + return Err(ParseError::Error(Error::Leb128TooLarge.into())); + } else if shift > 7 + && ((byte == 0 && prev & 0x40 == 0) || (byte == 0x7f && prev & 0x40 > 0)) + { + // overlong if the sign bit of penultimate byte has been extended + return Err(ParseError::Error(Error::Leb128Overlong.into())); + } else if shift < 64 && byte & 0x40 > 0 { + // sign extend negative numbers + res |= -1 << shift; + } + return Ok((input, res)); + } else if shift > 64 { + return Err(ParseError::Error(Error::Leb128TooLarge.into())); + } + prev = byte; + } +} + +pub(crate) fn leb128_u32(input: Input<'_>) -> ParseResult<'_, u32, E> +where + E: From, +{ + let (i, num) = leb128_u64(input)?; + let result = u32::try_from(num).map_err(|_| ParseError::Error(Error::Leb128TooLarge.into()))?; + Ok((i, result)) +} + +/// Parse a LEB128 encoded u64 from the input, throwing an error if it is `0` +pub(crate) fn nonzero_leb128_u64(input: Input<'_>) -> ParseResult<'_, NonZeroU64, E> +where + E: From, +{ + let (input, num) = leb128_u64(input)?; + let result = + NonZeroU64::new(num).ok_or_else(|| ParseError::Error(Error::UnexpectedZero.into()))?; + Ok((input, result)) +} + +#[cfg(test)] +mod tests { + use super::super::Needed; + use super::*; + use std::num::NonZeroUsize; + + const NEED_ONE: Needed = Needed::Size(unsafe { NonZeroUsize::new_unchecked(1) }); + + #[test] + fn leb_128_u64() { + let one = &[0b00000001_u8]; + let one_two_nine = &[0b10000001, 0b00000001]; + let one_and_more = &[0b00000001, 0b00000011]; + + let scenarios: Vec<(&'static [u8], ParseResult<'_, u64, Error>)> = vec![ + (one, Ok((Input::with_position(one, 1), 1))), + ( + one_two_nine, + Ok((Input::with_position(one_two_nine, 2), 129)), + ), + (one_and_more, Ok((Input::with_position(one_and_more, 1), 1))), + ]; + for (index, (input, expected)) in scenarios.clone().into_iter().enumerate() { + let result = leb128_u64(Input::new(input)); + if result != expected { + panic!( + "Scenario {} failed for u64: expected {:?} got {:?}", + index + 1, + expected, + result + ); + } + } + + let error_cases: Vec<(&'static str, &'static [u8], ParseError<_>)> = vec![ + ( + "too many bytes", + &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "too many bits", + &[129, 129, 129, 129, 129, 129, 129, 129, 129, 2], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "overlong encoding", + &[129, 0], + ParseError::Error(Error::Leb128Overlong), + ), + ("missing data", &[255], ParseError::Incomplete(NEED_ONE)), + ]; + error_cases.into_iter().for_each(|(desc, input, expected)| { + match leb128_u64::(Input::new(input)) { + Ok((_, x)) => panic!("leb128_u64 should fail with {}, got {}", desc, x), + Err(error) => { + if error != expected { + panic!("leb128_u64 should fail with {}, got {}", expected, error) + } + } + } + }); + + let success_cases: Vec<(&'static [u8], u64)> = vec![ + (&[0], 0), + (&[0x7f], 127), + (&[0x80, 0x01], 128), + (&[0xff, 0x7f], 16383), + ( + &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1], + u64::MAX, + ), + ]; + success_cases.into_iter().for_each(|(input, expected)| { + match leb128_u64::(Input::new(input)) { + Ok((_, x)) => { + if x != expected { + panic!("leb128_u64 should succeed with {}, got {}", expected, x) + } + } + Err(error) => panic!("leb128_u64 should succeed with {}, got {}", expected, error), + } + }); + } + + #[test] + fn leb_128_u32() { + let error_cases: Vec<(&'static str, &'static [u8], ParseError<_>)> = vec![ + ( + "too many bytes", + &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "too many bits", + &[0xff, 0xff, 0xff, 0xff, 0x1f], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "overlong encoding", + &[129, 0], + ParseError::Error(Error::Leb128Overlong), + ), + ("missing data", &[0xaa], ParseError::Incomplete(NEED_ONE)), + ]; + error_cases.into_iter().for_each(|(desc, input, expected)| { + match leb128_u32::(Input::new(input)) { + Ok((_, x)) => panic!("leb128_u32 should fail with {}, got {}", desc, x), + Err(error) => { + if error != expected { + panic!("leb128_u32 should fail with {}, got {}", expected, error) + } + } + } + }); + + let success_cases: Vec<(&'static [u8], u32)> = vec![ + (&[0], 0), + (&[0x7f], 127), + (&[0x80, 0x01], 128), + (&[0xff, 0x7f], 16383), + (&[0xff, 0xff, 0xff, 0xff, 0x0f], u32::MAX), + ]; + success_cases.into_iter().for_each(|(input, expected)| { + match leb128_u32::(Input::new(input)) { + Ok((_, x)) => { + if x != expected { + panic!("leb128_u32 should succeed with {}, got {}", expected, x) + } + } + Err(error) => panic!("leb128_u64 should succeed with {}, got {}", expected, error), + } + }); + } + + #[test] + fn leb_128_i64() { + let error_cases: Vec<(&'static str, &'static [u8], ParseError<_>)> = vec![ + ( + "too many bytes", + &[129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "too many positive bits", + &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "too many negative bits", + &[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7e], + ParseError::Error(Error::Leb128TooLarge), + ), + ( + "overlong positive encoding", + &[0xbf, 0], + ParseError::Error(Error::Leb128Overlong), + ), + ( + "overlong negative encoding", + &[0x81, 0xff, 0x7f], + ParseError::Error(Error::Leb128Overlong), + ), + ("missing data", &[0x90], ParseError::Incomplete(NEED_ONE)), + ]; + error_cases.into_iter().for_each(|(desc, input, expected)| { + match leb128_i64::(Input::new(input)) { + Ok((_, x)) => panic!("leb128_i64 should fail with {}, got {}", desc, x), + Err(error) => { + if error != expected { + panic!("leb128_i64 should fail with {}, got {}", expected, error) + } + } + } + }); + + let success_cases: Vec<(&'static [u8], i64)> = vec![ + (&[0], 0), + (&[0x7f], -1), + (&[0x3f], 63), + (&[0x40], -64), + (&[0x80, 0x01], 128), + (&[0xff, 0x3f], 8191), + (&[0x80, 0x40], -8192), + ( + &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0], + i64::MAX, + ), + ( + &[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f], + i64::MIN, + ), + ]; + success_cases.into_iter().for_each(|(input, expected)| { + match leb128_i64::(Input::new(input)) { + Ok((_, x)) => { + if x != expected { + panic!("leb128_i64 should succeed with {}, got {}", expected, x) + } + } + Err(error) => panic!("leb128_u64 should succeed with {}, got {}", expected, error), + } + }); + } +} diff --git a/automerge/src/storage/save.rs b/rust/automerge/src/storage/save.rs similarity index 100% rename from automerge/src/storage/save.rs rename to rust/automerge/src/storage/save.rs diff --git a/automerge/src/storage/save/document.rs b/rust/automerge/src/storage/save/document.rs similarity index 100% rename from automerge/src/storage/save/document.rs rename to rust/automerge/src/storage/save/document.rs diff --git a/rust/automerge/src/sync.rs b/rust/automerge/src/sync.rs new file mode 100644 index 00000000..d6dc2580 --- /dev/null +++ b/rust/automerge/src/sync.rs @@ -0,0 +1,963 @@ +//! # Sync Protocol +//! +//! The sync protocol is based on this paper: +//! , it assumes a reliable in-order stream +//! between two peers who are synchronizing a document. +//! +//! Each peer maintains a [`State`] for each peer they are synchronizing with. +//! This state tracks things like what the heads of the other peer are and +//! whether there are in-flight messages. Anything which implements [`SyncDoc`] +//! can take part in the sync protocol. The flow goes something like this: +//! +//! * The initiating peer creates an empty [`State`] and then calls +//! [`SyncDoc::generate_sync_message`] to generate new sync message and sends +//! it to the receiving peer. +//! * The receiving peer receives a message from the initiator, creates a new +//! [`State`], and calls [`SyncDoc::receive_sync_message`] on it's view of the +//! document +//! * The receiving peer then calls [`SyncDoc::generate_sync_message`] to generate +//! a new sync message and send it back to the initiator +//! * From this point on each peer operates in a loop, receiving a sync message +//! from the other peer and then generating a new message to send back. +//! +//! ## Example +//! +//! ``` +//! use automerge::{transaction::Transactable, sync::{self, SyncDoc}, ReadDoc}; +//! # fn main() -> Result<(), automerge::AutomergeError> { +//! // Create a document on peer1 +//! let mut peer1 = automerge::AutoCommit::new(); +//! peer1.put(automerge::ROOT, "key", "value")?; +//! +//! // Create a state to track our sync with peer2 +//! let mut peer1_state = sync::State::new(); +//! // Generate the initial message to send to peer2, unwrap for brevity +//! let message1to2 = peer1.sync().generate_sync_message(&mut peer1_state).unwrap(); +//! +//! // We receive the message on peer2. We don't have a document at all yet +//! // so we create one +//! let mut peer2 = automerge::AutoCommit::new(); +//! // We don't have a state for peer1 (it's a new connection), so we create one +//! let mut peer2_state = sync::State::new(); +//! // Now receive the message from peer 1 +//! peer2.sync().receive_sync_message(&mut peer2_state, message1to2)?; +//! +//! // Now we loop, sending messages from one to two and two to one until +//! // neither has anything new to send +//! +//! loop { +//! let two_to_one = peer2.sync().generate_sync_message(&mut peer2_state); +//! if let Some(message) = two_to_one.as_ref() { +//! println!("two to one"); +//! peer1.sync().receive_sync_message(&mut peer1_state, message.clone())?; +//! } +//! let one_to_two = peer1.sync().generate_sync_message(&mut peer1_state); +//! if let Some(message) = one_to_two.as_ref() { +//! println!("one to two"); +//! peer2.sync().receive_sync_message(&mut peer2_state, message.clone())?; +//! } +//! if two_to_one.is_none() && one_to_two.is_none() { +//! break; +//! } +//! } +//! +//! assert_eq!(peer2.get(automerge::ROOT, "key")?.unwrap().0.to_str(), Some("value")); +//! +//! # Ok(()) +//! # } +//! ``` + +use itertools::Itertools; +use serde::ser::SerializeMap; +use std::collections::{HashMap, HashSet}; + +use crate::{ + storage::{parse, Change as StoredChange, ReadChangeOpError}, + Automerge, AutomergeError, Change, ChangeHash, OpObserver, ReadDoc, +}; + +mod bloom; +mod state; + +pub use bloom::{BloomFilter, DecodeError as DecodeBloomError}; +pub use state::DecodeError as DecodeStateError; +pub use state::{Have, State}; + +/// A document which can take part in the sync protocol +/// +/// See the [module level documentation](crate::sync) for more details. +pub trait SyncDoc { + /// Generate a sync message for the remote peer represented by `sync_state` + /// + /// If this returns `None` then there are no new messages to send, either because we are + /// waiting for an acknolwedgement of an in-flight message, or because the remote is up to + /// date. + fn generate_sync_message(&self, sync_state: &mut State) -> Option; + + /// Apply a received sync message to this document and `sync_state` + fn receive_sync_message( + &mut self, + sync_state: &mut State, + message: Message, + ) -> Result<(), AutomergeError>; + + /// Apply a received sync message to this document and `sync_state`, observing any changes with + /// `op_observer` + fn receive_sync_message_with( + &mut self, + sync_state: &mut State, + message: Message, + op_observer: &mut Obs, + ) -> Result<(), AutomergeError>; +} + +const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification + +impl SyncDoc for Automerge { + fn generate_sync_message(&self, sync_state: &mut State) -> Option { + let our_heads = self.get_heads(); + + let our_need = self.get_missing_deps(sync_state.their_heads.as_ref().unwrap_or(&vec![])); + + let their_heads_set = if let Some(ref heads) = sync_state.their_heads { + heads.iter().collect::>() + } else { + HashSet::new() + }; + let our_have = if our_need.iter().all(|hash| their_heads_set.contains(hash)) { + vec![self.make_bloom_filter(sync_state.shared_heads.clone())] + } else { + Vec::new() + }; + + if let Some(ref their_have) = sync_state.their_have { + if let Some(first_have) = their_have.first().as_ref() { + if !first_have + .last_sync + .iter() + .all(|hash| self.get_change_by_hash(hash).is_some()) + { + let reset_msg = Message { + heads: our_heads, + need: Vec::new(), + have: vec![Have::default()], + changes: Vec::new(), + }; + return Some(reset_msg); + } + } + } + + let changes_to_send = if let (Some(their_have), Some(their_need)) = ( + sync_state.their_have.as_ref(), + sync_state.their_need.as_ref(), + ) { + self.get_changes_to_send(their_have, their_need) + .expect("Should have only used hashes that are in the document") + } else { + Vec::new() + }; + + let heads_unchanged = sync_state.last_sent_heads == our_heads; + + let heads_equal = if let Some(their_heads) = sync_state.their_heads.as_ref() { + their_heads == &our_heads + } else { + false + }; + + // deduplicate the changes to send with those we have already sent and clone it now + let changes_to_send = changes_to_send + .into_iter() + .filter_map(|change| { + if !sync_state.sent_hashes.contains(&change.hash()) { + Some(change.clone()) + } else { + None + } + }) + .collect::>(); + + if heads_unchanged { + if heads_equal && changes_to_send.is_empty() { + return None; + } + if sync_state.in_flight { + return None; + } + } + + sync_state.last_sent_heads = our_heads.clone(); + sync_state + .sent_hashes + .extend(changes_to_send.iter().map(|c| c.hash())); + + let sync_message = Message { + heads: our_heads, + have: our_have, + need: our_need, + changes: changes_to_send, + }; + + sync_state.in_flight = true; + Some(sync_message) + } + + fn receive_sync_message( + &mut self, + sync_state: &mut State, + message: Message, + ) -> Result<(), AutomergeError> { + self.do_receive_sync_message::<()>(sync_state, message, None) + } + + fn receive_sync_message_with( + &mut self, + sync_state: &mut State, + message: Message, + op_observer: &mut Obs, + ) -> Result<(), AutomergeError> { + self.do_receive_sync_message(sync_state, message, Some(op_observer)) + } +} + +impl Automerge { + fn make_bloom_filter(&self, last_sync: Vec) -> Have { + let new_changes = self + .get_changes(&last_sync) + .expect("Should have only used hashes that are in the document"); + let hashes = new_changes.iter().map(|change| change.hash()); + Have { + last_sync, + bloom: BloomFilter::from_hashes(hashes), + } + } + + fn get_changes_to_send( + &self, + have: &[Have], + need: &[ChangeHash], + ) -> Result, AutomergeError> { + if have.is_empty() { + Ok(need + .iter() + .filter_map(|hash| self.get_change_by_hash(hash)) + .collect()) + } else { + let mut last_sync_hashes = HashSet::new(); + let mut bloom_filters = Vec::with_capacity(have.len()); + + for h in have { + let Have { last_sync, bloom } = h; + last_sync_hashes.extend(last_sync); + bloom_filters.push(bloom); + } + let last_sync_hashes = last_sync_hashes.into_iter().copied().collect::>(); + + let changes = self.get_changes(&last_sync_hashes)?; + + let mut change_hashes = HashSet::with_capacity(changes.len()); + let mut dependents: HashMap> = HashMap::new(); + let mut hashes_to_send = HashSet::new(); + + for change in &changes { + change_hashes.insert(change.hash()); + + for dep in change.deps() { + dependents.entry(*dep).or_default().push(change.hash()); + } + + if bloom_filters + .iter() + .all(|bloom| !bloom.contains_hash(&change.hash())) + { + hashes_to_send.insert(change.hash()); + } + } + + let mut stack = hashes_to_send.iter().copied().collect::>(); + while let Some(hash) = stack.pop() { + if let Some(deps) = dependents.get(&hash) { + for dep in deps { + if hashes_to_send.insert(*dep) { + stack.push(*dep); + } + } + } + } + + let mut changes_to_send = Vec::new(); + for hash in need { + if !hashes_to_send.contains(hash) { + if let Some(change) = self.get_change_by_hash(hash) { + changes_to_send.push(change); + } + } + } + + for change in changes { + if hashes_to_send.contains(&change.hash()) { + changes_to_send.push(change); + } + } + Ok(changes_to_send) + } + } + + fn do_receive_sync_message( + &mut self, + sync_state: &mut State, + message: Message, + op_observer: Option<&mut Obs>, + ) -> Result<(), AutomergeError> { + let before_heads = self.get_heads(); + + let Message { + heads: message_heads, + changes: message_changes, + need: message_need, + have: message_have, + } = message; + + let changes_is_empty = message_changes.is_empty(); + if !changes_is_empty { + self.apply_changes_with(message_changes, op_observer)?; + sync_state.shared_heads = advance_heads( + &before_heads.iter().collect(), + &self.get_heads().into_iter().collect(), + &sync_state.shared_heads, + ); + } + + // trim down the sent hashes to those that we know they haven't seen + self.filter_changes(&message_heads, &mut sync_state.sent_hashes)?; + + if changes_is_empty && message_heads == before_heads { + sync_state.last_sent_heads = message_heads.clone(); + } + + if sync_state.sent_hashes.is_empty() { + sync_state.in_flight = false; + } + + let known_heads = message_heads + .iter() + .filter(|head| self.get_change_by_hash(head).is_some()) + .collect::>(); + if known_heads.len() == message_heads.len() { + sync_state.shared_heads = message_heads.clone(); + sync_state.in_flight = false; + // If the remote peer has lost all its data, reset our state to perform a full resync + if message_heads.is_empty() { + sync_state.last_sent_heads = Default::default(); + sync_state.sent_hashes = Default::default(); + } + } else { + sync_state.shared_heads = sync_state + .shared_heads + .iter() + .chain(known_heads) + .copied() + .unique() + .sorted() + .collect::>(); + } + + sync_state.their_have = Some(message_have); + sync_state.their_heads = Some(message_heads); + sync_state.their_need = Some(message_need); + + Ok(()) + } +} + +#[derive(Debug, thiserror::Error)] +pub enum ReadMessageError { + #[error("expected {expected_one_of:?} but found {found}")] + WrongType { expected_one_of: Vec, found: u8 }, + #[error("{0}")] + Parse(String), + #[error(transparent)] + ReadChangeOps(#[from] ReadChangeOpError), + #[error("not enough input")] + NotEnoughInput, +} + +impl From for ReadMessageError { + fn from(e: parse::leb128::Error) -> Self { + ReadMessageError::Parse(e.to_string()) + } +} + +impl From for ReadMessageError { + fn from(e: bloom::ParseError) -> Self { + ReadMessageError::Parse(e.to_string()) + } +} + +impl From for ReadMessageError { + fn from(e: crate::storage::change::ParseError) -> Self { + ReadMessageError::Parse(format!("error parsing changes: {}", e)) + } +} + +impl From for parse::ParseError { + fn from(e: ReadMessageError) -> Self { + parse::ParseError::Error(e) + } +} + +impl From> for ReadMessageError { + fn from(p: parse::ParseError) -> Self { + match p { + parse::ParseError::Error(e) => e, + parse::ParseError::Incomplete(..) => Self::NotEnoughInput, + } + } +} + +/// The sync message to be sent. +#[derive(Clone, Debug, PartialEq)] +pub struct Message { + /// The heads of the sender. + pub heads: Vec, + /// The hashes of any changes that are being explicitly requested from the recipient. + pub need: Vec, + /// A summary of the changes that the sender already has. + pub have: Vec, + /// The changes for the recipient to apply. + pub changes: Vec, +} + +impl serde::Serialize for Message { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut map = serializer.serialize_map(Some(4))?; + map.serialize_entry("heads", &self.heads)?; + map.serialize_entry("need", &self.need)?; + map.serialize_entry("have", &self.have)?; + map.serialize_entry( + "changes", + &self + .changes + .iter() + .map(crate::ExpandedChange::from) + .collect::>(), + )?; + map.end() + } +} + +fn parse_have(input: parse::Input<'_>) -> parse::ParseResult<'_, Have, ReadMessageError> { + let (i, last_sync) = parse::length_prefixed(parse::change_hash)(input)?; + let (i, bloom_bytes) = parse::length_prefixed_bytes(i)?; + let (_, bloom) = BloomFilter::parse(parse::Input::new(bloom_bytes)).map_err(|e| e.lift())?; + Ok((i, Have { last_sync, bloom })) +} + +impl Message { + pub fn decode(input: &[u8]) -> Result { + let input = parse::Input::new(input); + match Self::parse(input) { + Ok((_, msg)) => Ok(msg), + Err(parse::ParseError::Error(e)) => Err(e), + Err(parse::ParseError::Incomplete(_)) => Err(ReadMessageError::NotEnoughInput), + } + } + + pub(crate) fn parse(input: parse::Input<'_>) -> parse::ParseResult<'_, Self, ReadMessageError> { + let (i, message_type) = parse::take1(input)?; + if message_type != MESSAGE_TYPE_SYNC { + return Err(parse::ParseError::Error(ReadMessageError::WrongType { + expected_one_of: vec![MESSAGE_TYPE_SYNC], + found: message_type, + })); + } + + let (i, heads) = parse::length_prefixed(parse::change_hash)(i)?; + let (i, need) = parse::length_prefixed(parse::change_hash)(i)?; + let (i, have) = parse::length_prefixed(parse_have)(i)?; + + let change_parser = |i| { + let (i, bytes) = parse::length_prefixed_bytes(i)?; + let (_, change) = + StoredChange::parse(parse::Input::new(bytes)).map_err(|e| e.lift())?; + Ok((i, change)) + }; + let (i, stored_changes) = parse::length_prefixed(change_parser)(i)?; + let changes_len = stored_changes.len(); + let changes: Vec = stored_changes + .into_iter() + .try_fold::<_, _, Result<_, ReadMessageError>>( + Vec::with_capacity(changes_len), + |mut acc, stored| { + let change = Change::new_from_unverified(stored.into_owned(), None) + .map_err(ReadMessageError::ReadChangeOps)?; + acc.push(change); + Ok(acc) + }, + )?; + + Ok(( + i, + Message { + heads, + need, + have, + changes, + }, + )) + } + + pub fn encode(mut self) -> Vec { + let mut buf = vec![MESSAGE_TYPE_SYNC]; + + encode_hashes(&mut buf, &self.heads); + encode_hashes(&mut buf, &self.need); + encode_many(&mut buf, self.have.iter(), |buf, h| { + encode_hashes(buf, &h.last_sync); + leb128::write::unsigned(buf, h.bloom.to_bytes().len() as u64).unwrap(); + buf.extend(h.bloom.to_bytes()); + }); + + encode_many(&mut buf, self.changes.iter_mut(), |buf, change| { + leb128::write::unsigned(buf, change.raw_bytes().len() as u64).unwrap(); + buf.extend::<&[u8]>(change.raw_bytes().as_ref()) + }); + + buf + } +} + +fn encode_many<'a, I, It, F>(out: &mut Vec, data: I, f: F) +where + I: Iterator + ExactSizeIterator + 'a, + F: Fn(&mut Vec, It), +{ + leb128::write::unsigned(out, data.len() as u64).unwrap(); + for datum in data { + f(out, datum) + } +} + +fn encode_hashes(buf: &mut Vec, hashes: &[ChangeHash]) { + debug_assert!( + hashes.windows(2).all(|h| h[0] <= h[1]), + "hashes were not sorted" + ); + encode_many(buf, hashes.iter(), |buf, hash| buf.extend(hash.as_bytes())) +} + +fn advance_heads( + my_old_heads: &HashSet<&ChangeHash>, + my_new_heads: &HashSet, + our_old_shared_heads: &[ChangeHash], +) -> Vec { + let new_heads = my_new_heads + .iter() + .filter(|head| !my_old_heads.contains(head)) + .copied() + .collect::>(); + + let common_heads = our_old_shared_heads + .iter() + .filter(|head| my_new_heads.contains(head)) + .copied() + .collect::>(); + + let mut advanced_heads = HashSet::with_capacity(new_heads.len() + common_heads.len()); + for head in new_heads.into_iter().chain(common_heads) { + advanced_heads.insert(head); + } + let mut advanced_heads = advanced_heads.into_iter().collect::>(); + advanced_heads.sort(); + advanced_heads +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::change::gen::gen_change; + use crate::storage::parse::Input; + use crate::transaction::Transactable; + use crate::types::gen::gen_hash; + use crate::ActorId; + use proptest::prelude::*; + + prop_compose! { + fn gen_bloom()(hashes in gen_sorted_hashes(0..10)) -> BloomFilter { + BloomFilter::from_hashes(hashes.into_iter()) + } + } + + prop_compose! { + fn gen_have()(bloom in gen_bloom(), last_sync in gen_sorted_hashes(0..10)) -> Have { + Have { + bloom, + last_sync, + } + } + } + + fn gen_sorted_hashes(size: std::ops::Range) -> impl Strategy> { + proptest::collection::vec(gen_hash(), size).prop_map(|mut h| { + h.sort(); + h + }) + } + + prop_compose! { + fn gen_sync_message()( + heads in gen_sorted_hashes(0..10), + need in gen_sorted_hashes(0..10), + have in proptest::collection::vec(gen_have(), 0..10), + changes in proptest::collection::vec(gen_change(), 0..10), + ) -> Message { + Message { + heads, + need, + have, + changes, + } + } + + } + + #[test] + fn encode_decode_empty_message() { + let msg = Message { + heads: vec![], + need: vec![], + have: vec![], + changes: vec![], + }; + let encoded = msg.encode(); + Message::parse(Input::new(&encoded)).unwrap(); + } + + proptest! { + #[test] + fn encode_decode_message(msg in gen_sync_message()) { + let encoded = msg.clone().encode(); + let (i, decoded) = Message::parse(Input::new(&encoded)).unwrap(); + assert!(i.is_empty()); + assert_eq!(msg, decoded); + } + } + + #[test] + fn generate_sync_message_twice_does_nothing() { + let mut doc = crate::AutoCommit::new(); + doc.put(crate::ROOT, "key", "value").unwrap(); + let mut sync_state = State::new(); + + assert!(doc.sync().generate_sync_message(&mut sync_state).is_some()); + assert!(doc.sync().generate_sync_message(&mut sync_state).is_none()); + } + + #[test] + fn should_not_reply_if_we_have_no_data() { + let mut doc1 = crate::AutoCommit::new(); + let mut doc2 = crate::AutoCommit::new(); + let mut s1 = State::new(); + let mut s2 = State::new(); + let m1 = doc1 + .sync() + .generate_sync_message(&mut s1) + .expect("message was none"); + + doc2.sync().receive_sync_message(&mut s2, m1).unwrap(); + let m2 = doc2.sync().generate_sync_message(&mut s2); + assert!(m2.is_none()); + } + + #[test] + fn should_allow_simultaneous_messages_during_synchronisation() { + // create & synchronize two nodes + let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("abc123").unwrap()); + let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("def456").unwrap()); + let mut s1 = State::new(); + let mut s2 = State::new(); + + for i in 0..5 { + doc1.put(&crate::ROOT, "x", i).unwrap(); + doc1.commit(); + doc2.put(&crate::ROOT, "y", i).unwrap(); + doc2.commit(); + } + + let head1 = doc1.get_heads()[0]; + let head2 = doc2.get_heads()[0]; + + //// both sides report what they have but have no shared peer state + let msg1to2 = doc1 + .sync() + .generate_sync_message(&mut s1) + .expect("initial sync from 1 to 2 was None"); + let msg2to1 = doc2 + .sync() + .generate_sync_message(&mut s2) + .expect("initial sync message from 2 to 1 was None"); + assert_eq!(msg1to2.changes.len(), 0); + assert_eq!(msg1to2.have[0].last_sync.len(), 0); + assert_eq!(msg2to1.changes.len(), 0); + assert_eq!(msg2to1.have[0].last_sync.len(), 0); + + //// doc1 and doc2 receive that message and update sync state + doc1.sync().receive_sync_message(&mut s1, msg2to1).unwrap(); + doc2.sync().receive_sync_message(&mut s2, msg1to2).unwrap(); + + //// now both reply with their local changes the other lacks + //// (standard warning that 1% of the time this will result in a "need" message) + let msg1to2 = doc1 + .sync() + .generate_sync_message(&mut s1) + .expect("first reply from 1 to 2 was None"); + assert_eq!(msg1to2.changes.len(), 5); + + let msg2to1 = doc2 + .sync() + .generate_sync_message(&mut s2) + .expect("first reply from 2 to 1 was None"); + assert_eq!(msg2to1.changes.len(), 5); + + //// both should now apply the changes + doc1.sync().receive_sync_message(&mut s1, msg2to1).unwrap(); + assert_eq!(doc1.get_missing_deps(&[]), Vec::new()); + + doc2.sync().receive_sync_message(&mut s2, msg1to2).unwrap(); + assert_eq!(doc2.get_missing_deps(&[]), Vec::new()); + + //// The response acknowledges the changes received and sends no further changes + let msg1to2 = doc1 + .sync() + .generate_sync_message(&mut s1) + .expect("second reply from 1 to 2 was None"); + assert_eq!(msg1to2.changes.len(), 0); + let msg2to1 = doc2 + .sync() + .generate_sync_message(&mut s2) + .expect("second reply from 2 to 1 was None"); + assert_eq!(msg2to1.changes.len(), 0); + + //// After receiving acknowledgements, their shared heads should be equal + doc1.sync().receive_sync_message(&mut s1, msg2to1).unwrap(); + doc2.sync().receive_sync_message(&mut s2, msg1to2).unwrap(); + + assert_eq!(s1.shared_heads, s2.shared_heads); + + //// We're in sync, no more messages required + assert!(doc1.sync().generate_sync_message(&mut s1).is_none()); + assert!(doc2.sync().generate_sync_message(&mut s2).is_none()); + + //// If we make one more change and start another sync then its lastSync should be updated + doc1.put(crate::ROOT, "x", 5).unwrap(); + doc1.commit(); + let msg1to2 = doc1 + .sync() + .generate_sync_message(&mut s1) + .expect("third reply from 1 to 2 was None"); + let mut expected_heads = vec![head1, head2]; + expected_heads.sort(); + let mut actual_heads = msg1to2.have[0].last_sync.clone(); + actual_heads.sort(); + assert_eq!(actual_heads, expected_heads); + } + + #[test] + fn should_handle_false_positive_head() { + // Scenario: ,-- n1 + // c0 <-- c1 <-- c2 <-- c3 <-- c4 <-- c5 <-- c6 <-- c7 <-- c8 <-- c9 <-+ + // `-- n2 + // where n2 is a false positive in the Bloom filter containing {n1}. + // lastSync is c9. + + let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("abc123").unwrap()); + let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("def456").unwrap()); + let mut s1 = State::new(); + let mut s2 = State::new(); + + for i in 0..10 { + doc1.put(crate::ROOT, "x", i).unwrap(); + doc1.commit(); + } + + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + + // search for false positive; see comment above + let mut i = 0; + let (mut doc1, mut doc2) = loop { + let mut doc1copy = doc1 + .clone() + .with_actor(ActorId::try_from("01234567").unwrap()); + let val1 = format!("{} @ n1", i); + doc1copy.put(crate::ROOT, "x", val1).unwrap(); + doc1copy.commit(); + + let mut doc2copy = doc1 + .clone() + .with_actor(ActorId::try_from("89abcdef").unwrap()); + let val2 = format!("{} @ n2", i); + doc2copy.put(crate::ROOT, "x", val2).unwrap(); + doc2copy.commit(); + + let n1_bloom = BloomFilter::from_hashes(doc1copy.get_heads().into_iter()); + if n1_bloom.contains_hash(&doc2copy.get_heads()[0]) { + break (doc1copy, doc2copy); + } + i += 1; + }; + + let mut all_heads = doc1.get_heads(); + all_heads.extend(doc2.get_heads()); + all_heads.sort(); + + // reset sync states + let (_, mut s1) = State::parse(Input::new(s1.encode().as_slice())).unwrap(); + let (_, mut s2) = State::parse(Input::new(s2.encode().as_slice())).unwrap(); + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + assert_eq!(doc1.get_heads(), all_heads); + assert_eq!(doc2.get_heads(), all_heads); + } + + #[test] + fn should_handle_chains_of_false_positives() { + //// Scenario: ,-- c5 + //// c0 <-- c1 <-- c2 <-- c3 <-- c4 <-+ + //// `-- n2c1 <-- n2c2 <-- n2c3 + //// where n2c1 and n2c2 are both false positives in the Bloom filter containing {c5}. + //// lastSync is c4. + let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("abc123").unwrap()); + let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("def456").unwrap()); + let mut s1 = State::new(); + let mut s2 = State::new(); + + for i in 0..10 { + doc1.put(crate::ROOT, "x", i).unwrap(); + doc1.commit(); + } + + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + + doc1.put(crate::ROOT, "x", 5).unwrap(); + doc1.commit(); + let bloom = BloomFilter::from_hashes(doc1.get_heads().into_iter()); + + // search for false positive; see comment above + let mut i = 0; + let mut doc2 = loop { + let mut doc = doc2 + .fork() + .with_actor(ActorId::try_from("89abcdef").unwrap()); + doc.put(crate::ROOT, "x", format!("{} at 89abdef", i)) + .unwrap(); + doc.commit(); + if bloom.contains_hash(&doc.get_heads()[0]) { + break doc; + } + i += 1; + }; + + // find another false positive building on the first + i = 0; + let mut doc2 = loop { + let mut doc = doc2 + .fork() + .with_actor(ActorId::try_from("89abcdef").unwrap()); + doc.put(crate::ROOT, "x", format!("{} again", i)).unwrap(); + doc.commit(); + if bloom.contains_hash(&doc.get_heads()[0]) { + break doc; + } + i += 1; + }; + + doc2.put(crate::ROOT, "x", "final @ 89abcdef").unwrap(); + + let mut all_heads = doc1.get_heads(); + all_heads.extend(doc2.get_heads()); + all_heads.sort(); + + let (_, mut s1) = State::parse(Input::new(s1.encode().as_slice())).unwrap(); + let (_, mut s2) = State::parse(Input::new(s2.encode().as_slice())).unwrap(); + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + assert_eq!(doc1.get_heads(), all_heads); + assert_eq!(doc2.get_heads(), all_heads); + } + + #[test] + fn should_handle_lots_of_branching_and_merging() { + let mut doc1 = crate::AutoCommit::new().with_actor(ActorId::try_from("01234567").unwrap()); + let mut doc2 = crate::AutoCommit::new().with_actor(ActorId::try_from("89abcdef").unwrap()); + let mut doc3 = crate::AutoCommit::new().with_actor(ActorId::try_from("fedcba98").unwrap()); + let mut s1 = State::new(); + let mut s2 = State::new(); + + doc1.put(crate::ROOT, "x", 0).unwrap(); + let change1 = doc1.get_last_local_change().unwrap().clone(); + + doc2.apply_changes([change1.clone()]).unwrap(); + doc3.apply_changes([change1]).unwrap(); + + doc3.put(crate::ROOT, "x", 1).unwrap(); + + //// - n1c1 <------ n1c2 <------ n1c3 <-- etc. <-- n1c20 <------ n1c21 + //// / \/ \/ \/ + //// / /\ /\ /\ + //// c0 <---- n2c1 <------ n2c2 <------ n2c3 <-- etc. <-- n2c20 <------ n2c21 + //// \ / + //// ---------------------------------------------- n3c1 <----- + for i in 1..20 { + doc1.put(crate::ROOT, "n1", i).unwrap(); + doc2.put(crate::ROOT, "n2", i).unwrap(); + let change1 = doc1.get_last_local_change().unwrap().clone(); + let change2 = doc2.get_last_local_change().unwrap().clone(); + doc1.apply_changes([change2.clone()]).unwrap(); + doc2.apply_changes([change1]).unwrap(); + } + + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + + //// Having n3's last change concurrent to the last sync heads forces us into the slower code path + let change3 = doc3.get_last_local_change().unwrap().clone(); + doc2.apply_changes([change3]).unwrap(); + + doc1.put(crate::ROOT, "n1", "final").unwrap(); + doc2.put(crate::ROOT, "n1", "final").unwrap(); + + sync(&mut doc1, &mut doc2, &mut s1, &mut s2); + + assert_eq!(doc1.get_heads(), doc2.get_heads()); + } + + fn sync( + a: &mut crate::AutoCommit, + b: &mut crate::AutoCommit, + a_sync_state: &mut State, + b_sync_state: &mut State, + ) { + //function sync(a: Automerge, b: Automerge, aSyncState = initSyncState(), bSyncState = initSyncState()) { + const MAX_ITER: usize = 10; + let mut iterations = 0; + + loop { + let a_to_b = a.sync().generate_sync_message(a_sync_state); + let b_to_a = b.sync().generate_sync_message(b_sync_state); + if a_to_b.is_none() && b_to_a.is_none() { + break; + } + if iterations > MAX_ITER { + panic!("failed to sync in {} iterations", MAX_ITER); + } + if let Some(msg) = a_to_b { + b.sync().receive_sync_message(b_sync_state, msg).unwrap() + } + if let Some(msg) = b_to_a { + a.sync().receive_sync_message(a_sync_state, msg).unwrap() + } + iterations += 1; + } + } +} diff --git a/automerge/src/sync/bloom.rs b/rust/automerge/src/sync/bloom.rs similarity index 93% rename from automerge/src/sync/bloom.rs rename to rust/automerge/src/sync/bloom.rs index aff3dc13..8523061e 100644 --- a/automerge/src/sync/bloom.rs +++ b/rust/automerge/src/sync/bloom.rs @@ -9,7 +9,7 @@ use crate::ChangeHash; const BITS_PER_ENTRY: u32 = 10; const NUM_PROBES: u32 = 7; -#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize)] pub struct BloomFilter { num_entries: u32, num_bits_per_entry: u32, @@ -17,6 +17,17 @@ pub struct BloomFilter { bits: Vec, } +impl Default for BloomFilter { + fn default() -> Self { + BloomFilter { + num_entries: 0, + num_bits_per_entry: BITS_PER_ENTRY, + num_probes: NUM_PROBES, + bits: Vec::new(), + } + } +} + #[derive(Debug, thiserror::Error)] pub(crate) enum ParseError { #[error(transparent)] @@ -115,7 +126,7 @@ impl BloomFilter { let num_entries = hashes.len() as u32; let num_bits_per_entry = BITS_PER_ENTRY; let num_probes = NUM_PROBES; - let bits = vec![0; bits_capacity(num_entries, num_bits_per_entry) as usize]; + let bits = vec![0; bits_capacity(num_entries, num_bits_per_entry)]; let mut filter = Self { num_entries, num_bits_per_entry, diff --git a/automerge/src/sync/state.rs b/rust/automerge/src/sync/state.rs similarity index 69% rename from automerge/src/sync/state.rs rename to rust/automerge/src/sync/state.rs index 5a34aad1..354c605f 100644 --- a/automerge/src/sync/state.rs +++ b/rust/automerge/src/sync/state.rs @@ -23,20 +23,39 @@ impl From for DecodeError { } /// The state of synchronisation with a peer. +/// +/// This should be persisted using [`Self::encode`] when you know you will be interacting with the +/// same peer in multiple sessions. [`Self::encode`] only encodes state which should be reused +/// across connections. #[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] pub struct State { + /// The hashes which we know both peers have pub shared_heads: Vec, + /// The heads we last sent pub last_sent_heads: Vec, + /// The heads we last received from them pub their_heads: Option>, + /// Any specific changes they last said they needed pub their_need: Option>, + /// The bloom filters summarising what they said they have pub their_have: Option>, + /// The hashes we have sent in this session pub sent_hashes: BTreeSet, + + /// `generate_sync_message` should return `None` if there are no new changes to send. In + /// particular, if there are changes in flight which the other end has not yet acknowledged we + /// do not wish to generate duplicate sync messages. This field tracks whether the changes we + /// expect to send to the peer based on this sync state have been sent or not. If + /// `in_flight` is `false` then `generate_sync_message` will return a new message (provided + /// there are in fact changes to send). If it is `true` then we don't. This flag is cleared + /// in `receive_sync_message`. + pub in_flight: bool, } /// A summary of the changes that the sender of the message already has. /// This is implicitly a request to the recipient to send all changes that the /// sender does not already have. -#[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, serde::Serialize)] pub struct Have { /// The heads at the time of the last successful sync with this recipient. pub last_sync: Vec, @@ -84,6 +103,7 @@ impl State { their_need: None, their_have: Some(Vec::new()), sent_hashes: BTreeSet::new(), + in_flight: false, }, )) } diff --git a/automerge/src/transaction.rs b/rust/automerge/src/transaction.rs similarity index 69% rename from automerge/src/transaction.rs rename to rust/automerge/src/transaction.rs index f97fa7e5..b513bc63 100644 --- a/automerge/src/transaction.rs +++ b/rust/automerge/src/transaction.rs @@ -1,13 +1,15 @@ mod commit; mod inner; mod manual_transaction; +pub(crate) mod observation; mod result; mod transactable; pub use self::commit::CommitOptions; pub use self::transactable::Transactable; -pub(crate) use inner::TransactionInner; +pub(crate) use inner::{TransactionArgs, TransactionInner}; pub use manual_transaction::Transaction; +pub use observation::{Observation, Observed, UnObserved}; pub use result::Failure; pub use result::Success; diff --git a/automerge/src/transaction/commit.rs b/rust/automerge/src/transaction/commit.rs similarity index 100% rename from automerge/src/transaction/commit.rs rename to rust/automerge/src/transaction/commit.rs diff --git a/rust/automerge/src/transaction/inner.rs b/rust/automerge/src/transaction/inner.rs new file mode 100644 index 00000000..0fe735d5 --- /dev/null +++ b/rust/automerge/src/transaction/inner.rs @@ -0,0 +1,731 @@ +use std::num::NonZeroU64; + +use crate::exid::ExId; +use crate::query::{self, OpIdSearch}; +use crate::storage::Change as StoredChange; +use crate::types::{Key, ListEncoding, ObjId, OpId, OpIds, TextEncoding}; +use crate::{op_tree::OpSetMetadata, types::Op, Automerge, Change, ChangeHash, OpObserver, Prop}; +use crate::{AutomergeError, ObjType, OpType, ScalarValue}; + +#[derive(Debug, Clone)] +pub(crate) struct TransactionInner { + actor: usize, + seq: u64, + start_op: NonZeroU64, + time: i64, + message: Option, + deps: Vec, + operations: Vec<(ObjId, Op)>, +} + +/// Arguments required to create a new transaction +pub(crate) struct TransactionArgs { + /// The index of the actor ID this transaction will create ops for in the + /// [`OpSetMetadata::actors`] + pub(crate) actor_index: usize, + /// The sequence number of the change this transaction will create + pub(crate) seq: u64, + /// The start op of the change this transaction will create + pub(crate) start_op: NonZeroU64, + /// The dependencies of the change this transaction will create + pub(crate) deps: Vec, +} + +impl TransactionInner { + pub(crate) fn new( + TransactionArgs { + actor_index: actor, + seq, + start_op, + deps, + }: TransactionArgs, + ) -> Self { + TransactionInner { + actor, + seq, + start_op, + time: 0, + message: None, + operations: vec![], + deps, + } + } + + /// Create an empty change + pub(crate) fn empty( + doc: &mut Automerge, + args: TransactionArgs, + message: Option, + time: Option, + ) -> ChangeHash { + Self::new(args).commit_impl(doc, message, time) + } + + pub(crate) fn pending_ops(&self) -> usize { + self.operations.len() + } + + /// Commit the operations performed in this transaction, returning the hashes corresponding to + /// the new heads. + /// + /// Returns `None` if there were no operations to commit + #[tracing::instrument(skip(self, doc))] + pub(crate) fn commit( + self, + doc: &mut Automerge, + message: Option, + time: Option, + ) -> Option { + if self.pending_ops() == 0 { + return None; + } + Some(self.commit_impl(doc, message, time)) + } + + pub(crate) fn commit_impl( + mut self, + doc: &mut Automerge, + message: Option, + time: Option, + ) -> ChangeHash { + if message.is_some() { + self.message = message; + } + + if let Some(t) = time { + self.time = t; + } + + let num_ops = self.pending_ops(); + let change = self.export(&doc.ops().m); + let hash = change.hash(); + #[cfg(not(debug_assertions))] + tracing::trace!(commit=?hash, deps=?change.deps(), "committing transaction"); + #[cfg(debug_assertions)] + { + let ops = change.iter_ops().collect::>(); + tracing::trace!(commit=?hash, ?ops, deps=?change.deps(), "committing transaction"); + } + doc.update_history(change, num_ops); + debug_assert_eq!(doc.get_heads(), vec![hash]); + hash + } + + #[tracing::instrument(skip(self, metadata))] + pub(crate) fn export(self, metadata: &OpSetMetadata) -> Change { + use crate::storage::{change::PredOutOfOrder, convert::op_as_actor_id}; + + let actor = metadata.actors.get(self.actor).clone(); + let deps = self.deps.clone(); + let stored = match StoredChange::builder() + .with_actor(actor) + .with_seq(self.seq) + .with_start_op(self.start_op) + .with_message(self.message.clone()) + .with_dependencies(deps) + .with_timestamp(self.time) + .build( + self.operations + .iter() + .map(|(obj, op)| op_as_actor_id(obj, op, metadata)), + ) { + Ok(s) => s, + Err(PredOutOfOrder) => { + // SAFETY: types::Op::preds is `types::OpIds` which ensures ops are always sorted + panic!("preds out of order"); + } + }; + #[cfg(debug_assertions)] + { + let realized_ops = self.operations.iter().collect::>(); + tracing::trace!(?stored, ops=?realized_ops, "committing change"); + } + #[cfg(not(debug_assertions))] + tracing::trace!(?stored, "committing change"); + Change::new(stored) + } + + /// Undo the operations added in this transaction, returning the number of cancelled + /// operations. + pub(crate) fn rollback(self, doc: &mut Automerge) -> usize { + let num = self.pending_ops(); + // remove in reverse order so sets are removed before makes etc... + for (obj, op) in self.operations.into_iter().rev() { + for pred_id in &op.pred { + if let Some(p) = doc.ops().search(&obj, OpIdSearch::new(*pred_id)).index() { + doc.ops_mut().change_vis(&obj, p, |o| o.remove_succ(&op)); + } + } + if let Some(pos) = doc.ops().search(&obj, OpIdSearch::new(op.id)).index() { + doc.ops_mut().remove(&obj, pos); + } + } + + doc.rollback_last_actor(); + + num + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub(crate) fn put, V: Into, Obs: OpObserver>( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + ex_obj: &ExId, + prop: P, + value: V, + ) -> Result<(), AutomergeError> { + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + let value = value.into(); + let prop = prop.into(); + match (&prop, obj_type) { + (Prop::Map(_), ObjType::Map) => Ok(()), + (Prop::Seq(_), ObjType::List) => Ok(()), + (Prop::Seq(_), ObjType::Text) => Ok(()), + _ => Err(AutomergeError::InvalidOp(obj_type)), + }?; + self.local_op(doc, op_observer, obj, prop, value.into())?; + Ok(()) + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Returns + /// + /// The opid of the operation which was created, or None if this operation doesn't change the + /// document + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + pub(crate) fn put_object, Obs: OpObserver>( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + ex_obj: &ExId, + prop: P, + value: ObjType, + ) -> Result { + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + let prop = prop.into(); + match (&prop, obj_type) { + (Prop::Map(_), ObjType::Map) => Ok(()), + (Prop::Seq(_), ObjType::List) => Ok(()), + _ => Err(AutomergeError::InvalidOp(obj_type)), + }?; + let id = self + .local_op(doc, op_observer, obj, prop, value.into())? + .unwrap(); + let id = doc.id_to_exid(id); + Ok(id) + } + + fn next_id(&mut self) -> OpId { + OpId::new(self.start_op.get() + self.pending_ops() as u64, self.actor) + } + + fn next_insert(&mut self, key: Key, value: ScalarValue) -> Op { + Op { + id: self.next_id(), + action: OpType::Put(value), + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + } + } + + fn next_delete(&mut self, key: Key, pred: OpIds) -> Op { + Op { + id: self.next_id(), + action: OpType::Delete, + key, + succ: Default::default(), + pred, + insert: false, + } + } + + #[allow(clippy::too_many_arguments)] + fn insert_local_op( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + prop: Prop, + op: Op, + pos: usize, + obj: ObjId, + succ_pos: &[usize], + ) { + doc.ops_mut().add_succ(&obj, succ_pos, &op); + + if !op.is_delete() { + doc.ops_mut().insert(pos, &obj, op.clone()); + } + + self.finalize_op(doc, op_observer, obj, prop, op); + } + + pub(crate) fn insert, Obs: OpObserver>( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + ex_obj: &ExId, + index: usize, + value: V, + ) -> Result<(), AutomergeError> { + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + if !matches!(obj_type, ObjType::List | ObjType::Text) { + return Err(AutomergeError::InvalidOp(obj_type)); + } + let value = value.into(); + tracing::trace!(obj=?obj, value=?value, "inserting value"); + self.do_insert(doc, op_observer, obj, index, value.into())?; + Ok(()) + } + + pub(crate) fn insert_object( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + ex_obj: &ExId, + index: usize, + value: ObjType, + ) -> Result { + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + if !matches!(obj_type, ObjType::List | ObjType::Text) { + return Err(AutomergeError::InvalidOp(obj_type)); + } + let id = self.do_insert(doc, op_observer, obj, index, value.into())?; + let id = doc.id_to_exid(id); + Ok(id) + } + + fn do_insert( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + obj: ObjId, + index: usize, + action: OpType, + ) -> Result { + let id = self.next_id(); + + let query = doc + .ops() + .search(&obj, query::InsertNth::new(index, ListEncoding::List)); + + let key = query.key()?; + + let op = Op { + id, + action, + key, + succ: Default::default(), + pred: Default::default(), + insert: true, + }; + + doc.ops_mut().insert(query.pos(), &obj, op.clone()); + + self.finalize_op(doc, op_observer, obj, Prop::Seq(index), op); + + Ok(id) + } + + pub(crate) fn local_op( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + obj: ObjId, + prop: Prop, + action: OpType, + ) -> Result, AutomergeError> { + match prop { + Prop::Map(s) => self.local_map_op(doc, op_observer, obj, s, action), + Prop::Seq(n) => self.local_list_op(doc, op_observer, obj, n, action), + } + } + + fn local_map_op( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + obj: ObjId, + prop: String, + action: OpType, + ) -> Result, AutomergeError> { + if prop.is_empty() { + return Err(AutomergeError::EmptyStringKey); + } + + let id = self.next_id(); + let prop_index = doc.ops_mut().m.props.cache(prop.clone()); + let query = doc.ops().search(&obj, query::Prop::new(prop_index)); + + // no key present to delete + if query.ops.is_empty() && action == OpType::Delete { + return Ok(None); + } + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + // increment operations are only valid against counter values. + // if there are multiple values (from conflicts) then we just need one of them to be a counter. + if matches!(action, OpType::Increment(_)) && query.ops.iter().all(|op| !op.is_counter()) { + return Err(AutomergeError::MissingCounter); + } + + let pred = doc.ops().m.sorted_opids(query.ops.iter().map(|o| o.id)); + + let op = Op { + id, + action, + key: Key::Map(prop_index), + succ: Default::default(), + pred, + insert: false, + }; + + let pos = query.pos; + let ops_pos = query.ops_pos; + self.insert_local_op(doc, op_observer, Prop::Map(prop), op, pos, obj, &ops_pos); + + Ok(Some(id)) + } + + fn local_list_op( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + obj: ObjId, + index: usize, + action: OpType, + ) -> Result, AutomergeError> { + let query = doc + .ops() + .search(&obj, query::Nth::new(index, ListEncoding::List)); + + let id = self.next_id(); + let pred = doc.ops().m.sorted_opids(query.ops.iter().map(|o| o.id)); + let key = query.key()?; + + if query.ops.len() == 1 && query.ops[0].is_noop(&action) { + return Ok(None); + } + + // increment operations are only valid against counter values. + // if there are multiple values (from conflicts) then we just need one of them to be a counter. + if matches!(action, OpType::Increment(_)) && query.ops.iter().all(|op| !op.is_counter()) { + return Err(AutomergeError::MissingCounter); + } + + let op = Op { + id, + action, + key, + succ: Default::default(), + pred, + insert: false, + }; + + let pos = query.pos; + let ops_pos = query.ops_pos; + self.insert_local_op(doc, op_observer, Prop::Seq(index), op, pos, obj, &ops_pos); + + Ok(Some(id)) + } + + pub(crate) fn increment, Obs: OpObserver>( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + obj: &ExId, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + let obj = doc.exid_to_obj(obj)?.0; + self.local_op(doc, op_observer, obj, prop.into(), OpType::Increment(value))?; + Ok(()) + } + + pub(crate) fn delete, Obs: OpObserver>( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + ex_obj: &ExId, + prop: P, + ) -> Result<(), AutomergeError> { + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + let prop = prop.into(); + if obj_type == ObjType::Text { + let index = prop.to_index().ok_or(AutomergeError::InvalidOp(obj_type))?; + self.inner_splice( + doc, + op_observer, + SpliceArgs { + obj, + index, + del: 1, + values: vec![], + splice_type: SpliceType::Text("", doc.text_encoding()), + }, + )?; + } else { + self.local_op(doc, op_observer, obj, prop, OpType::Delete)?; + } + Ok(()) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + pub(crate) fn splice( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + ex_obj: &ExId, + index: usize, + del: usize, + vals: impl IntoIterator, + ) -> Result<(), AutomergeError> { + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + if !matches!(obj_type, ObjType::List | ObjType::Text) { + return Err(AutomergeError::InvalidOp(obj_type)); + } + let values = vals.into_iter().collect(); + self.inner_splice( + doc, + op_observer, + SpliceArgs { + obj, + index, + del, + values, + splice_type: SpliceType::List, + }, + ) + } + + /// Splice string into a text object + pub(crate) fn splice_text( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + ex_obj: &ExId, + index: usize, + del: usize, + text: &str, + ) -> Result<(), AutomergeError> { + let (obj, obj_type) = doc.exid_to_obj(ex_obj)?; + if obj_type != ObjType::Text { + return Err(AutomergeError::InvalidOp(obj_type)); + } + let values = text.chars().map(ScalarValue::from).collect(); + self.inner_splice( + doc, + op_observer, + SpliceArgs { + obj, + index, + del, + values, + splice_type: SpliceType::Text(text, doc.text_encoding()), + }, + ) + } + + fn inner_splice( + &mut self, + doc: &mut Automerge, + mut op_observer: Option<&mut Obs>, + SpliceArgs { + obj, + mut index, + mut del, + values, + splice_type, + }: SpliceArgs<'_>, + ) -> Result<(), AutomergeError> { + let ex_obj = doc.ops().id_to_exid(obj.0); + let encoding = splice_type.encoding(); + // delete `del` items - performing the query for each one + let mut deleted = 0; + while deleted < del { + // TODO: could do this with a single custom query + let query = doc.ops().search(&obj, query::Nth::new(index, encoding)); + + // if we delete in the middle of a multi-character + // move cursor back to the beginning and expand the del width + let adjusted_index = query.index(); + if adjusted_index < index { + del += index - adjusted_index; + index = adjusted_index; + } + + let step = if let Some(op) = query.ops.last() { + op.width(encoding) + } else { + break; + }; + + let op = self.next_delete(query.key()?, query.pred(doc.ops())); + + let ops_pos = query.ops_pos; + doc.ops_mut().add_succ(&obj, &ops_pos, &op); + + self.operations.push((obj, op)); + + deleted += step; + } + + if deleted > 0 { + if let Some(obs) = op_observer.as_mut() { + obs.delete_seq(doc, ex_obj.clone(), index, deleted); + } + } + + // do the insert query for the first item and then + // insert the remaining ops one after the other + if !values.is_empty() { + let query = doc + .ops() + .search(&obj, query::InsertNth::new(index, encoding)); + let mut pos = query.pos(); + let mut key = query.key()?; + let mut cursor = index; + let mut width = 0; + + for v in &values { + let op = self.next_insert(key, v.clone()); + + doc.ops_mut().insert(pos, &obj, op.clone()); + + width = op.width(encoding); + cursor += width; + pos += 1; + key = op.id.into(); + + self.operations.push((obj, op)); + } + + doc.ops_mut().hint(&obj, cursor - width, pos - 1); + + // handle the observer + if let Some(obs) = op_observer.as_mut() { + match splice_type { + SpliceType::Text(text, _) if !obs.text_as_seq() => { + obs.splice_text(doc, ex_obj, index, text) + } + SpliceType::List | SpliceType::Text(..) => { + let start = self.operations.len() - values.len(); + for (offset, v) in values.iter().enumerate() { + let op = &self.operations[start + offset].1; + let value = (v.clone().into(), doc.ops().id_to_exid(op.id)); + obs.insert(doc, ex_obj.clone(), index + offset, value) + } + } + } + } + } + + Ok(()) + } + + fn finalize_op( + &mut self, + doc: &mut Automerge, + op_observer: Option<&mut Obs>, + obj: ObjId, + prop: Prop, + op: Op, + ) { + // TODO - id_to_exid should be a noop if not used - change type to Into? + if let Some(op_observer) = op_observer { + let ex_obj = doc.ops().id_to_exid(obj.0); + if op.insert { + let obj_type = doc.ops().object_type(&obj); + assert!(obj_type.unwrap().is_sequence()); + match (obj_type, prop) { + (Some(ObjType::List), Prop::Seq(index)) => { + let value = (op.value(), doc.ops().id_to_exid(op.id)); + op_observer.insert(doc, ex_obj, index, value) + } + (Some(ObjType::Text), Prop::Seq(index)) => { + // FIXME + if op_observer.text_as_seq() { + let value = (op.value(), doc.ops().id_to_exid(op.id)); + op_observer.insert(doc, ex_obj, index, value) + } else { + op_observer.splice_text(doc, ex_obj, index, op.to_str()) + } + } + _ => {} + } + } else if op.is_delete() { + op_observer.delete(doc, ex_obj, prop); + } else if let Some(value) = op.get_increment_value() { + op_observer.increment(doc, ex_obj, prop, (value, doc.ops().id_to_exid(op.id))); + } else { + let value = (op.value(), doc.ops().id_to_exid(op.id)); + op_observer.put(doc, ex_obj, prop, value, false); + } + } + self.operations.push((obj, op)); + } +} + +enum SpliceType<'a> { + List, + Text(&'a str, TextEncoding), +} + +impl<'a> SpliceType<'a> { + fn encoding(&self) -> ListEncoding { + match self { + SpliceType::List => ListEncoding::List, + SpliceType::Text(_, encoding) => ListEncoding::Text(*encoding), + } + } +} + +struct SpliceArgs<'a> { + obj: ObjId, + index: usize, + del: usize, + values: Vec, + splice_type: SpliceType<'a>, +} + +#[cfg(test)] +mod tests { + use crate::{transaction::Transactable, ReadDoc, ROOT}; + + use super::*; + + #[test] + fn map_rollback_doesnt_panic() { + let mut doc = Automerge::new(); + let mut tx = doc.transaction(); + + let a = tx.put_object(ROOT, "a", ObjType::Map).unwrap(); + tx.put(&a, "b", 1).unwrap(); + assert!(tx.get(&a, "b").unwrap().is_some()); + } +} diff --git a/automerge/src/transaction/manual_transaction.rs b/rust/automerge/src/transaction/manual_transaction.rs similarity index 64% rename from automerge/src/transaction/manual_transaction.rs rename to rust/automerge/src/transaction/manual_transaction.rs index 695866ad..fa5f6340 100644 --- a/automerge/src/transaction/manual_transaction.rs +++ b/rust/automerge/src/transaction/manual_transaction.rs @@ -1,11 +1,14 @@ use std::ops::RangeBounds; use crate::exid::ExId; -use crate::{Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ScalarValue, Value, Values}; +use crate::op_observer::BranchableObserver; +use crate::{ + Automerge, ChangeHash, KeysAt, ObjType, OpObserver, Prop, ReadDoc, ScalarValue, Value, Values, +}; use crate::{AutomergeError, Keys}; use crate::{ListRange, ListRangeAt, MapRange, MapRangeAt}; -use super::{CommitOptions, Transactable, TransactionInner}; +use super::{observation, CommitOptions, Transactable, TransactionArgs, TransactionInner}; /// A transaction on a document. /// Transactions group operations into a single change so that no other operations can happen @@ -20,15 +23,42 @@ use super::{CommitOptions, Transactable, TransactionInner}; /// intermediate state. /// This is consistent with `?` error handling. #[derive(Debug)] -pub struct Transaction<'a, Obs: OpObserver> { +pub struct Transaction<'a, Obs: observation::Observation> { // this is an option so that we can take it during commit and rollback to prevent it being // rolled back during drop. - pub(crate) inner: Option, - pub(crate) doc: &'a mut Automerge, - pub op_observer: Obs, + inner: Option, + // As with `inner` this is an `Option` so we can `take` it during `commit` + observation: Option, + doc: &'a mut Automerge, } -impl<'a, Obs: OpObserver> Transaction<'a, Obs> { +impl<'a, Obs: observation::Observation> Transaction<'a, Obs> { + pub(crate) fn new(doc: &'a mut Automerge, args: TransactionArgs, obs: Obs) -> Self { + Self { + inner: Some(TransactionInner::new(args)), + doc, + observation: Some(obs), + } + } +} + +impl<'a> Transaction<'a, observation::UnObserved> { + pub(crate) fn empty( + doc: &'a mut Automerge, + args: TransactionArgs, + opts: CommitOptions, + ) -> ChangeHash { + TransactionInner::empty(doc, args, opts.message, opts.time) + } +} + +impl<'a, Obs: OpObserver + BranchableObserver> Transaction<'a, observation::Observed> { + pub fn observer(&mut self) -> &mut Obs { + self.observation.as_mut().unwrap().observer() + } +} + +impl<'a, Obs: observation::Observation> Transaction<'a, Obs> { /// Get the heads of the document before this transaction was started. pub fn get_heads(&self) -> Vec { self.doc.get_heads() @@ -36,8 +66,11 @@ impl<'a, Obs: OpObserver> Transaction<'a, Obs> { /// Commit the operations performed in this transaction, returning the hashes corresponding to /// the new heads. - pub fn commit(mut self) -> ChangeHash { - self.inner.take().unwrap().commit(self.doc, None, None) + pub fn commit(mut self) -> Obs::CommitResult { + let tx = self.inner.take().unwrap(); + let hash = tx.commit(self.doc, None, None); + let obs = self.observation.take().unwrap(); + obs.make_result(hash) } /// Commit the operations in this transaction with some options. @@ -56,11 +89,11 @@ impl<'a, Obs: OpObserver> Transaction<'a, Obs> { /// i64; /// tx.commit_with(CommitOptions::default().with_message("Create todos list").with_time(now)); /// ``` - pub fn commit_with(mut self, options: CommitOptions) -> ChangeHash { - self.inner - .take() - .unwrap() - .commit(self.doc, options.message, options.time) + pub fn commit_with(mut self, options: CommitOptions) -> Obs::CommitResult { + let tx = self.inner.take().unwrap(); + let hash = tx.commit(self.doc, options.message, options.time); + let obs = self.observation.take().unwrap(); + obs.make_result(hash) } /// Undo the operations added in this transaction, returning the number of cancelled @@ -68,124 +101,21 @@ impl<'a, Obs: OpObserver> Transaction<'a, Obs> { pub fn rollback(mut self) -> usize { self.inner.take().unwrap().rollback(self.doc) } + + fn do_tx(&mut self, f: F) -> O + where + F: FnOnce(&mut TransactionInner, &mut Automerge, Option<&mut Obs::Obs>) -> O, + { + let tx = self.inner.as_mut().unwrap(); + if let Some(obs) = self.observation.as_mut() { + f(tx, self.doc, obs.observer()) + } else { + f(tx, self.doc, None) + } + } } -impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { - /// Get the number of pending operations in this transaction. - fn pending_ops(&self) -> usize { - self.inner.as_ref().unwrap().pending_ops() - } - - /// Set the value of property `P` to value `V` in object `obj`. - /// - /// # Errors - /// - /// This will return an error if - /// - The object does not exist - /// - The key is the wrong type for the object - /// - The key does not exist in the object - fn put, P: Into, V: Into>( - &mut self, - obj: O, - prop: P, - value: V, - ) -> Result<(), AutomergeError> { - self.inner - .as_mut() - .unwrap() - .put(self.doc, &mut self.op_observer, obj.as_ref(), prop, value) - } - - fn put_object, P: Into>( - &mut self, - obj: O, - prop: P, - value: ObjType, - ) -> Result { - self.inner.as_mut().unwrap().put_object( - self.doc, - &mut self.op_observer, - obj.as_ref(), - prop, - value, - ) - } - - fn insert, V: Into>( - &mut self, - obj: O, - index: usize, - value: V, - ) -> Result<(), AutomergeError> { - self.inner.as_mut().unwrap().insert( - self.doc, - &mut self.op_observer, - obj.as_ref(), - index, - value, - ) - } - - fn insert_object>( - &mut self, - obj: O, - index: usize, - value: ObjType, - ) -> Result { - self.inner.as_mut().unwrap().insert_object( - self.doc, - &mut self.op_observer, - obj.as_ref(), - index, - value, - ) - } - - fn increment, P: Into>( - &mut self, - obj: O, - prop: P, - value: i64, - ) -> Result<(), AutomergeError> { - self.inner.as_mut().unwrap().increment( - self.doc, - &mut self.op_observer, - obj.as_ref(), - prop, - value, - ) - } - - fn delete, P: Into>( - &mut self, - obj: O, - prop: P, - ) -> Result<(), AutomergeError> { - self.inner - .as_mut() - .unwrap() - .delete(self.doc, &mut self.op_observer, obj.as_ref(), prop) - } - - /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert - /// the new elements - fn splice, V: IntoIterator>( - &mut self, - obj: O, - pos: usize, - del: usize, - vals: V, - ) -> Result<(), AutomergeError> { - self.inner.as_mut().unwrap().splice( - self.doc, - &mut self.op_observer, - obj.as_ref(), - pos, - del, - vals, - ) - } - +impl<'a, Obs: observation::Observation> ReadDoc for Transaction<'a, Obs> { fn keys>(&self, obj: O) -> Keys<'_, '_> { self.doc.keys(obj) } @@ -244,7 +174,7 @@ impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { self.doc.length_at(obj, heads) } - fn object_type>(&self, obj: O) -> Option { + fn object_type>(&self, obj: O) -> Result { self.doc.object_type(obj) } @@ -297,13 +227,119 @@ impl<'a, Obs: OpObserver> Transactable for Transaction<'a, Obs> { fn parents>(&self, obj: O) -> Result, AutomergeError> { self.doc.parents(obj) } + + fn path_to_object>(&self, obj: O) -> Result, AutomergeError> { + self.doc.path_to_object(obj) + } + + fn get_missing_deps(&self, heads: &[ChangeHash]) -> Vec { + self.doc.get_missing_deps(heads) + } + + fn get_change_by_hash(&self, hash: &ChangeHash) -> Option<&crate::Change> { + self.doc.get_change_by_hash(hash) + } +} + +impl<'a, Obs: observation::Observation> Transactable for Transaction<'a, Obs> { + /// Get the number of pending operations in this transaction. + fn pending_ops(&self) -> usize { + self.inner.as_ref().unwrap().pending_ops() + } + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn put, P: Into, V: Into>( + &mut self, + obj: O, + prop: P, + value: V, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.put(doc, obs, obj.as_ref(), prop, value)) + } + + fn put_object, P: Into>( + &mut self, + obj: O, + prop: P, + value: ObjType, + ) -> Result { + self.do_tx(|tx, doc, obs| tx.put_object(doc, obs, obj.as_ref(), prop, value)) + } + + fn insert, V: Into>( + &mut self, + obj: O, + index: usize, + value: V, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.insert(doc, obs, obj.as_ref(), index, value)) + } + + fn insert_object>( + &mut self, + obj: O, + index: usize, + value: ObjType, + ) -> Result { + self.do_tx(|tx, doc, obs| tx.insert_object(doc, obs, obj.as_ref(), index, value)) + } + + fn increment, P: Into>( + &mut self, + obj: O, + prop: P, + value: i64, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.increment(doc, obs, obj.as_ref(), prop, value)) + } + + fn delete, P: Into>( + &mut self, + obj: O, + prop: P, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.delete(doc, obs, obj.as_ref(), prop)) + } + + /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert + /// the new elements + fn splice, V: IntoIterator>( + &mut self, + obj: O, + pos: usize, + del: usize, + vals: V, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.splice(doc, obs, obj.as_ref(), pos, del, vals)) + } + + fn splice_text>( + &mut self, + obj: O, + pos: usize, + del: usize, + text: &str, + ) -> Result<(), AutomergeError> { + self.do_tx(|tx, doc, obs| tx.splice_text(doc, obs, obj.as_ref(), pos, del, text)) + } + + fn base_heads(&self) -> Vec { + self.doc.get_heads() + } } // If a transaction is not commited or rolled back manually then it can leave the document in an // intermediate state. // This defaults to rolling back the transaction to be compatible with `?` error returning before // reaching a call to `commit`. -impl<'a, Obs: OpObserver> Drop for Transaction<'a, Obs> { +impl<'a, Obs: observation::Observation> Drop for Transaction<'a, Obs> { fn drop(&mut self) { if let Some(txn) = self.inner.take() { txn.rollback(self.doc); diff --git a/rust/automerge/src/transaction/observation.rs b/rust/automerge/src/transaction/observation.rs new file mode 100644 index 00000000..53723711 --- /dev/null +++ b/rust/automerge/src/transaction/observation.rs @@ -0,0 +1,80 @@ +//! This module is essentially a type level Option. It is used in sitations where we know at +//! compile time whether an `OpObserver` is available to track changes in a transaction. +use crate::{op_observer::BranchableObserver, ChangeHash, OpObserver}; + +mod private { + use crate::op_observer::BranchableObserver; + + pub trait Sealed {} + impl Sealed for super::Observed {} + impl Sealed for super::UnObserved {} +} + +pub trait Observation: private::Sealed { + type Obs: OpObserver + BranchableObserver; + type CommitResult; + + fn observer(&mut self) -> Option<&mut Self::Obs>; + fn make_result(self, hash: Option) -> Self::CommitResult; + fn branch(&self) -> Self; + fn merge(&mut self, other: &Self); +} + +#[derive(Clone, Debug)] +pub struct Observed(Obs); + +impl Observed { + pub(crate) fn new(o: O) -> Self { + Self(o) + } + + pub(crate) fn observer(&mut self) -> &mut O { + &mut self.0 + } +} + +impl Observation for Observed { + type Obs = Obs; + type CommitResult = (Obs, Option); + fn observer(&mut self) -> Option<&mut Self::Obs> { + Some(&mut self.0) + } + + fn make_result(self, hash: Option) -> Self::CommitResult { + (self.0, hash) + } + + fn branch(&self) -> Self { + Self(self.0.branch()) + } + + fn merge(&mut self, other: &Self) { + self.0.merge(&other.0) + } +} + +#[derive(Clone, Default, Debug)] +pub struct UnObserved; +impl UnObserved { + pub fn new() -> Self { + Self + } +} + +impl Observation for UnObserved { + type Obs = (); + type CommitResult = Option; + fn observer(&mut self) -> Option<&mut Self::Obs> { + None + } + + fn make_result(self, hash: Option) -> Self::CommitResult { + hash + } + + fn branch(&self) -> Self { + Self + } + + fn merge(&mut self, _other: &Self) {} +} diff --git a/automerge/src/transaction/result.rs b/rust/automerge/src/transaction/result.rs similarity index 77% rename from automerge/src/transaction/result.rs rename to rust/automerge/src/transaction/result.rs index 8943b7a2..5327ff44 100644 --- a/automerge/src/transaction/result.rs +++ b/rust/automerge/src/transaction/result.rs @@ -5,8 +5,8 @@ use crate::ChangeHash; pub struct Success { /// The result of the transaction. pub result: O, - /// The hash of the change, also the head of the document. - pub hash: ChangeHash, + /// The hash of the change, will be `None` if the transaction did not create any operations + pub hash: Option, pub op_observer: Obs, } diff --git a/rust/automerge/src/transaction/transactable.rs b/rust/automerge/src/transaction/transactable.rs new file mode 100644 index 00000000..05c48c79 --- /dev/null +++ b/rust/automerge/src/transaction/transactable.rs @@ -0,0 +1,93 @@ +use crate::exid::ExId; +use crate::{AutomergeError, ChangeHash, ObjType, Prop, ReadDoc, ScalarValue}; + +/// A way of mutating a document within a single change. +pub trait Transactable: ReadDoc { + /// Get the number of pending operations in this transaction. + fn pending_ops(&self) -> usize; + + /// Set the value of property `P` to value `V` in object `obj`. + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn put, P: Into, V: Into>( + &mut self, + obj: O, + prop: P, + value: V, + ) -> Result<(), AutomergeError>; + + /// Set the value of property `P` to the new object `V` in object `obj`. + /// + /// # Returns + /// + /// The id of the object which was created. + /// + /// # Errors + /// + /// This will return an error if + /// - The object does not exist + /// - The key is the wrong type for the object + /// - The key does not exist in the object + fn put_object, P: Into>( + &mut self, + obj: O, + prop: P, + object: ObjType, + ) -> Result; + + /// Insert a value into a list at the given index. + fn insert, V: Into>( + &mut self, + obj: O, + index: usize, + value: V, + ) -> Result<(), AutomergeError>; + + /// Insert an object into a list at the given index. + fn insert_object>( + &mut self, + obj: O, + index: usize, + object: ObjType, + ) -> Result; + + /// Increment the counter at the prop in the object by `value`. + fn increment, P: Into>( + &mut self, + obj: O, + prop: P, + value: i64, + ) -> Result<(), AutomergeError>; + + /// Delete the value at prop in the object. + fn delete, P: Into>( + &mut self, + obj: O, + prop: P, + ) -> Result<(), AutomergeError>; + + fn splice, V: IntoIterator>( + &mut self, + obj: O, + pos: usize, + del: usize, + vals: V, + ) -> Result<(), AutomergeError>; + + /// Like [`Self::splice`] but for text. + fn splice_text>( + &mut self, + obj: O, + pos: usize, + del: usize, + text: &str, + ) -> Result<(), AutomergeError>; + + /// The heads this transaction will be based on + fn base_heads(&self) -> Vec; +} diff --git a/automerge/src/types.rs b/rust/automerge/src/types.rs similarity index 70% rename from automerge/src/types.rs rename to rust/automerge/src/types.rs index a1e4f2a7..468986ec 100644 --- a/automerge/src/types.rs +++ b/rust/automerge/src/types.rs @@ -3,10 +3,12 @@ use crate::legacy as amp; use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::cmp::Eq; +use std::cmp::Ordering; use std::fmt; use std::fmt::Display; use std::str::FromStr; use tinyvec::{ArrayVec, TinyVec}; +//use crate::indexed_cache::IndexedCache; mod opids; pub(crate) use opids::OpIds; @@ -141,12 +143,17 @@ impl fmt::Display for ActorId { } } +/// The type of an object #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Copy, Hash)] #[serde(rename_all = "camelCase", untagged)] pub enum ObjType { + /// A map Map, + /// Retained for backwards compatibility, tables are identical to maps Table, + /// A sequence of arbitrary values List, + /// A sequence of characters Text, } @@ -209,23 +216,35 @@ impl OpType { } } - pub(crate) fn from_index_and_value( - index: u64, - value: ScalarValue, - ) -> Result { - match index { - 0 => Ok(Self::Make(ObjType::Map)), - 1 => Ok(Self::Put(value)), - 2 => Ok(Self::Make(ObjType::List)), - 3 => Ok(Self::Delete), - 4 => Ok(Self::Make(ObjType::Text)), + pub(crate) fn validate_action_and_value( + action: u64, + value: &ScalarValue, + ) -> Result<(), error::InvalidOpType> { + match action { + 0..=4 => Ok(()), 5 => match value { - ScalarValue::Int(i) => Ok(Self::Increment(i)), - ScalarValue::Uint(i) => Ok(Self::Increment(i as i64)), + ScalarValue::Int(_) | ScalarValue::Uint(_) => Ok(()), _ => Err(error::InvalidOpType::NonNumericInc), }, - 6 => Ok(Self::Make(ObjType::Table)), - other => Err(error::InvalidOpType::UnknownAction(other)), + 6 => Ok(()), + _ => Err(error::InvalidOpType::UnknownAction(action)), + } + } + + pub(crate) fn from_action_and_value(action: u64, value: ScalarValue) -> OpType { + match action { + 0 => Self::Make(ObjType::Map), + 1 => Self::Put(value), + 2 => Self::Make(ObjType::List), + 3 => Self::Delete, + 4 => Self::Make(ObjType::Text), + 5 => match value { + ScalarValue::Int(i) => Self::Increment(i), + ScalarValue::Uint(i) => Self::Increment(i as i64), + _ => unreachable!("validate_action_and_value returned NonNumericInc"), + }, + 6 => Self::Make(ObjType::Table), + _ => unreachable!("validate_action_and_value returned UnknownAction"), } } } @@ -253,17 +272,6 @@ pub(crate) trait Exportable { fn export(&self) -> Export; } -impl OpId { - #[inline] - pub(crate) fn counter(&self) -> u64 { - self.0 - } - #[inline] - pub(crate) fn actor(&self) -> usize { - self.1 - } -} - impl Exportable for ObjId { fn export(&self) -> Export { if self.0 == ROOT { @@ -387,12 +395,27 @@ pub(crate) enum Key { Seq(ElemId), } +/// A property of an object +/// +/// This is either a string representing a property in a map, or an integer +/// which is the index into a sequence #[derive(Debug, PartialEq, PartialOrd, Eq, Ord, Clone)] pub enum Prop { + /// A property in a map Map(String), + /// An index into a sequence Seq(usize), } +impl Prop { + pub(crate) fn to_index(&self) -> Option { + match self { + Prop::Map(_) => None, + Prop::Seq(n) => Some(*n), + } + } +} + impl Display for Prop { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { @@ -412,11 +435,28 @@ impl Key { } #[derive(Debug, Clone, PartialOrd, Ord, Eq, PartialEq, Copy, Hash, Default)] -pub(crate) struct OpId(pub(crate) u64, pub(crate) usize); +pub(crate) struct OpId(u32, u32); impl OpId { - pub(crate) fn new(actor: usize, counter: u64) -> Self { - Self(counter, actor) + pub(crate) fn new(counter: u64, actor: usize) -> Self { + Self(counter.try_into().unwrap(), actor.try_into().unwrap()) + } + + #[inline] + pub(crate) fn counter(&self) -> u64 { + self.0.into() + } + + #[inline] + pub(crate) fn actor(&self) -> usize { + self.1.try_into().unwrap() + } + + #[inline] + pub(crate) fn lamport_cmp(&self, other: &OpId, actors: &[ActorId]) -> Ordering { + self.0 + .cmp(&other.0) + .then_with(|| actors[self.1 as usize].cmp(&actors[other.1 as usize])) } } @@ -437,6 +477,48 @@ impl ObjId { } } +/// How indexes into text sequeces are calculated +/// +/// Automerge text objects are internally sequences of utf8 characters. This +/// means that in environments (such as javascript) which use a different +/// encoding the indexes into the text sequence will be different. This enum +/// represents the different ways indexes can be calculated. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum TextEncoding { + /// The indexes are calculated using the utf8 encoding + Utf8, + /// The indexes are calculated using the utf16 encoding + Utf16, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub(crate) enum ListEncoding { + List, + Text(TextEncoding), +} + +impl Default for ListEncoding { + fn default() -> Self { + ListEncoding::List + } +} + +impl Default for TextEncoding { + fn default() -> Self { + TextEncoding::Utf8 + } +} + +impl ListEncoding { + pub(crate) fn new(obj: ObjType, text_encoding: TextEncoding) -> Self { + if obj == ObjType::Text { + ListEncoding::Text(text_encoding) + } else { + ListEncoding::List + } + } +} + #[derive(Debug, Clone, Copy, PartialOrd, Eq, PartialEq, Ord, Hash, Default)] pub(crate) struct ElemId(pub(crate) OpId); @@ -491,6 +573,22 @@ impl Op { } } + pub(crate) fn width(&self, encoding: ListEncoding) -> usize { + match encoding { + ListEncoding::List => 1, + ListEncoding::Text(TextEncoding::Utf8) => self.to_str().chars().count(), + ListEncoding::Text(TextEncoding::Utf16) => self.to_str().encode_utf16().count(), + } + } + + pub(crate) fn to_str(&self) -> &str { + if let OpType::Put(ScalarValue::Str(s)) = &self.action { + s + } else { + "\u{fffc}" + } + } + pub(crate) fn visible(&self) -> bool { if self.is_inc() { false @@ -610,14 +708,14 @@ impl AsRef<[u8]> for ChangeHash { impl fmt::Debug for ChangeHash { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("ChangeHash") - .field(&hex::encode(&self.0)) + .field(&hex::encode(self.0)) .finish() } } impl fmt::Display for ChangeHash { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", hex::encode(&self.0)) + write!(f, "{}", hex::encode(self.0)) } } @@ -670,3 +768,77 @@ impl From for wasm_bindgen::JsValue { } } } + +#[cfg(test)] +pub(crate) mod gen { + use super::{ + ChangeHash, Counter, ElemId, Key, ObjType, Op, OpId, OpIds, OpType, ScalarValue, HASH_SIZE, + }; + use proptest::prelude::*; + + pub(crate) fn gen_hash() -> impl Strategy { + proptest::collection::vec(proptest::bits::u8::ANY, HASH_SIZE) + .prop_map(|b| ChangeHash::try_from(&b[..]).unwrap()) + } + + pub(crate) fn gen_scalar_value() -> impl Strategy { + prop_oneof![ + proptest::collection::vec(proptest::bits::u8::ANY, 0..200).prop_map(ScalarValue::Bytes), + "[a-z]{10,500}".prop_map(|s| ScalarValue::Str(s.into())), + any::().prop_map(ScalarValue::Int), + any::().prop_map(ScalarValue::Uint), + any::().prop_map(ScalarValue::F64), + any::().prop_map(|c| ScalarValue::Counter(Counter::from(c))), + any::().prop_map(ScalarValue::Timestamp), + any::().prop_map(ScalarValue::Boolean), + Just(ScalarValue::Null), + ] + } + + pub(crate) fn gen_objtype() -> impl Strategy { + prop_oneof![ + Just(ObjType::Map), + Just(ObjType::Table), + Just(ObjType::List), + Just(ObjType::Text), + ] + } + + pub(crate) fn gen_action() -> impl Strategy { + prop_oneof![ + Just(OpType::Delete), + any::().prop_map(OpType::Increment), + gen_scalar_value().prop_map(OpType::Put), + gen_objtype().prop_map(OpType::Make) + ] + } + + pub(crate) fn gen_key(key_indices: Vec) -> impl Strategy { + prop_oneof![ + proptest::sample::select(key_indices).prop_map(Key::Map), + Just(Key::Seq(ElemId(OpId::new(0, 0)))), + ] + } + + /// Generate an arbitrary op + /// + /// The generated op will have no preds or succs + /// + /// # Arguments + /// + /// * `id` - the OpId this op will be given + /// * `key_prop_indices` - The indices of props which will be used to generate keys of type + /// `Key::Map`. I.e. this is what would typically be in `OpSetMetadata::props + pub(crate) fn gen_op(id: OpId, key_prop_indices: Vec) -> impl Strategy { + (gen_key(key_prop_indices), any::(), gen_action()).prop_map( + move |(key, insert, action)| Op { + id, + key, + insert, + action, + succ: OpIds::empty(), + pred: OpIds::empty(), + }, + ) + } +} diff --git a/automerge/src/types/opids.rs b/rust/automerge/src/types/opids.rs similarity index 96% rename from automerge/src/types/opids.rs rename to rust/automerge/src/types/opids.rs index 3ebac93c..a81ccb36 100644 --- a/automerge/src/types/opids.rs +++ b/rust/automerge/src/types/opids.rs @@ -129,7 +129,8 @@ mod tests { fn gen_opid(actors: Vec) -> impl Strategy { (0..actors.len()).prop_flat_map(|actor_idx| { - (Just(actor_idx), 0..u64::MAX).prop_map(|(actor_idx, counter)| OpId(counter, actor_idx)) + (Just(actor_idx), 0..(u32::MAX as u64)) + .prop_map(|(actor_idx, counter)| OpId::new(counter, actor_idx)) }) } @@ -190,7 +191,7 @@ mod tests { (OpId(0, _), OpId(0, _)) => Ordering::Equal, (OpId(0, _), OpId(_, _)) => Ordering::Less, (OpId(_, _), OpId(0, _)) => Ordering::Greater, - (OpId(a, x), OpId(b, y)) if a == b => actors[*x].cmp(&actors[*y]), + (OpId(a, x), OpId(b, y)) if a == b => actors[*x as usize].cmp(&actors[*y as usize]), (OpId(a, _), OpId(b, _)) => a.cmp(b), } } diff --git a/automerge/src/value.rs b/rust/automerge/src/value.rs similarity index 97% rename from automerge/src/value.rs rename to rust/automerge/src/value.rs index b3142bdf..be128787 100644 --- a/automerge/src/value.rs +++ b/rust/automerge/src/value.rs @@ -5,9 +5,12 @@ use smol_str::SmolStr; use std::borrow::Cow; use std::fmt; +/// The type of values in an automerge document #[derive(Debug, Clone, PartialEq)] pub enum Value<'a> { + /// An composite object of type `ObjType` Object(ObjType), + /// A non composite value // TODO: if we don't have to store this in patches any more then it might be able to be just a // &'a ScalarValue rather than a Cow Scalar(Cow<'a, ScalarValue>), @@ -266,6 +269,12 @@ impl<'a> From for Value<'a> { } } +impl<'a> From for Value<'a> { + fn from(s: SmolStr) -> Self { + Value::Scalar(Cow::Owned(ScalarValue::Str(s))) + } +} + impl<'a> From for Value<'a> { fn from(c: char) -> Self { Value::Scalar(Cow::Owned(ScalarValue::Str(SmolStr::new(c.to_string())))) @@ -425,6 +434,7 @@ impl From<&Counter> for f64 { } } +/// A value which is not a composite value #[derive(Serialize, PartialEq, Debug, Clone)] #[serde(untagged)] pub enum ScalarValue { @@ -436,7 +446,11 @@ pub enum ScalarValue { Counter(Counter), Timestamp(i64), Boolean(bool), - Unknown { type_code: u8, bytes: Vec }, + /// A value from a future version of automerge + Unknown { + type_code: u8, + bytes: Vec, + }, Null, } diff --git a/automerge/src/values.rs b/rust/automerge/src/values.rs similarity index 89% rename from automerge/src/values.rs rename to rust/automerge/src/values.rs index 90f596f3..15ccb4cb 100644 --- a/automerge/src/values.rs +++ b/rust/automerge/src/values.rs @@ -2,6 +2,9 @@ use crate::exid::ExId; use crate::{Automerge, Value}; use std::fmt; +/// An iterator over the values in an object +/// +/// This is returned by the [`crate::ReadDoc::values`] and [`crate::ReadDoc::values_at`] methods pub struct Values<'a> { range: Box>, doc: &'a Automerge, @@ -52,9 +55,3 @@ impl<'a> Iterator for Values<'a> { self.range.next_value(self.doc) } } - -impl<'a> DoubleEndedIterator for Values<'a> { - fn next_back(&mut self) -> Option { - unimplemented!() - } -} diff --git a/automerge/src/visualisation.rs b/rust/automerge/src/visualisation.rs similarity index 89% rename from automerge/src/visualisation.rs rename to rust/automerge/src/visualisation.rs index 6894f46f..31e9bbdb 100644 --- a/automerge/src/visualisation.rs +++ b/rust/automerge/src/visualisation.rs @@ -1,4 +1,4 @@ -use crate::types::ObjId; +use crate::types::{ObjId, Op}; use fxhash::FxHasher; use std::{borrow::Cow, collections::HashMap, hash::BuildHasherDefault}; @@ -26,7 +26,7 @@ pub(crate) struct Node<'a> { #[derive(Clone)] pub(crate) enum NodeType<'a> { ObjRoot(crate::types::ObjId), - ObjTreeNode(ObjId, &'a crate::op_tree::OpTreeNode), + ObjTreeNode(ObjId, &'a crate::op_tree::OpTreeNode, &'a [Op]), } #[derive(Clone)] @@ -52,7 +52,13 @@ impl<'a> GraphVisualisation<'a> { let mut nodes = HashMap::new(); for (obj_id, tree) in trees { if let Some(root_node) = &tree.internal.root_node { - let tree_id = Self::construct_nodes(root_node, obj_id, &mut nodes, metadata); + let tree_id = Self::construct_nodes( + root_node, + &tree.internal.ops, + obj_id, + &mut nodes, + metadata, + ); let obj_tree_id = NodeId::default(); nodes.insert( obj_tree_id, @@ -77,6 +83,7 @@ impl<'a> GraphVisualisation<'a> { fn construct_nodes( node: &'a crate::op_tree::OpTreeNode, + ops: &'a [Op], objid: &ObjId, nodes: &mut HashMap>, m: &'a crate::op_set::OpSetMetadata, @@ -84,7 +91,7 @@ impl<'a> GraphVisualisation<'a> { let node_id = NodeId::default(); let mut child_ids = Vec::new(); for child in &node.children { - let child_id = Self::construct_nodes(child, objid, nodes, m); + let child_id = Self::construct_nodes(child, ops, objid, nodes, m); child_ids.push(child_id); } nodes.insert( @@ -92,7 +99,7 @@ impl<'a> GraphVisualisation<'a> { Node { id: node_id, children: child_ids, - node_type: NodeType::ObjTreeNode(*objid, node), + node_type: NodeType::ObjTreeNode(*objid, node, ops), metadata: m, }, ); @@ -138,7 +145,7 @@ impl<'a> dot::Labeller<'a, &'a Node<'a>, Edge> for GraphVisualisation<'a> { fn node_shape(&'a self, node: &&'a Node<'a>) -> Option> { let shape = match node.node_type { - NodeType::ObjTreeNode(_, _) => dot::LabelText::label("none"), + NodeType::ObjTreeNode(_, _, _) => dot::LabelText::label("none"), NodeType::ObjRoot(_) => dot::LabelText::label("ellipse"), }; Some(shape) @@ -146,8 +153,8 @@ impl<'a> dot::Labeller<'a, &'a Node<'a>, Edge> for GraphVisualisation<'a> { fn node_label(&'a self, n: &&Node<'a>) -> dot::LabelText<'a> { match n.node_type { - NodeType::ObjTreeNode(objid, tree_node) => dot::LabelText::HtmlStr( - OpTable::create(tree_node, &objid, n.metadata, &self.actor_shorthands) + NodeType::ObjTreeNode(objid, tree_node, ops) => dot::LabelText::HtmlStr( + OpTable::create(tree_node, ops, &objid, n.metadata, &self.actor_shorthands) .to_html() .into(), ), @@ -165,6 +172,7 @@ struct OpTable { impl OpTable { fn create<'a>( node: &'a crate::op_tree::OpTreeNode, + ops: &'a [Op], obj: &ObjId, metadata: &crate::op_set::OpSetMetadata, actor_shorthands: &HashMap, @@ -172,7 +180,7 @@ impl OpTable { let rows = node .elements .iter() - .map(|e| OpTableRow::create(e, obj, metadata, actor_shorthands)) + .map(|e| OpTableRow::create(&ops[*e], obj, metadata, actor_shorthands)) .collect(); OpTable { rows } } diff --git a/rust/automerge/tests/fixtures/64bit_obj_id_change.automerge b/rust/automerge/tests/fixtures/64bit_obj_id_change.automerge new file mode 100644 index 00000000..700342a2 Binary files /dev/null and b/rust/automerge/tests/fixtures/64bit_obj_id_change.automerge differ diff --git a/rust/automerge/tests/fixtures/64bit_obj_id_doc.automerge b/rust/automerge/tests/fixtures/64bit_obj_id_doc.automerge new file mode 100644 index 00000000..6beb57fe Binary files /dev/null and b/rust/automerge/tests/fixtures/64bit_obj_id_doc.automerge differ diff --git a/rust/automerge/tests/fixtures/counter_value_has_incorrect_meta.automerge b/rust/automerge/tests/fixtures/counter_value_has_incorrect_meta.automerge new file mode 100644 index 00000000..2290b446 Binary files /dev/null and b/rust/automerge/tests/fixtures/counter_value_has_incorrect_meta.automerge differ diff --git a/rust/automerge/tests/fixtures/counter_value_is_ok.automerge b/rust/automerge/tests/fixtures/counter_value_is_ok.automerge new file mode 100644 index 00000000..fdc59896 Binary files /dev/null and b/rust/automerge/tests/fixtures/counter_value_is_ok.automerge differ diff --git a/rust/automerge/tests/fixtures/counter_value_is_overlong.automerge b/rust/automerge/tests/fixtures/counter_value_is_overlong.automerge new file mode 100644 index 00000000..831346f7 Binary files /dev/null and b/rust/automerge/tests/fixtures/counter_value_is_overlong.automerge differ diff --git a/rust/automerge/tests/fixtures/two_change_chunks.automerge b/rust/automerge/tests/fixtures/two_change_chunks.automerge new file mode 100644 index 00000000..1a84b363 Binary files /dev/null and b/rust/automerge/tests/fixtures/two_change_chunks.automerge differ diff --git a/rust/automerge/tests/fixtures/two_change_chunks_compressed.automerge b/rust/automerge/tests/fixtures/two_change_chunks_compressed.automerge new file mode 100644 index 00000000..9e3f305f Binary files /dev/null and b/rust/automerge/tests/fixtures/two_change_chunks_compressed.automerge differ diff --git a/rust/automerge/tests/fixtures/two_change_chunks_out_of_order.automerge b/rust/automerge/tests/fixtures/two_change_chunks_out_of_order.automerge new file mode 100644 index 00000000..9ba0355f Binary files /dev/null and b/rust/automerge/tests/fixtures/two_change_chunks_out_of_order.automerge differ diff --git a/rust/automerge/tests/fuzz-crashers/action-is-48.automerge b/rust/automerge/tests/fuzz-crashers/action-is-48.automerge new file mode 100644 index 00000000..16e6f719 Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/action-is-48.automerge differ diff --git a/rust/automerge/tests/fuzz-crashers/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 b/rust/automerge/tests/fuzz-crashers/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 new file mode 100644 index 00000000..bcb12cdd Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 differ diff --git a/rust/automerge/tests/fuzz-crashers/incorrect_max_op.automerge b/rust/automerge/tests/fuzz-crashers/incorrect_max_op.automerge new file mode 100644 index 00000000..05cc2c82 Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/incorrect_max_op.automerge differ diff --git a/rust/automerge/tests/fuzz-crashers/invalid_deflate_stream.automerge b/rust/automerge/tests/fuzz-crashers/invalid_deflate_stream.automerge new file mode 100644 index 00000000..21e869eb Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/invalid_deflate_stream.automerge differ diff --git a/rust/automerge/tests/fuzz-crashers/missing_actor.automerge b/rust/automerge/tests/fuzz-crashers/missing_actor.automerge new file mode 100644 index 00000000..cc8c61b1 Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/missing_actor.automerge differ diff --git a/rust/automerge/tests/fuzz-crashers/missing_deps.automerge b/rust/automerge/tests/fuzz-crashers/missing_deps.automerge new file mode 100644 index 00000000..8a57a0f4 Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/missing_deps.automerge differ diff --git a/rust/automerge/tests/fuzz-crashers/missing_deps_compressed.automerge b/rust/automerge/tests/fuzz-crashers/missing_deps_compressed.automerge new file mode 100644 index 00000000..2c7b123b Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/missing_deps_compressed.automerge differ diff --git a/rust/automerge/tests/fuzz-crashers/missing_deps_subsequent.automerge b/rust/automerge/tests/fuzz-crashers/missing_deps_subsequent.automerge new file mode 100644 index 00000000..2fe439af Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/missing_deps_subsequent.automerge differ diff --git a/rust/automerge/tests/fuzz-crashers/overflow_in_length.automerge b/rust/automerge/tests/fuzz-crashers/overflow_in_length.automerge new file mode 100644 index 00000000..45771f34 Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/overflow_in_length.automerge differ diff --git a/rust/automerge/tests/fuzz-crashers/too_many_deps.automerge b/rust/automerge/tests/fuzz-crashers/too_many_deps.automerge new file mode 100644 index 00000000..657ce993 Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/too_many_deps.automerge differ diff --git a/rust/automerge/tests/fuzz-crashers/too_many_ops.automerge b/rust/automerge/tests/fuzz-crashers/too_many_ops.automerge new file mode 100644 index 00000000..661258b0 Binary files /dev/null and b/rust/automerge/tests/fuzz-crashers/too_many_ops.automerge differ diff --git a/automerge/tests/test.rs b/rust/automerge/tests/test.rs similarity index 84% rename from automerge/tests/test.rs rename to rust/automerge/tests/test.rs index ae28b531..3be6725e 100644 --- a/automerge/tests/test.rs +++ b/rust/automerge/tests/test.rs @@ -1,17 +1,17 @@ use automerge::transaction::Transactable; use automerge::{ - ActorId, AutoCommit, Automerge, AutomergeError, Change, ExpandedChange, ObjType, ScalarValue, - VecOpObserver, ROOT, + ActorId, AutoCommit, Automerge, AutomergeError, Change, ExpandedChange, ObjType, ReadDoc, + ScalarValue, VecOpObserver, ROOT, }; +use std::fs; // set up logging for all the tests -use test_log::test; +//use test_log::test; -mod helpers; #[allow(unused_imports)] -use helpers::{ - mk_counter, new_doc, new_doc_with_actor, pretty_print, realize, realize_obj, sorted_actors, - RealizedObject, +use automerge_test::{ + assert_doc, assert_obj, list, map, mk_counter, new_doc, new_doc_with_actor, pretty_print, + realize, realize_obj, sorted_actors, RealizedObject, }; use pretty_assertions::assert_eq; @@ -21,7 +21,7 @@ fn no_conflict_on_repeated_assignment() { doc.put(&automerge::ROOT, "foo", 1).unwrap(); doc.put(&automerge::ROOT, "foo", 2).unwrap(); assert_doc!( - doc.document(), + &doc, map! { "foo" => { 2 }, } @@ -41,7 +41,7 @@ fn repeated_map_assignment_which_resolves_conflict_not_ignored() { doc1.put(&automerge::ROOT, "field", 123).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "field" => { 123 } } @@ -62,7 +62,7 @@ fn repeated_list_assignment_which_resolves_conflict_not_ignored() { doc1.put(&list_id, 0, 789).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "list" => { list![ @@ -84,7 +84,7 @@ fn list_deletion() { doc.insert(&list_id, 2, 789).unwrap(); doc.delete(&list_id, 1).unwrap(); assert_doc!( - doc.document(), + &doc, map! { "list" => { list![ { 123 }, @@ -106,7 +106,7 @@ fn merge_concurrent_map_prop_updates() { "bar".into() ); assert_doc!( - doc1.document(), + &doc1, map! { "foo" => { "bar" }, "hello" => { "world" }, @@ -114,7 +114,7 @@ fn merge_concurrent_map_prop_updates() { ); doc2.merge(&mut doc1).unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "foo" => { "bar" }, "hello" => { "world" }, @@ -134,7 +134,7 @@ fn add_concurrent_increments_of_same_property() { doc2.increment(&automerge::ROOT, "counter", 2).unwrap(); doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "counter" => { mk_counter(3) @@ -161,7 +161,7 @@ fn add_increments_only_to_preceeded_values() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "counter" => { mk_counter(1), @@ -181,7 +181,7 @@ fn concurrent_updates_of_same_field() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "field" => { "one", @@ -206,7 +206,7 @@ fn concurrent_updates_of_same_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => { list![{ @@ -232,7 +232,7 @@ fn assignment_conflicts_of_different_types() { doc1.merge(&mut doc3).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "field" => { "string", @@ -255,7 +255,7 @@ fn changes_within_conflicting_map_field() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "field" => { "string", @@ -292,7 +292,7 @@ fn changes_within_conflicting_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "list" => { list![ @@ -330,7 +330,7 @@ fn concurrently_assigned_nested_maps_should_not_merge() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "config" => { map!{ @@ -364,7 +364,7 @@ fn concurrent_insertions_at_different_list_positions() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "list" => { list![ @@ -396,7 +396,7 @@ fn concurrent_insertions_at_same_list_position() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => { list![ @@ -427,7 +427,7 @@ fn concurrent_assignment_and_deletion_of_a_map_entry() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "bestBird" => { "magpie", @@ -451,7 +451,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { doc2.delete(&list_id, 1).unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "birds" => {list![ {"blackbird"}, @@ -461,7 +461,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { ); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => {list![ { "blackbird" }, @@ -474,7 +474,7 @@ fn concurrent_assignment_and_deletion_of_list_entry() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => {list![ { "blackbird" }, @@ -507,7 +507,7 @@ fn insertion_after_a_deleted_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => {list![ { "blackbird" }, @@ -518,7 +518,7 @@ fn insertion_after_a_deleted_list_element() { doc2.merge(&mut doc1).unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "birds" => {list![ { "blackbird" }, @@ -549,7 +549,7 @@ fn concurrent_deletion_of_same_list_element() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => {list![ { "albatross" }, @@ -560,7 +560,7 @@ fn concurrent_deletion_of_same_list_element() { doc2.merge(&mut doc1).unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "birds" => {list![ { "albatross" }, @@ -593,7 +593,7 @@ fn concurrent_updates_at_different_levels() { doc1.merge(&mut doc2).unwrap(); assert_obj!( - doc1.document(), + &doc1, &automerge::ROOT, "animals", map! { @@ -635,7 +635,7 @@ fn concurrent_updates_of_concurrently_deleted_objects() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "birds" => { map!{}, @@ -686,7 +686,7 @@ fn does_not_interleave_sequence_insertions_at_same_position() { doc1.merge(&mut doc2).unwrap(); assert_doc!( - doc1.document(), + &doc1, map! { "wisdom" => {list![ {"to"}, @@ -719,7 +719,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_greater_actor_id( doc2.insert(&list, 0, "one").unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "list" => { list![ { "one" }, @@ -744,7 +744,7 @@ fn mutliple_insertions_at_same_list_position_with_insertion_by_lesser_actor_id() doc2.insert(&list, 0, "one").unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "list" => { list![ { "one" }, @@ -771,7 +771,7 @@ fn insertion_consistent_with_causality() { doc2.insert(&list, 0, "one").unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "list" => { list![ {"one"}, @@ -1124,33 +1124,31 @@ fn test_merging_test_conflicts_then_saving_and_loading() { let mut doc1 = new_doc_with_actor(actor1); let text = doc1.put_object(ROOT, "text", ObjType::Text).unwrap(); - doc1.splice(&text, 0, 0, "hello".chars().map(|c| c.to_string().into())) - .unwrap(); + doc1.splice_text(&text, 0, 0, "hello").unwrap(); let mut doc2 = AutoCommit::load(&doc1.save()).unwrap(); doc2.set_actor(actor2); - assert_doc! {doc2.document(), map!{ + assert_doc! {&doc2, map!{ "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"o"}]}, }}; - doc2.splice(&text, 4, 1, Vec::new()).unwrap(); - doc2.splice(&text, 4, 0, vec!["!".into()]).unwrap(); - doc2.splice(&text, 5, 0, vec![" ".into()]).unwrap(); - doc2.splice(&text, 6, 0, "world".chars().map(|c| c.into())) - .unwrap(); + doc2.splice_text(&text, 4, 1, "").unwrap(); + doc2.splice_text(&text, 4, 0, "!").unwrap(); + doc2.splice_text(&text, 5, 0, " ").unwrap(); + doc2.splice_text(&text, 6, 0, "world").unwrap(); assert_doc!( - doc2.document(), + &doc2, map! { "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"!"}, {" "}, {"w"} , {"o"}, {"r"}, {"l"}, {"d"}]} } ); - let mut doc3 = AutoCommit::load(&doc2.save()).unwrap(); + let doc3 = AutoCommit::load(&doc2.save()).unwrap(); assert_doc!( - doc3.document(), + &doc3, map! { "text" => { list![{"h"}, {"e"}, {"l"}, {"l"}, {"!"}, {" "}, {"w"} , {"o"}, {"r"}, {"l"}, {"d"}]} } @@ -1343,3 +1341,159 @@ fn load_doc_with_deleted_objects() { let saved = doc.save(); Automerge::load(&saved).unwrap(); } + +#[test] +fn insert_after_many_deletes() { + let mut doc = AutoCommit::new(); + let obj = doc.put_object(&ROOT, "object", ObjType::Map).unwrap(); + for i in 0..100 { + doc.put(&obj, i.to_string(), i).unwrap(); + doc.delete(&obj, i.to_string()).unwrap(); + } +} + +#[test] +fn simple_bad_saveload() { + let mut doc = Automerge::new(); + doc.transact::<_, _, AutomergeError>(|d| { + d.put(ROOT, "count", 0)?; + Ok(()) + }) + .unwrap(); + + doc.transact::<_, _, AutomergeError>(|_d| Ok(())).unwrap(); + + doc.transact::<_, _, AutomergeError>(|d| { + d.put(ROOT, "count", 0)?; + Ok(()) + }) + .unwrap(); + + let bytes = doc.save(); + Automerge::load(&bytes).unwrap(); +} + +#[test] +fn ops_on_wrong_objets() -> Result<(), AutomergeError> { + let mut doc = AutoCommit::new(); + let list = doc.put_object(&automerge::ROOT, "list", ObjType::List)?; + doc.insert(&list, 0, "a")?; + doc.insert(&list, 1, "b")?; + let e1 = doc.put(&list, "a", "AAA"); + assert_eq!(e1, Err(AutomergeError::InvalidOp(ObjType::List))); + let e2 = doc.splice_text(&list, 0, 0, "hello world"); + assert_eq!(e2, Err(AutomergeError::InvalidOp(ObjType::List))); + let map = doc.put_object(&automerge::ROOT, "map", ObjType::Map)?; + doc.put(&map, "a", "AAA")?; + doc.put(&map, "b", "BBB")?; + let e3 = doc.insert(&map, 0, "b"); + assert_eq!(e3, Err(AutomergeError::InvalidOp(ObjType::Map))); + let e4 = doc.splice_text(&map, 0, 0, "hello world"); + assert_eq!(e4, Err(AutomergeError::InvalidOp(ObjType::Map))); + let text = doc.put_object(&automerge::ROOT, "text", ObjType::Text)?; + doc.splice_text(&text, 0, 0, "hello world")?; + let e5 = doc.put(&text, "a", "AAA"); + assert_eq!(e5, Err(AutomergeError::InvalidOp(ObjType::Text))); + //let e6 = doc.insert(&text, 0, "b"); + //assert_eq!(e6, Err(AutomergeError::InvalidOp(ObjType::Text))); + Ok(()) +} + +#[test] +fn fuzz_crashers() { + let paths = fs::read_dir("./tests/fuzz-crashers").unwrap(); + + for path in paths { + // uncomment this line to figure out which fixture is crashing: + // println!("{:?}", path.as_ref().unwrap().path().display()); + let bytes = fs::read(path.as_ref().unwrap().path()); + let res = Automerge::load(&bytes.unwrap()); + assert!(res.is_err()); + } +} + +fn fixture(name: &str) -> Vec { + fs::read("./tests/fixtures/".to_owned() + name).unwrap() +} + +#[test] +fn overlong_leb() { + // the value metadata says "2", but the LEB is only 1-byte long and there's an extra 0 + assert!(Automerge::load(&fixture("counter_value_has_incorrect_meta.automerge")).is_err()); + // the LEB is overlong (using 2 bytes where one would have sufficed) + assert!(Automerge::load(&fixture("counter_value_is_overlong.automerge")).is_err()); + // the LEB is correct + assert!(Automerge::load(&fixture("counter_value_is_ok.automerge")).is_ok()); +} + +#[test] +fn load() { + fn check_fixture(name: &str) { + let doc = Automerge::load(&fixture(name)).unwrap(); + let map_id = doc.get(ROOT, "a").unwrap().unwrap().1; + assert_eq!(doc.get(map_id, "a").unwrap().unwrap().0, "b".into()); + } + + check_fixture("two_change_chunks.automerge"); + check_fixture("two_change_chunks_compressed.automerge"); + check_fixture("two_change_chunks_out_of_order.automerge"); +} + +#[test] +fn negative_64() { + let mut doc = Automerge::new(); + assert!(doc.transact(|d| { d.put(ROOT, "a", -64_i64) }).is_ok()) +} + +#[test] +fn obj_id_64bits() { + // this change has an opId of 2**42, which when cast to a 32-bit int gives 0. + // The file should either fail to load (a limit of ~4 billion ops per doc seems reasonable), or be handled correctly. + if let Ok(doc) = Automerge::load(&fixture("64bit_obj_id_change.automerge")) { + let map_id = doc.get(ROOT, "a").unwrap().unwrap().1; + assert!(map_id != ROOT) + } + + // this fixture is the same as the above, but as a document chunk. + if let Ok(doc) = Automerge::load(&fixture("64bit_obj_id_doc.automerge")) { + let map_id = doc.get(ROOT, "a").unwrap().unwrap().1; + assert!(map_id != ROOT) + } +} + +#[test] +fn bad_change_on_optree_node_boundary() { + let mut doc = Automerge::new(); + doc.transact::<_, _, AutomergeError>(|d| { + d.put(ROOT, "a", "z")?; + d.put(ROOT, "b", 0)?; + d.put(ROOT, "c", 0)?; + Ok(()) + }) + .unwrap(); + let iterations = 15_u64; + for i in 0_u64..iterations { + doc.transact::<_, _, AutomergeError>(|d| { + let s = "a".repeat(i as usize); + d.put(ROOT, "a", s)?; + d.put(ROOT, "b", i + 1)?; + d.put(ROOT, "c", i + 1)?; + Ok(()) + }) + .unwrap(); + } + let mut doc2 = Automerge::load(doc.save().as_slice()).unwrap(); + doc.transact::<_, _, AutomergeError>(|d| { + let i = iterations + 2; + let s = "a".repeat(i as usize); + d.put(ROOT, "a", s)?; + d.put(ROOT, "b", i)?; + d.put(ROOT, "c", i)?; + Ok(()) + }) + .unwrap(); + let change = doc.get_changes(&doc2.get_heads()).unwrap(); + doc2.apply_changes(change.into_iter().cloned().collect::>()) + .unwrap(); + Automerge::load(doc2.save().as_slice()).unwrap(); +} diff --git a/deny.toml b/rust/deny.toml similarity index 92% rename from deny.toml rename to rust/deny.toml index f6985357..473cdae8 100644 --- a/deny.toml +++ b/rust/deny.toml @@ -46,7 +46,6 @@ notice = "warn" # output a note when they are encountered. ignore = [ #"RUSTSEC-0000-0000", - "RUSTSEC-2021-0127", # serde_cbor is unmaintained, but we only use it in criterion for benchmarks ] # Threshold for security vulnerabilities, any vulnerability with a CVSS score # lower than the range specified will be ignored. Note that ignored advisories @@ -100,10 +99,6 @@ confidence-threshold = 0.8 # Allow 1 or more licenses on a per-crate basis, so that particular licenses # aren't accepted for every possible crate as with the normal allow list exceptions = [ - # this is a LGPL like license in the CLI - # since this is an application not a library people would link to it should be fine - { allow = ["EPL-2.0"], name = "colored_json" }, - # The Unicode-DFS--2016 license is necessary for unicode-ident because they # use data from the unicode tables to generate the tables which are # included in the application. We do not distribute those data files so @@ -115,6 +110,9 @@ exceptions = [ # should be revied more fully before release { allow = ["MPL-2.0"], name = "cbindgen" }, { allow = ["BSD-3-Clause"], name = "instant" }, + + # we only use prettytable in tests + { allow = ["BSD-3-Clause"], name = "prettytable" }, ] # Some crates don't have (easily) machine readable licensing information, @@ -177,21 +175,20 @@ deny = [ ] # Certain crates/versions that will be skipped when doing duplicate detection. skip = [ - # These are transitive depdendencies of criterion, which is only included for benchmarking anyway - { name = "itoa", version = "0.4.8" }, - { name = "textwrap", version = "0.11.0" }, - { name = "clap", version = "2.34.0" }, - - # These are transitive depdendencies of cbindgen - { name = "strsim", version = "0.8.0" }, - { name = "heck", version = "0.3.3" }, + # duct, which we only depend on for integration tests in automerge-cli, + # pulls in a version of os_pipe which in turn pulls in a version of + # windows-sys which is different to the version in pulled in by is-terminal. + # This is fine to ignore for now because it doesn't end up in downstream + # dependencies. + { name = "windows-sys", version = "0.42.0" } ] # Similarly to `skip` allows you to skip certain crates during duplicate # detection. Unlike skip, it also includes the entire tree of transitive # dependencies starting at the specified crate, up to a certain depth, which is # by default infinite skip-tree = [ - #{ name = "ansi_term", version = "=0.11.0", depth = 20 }, + # // We only ever use criterion in benchmarks + { name = "criterion", version = "0.4.0", depth=10}, ] # This section is considered when running `cargo deny check sources`. diff --git a/edit-trace/.gitignore b/rust/edit-trace/.gitignore similarity index 90% rename from edit-trace/.gitignore rename to rust/edit-trace/.gitignore index bf54725a..55778aca 100644 --- a/edit-trace/.gitignore +++ b/rust/edit-trace/.gitignore @@ -3,3 +3,4 @@ Cargo.lock node_modules yarn.lock flamegraph.svg +/prof diff --git a/edit-trace/Cargo.toml b/rust/edit-trace/Cargo.toml similarity index 93% rename from edit-trace/Cargo.toml rename to rust/edit-trace/Cargo.toml index 0107502b..eaebde46 100644 --- a/edit-trace/Cargo.toml +++ b/rust/edit-trace/Cargo.toml @@ -6,7 +6,7 @@ license = "MIT" [dependencies] automerge = { path = "../automerge" } -criterion = "0.3.5" +criterion = "0.4.0" json = "0.12.4" rand = "^0.8" diff --git a/edit-trace/Makefile b/rust/edit-trace/Makefile similarity index 100% rename from edit-trace/Makefile rename to rust/edit-trace/Makefile diff --git a/edit-trace/README.md b/rust/edit-trace/README.md similarity index 100% rename from edit-trace/README.md rename to rust/edit-trace/README.md diff --git a/edit-trace/automerge-1.0.js b/rust/edit-trace/automerge-1.0.js similarity index 100% rename from edit-trace/automerge-1.0.js rename to rust/edit-trace/automerge-1.0.js diff --git a/edit-trace/automerge-js.js b/rust/edit-trace/automerge-js.js similarity index 54% rename from edit-trace/automerge-js.js rename to rust/edit-trace/automerge-js.js index eae08634..2956d5d5 100644 --- a/edit-trace/automerge-js.js +++ b/rust/edit-trace/automerge-js.js @@ -1,12 +1,9 @@ // Apply the paper editing trace to an Automerge.Text object, one char at a time const { edits, finalText } = require('./editing-trace') -const Automerge = require('../automerge-js') -const wasm_api = require('../automerge-wasm') +const Automerge = require('../../javascript') -Automerge.use(wasm_api) - -const start = new Date() -let state = Automerge.from({text: new Automerge.Text()}) +let start = new Date() +let state = Automerge.from({text: ""}) state = Automerge.change(state, doc => { for (let i = 0; i < edits.length; i++) { @@ -14,14 +11,19 @@ state = Automerge.change(state, doc => { console.log(`Processed ${i} edits in ${new Date() - start} ms`) } let edit = edits[i] - if (edit[1] > 0) doc.text.deleteAt(edit[0], edit[1]) - if (edit.length > 2) doc.text.insertAt(edit[0], ...edit.slice(2)) + Automerge.splice(doc, 'text', ... edit) } }) - -let _ = Automerge.save(state) console.log(`Done in ${new Date() - start} ms`) -if (state.text.join('') !== finalText) { +start = new Date() +let bytes = Automerge.save(state) +console.log(`Save in ${new Date() - start} ms`) + +start = new Date() +let _load = Automerge.load(bytes) +console.log(`Load in ${new Date() - start} ms`) + +if (state.text !== finalText) { throw new RangeError('ERROR: final text did not match expectation') } diff --git a/edit-trace/automerge-wasm.js b/rust/edit-trace/automerge-wasm.js similarity index 63% rename from edit-trace/automerge-wasm.js rename to rust/edit-trace/automerge-wasm.js index e0f1454d..8f6f51af 100644 --- a/edit-trace/automerge-wasm.js +++ b/rust/edit-trace/automerge-wasm.js @@ -4,6 +4,8 @@ const Automerge = require('../automerge-wasm') const start = new Date() let doc = Automerge.create(); +doc.enablePatches(true) +let mat = doc.materialize("/") let text = doc.putObject("_root", "text", "", "text") for (let i = 0; i < edits.length; i++) { @@ -14,14 +16,25 @@ for (let i = 0; i < edits.length; i++) { doc.splice(text, ...edit) } -let _ = doc.save() - console.log(`Done in ${new Date() - start} ms`) let t_time = new Date() +let saved = doc.save() +console.log(`doc.save in ${new Date() - t_time} ms`) + +t_time = new Date() +Automerge.load(saved) +console.log(`doc.load in ${new Date() - t_time} ms`) + +t_time = new Date() let t = doc.text(text); console.log(`doc.text in ${new Date() - t_time} ms`) +t_time = new Date() +t = doc.text(text); +mat = doc.applyPatches(mat) +console.log(`doc.applyPatches() in ${new Date() - t_time} ms`) + if (doc.text(text) !== finalText) { throw new RangeError('ERROR: final text did not match expectation') } diff --git a/edit-trace/baseline.js b/rust/edit-trace/baseline.js similarity index 100% rename from edit-trace/baseline.js rename to rust/edit-trace/baseline.js diff --git a/edit-trace/benches/main.rs b/rust/edit-trace/benches/main.rs similarity index 100% rename from edit-trace/benches/main.rs rename to rust/edit-trace/benches/main.rs diff --git a/edit-trace/editing-trace.js b/rust/edit-trace/editing-trace.js similarity index 100% rename from edit-trace/editing-trace.js rename to rust/edit-trace/editing-trace.js diff --git a/edit-trace/edits.json b/rust/edit-trace/edits.json similarity index 100% rename from edit-trace/edits.json rename to rust/edit-trace/edits.json diff --git a/edit-trace/package.json b/rust/edit-trace/package.json similarity index 69% rename from edit-trace/package.json rename to rust/edit-trace/package.json index a9d1e0e0..acd37ac0 100644 --- a/edit-trace/package.json +++ b/rust/edit-trace/package.json @@ -4,9 +4,9 @@ "main": "wasm-text.js", "license": "MIT", "scripts": { - "wasm": "0x -D prof wasm-text.js" + "wasm": "0x -D prof automerge-wasm.js" }, "devDependencies": { - "0x": "^4.11.0" + "0x": "^5.4.1" } } diff --git a/edit-trace/src/main.rs b/rust/edit-trace/src/main.rs similarity index 80% rename from edit-trace/src/main.rs rename to rust/edit-trace/src/main.rs index f6924c7d..9724a109 100644 --- a/edit-trace/src/main.rs +++ b/rust/edit-trace/src/main.rs @@ -1,4 +1,5 @@ use automerge::ObjType; +use automerge::ReadDoc; use automerge::{transaction::Transactable, Automerge, AutomergeError, ROOT}; use std::time::Instant; @@ -28,16 +29,18 @@ fn main() -> Result<(), AutomergeError> { tx.splice_text(&text, pos, del, &vals)?; } tx.commit(); + println!("Done in {} ms", now.elapsed().as_millis()); let save = Instant::now(); - let _bytes = doc.save(); + let bytes = doc.save(); println!("Saved in {} ms", save.elapsed().as_millis()); - /* - let load = Instant::now(); - let _ = Automerge::load(&bytes).unwrap(); - println!("Loaded in {} ms", load.elapsed().as_millis()); - */ + let load = Instant::now(); + let _ = Automerge::load(&bytes).unwrap(); + println!("Loaded in {} ms", load.elapsed().as_millis()); + + let get_txt = Instant::now(); + doc.text(&text)?; + println!("Text in {} ms", get_txt.elapsed().as_millis()); - println!("Done in {} ms", now.elapsed().as_millis()); Ok(()) } diff --git a/scripts/ci/advisory b/scripts/ci/advisory index 07e8c72e..6da4a578 100755 --- a/scripts/ci/advisory +++ b/scripts/ci/advisory @@ -1,6 +1,7 @@ #!/usr/bin/env bash set -eoux pipefail +cd rust cargo deny --version cargo deny check advisories cargo deny check licenses diff --git a/scripts/ci/build-test b/scripts/ci/build-test index dbd89f5d..de592f7e 100755 --- a/scripts/ci/build-test +++ b/scripts/ci/build-test @@ -1,6 +1,7 @@ #!/usr/bin/env bash set -eoux pipefail +cd rust cargo build --workspace --all-features RUST_LOG=error cargo test --workspace --all-features diff --git a/scripts/ci/cmake-build b/scripts/ci/cmake-build index 41357caa..25a69756 100755 --- a/scripts/ci/cmake-build +++ b/scripts/ci/cmake-build @@ -1,18 +1,19 @@ #!/usr/bin/env bash set -eoux pipefail -THIS_SCRIPT=$(dirname "$0"); +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" # \note CMake's default build types are "Debug", "MinSizeRel", "Release" and # "RelWithDebInfo" but custom ones can also be defined so we pass it verbatim. BUILD_TYPE=$1; LIB_TYPE=$2; -if [ "${LIB_TYPE,,}" == "shared" ]; then +if [ "$(echo "${LIB_TYPE}" | tr '[:upper:]' '[:lower:]')" == "shared" ]; then SHARED_TOGGLE="ON" else SHARED_TOGGLE="OFF" fi -C_PROJECT=$THIS_SCRIPT/../../automerge-c; +C_PROJECT=$THIS_SCRIPT/../../rust/automerge-c; mkdir -p $C_PROJECT/build; cd $C_PROJECT/build; cmake --log-level=ERROR -B . -S .. -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DBUILD_SHARED_LIBS=$SHARED_TOGGLE; -cmake --build . --target test_automerge; +cmake --build . --target automerge_test; diff --git a/scripts/ci/cmake-docs b/scripts/ci/cmake-docs deleted file mode 100755 index 7f29a311..00000000 --- a/scripts/ci/cmake-docs +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash - -set -eoux pipefail - -mkdir -p automerge-c/build -cd automerge-c/build -cmake -B . -S .. -DBUILD_TESTING=OFF -cmake --build . --target automerge_docs - -echo "Try opening automerge-c/build/src/html/index.html" diff --git a/scripts/ci/deno_tests b/scripts/ci/deno_tests new file mode 100755 index 00000000..9f297557 --- /dev/null +++ b/scripts/ci/deno_tests @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +set -eou pipefail +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; +JS_PROJECT=$THIS_SCRIPT/../../javascript; +E2E_PROJECT=$THIS_SCRIPT/../../javascript/e2e; + +echo "building wasm and js" +yarn --cwd $E2E_PROJECT install; +yarn --cwd $E2E_PROJECT e2e buildjs; +cp $WASM_PROJECT/index.d.ts $WASM_PROJECT/deno/; +sed -i '1i /// ' $WASM_PROJECT/deno/automerge_wasm.js; + +echo "Running Wasm Deno tests"; +deno test $WASM_PROJECT/deno-tests/deno.ts --allow-read; + +echo "Running JS Deno tests"; +ROOT_MODULE=$WASM_PROJECT/deno yarn --cwd $JS_PROJECT deno:build; +yarn --cwd $JS_PROJECT deno:test; + diff --git a/scripts/ci/fmt b/scripts/ci/fmt index d3d7e28c..27235f92 100755 --- a/scripts/ci/fmt +++ b/scripts/ci/fmt @@ -1,4 +1,5 @@ #!/usr/bin/env bash set -eoux pipefail +cd rust cargo fmt -- --check diff --git a/scripts/ci/fmt_js b/scripts/ci/fmt_js new file mode 100755 index 00000000..8f387b6a --- /dev/null +++ b/scripts/ci/fmt_js @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +set -eoux pipefail + +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +yarn --cwd $THIS_SCRIPT/../../javascript prettier -c . + diff --git a/scripts/ci/js_tests b/scripts/ci/js_tests index b203dea4..68205a33 100755 --- a/scripts/ci/js_tests +++ b/scripts/ci/js_tests @@ -1,20 +1,15 @@ -set -e - -THIS_SCRIPT=$(dirname "$0"); -WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; -JS_PROJECT=$THIS_SCRIPT/../../automerge-js; - -yarn --cwd $WASM_PROJECT install; -# This will take care of running wasm-pack -yarn --cwd $WASM_PROJECT build; -# If the dependencies are already installed we delete automerge-wasm. This makes -# this script usable for iterative development. -if [ -d $JS_PROJECT/node_modules/automerge-wasm ]; then - rm -rf $JS_PROJECT/node_modules/automerge-wasm -fi -# --check-files forces yarn to check if the local dep has changed -yarn --cwd $JS_PROJECT install --check-files; -yarn --cwd $JS_PROJECT test; - +#!/usr/bin/env bash +set -eoux pipefail +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; +JS_PROJECT=$THIS_SCRIPT/../../javascript; +E2E_PROJECT=$THIS_SCRIPT/../../javascript/e2e; +yarn --cwd $E2E_PROJECT install; +# This will build the automerge-wasm project, publish it to a local NPM +# repository, then run `yarn build` in the `javascript` directory with +# the local registry +yarn --cwd $E2E_PROJECT e2e buildjs; +yarn --cwd $JS_PROJECT test diff --git a/scripts/ci/lint b/scripts/ci/lint index 163b245d..87a16765 100755 --- a/scripts/ci/lint +++ b/scripts/ci/lint @@ -1,6 +1,10 @@ #!/usr/bin/env bash set -eoux pipefail +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" + +cd $THIS_SCRIPT/../../rust # Force clippy to consider all local sources # https://github.com/rust-lang/rust-clippy/issues/4612 find . -name "*.rs" -not -path "./target/*" -exec touch "{}" + diff --git a/scripts/ci/run b/scripts/ci/run index 423b995c..aebfe4c4 100755 --- a/scripts/ci/run +++ b/scripts/ci/run @@ -2,11 +2,12 @@ set -eou pipefail ./scripts/ci/fmt +./scripts/ci/fmt_js ./scripts/ci/lint ./scripts/ci/build-test ./scripts/ci/rust-docs ./scripts/ci/advisory ./scripts/ci/wasm_tests +./scripts/ci/deno_tests ./scripts/ci/js_tests ./scripts/ci/cmake-build Release static -./scripts/ci/cmake-docs diff --git a/scripts/ci/rust-docs b/scripts/ci/rust-docs index 647880ce..4be0ed9a 100755 --- a/scripts/ci/rust-docs +++ b/scripts/ci/rust-docs @@ -1,5 +1,8 @@ #!/usr/bin/env bash set -eoux pipefail +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +cd $THIS_SCRIPT/../../rust RUSTDOCFLAGS="-D rustdoc::broken-intra-doc-links -D warnings" \ cargo doc --no-deps --workspace --document-private-items diff --git a/scripts/ci/wasm_tests b/scripts/ci/wasm_tests index 778e1e1f..fac344d8 100755 --- a/scripts/ci/wasm_tests +++ b/scripts/ci/wasm_tests @@ -1,5 +1,6 @@ -THIS_SCRIPT=$(dirname "$0"); -WASM_PROJECT=$THIS_SCRIPT/../../automerge-wasm; +# see https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself +THIS_SCRIPT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +WASM_PROJECT=$THIS_SCRIPT/../../rust/automerge-wasm; yarn --cwd $WASM_PROJECT install; yarn --cwd $WASM_PROJECT build;