diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 08133091..8519ac5e 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -2,10 +2,10 @@ name: CI
on:
push:
branches:
- - main
+ - main
pull_request:
branches:
- - main
+ - main
jobs:
fmt:
runs-on: ubuntu-latest
@@ -14,7 +14,8 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: 1.67.0
+ default: true
components: rustfmt
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/fmt
@@ -27,7 +28,8 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: 1.67.0
+ default: true
components: clippy
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/lint
@@ -40,9 +42,14 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: 1.67.0
+ default: true
- uses: Swatinem/rust-cache@v1
- - run: ./scripts/ci/docs
+ - name: Build rust docs
+ run: ./scripts/ci/rust-docs
+ shell: bash
+ - name: Install doxygen
+ run: sudo apt-get install -y doxygen
shell: bash
cargo-deny:
@@ -57,23 +64,50 @@ jobs:
- uses: actions/checkout@v2
- uses: EmbarkStudios/cargo-deny-action@v1
with:
+ arguments: '--manifest-path ./rust/Cargo.toml'
command: check ${{ matrix.checks }}
wasm_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- - name: Install wasm-pack
- run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
+ - name: Install wasm-bindgen-cli
+ run: cargo install wasm-bindgen-cli wasm-opt
+ - name: Install wasm32 target
+ run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/wasm_tests
+ deno_tests:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: denoland/setup-deno@v1
+ with:
+ deno-version: v1.x
+ - name: Install wasm-bindgen-cli
+ run: cargo install wasm-bindgen-cli wasm-opt
+ - name: Install wasm32 target
+ run: rustup target add wasm32-unknown-unknown
+ - name: run tests
+ run: ./scripts/ci/deno_tests
+
+ js_fmt:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: install
+ run: yarn global add prettier
+ - name: format
+ run: prettier -c javascript/.prettierrc javascript
js_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- - name: Install wasm-pack
- run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
+ - name: Install wasm-bindgen-cli
+ run: cargo install wasm-bindgen-cli wasm-opt
+ - name: Install wasm32 target
+ run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/js_tests
@@ -84,7 +118,8 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: nightly-2023-01-26
+ default: true
- uses: Swatinem/rust-cache@v1
- name: Install CMocka
run: sudo apt-get install -y libcmocka-dev
@@ -92,6 +127,8 @@ jobs:
uses: jwlawson/actions-setup-cmake@v1.12
with:
cmake-version: latest
+ - name: Install rust-src
+ run: rustup component add rust-src
- name: Build and test C bindings
run: ./scripts/ci/cmake-build Release Static
shell: bash
@@ -101,15 +138,14 @@ jobs:
strategy:
matrix:
toolchain:
- - stable
- - nightly
- continue-on-error: ${{ matrix.toolchain == 'nightly' }}
+ - 1.67.0
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.toolchain }}
+ default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
@@ -121,7 +157,8 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: 1.67.0
+ default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
@@ -133,8 +170,8 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: 1.67.0
+ default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
-
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
index 1e928e6e..b501d526 100644
--- a/.github/workflows/docs.yaml
+++ b/.github/workflows/docs.yaml
@@ -23,22 +23,30 @@ jobs:
uses: Swatinem/rust-cache@v1
- name: Clean docs dir
+ run: rm -rf docs
+ shell: bash
+
+ - name: Clean Rust docs dir
uses: actions-rs/cargo@v1
with:
command: clean
- args: --doc
+ args: --manifest-path ./rust/Cargo.toml --doc
- - name: Build docs
+ - name: Build Rust docs
uses: actions-rs/cargo@v1
with:
command: doc
- args: --workspace --all-features --no-deps
+ args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps
+
+ - name: Move Rust docs
+ run: mkdir -p docs && mv rust/target/doc/* docs/.
+ shell: bash
- name: Configure root page
- run: echo ' ' > target/doc/index.html
+ run: echo ' ' > docs/index.html
- name: Deploy docs
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
- publish_dir: ./target/doc
+ publish_dir: ./docs
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
new file mode 100644
index 00000000..762671ff
--- /dev/null
+++ b/.github/workflows/release.yaml
@@ -0,0 +1,214 @@
+name: Release
+on:
+ push:
+ branches:
+ - main
+
+jobs:
+ check_if_wasm_version_upgraded:
+ name: Check if WASM version has been upgraded
+ runs-on: ubuntu-latest
+ outputs:
+ wasm_version: ${{ steps.version-updated.outputs.current-package-version }}
+ wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }}
+ steps:
+ - uses: JiPaix/package-json-updated-action@v1.0.5
+ id: version-updated
+ with:
+ path: rust/automerge-wasm/package.json
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ publish-wasm:
+ name: Publish WASM package
+ runs-on: ubuntu-latest
+ needs:
+ - check_if_wasm_version_upgraded
+ # We create release only if the version in the package.json has been upgraded
+ if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true'
+ steps:
+ - uses: actions/setup-node@v3
+ with:
+ node-version: '16.x'
+ registry-url: 'https://registry.npmjs.org'
+ - uses: denoland/setup-deno@v1
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ ref: ${{ github.ref }}
+ - name: Get rid of local github workflows
+ run: rm -r .github/workflows
+ - name: Remove tmp_branch if it exists
+ run: git push origin :tmp_branch || true
+ - run: git checkout -b tmp_branch
+ - name: Install wasm-bindgen-cli
+ run: cargo install wasm-bindgen-cli wasm-opt
+ - name: Install wasm32 target
+ run: rustup target add wasm32-unknown-unknown
+ - name: run wasm js tests
+ id: wasm_js_tests
+ run: ./scripts/ci/wasm_tests
+ - name: run wasm deno tests
+ id: wasm_deno_tests
+ run: ./scripts/ci/deno_tests
+ - name: build release
+ id: build_release
+ run: |
+ npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release
+ - name: Collate deno release files
+ if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
+ run: |
+ mkdir $GITHUB_WORKSPACE/deno_wasm_dist
+ cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist
+ cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist
+ cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist
+ cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist
+ sed -i '1i /// ' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js
+ - name: Create npm release
+ if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
+ run: |
+ if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then
+ echo "This version is already published"
+ exit 0
+ fi
+ EXTRA_ARGS="--access public"
+ if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
+ echo "Is pre-release version"
+ EXTRA_ARGS="$EXTRA_ARGS --tag next"
+ fi
+ if [ "$NODE_AUTH_TOKEN" = "" ]; then
+ echo "Can't publish on NPM, You need a NPM_TOKEN secret."
+ false
+ fi
+ npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS
+ env:
+ NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
+ VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
+ - name: Commit wasm deno release files
+ run: |
+ git config --global user.name "actions"
+ git config --global user.email actions@github.com
+ git add $GITHUB_WORKSPACE/deno_wasm_dist
+ git commit -am "Add deno release files"
+ git push origin tmp_branch
+ - name: Tag wasm release
+ if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
+ uses: softprops/action-gh-release@v1
+ with:
+ name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
+ tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
+ target_commitish: tmp_branch
+ generate_release_notes: false
+ draft: false
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - name: Remove tmp_branch
+ run: git push origin :tmp_branch
+ check_if_js_version_upgraded:
+ name: Check if JS version has been upgraded
+ runs-on: ubuntu-latest
+ outputs:
+ js_version: ${{ steps.version-updated.outputs.current-package-version }}
+ js_has_updated: ${{ steps.version-updated.outputs.has-updated }}
+ steps:
+ - uses: JiPaix/package-json-updated-action@v1.0.5
+ id: version-updated
+ with:
+ path: javascript/package.json
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ publish-js:
+ name: Publish JS package
+ runs-on: ubuntu-latest
+ needs:
+ - check_if_js_version_upgraded
+ - check_if_wasm_version_upgraded
+ - publish-wasm
+ # We create release only if the version in the package.json has been upgraded and after the WASM release
+ if: |
+ (always() && ! cancelled()) &&
+ (needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') &&
+ needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true'
+ steps:
+ - uses: actions/setup-node@v3
+ with:
+ node-version: '16.x'
+ registry-url: 'https://registry.npmjs.org'
+ - uses: denoland/setup-deno@v1
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ ref: ${{ github.ref }}
+ - name: Get rid of local github workflows
+ run: rm -r .github/workflows
+ - name: Remove js_tmp_branch if it exists
+ run: git push origin :js_tmp_branch || true
+ - run: git checkout -b js_tmp_branch
+ - name: check js formatting
+ run: |
+ yarn global add prettier
+ prettier -c javascript/.prettierrc javascript
+ - name: run js tests
+ id: js_tests
+ run: |
+ cargo install wasm-bindgen-cli wasm-opt
+ rustup target add wasm32-unknown-unknown
+ ./scripts/ci/js_tests
+ - name: build js release
+ id: build_release
+ run: |
+ npm --prefix $GITHUB_WORKSPACE/javascript run build
+ - name: build js deno release
+ id: build_deno_release
+ run: |
+ VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build
+ env:
+ WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
+ - name: run deno tests
+ id: deno_tests
+ run: |
+ npm --prefix $GITHUB_WORKSPACE/javascript run deno:test
+ - name: Collate deno release files
+ if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
+ run: |
+ mkdir $GITHUB_WORKSPACE/deno_js_dist
+ cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist
+ - name: Create npm release
+ if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
+ run: |
+ if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then
+ echo "This version is already published"
+ exit 0
+ fi
+ EXTRA_ARGS="--access public"
+ if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
+ echo "Is pre-release version"
+ EXTRA_ARGS="$EXTRA_ARGS --tag next"
+ fi
+ if [ "$NODE_AUTH_TOKEN" = "" ]; then
+ echo "Can't publish on NPM, You need a NPM_TOKEN secret."
+ false
+ fi
+ npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS
+ env:
+ NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
+ VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }}
+ - name: Commit js deno release files
+ run: |
+ git config --global user.name "actions"
+ git config --global user.email actions@github.com
+ git add $GITHUB_WORKSPACE/deno_js_dist
+ git commit -am "Add deno js release files"
+ git push origin js_tmp_branch
+ - name: Tag JS release
+ if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
+ uses: softprops/action-gh-release@v1
+ with:
+ name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }}
+ tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }}
+ target_commitish: js_tmp_branch
+ generate_release_notes: false
+ draft: false
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - name: Remove js_tmp_branch
+ run: git push origin :js_tmp_branch
diff --git a/.gitignore b/.gitignore
index eca9df3f..f77865d0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,6 @@
-/target
/.direnv
perf.*
/Cargo.lock
build/
+.vim/*
+/target
diff --git a/Makefile b/Makefile
deleted file mode 100644
index 9f8db2d1..00000000
--- a/Makefile
+++ /dev/null
@@ -1,13 +0,0 @@
-rust:
- cd automerge && cargo test
-
-wasm:
- cd automerge-wasm && yarn
- cd automerge-wasm && yarn build
- cd automerge-wasm && yarn test
- cd automerge-wasm && yarn link
-
-js: wasm
- cd automerge-js && yarn
- cd automerge-js && yarn link "automerge-wasm"
- cd automerge-js && yarn test
diff --git a/README.md b/README.md
index 4c58e8d1..ad174da4 100644
--- a/README.md
+++ b/README.md
@@ -1,110 +1,147 @@
-# Automerge RS
+# Automerge
[](https://automerge.org/)
[](https://automerge.org/automerge-rs/automerge/)
[](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml)
+[](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml)
-This is a rust implementation of the [Automerge](https://github.com/automerge/automerge) file format and network protocol.
+Automerge is a library which provides fast implementations of several different
+CRDTs, a compact compression format for these CRDTs, and a sync protocol for
+efficiently transmitting those changes over the network. The objective of the
+project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational
+databases support server applications - by providing mechanisms for persistence
+which allow application developers to avoid thinking about hard distributed
+computing problems. Automerge aims to be PostgreSQL for your local-first app.
-If you are looking for the origional `automerge-rs` project that can be used as a wasm backend to the javascript implementation, it can be found [here](https://github.com/automerge/automerge-rs/tree/automerge-1.0).
+If you're looking for documentation on the JavaScript implementation take a look
+at https://automerge.org/docs/hello/. There are other implementations in both
+Rust and C, but they are earlier and don't have documentation yet. You can find
+them in `rust/automerge` and `rust/automerge-c` if you are comfortable
+reading the code and tests to figure out how to use them.
+
+If you're familiar with CRDTs and interested in the design of Automerge in
+particular take a look at https://automerge.org/docs/how-it-works/backend/
+
+Finally, if you want to talk to us about this project please [join the
+Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw)
## Status
-This project has 4 components:
+This project is formed of a core Rust implementation which is exposed via FFI in
+javascript+WASM, C, and soon other languages. Alex
+([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining
+automerge, other members of Ink and Switch are also contributing time and there
+are several other maintainers. The focus is currently on shipping the new JS
+package. We expect to be iterating the API and adding new features over the next
+six months so there will likely be several major version bumps in all packages
+in that time.
-1. _automerge_ - a rust implementation of the library. This project is the most mature and being used in a handful of small applications.
-2. _automerge-wasm_ - a js/wasm interface to the underlying rust library. This api is generally mature and in use in a handful of projects as well.
-3. _automerge-js_ - this is a javascript library using the wasm interface to export the same public api of the primary automerge project. Currently this project passes all of automerge's tests but has not been used in any real project or packaged as an NPM. Alpha testers welcome.
-4. _automerge-c_ - this is a c library intended to be an ffi integration point for all other languages. It is currently a work in progress and not yet ready for any testing.
+In general we try and respect semver.
-## How?
+### JavaScript
-The current iteration of automerge-rs is complicated to work with because it
-adopts the frontend/backend split architecture of the JS implementation. This
-architecture was necessary due to basic operations on the automerge opset being
-too slow to perform on the UI thread. Recently @orionz has been able to improve
-the performance to the point where the split is no longer necessary. This means
-we can adopt a much simpler mutable API.
+A stable release of the javascript package is currently available as
+`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are
+available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at
+https://deno.land/x/automerge
-The architecture is now built around the `OpTree`. This is a data structure
-which supports efficiently inserting new operations and realising values of
-existing operations. Most interactions with the `OpTree` are in the form of
-implementations of `TreeQuery` - a trait which can be used to traverse the
-optree and producing state of some kind. User facing operations are exposed on
-an `Automerge` object, under the covers these operations typically instantiate
-some `TreeQuery` and run it over the `OpTree`.
+### Rust
-## Development
+The rust codebase is currently oriented around producing a performant backend
+for the Javascript wrapper and as such the API for Rust code is low level and
+not well documented. We will be returning to this over the next few months but
+for now you will need to be comfortable reading the tests and asking questions
+to figure out how to use it. If you are looking to build rust applications which
+use automerge you may want to look into
+[autosurgeon](https://github.com/alexjg/autosurgeon)
-Please feel free to open issues and pull requests.
+## Repository Organisation
-### Running CI
+- `./rust` - the rust rust implementation and also the Rust components of
+ platform specific wrappers (e.g. `automerge-wasm` for the WASM API or
+ `automerge-c` for the C FFI bindings)
+- `./javascript` - The javascript library which uses `automerge-wasm`
+ internally but presents a more idiomatic javascript interface
+- `./scripts` - scripts which are useful to maintenance of the repository.
+ This includes the scripts which are run in CI.
+- `./img` - static assets for use in `.md` files
-The steps CI will run are all defined in `./scripts/ci`. Obviously CI will run
-everything when you submit a PR, but if you want to run everything locally
-before you push you can run `./scripts/ci/run` to run everything.
+## Building
-### Running the JS tests
+To build this codebase you will need:
-You will need to have [node](https://nodejs.org/en/), [yarn](https://yarnpkg.com/getting-started/install), [rust](https://rustup.rs/) and [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/) installed.
+- `rust`
+- `node`
+- `yarn`
+- `cmake`
+- `cmocka`
-To build and test the rust library:
+You will also need to install the following with `cargo install`
-```shell
- $ cd automerge
- $ cargo test
+- `wasm-bindgen-cli`
+- `wasm-opt`
+- `cargo-deny`
+
+And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation.
+
+The various subprojects (the rust code, the wrapper projects) have their own
+build instructions, but to run the tests that will be run in CI you can run
+`./scripts/ci/run`.
+
+### For macOS
+
+These instructions worked to build locally on macOS 13.1 (arm64) as of
+Nov 29th 2022.
+
+```bash
+# clone the repo
+git clone https://github.com/automerge/automerge-rs
+cd automerge-rs
+
+# install rustup
+curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
+
+# install homebrew
+/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
+
+# install cmake, node, cmocka
+brew install cmake node cmocka
+
+# install yarn
+npm install --global yarn
+
+# install javascript dependencies
+yarn --cwd ./javascript
+
+# install rust dependencies
+cargo install wasm-bindgen-cli wasm-opt cargo-deny
+
+# get nightly rust to produce optimized automerge-c builds
+rustup toolchain install nightly
+rustup component add rust-src --toolchain nightly
+
+# add wasm target in addition to current architecture
+rustup target add wasm32-unknown-unknown
+
+# Run ci script
+./scripts/ci/run
```
-To build and test the wasm library:
+If your build fails to find `cmocka.h` you may need to teach it about homebrew's
+installation location:
-```shell
- ## setup
- $ cd automerge-wasm
- $ yarn
-
- ## building or testing
- $ yarn build
- $ yarn test
-
- ## without this the js library wont automatically use changes
- $ yarn link
-
- ## cutting a release or doing benchmarking
- $ yarn release
+```
+export CPATH=/opt/homebrew/include
+export LIBRARY_PATH=/opt/homebrew/lib
+./scripts/ci/run
```
-To test the js library. This is where most of the tests reside.
+## Contributing
-```shell
- ## setup
- $ cd automerge-js
- $ yarn
- $ yarn link "automerge-wasm"
-
- ## testing
- $ yarn test
-```
-
-And finally, to build and test the C bindings with CMake:
-
-```shell
-## setup
-$ cd automerge-c
-$ mkdir -p build
-$ cd build
-$ cmake -S .. -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF
-## building and testing
-$ cmake --build .
-```
-To add debugging symbols, replace `Release` with `Debug`.
-To build a shared library instead of a static one, replace `OFF` with `ON`.
-
-The C bindings can be built and tested on any platform for which CMake is
-available but the steps for doing so vary across platforms and are too numerous
-to list here.
-
-## Benchmarking
-
-The `edit-trace` folder has the main code for running the edit trace benchmarking.
+Please try and split your changes up into relatively independent commits which
+change one subsystem at a time and add good commit messages which describe what
+the change is and why you're making it (err on the side of longer commit
+messages). `git blame` should give future maintainers a good idea of why
+something is the way it is.
diff --git a/TODO.md b/TODO.md
deleted file mode 100644
index 646c0c20..00000000
--- a/TODO.md
+++ /dev/null
@@ -1,32 +0,0 @@
-### next steps:
- 1. C API
- 2. port rust command line tool
- 3. fast load
-
-### ergonomics:
- 1. value() -> () or something that into's a value
-
-### automerge:
- 1. single pass (fast) load
- 2. micro-patches / bare bones observation API / fully hydrated documents
-
-### future:
- 1. handle columns with unknown data in and out
- 2. branches with different indexes
-
-### Peritext
- 1. add mark / remove mark -- type, start/end elemid (inclusive,exclusive)
- 2. track any formatting ops that start or end on a character
- 3. ops right before the character, ops right after that character
- 4. query a single character - character, plus marks that start or end on that character
- what is its current formatting,
- what are the ops that include that in their span,
- None = same as last time, Set( bold, italic ),
- keep these on index
- 5. op probably belongs with the start character - possible packed at the beginning or end of the list
-
-### maybe:
- 1. tables
-
-### no:
- 1. cursors
diff --git a/automerge-c/.gitignore b/automerge-c/.gitignore
deleted file mode 100644
index cb544af0..00000000
--- a/automerge-c/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-automerge
-automerge.h
-automerge.o
diff --git a/automerge-c/CMakeLists.txt b/automerge-c/CMakeLists.txt
deleted file mode 100644
index 188780f9..00000000
--- a/automerge-c/CMakeLists.txt
+++ /dev/null
@@ -1,135 +0,0 @@
-cmake_minimum_required(VERSION 3.18 FATAL_ERROR)
-
-set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
-
-# Parse the library name, project name and project version out of Cargo's TOML file.
-set(CARGO_LIB_SECTION OFF)
-
-set(LIBRARY_NAME "")
-
-set(CARGO_PKG_SECTION OFF)
-
-set(CARGO_PKG_NAME "")
-
-set(CARGO_PKG_VERSION "")
-
-file(READ Cargo.toml TOML_STRING)
-
-string(REPLACE ";" "\\\\;" TOML_STRING "${TOML_STRING}")
-
-string(REPLACE "\n" ";" TOML_LINES "${TOML_STRING}")
-
-foreach(TOML_LINE IN ITEMS ${TOML_LINES})
- string(REGEX MATCH "^\\[(lib|package)\\]$" _ ${TOML_LINE})
-
- if(CMAKE_MATCH_1 STREQUAL "lib")
- set(CARGO_LIB_SECTION ON)
-
- set(CARGO_PKG_SECTION OFF)
- elseif(CMAKE_MATCH_1 STREQUAL "package")
- set(CARGO_LIB_SECTION OFF)
-
- set(CARGO_PKG_SECTION ON)
- endif()
-
- string(REGEX MATCH "^name += +\"([^\"]+)\"$" _ ${TOML_LINE})
-
- if(CMAKE_MATCH_1 AND (CARGO_LIB_SECTION AND NOT CARGO_PKG_SECTION))
- set(LIBRARY_NAME "${CMAKE_MATCH_1}")
- elseif(CMAKE_MATCH_1 AND (NOT CARGO_LIB_SECTION AND CARGO_PKG_SECTION))
- set(CARGO_PKG_NAME "${CMAKE_MATCH_1}")
- endif()
-
- string(REGEX MATCH "^version += +\"([^\"]+)\"$" _ ${TOML_LINE})
-
- if(CMAKE_MATCH_1 AND CARGO_PKG_SECTION)
- set(CARGO_PKG_VERSION "${CMAKE_MATCH_1}")
- endif()
-
- if(LIBRARY_NAME AND (CARGO_PKG_NAME AND CARGO_PKG_VERSION))
- break()
- endif()
-endforeach()
-
-project(${CARGO_PKG_NAME} VERSION ${CARGO_PKG_VERSION} LANGUAGES C DESCRIPTION "C bindings for the Automerge Rust backend.")
-
-include(CTest)
-
-option(BUILD_SHARED_LIBS "Enable the choice of a shared or static library.")
-
-include(CMakePackageConfigHelpers)
-
-include(GNUInstallDirs)
-
-string(MAKE_C_IDENTIFIER ${PROJECT_NAME} SYMBOL_PREFIX)
-
-string(TOUPPER ${SYMBOL_PREFIX} SYMBOL_PREFIX)
-
-set(CARGO_TARGET_DIR "${CMAKE_CURRENT_BINARY_DIR}/Cargo/target")
-
-add_subdirectory(src)
-
-# Generate and install the configuration header.
-math(EXPR INTEGER_PROJECT_VERSION_MAJOR "${PROJECT_VERSION_MAJOR} * 100000")
-
-math(EXPR INTEGER_PROJECT_VERSION_MINOR "${PROJECT_VERSION_MINOR} * 100")
-
-math(EXPR INTEGER_PROJECT_VERSION_PATCH "${PROJECT_VERSION_PATCH}")
-
-math(EXPR INTEGER_PROJECT_VERSION "${INTEGER_PROJECT_VERSION_MAJOR} + ${INTEGER_PROJECT_VERSION_MINOR} + ${INTEGER_PROJECT_VERSION_PATCH}")
-
-configure_file(
- ${CMAKE_MODULE_PATH}/config.h.in
- config.h
- @ONLY
- NEWLINE_STYLE LF
-)
-
-install(
- FILES ${CMAKE_BINARY_DIR}/config.h
- DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}
-)
-
-if(BUILD_TESTING)
- add_subdirectory(test)
-
- enable_testing()
-endif()
-
-# Generate and install .cmake files
-set(PROJECT_CONFIG_NAME "${PROJECT_NAME}-config")
-
-set(PROJECT_CONFIG_VERSION_NAME "${PROJECT_CONFIG_NAME}-version")
-
-write_basic_package_version_file(
- ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_VERSION_NAME}.cmake
- VERSION ${PROJECT_VERSION}
- COMPATIBILITY ExactVersion
-)
-
-# The namespace label starts with the title-cased library name.
-string(SUBSTRING ${LIBRARY_NAME} 0 1 NS_FIRST)
-
-string(SUBSTRING ${LIBRARY_NAME} 1 -1 NS_REST)
-
-string(TOUPPER ${NS_FIRST} NS_FIRST)
-
-string(TOLOWER ${NS_REST} NS_REST)
-
-string(CONCAT NAMESPACE ${NS_FIRST} ${NS_REST} "::")
-
-# \note CMake doesn't automate the exporting of an imported library's targets
-# so the package configuration script must do it.
-configure_package_config_file(
- ${CMAKE_MODULE_PATH}/${PROJECT_CONFIG_NAME}.cmake.in
- ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_NAME}.cmake
- INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}
-)
-
-install(
- FILES
- ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_NAME}.cmake
- ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_CONFIG_VERSION_NAME}.cmake
- DESTINATION
- ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}
-)
diff --git a/automerge-c/Makefile b/automerge-c/Makefile
deleted file mode 100644
index a5ab353b..00000000
--- a/automerge-c/Makefile
+++ /dev/null
@@ -1,30 +0,0 @@
-
-CC=gcc
-CFLAGS=-I.
-DEPS=automerge.h
-LIBS=-lpthread -ldl -lm
-LDIR=../target/release
-LIB=../target/release/libautomerge.a
-DEBUG_LIB=../target/debug/libautomerge.a
-
-all: $(DEBUG_LIB) automerge
-
-debug: LDIR=../target/debug
-debug: automerge $(DEBUG_LIB)
-
-automerge: automerge.o $(LDIR)/libautomerge.a
- $(CC) -o $@ automerge.o $(LDIR)/libautomerge.a $(LIBS) -L$(LDIR)
-
-$(DEBUG_LIB): src/*.rs
- cargo build
-
-$(LIB): src/*.rs
- cargo build --release
-
-%.o: %.c $(DEPS)
- $(CC) -c -o $@ $< $(CFLAGS)
-
-.PHONY: clean
-
-clean:
- rm -f *.o automerge $(LIB) $(DEBUG_LIB)
diff --git a/automerge-c/README.md b/automerge-c/README.md
deleted file mode 100644
index d500f330..00000000
--- a/automerge-c/README.md
+++ /dev/null
@@ -1,95 +0,0 @@
-
-## Methods we need to support
-
-### Basic management
-
- 1. `AMcreate()`
- 1. `AMclone(doc)`
- 1. `AMfree(doc)`
- 1. `AMconfig(doc, key, val)` // set actor
- 1. `actor = get_actor(doc)`
-
-### Transactions
-
- 1. `AMpendingOps(doc)`
- 1. `AMcommit(doc, message, time)`
- 1. `AMrollback(doc)`
-
-### Write
-
- 1. `AMset{Map|List}(doc, obj, prop, value)`
- 1. `AMinsert(doc, obj, index, value)`
- 1. `AMpush(doc, obj, value)`
- 1. `AMdel{Map|List}(doc, obj, prop)`
- 1. `AMinc{Map|List}(doc, obj, prop, value)`
- 1. `AMspliceText(doc, obj, start, num_del, text)`
-
-### Read
-
- 1. `AMkeys(doc, obj, heads)`
- 1. `AMlength(doc, obj, heads)`
- 1. `AMvalues(doc, obj, heads)`
- 1. `AMtext(doc, obj, heads)`
-
-### Sync
-
- 1. `AMgenerateSyncMessage(doc, state)`
- 1. `AMreceiveSyncMessage(doc, state, message)`
- 1. `AMinitSyncState()`
-
-### Save / Load
-
- 1. `AMload(data)`
- 1. `AMloadIncremental(doc, data)`
- 1. `AMsave(doc)`
- 1. `AMsaveIncremental(doc)`
-
-### Low Level Access
-
- 1. `AMapplyChanges(doc, changes)`
- 1. `AMgetChanges(doc, deps)`
- 1. `AMgetChangesAdded(doc1, doc2)`
- 1. `AMgetHeads(doc)`
- 1. `AMgetLastLocalChange(doc)`
- 1. `AMgetMissingDeps(doc, heads)`
-
-### Encode/Decode
-
- 1. `AMencodeChange(change)`
- 1. `AMdecodeChange(change)`
- 1. `AMencodeSyncMessage(change)`
- 1. `AMdecodeSyncMessage(change)`
- 1. `AMencodeSyncState(change)`
- 1. `AMdecodeSyncState(change)`
-
-## Open Question - Memory management
-
-Most of these calls return one or more items of arbitrary length. Doing memory management in C is tricky. This is my proposed solution...
-
-###
-
- ```
- // returns 1 or zero opids
- n = automerge_set(doc, "_root", "hello", datatype, value);
- if (n) {
- automerge_pop(doc, &obj, len);
- }
-
- // returns n values
- n = automerge_values(doc, "_root", "hello");
- for (i = 0; i
-#include
-#include
-#include
-#include "automerge.h"
-
-#define MAX_BUFF_SIZE 4096
-
-int main() {
- int n = 0;
- int data_type = 0;
- char buff[MAX_BUFF_SIZE];
- char obj[MAX_BUFF_SIZE];
- AMresult* res = NULL;
-
- printf("begin\n");
-
- AMdoc* doc = AMcreate();
-
- printf("AMconfig()...");
- AMconfig(doc, "actor", "aabbcc");
- printf("pass!\n");
-
- printf("AMmapSetStr()...\n");
- res = AMmapSetStr(doc, NULL, "string", "hello world");
- if (AMresultStatus(res) != AM_STATUS_COMMAND_OK)
- {
- printf("AMmapSet() failed: %s\n", AMerrorMessage(res));
- return 1;
- }
- AMclear(res);
- printf("pass!\n");
-
- AMdestroy(doc);
- printf("end\n");
-}
diff --git a/automerge-c/build.rs b/automerge-c/build.rs
deleted file mode 100644
index e953527f..00000000
--- a/automerge-c/build.rs
+++ /dev/null
@@ -1,25 +0,0 @@
-extern crate cbindgen;
-
-use std::{env, path::PathBuf};
-
-fn main() {
- let crate_dir = PathBuf::from(
- env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR env var is not defined"),
- );
-
- let config = cbindgen::Config::from_file("cbindgen.toml")
- .expect("Unable to find cbindgen.toml configuration file");
-
- // let mut config: cbindgen::Config = Default::default();
- // config.language = cbindgen::Language::C;
-
- if let Ok(writer) = cbindgen::generate_with_config(&crate_dir, config) {
- writer.write_to_file(crate_dir.join("automerge.h"));
-
- // Also write the generated header into the target directory when
- // specified (necessary for an out-of-source build a la CMake).
- if let Ok(target_dir) = env::var("CARGO_TARGET_DIR") {
- writer.write_to_file(PathBuf::from(target_dir).join("automerge.h"));
- }
- }
-}
diff --git a/automerge-c/cbindgen.toml b/automerge-c/cbindgen.toml
deleted file mode 100644
index aad1850d..00000000
--- a/automerge-c/cbindgen.toml
+++ /dev/null
@@ -1,39 +0,0 @@
-after_includes = """\n
-/**
- * \\defgroup enumerations Public Enumerations
- Symbolic names for integer constants.
- */
-
-/**
- * \\memberof AMdoc
- * \\def AM_ROOT
- * \\brief The root object of an `AMdoc` struct.
- */
-#define AM_ROOT NULL
-"""
-autogen_warning = "/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */"
-documentation = true
-documentation_style = "doxy"
-header = """
-/** \\file
- * All constants, functions and types in the Automerge library's C API.
- */
- """
-include_guard = "automerge_h"
-includes = []
-language = "C"
-line_length = 140
-no_includes = true
-style = "both"
-sys_includes = ["stdbool.h", "stddef.h", "stdint.h"]
-usize_is_size_t = true
-
-[enum]
-derive_const_casts = true
-enum_class = true
-must_use = "MUST_USE_ENUM"
-prefix_with_name = true
-rename_variants = "ScreamingSnakeCase"
-
-[export]
-item_types = ["enums", "structs", "opaque", "constants", "functions"]
diff --git a/automerge-c/cmake/config.h.in b/automerge-c/cmake/config.h.in
deleted file mode 100644
index 08643fc5..00000000
--- a/automerge-c/cmake/config.h.in
+++ /dev/null
@@ -1,14 +0,0 @@
-#ifndef @SYMBOL_PREFIX@_CONFIG_INCLUDED
-#define @SYMBOL_PREFIX@_CONFIG_INCLUDED
-
-/* This header is auto-generated by CMake. */
-
-#define @SYMBOL_PREFIX@_VERSION @INTEGER_PROJECT_VERSION@
-
-#define @SYMBOL_PREFIX@_MAJOR_VERSION (@SYMBOL_PREFIX@_VERSION / 100000)
-
-#define @SYMBOL_PREFIX@_MINOR_VERSION ((@SYMBOL_PREFIX@_VERSION / 100) % 1000)
-
-#define @SYMBOL_PREFIX@_PATCH_VERSION (@SYMBOL_PREFIX@_VERSION % 100)
-
-#endif /* @SYMBOL_PREFIX@_CONFIG_INCLUDED */
diff --git a/automerge-c/src/CMakeLists.txt b/automerge-c/src/CMakeLists.txt
deleted file mode 100644
index 11cf5d96..00000000
--- a/automerge-c/src/CMakeLists.txt
+++ /dev/null
@@ -1,220 +0,0 @@
-cmake_minimum_required(VERSION 3.18 FATAL_ERROR)
-
-find_program (
- CARGO_CMD
- "cargo"
- PATHS "$ENV{CARGO_HOME}/bin"
- DOC "The Cargo command"
-)
-
-if(NOT CARGO_CMD)
- message(FATAL_ERROR "Cargo (Rust package manager) not found! Install it and/or set the CARGO_HOME environment variable.")
-endif()
-
-string(TOLOWER "${CMAKE_BUILD_TYPE}" BUILD_TYPE_LOWER)
-
-if(BUILD_TYPE_LOWER STREQUAL debug)
- set(CARGO_BUILD_TYPE "debug")
-
- set(CARGO_FLAG "")
-else()
- set(CARGO_BUILD_TYPE "release")
-
- set(CARGO_FLAG "--release")
-endif()
-
-set(CARGO_CURRENT_BINARY_DIR "${CARGO_TARGET_DIR}/${CARGO_BUILD_TYPE}")
-
-set(
- CARGO_OUTPUT
- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h
- ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}
- ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}
-)
-
-if(WIN32)
- # \note The basename of an import library output by Cargo is the filename
- # of its corresponding shared library.
- list(APPEND CARGO_OUTPUT ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX})
-endif()
-
-add_custom_command(
- OUTPUT ${CARGO_OUTPUT}
- COMMAND
- # \note cbindgen won't regenerate its output header file after it's
- # been removed but it will after its configuration file has been
- # updated.
- ${CMAKE_COMMAND} -DCONDITION=NOT_EXISTS -P ${CMAKE_SOURCE_DIR}/cmake/file_touch.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h ${CMAKE_SOURCE_DIR}/cbindgen.toml
- COMMAND
- ${CMAKE_COMMAND} -E env CARGO_TARGET_DIR=${CARGO_TARGET_DIR} ${CARGO_CMD} build ${CARGO_FLAG}
- MAIN_DEPENDENCY
- lib.rs
- DEPENDS
- doc.rs
- result.rs
- utils.rs
- ${CMAKE_SOURCE_DIR}/build.rs
- ${CMAKE_SOURCE_DIR}/Cargo.toml
- ${CMAKE_SOURCE_DIR}/cbindgen.toml
- WORKING_DIRECTORY
- ${CMAKE_SOURCE_DIR}
- COMMENT
- "Producing the library artifacts with Cargo..."
- VERBATIM
-)
-
-add_custom_target(
- ${LIBRARY_NAME}_artifacts
- DEPENDS ${CARGO_OUTPUT}
-)
-
-# \note cbindgen's naming behavior isn't fully configurable.
-add_custom_command(
- TARGET ${LIBRARY_NAME}_artifacts
- POST_BUILD
- COMMAND
- # Compensate for cbindgen's variant struct naming.
- ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+_[^_]+\)_Body -DREPLACE_EXPR=AM\\1 -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h
- COMMAND
- # Compensate for cbindgen's union tag enum type naming.
- ${CMAKE_COMMAND} -DMATCH_REGEX=AM\([^_]+\)_Tag -DREPLACE_EXPR=AM\\1Variant -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h
- COMMAND
- # Compensate for cbindgen's translation of consecutive uppercase letters to "ScreamingSnakeCase".
- ${CMAKE_COMMAND} -DMATCH_REGEX=A_M\([^_]+\)_ -DREPLACE_EXPR=AM_\\1_ -P ${CMAKE_SOURCE_DIR}/cmake/file_regex_replace.cmake -- ${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h
- WORKING_DIRECTORY
- ${CMAKE_SOURCE_DIR}
- COMMENT
- "Compensating for hard-coded cbindgen naming behaviors..."
- VERBATIM
-)
-
-if(BUILD_SHARED_LIBS)
- if(WIN32)
- set(LIBRARY_DESTINATION "${CMAKE_INSTALL_BINDIR}")
- else()
- set(LIBRARY_DESTINATION "${CMAKE_INSTALL_LIBDIR}")
- endif()
-
- set(LIBRARY_DEFINE_SYMBOL "${SYMBOL_PREFIX}_EXPORTS")
-
- # \note The basename of an import library output by Cargo is the filename
- # of its corresponding shared library.
- set(LIBRARY_IMPLIB "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}")
-
- set(LIBRARY_LOCATION "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}")
-
- set(LIBRARY_NO_SONAME "${WIN32}")
-
- set(LIBRARY_SONAME "${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}")
-
- set(LIBRARY_TYPE "SHARED")
-else()
- set(LIBRARY_DEFINE_SYMBOL "")
-
- set(LIBRARY_DESTINATION "${CMAKE_INSTALL_LIBDIR}")
-
- set(LIBRARY_IMPLIB "")
-
- set(LIBRARY_LOCATION "${CARGO_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}")
-
- set(LIBRARY_NO_SONAME "TRUE")
-
- set(LIBRARY_SONAME "")
-
- set(LIBRARY_TYPE "STATIC")
-endif()
-
-add_library(${LIBRARY_NAME} ${LIBRARY_TYPE} IMPORTED GLOBAL)
-
-set_target_properties(
- ${LIBRARY_NAME}
- PROPERTIES
- # \note Cargo writes a debug build into a nested directory instead of
- # decorating its name.
- DEBUG_POSTFIX ""
- DEFINE_SYMBOL "${LIBRARY_DEFINE_SYMBOL}"
- IMPORTED_IMPLIB "${LIBRARY_IMPLIB}"
- IMPORTED_LOCATION "${LIBRARY_LOCATION}"
- IMPORTED_NO_SONAME "${LIBRARY_NO_SONAME}"
- IMPORTED_SONAME "${LIBRARY_SONAME}"
- LINKER_LANGUAGE C
- PUBLIC_HEADER "${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h"
- SOVERSION "${PROJECT_VERSION_MAJOR}"
- VERSION "${PROJECT_VERSION}"
- # \note Cargo exports all of the symbols automatically.
- WINDOWS_EXPORT_ALL_SYMBOLS "TRUE"
-)
-
-target_compile_definitions(${LIBRARY_NAME} INTERFACE $)
-
-target_include_directories(
- ${LIBRARY_NAME}
- INTERFACE
- "$"
-)
-
-set(CMAKE_THREAD_PREFER_PTHREAD TRUE)
-
-set(THREADS_PREFER_PTHREAD_FLAG TRUE)
-
-find_package(Threads REQUIRED)
-
-set(LIBRARY_DEPENDENCIES Threads::Threads ${CMAKE_DL_LIBS})
-
-if(WIN32)
- list(APPEND LIBRARY_DEPENDENCIES Bcrypt userenv ws2_32)
-else()
- list(APPEND LIBRARY_DEPENDENCIES m)
-endif()
-
-target_link_libraries(${LIBRARY_NAME} INTERFACE ${LIBRARY_DEPENDENCIES})
-
-install(
- FILES $
- TYPE LIB
- # \note The basename of an import library output by Cargo is the filename
- # of its corresponding shared library.
- RENAME "${CMAKE_STATIC_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}"
- OPTIONAL
-)
-
-set(LIBRARY_FILE_NAME "${CMAKE_${LIBRARY_TYPE}_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_${LIBRARY_TYPE}_LIBRARY_SUFFIX}")
-
-install(
- FILES $
- RENAME "${LIBRARY_FILE_NAME}"
- DESTINATION ${LIBRARY_DESTINATION}
-)
-
-install(
- FILES $
- DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}
-)
-
-find_package(Doxygen OPTIONAL_COMPONENTS dot)
-
-if(DOXYGEN_FOUND)
- set(DOXYGEN_GENERATE_LATEX YES)
-
- set(DOXYGEN_PDF_HYPERLINKS YES)
-
- set(DOXYGEN_PROJECT_LOGO "${CMAKE_SOURCE_DIR}/img/brandmark.png")
-
- set(DOXYGEN_SORT_BRIEF_DOCS YES)
-
- set(DOXYGEN_USE_MDFILE_AS_MAINPAGE "${CMAKE_SOURCE_DIR}/README.md")
-
- doxygen_add_docs(
- ${LIBRARY_NAME}_docs
- "${CARGO_TARGET_DIR}/${LIBRARY_NAME}.h"
- "${CMAKE_SOURCE_DIR}/README.md"
- USE_STAMP_FILE
- WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
- COMMENT "Producing documentation with Doxygen..."
- )
-
- # \note A Doxygen input file isn't a file-level dependency so the Doxygen
- # command must instead depend upon a target that outputs the file or
- # it will just output an error message when it can't be found.
- add_dependencies(${LIBRARY_NAME}_docs ${LIBRARY_NAME}_artifacts)
-endif()
diff --git a/automerge-c/src/doc.rs b/automerge-c/src/doc.rs
deleted file mode 100644
index 4de2524a..00000000
--- a/automerge-c/src/doc.rs
+++ /dev/null
@@ -1,85 +0,0 @@
-use automerge as am;
-use std::collections::BTreeSet;
-use std::ops::{Deref, DerefMut};
-
-use crate::result::AMobjId;
-use automerge::transaction::Transactable;
-
-/// \struct AMdoc
-/// \brief A JSON-like CRDT.
-#[derive(Clone)]
-pub struct AMdoc {
- body: am::AutoCommit,
- obj_ids: BTreeSet,
-}
-
-impl AMdoc {
- pub fn new(body: am::AutoCommit) -> Self {
- Self {
- body,
- obj_ids: BTreeSet::new(),
- }
- }
-
- pub fn insert_object(
- &mut self,
- obj: &am::ObjId,
- index: usize,
- value: am::ObjType,
- ) -> Result<&AMobjId, am::AutomergeError> {
- match self.body.insert_object(obj, index, value) {
- Ok(ex_id) => {
- let obj_id = AMobjId::new(ex_id);
- self.obj_ids.insert(obj_id.clone());
- match self.obj_ids.get(&obj_id) {
- Some(obj_id) => Ok(obj_id),
- None => Err(am::AutomergeError::Fail),
- }
- }
- Err(e) => Err(e),
- }
- }
-
- pub fn put_object, P: Into>(
- &mut self,
- obj: O,
- prop: P,
- value: am::ObjType,
- ) -> Result<&AMobjId, am::AutomergeError> {
- match self.body.put_object(obj, prop, value) {
- Ok(ex_id) => {
- let obj_id = AMobjId::new(ex_id);
- self.obj_ids.insert(obj_id.clone());
- match self.obj_ids.get(&obj_id) {
- Some(obj_id) => Ok(obj_id),
- None => Err(am::AutomergeError::Fail),
- }
- }
- Err(e) => Err(e),
- }
- }
-
- pub fn drop_obj_id(&mut self, obj_id: &AMobjId) -> bool {
- self.obj_ids.remove(obj_id)
- }
-}
-
-impl Deref for AMdoc {
- type Target = am::AutoCommit;
-
- fn deref(&self) -> &Self::Target {
- &self.body
- }
-}
-
-impl DerefMut for AMdoc {
- fn deref_mut(&mut self) -> &mut Self::Target {
- &mut self.body
- }
-}
-
-impl From for *mut AMdoc {
- fn from(b: AMdoc) -> Self {
- Box::into_raw(Box::new(b))
- }
-}
diff --git a/automerge-c/src/lib.rs b/automerge-c/src/lib.rs
deleted file mode 100644
index a880d588..00000000
--- a/automerge-c/src/lib.rs
+++ /dev/null
@@ -1,1081 +0,0 @@
-use automerge as am;
-use smol_str::SmolStr;
-use std::{borrow::Cow, ffi::CStr, ffi::CString, os::raw::c_char};
-
-mod doc;
-mod result;
-mod utils;
-
-use automerge::transaction::Transactable;
-use doc::AMdoc;
-use result::{AMobjId, AMresult, AMvalue};
-
-/// \ingroup enumerations
-/// \enum AMobjType
-/// \brief The type of an object value.
-#[repr(u8)]
-pub enum AMobjType {
- /// A list.
- List = 1,
- /// A key-value map.
- Map,
- /// A list of Unicode graphemes.
- Text,
-}
-
-impl From for am::ObjType {
- fn from(o: AMobjType) -> Self {
- match o {
- AMobjType::Map => am::ObjType::Map,
- AMobjType::List => am::ObjType::List,
- AMobjType::Text => am::ObjType::Text,
- }
- }
-}
-
-/// \ingroup enumerations
-/// \enum AMstatus
-/// \brief The status of an API call.
-#[derive(Debug)]
-#[repr(u8)]
-pub enum AMstatus {
- /// Success.
- /// \note This tag is unalphabetized so that `0` indicates success.
- Ok,
- /// Failure due to an error.
- Error,
- /// Failure due to an invalid result.
- InvalidResult,
-}
-
-unsafe fn to_str(c: *const c_char) -> String {
- CStr::from_ptr(c).to_string_lossy().to_string()
-}
-
-macro_rules! to_doc {
- ($handle:expr) => {{
- let handle = $handle.as_mut();
- match handle {
- Some(b) => b,
- None => return AMresult::err("Invalid AMdoc pointer").into(),
- }
- }};
-}
-
-macro_rules! to_obj_id {
- ($handle:expr) => {{
- match $handle.as_ref() {
- Some(obj_id) => obj_id,
- None => &am::ROOT,
- }
- }};
-}
-
-fn to_result<'a, R: Into>>(r: R) -> *mut AMresult<'a> {
- (r.into()).into()
-}
-
-/// \memberof AMdoc
-/// \brief Allocates a new `AMdoc` struct and initializes it with defaults.
-///
-/// \return A pointer to an `AMdoc` struct.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeDoc()`.
-#[no_mangle]
-pub extern "C" fn AMallocDoc() -> *mut AMdoc {
- AMdoc::new(am::AutoCommit::new()).into()
-}
-
-/// \memberof AMdoc
-/// \brief Deallocates the storage for an `AMdoc` struct previously
-/// allocated by `AMallocDoc()` or `AMdup()`.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \pre \p doc must be a valid address.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-#[no_mangle]
-pub unsafe extern "C" fn AMfreeDoc(doc: *mut AMdoc) {
- if !doc.is_null() {
- let doc: AMdoc = *Box::from_raw(doc);
- drop(doc)
- }
-}
-
-/// \memberof AMdoc
-/// \brief Allocates storage for an `AMdoc` struct and initializes it by
-/// duplicating the `AMdoc` struct pointed to by \p doc.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \return A pointer to an `AMdoc` struct.
-/// \pre \p doc must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeDoc()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-#[no_mangle]
-pub unsafe extern "C" fn AMdup(doc: *mut AMdoc) -> *mut AMdoc {
- let doc = *Box::from_raw(doc);
- let copy = doc.clone();
- std::mem::forget(doc);
- copy.into()
-}
-
-/// \memberof AMdoc
-/// \brief Gets an `AMdoc` struct's actor ID value as an array of bytes.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \return A pointer to an `AMresult` struct containing an `AMbyteSpan`.
-/// \pre \p doc must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-#[no_mangle]
-pub unsafe extern "C" fn AMgetActor<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(Ok(doc.get_actor().clone()))
-}
-
-/// \memberof AMdoc
-/// \brief Gets an `AMdoc` struct's actor ID value as a hexadecimal string.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \return A pointer to an `AMresult` struct containing a `char const*`.
-/// \pre \p doc must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-#[no_mangle]
-pub unsafe extern "C" fn AMgetActorHex<'a>(doc: *mut AMdoc) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let hex_str = doc.get_actor().to_hex_string();
- let value = am::Value::Scalar(Cow::Owned(am::ScalarValue::Str(SmolStr::new(hex_str))));
- to_result(Ok(value))
-}
-
-/// \memberof AMdoc
-/// \brief Puts an array of bytes as the actor ID value of an `AMdoc` struct. .
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] value A pointer to an array of bytes.
-/// \param[in] count The number of bytes to copy from \p value.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre \p value must be a valid address.
-/// \pre `0 <=` \p count `<=` length of \p value.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// value must be a byte array of length `count`
-#[no_mangle]
-pub unsafe extern "C" fn AMsetActor<'a>(
- doc: *mut AMdoc,
- value: *const u8,
- count: usize,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let slice = std::slice::from_raw_parts(value, count);
- doc.set_actor(am::ActorId::from(slice));
- to_result(Ok(()))
-}
-
-/// \memberof AMdoc
-/// \brief Puts a hexadecimal string as the actor ID value of an `AMdoc` struct.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] hex_str A string of hexadecimal characters.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre \p hex_str must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// hex_str must be a null-terminated array of `c_char`
-#[no_mangle]
-pub unsafe extern "C" fn AMsetActorHex<'a>(
- doc: *mut AMdoc,
- hex_str: *const c_char,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let slice = std::slice::from_raw_parts(hex_str as *const u8, libc::strlen(hex_str));
- to_result(match hex::decode(slice) {
- Ok(vec) => {
- doc.set_actor(vec.into());
- Ok(())
- }
- Err(error) => Err(am::AutomergeError::HexDecode(error)),
- })
-}
-
-/// \memberof AMresult
-/// \brief Gets the status code of an `AMresult` struct.
-///
-/// \param[in] result A pointer to an `AMresult` struct.
-/// \return An `AMstatus` enum tag.
-/// \pre \p result must be a valid address.
-/// \internal
-///
-/// # Safety
-/// result must be a pointer to a valid AMresult
-#[no_mangle]
-pub unsafe extern "C" fn AMresultStatus(result: *mut AMresult) -> AMstatus {
- match result.as_mut() {
- Some(AMresult::Error(_)) => AMstatus::Error,
- None => AMstatus::InvalidResult,
- _ => AMstatus::Ok,
- }
-}
-
-/// \memberof AMresult
-/// \brief Gets the size of an `AMresult` struct.
-///
-/// \param[in] result A pointer to an `AMresult` struct.
-/// \return The count of values in \p result.
-/// \pre \p result must be a valid address.
-/// \internal
-///
-/// # Safety
-/// result must be a pointer to a valid AMresult
-#[no_mangle]
-pub unsafe extern "C" fn AMresultSize(result: *mut AMresult) -> usize {
- if let Some(result) = result.as_mut() {
- match result {
- AMresult::ActorId(_) | AMresult::ObjId(_) => 1,
- AMresult::Changes(changes) => changes.len(),
- AMresult::Error(_) | AMresult::Nothing => 0,
- AMresult::Scalars(vec, _) => vec.len(),
- }
- } else {
- 0
- }
-}
-
-/// \memberof AMresult
-/// \brief Gets a value from an `AMresult` struct.
-///
-/// \param[in] result A pointer to an `AMresult` struct.
-/// \param[in] index The index of a value.
-/// \return An `AMvalue` struct.
-/// \pre \p result must be a valid address.
-/// \pre `0 <=` \p index `<=` AMresultSize() for \p result.
-/// \internal
-///
-/// # Safety
-/// result must be a pointer to a valid AMresult
-#[no_mangle]
-pub unsafe extern "C" fn AMresultValue(result: *mut AMresult, index: usize) -> AMvalue {
- let mut value = AMvalue::Nothing;
- if let Some(result) = result.as_mut() {
- match result {
- AMresult::ActorId(actor_id) => {
- if index == 0 {
- value = AMvalue::ActorId(actor_id.into());
- }
- }
- AMresult::Changes(_) => {}
- AMresult::Error(_) => {}
- AMresult::ObjId(obj_id) => {
- if index == 0 {
- value = AMvalue::ObjId(obj_id);
- }
- }
- AMresult::Nothing => (),
- AMresult::Scalars(vec, hosted_str) => {
- if let Some(element) = vec.get(index) {
- match element {
- am::Value::Scalar(scalar) => match scalar.as_ref() {
- am::ScalarValue::Boolean(flag) => {
- value = AMvalue::Boolean(*flag as i8);
- }
- am::ScalarValue::Bytes(bytes) => {
- value = AMvalue::Bytes(bytes.into());
- }
- am::ScalarValue::Counter(counter) => {
- value = AMvalue::Counter(counter.into());
- }
- am::ScalarValue::F64(float) => {
- value = AMvalue::F64(*float);
- }
- am::ScalarValue::Int(int) => {
- value = AMvalue::Int(*int);
- }
- am::ScalarValue::Null => {
- value = AMvalue::Null;
- }
- am::ScalarValue::Str(smol_str) => {
- *hosted_str = CString::new(smol_str.to_string()).ok();
- if let Some(c_str) = hosted_str {
- value = AMvalue::Str(c_str.as_ptr());
- }
- }
- am::ScalarValue::Timestamp(timestamp) => {
- value = AMvalue::Timestamp(*timestamp);
- }
- am::ScalarValue::Uint(uint) => {
- value = AMvalue::Uint(*uint);
- }
- },
- // \todo Confirm that an object value should be ignored
- // when there's no object ID variant.
- am::Value::Object(_) => (),
- }
- }
- }
- }
- };
- value
-}
-
-/// \memberof AMdoc
-/// \brief Puts a signed integer as the value of a key in a map object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] key A UTF-8 string key for the map object identified by \p obj.
-/// \param[in] value A 64-bit signed integer.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre \p key must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// key must be a c string of the map key to be used
-#[no_mangle]
-pub unsafe extern "C" fn AMmapPutInt<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- key: *const c_char,
- value: i64,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(doc.put(to_obj_id!(obj_id), to_str(key), value))
-}
-
-/// \memberof AMdoc
-/// \brief Puts an unsigned integer as the value of a key in a map object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] key A UTF-8 string key for the map object identified by \p obj.
-/// \param[in] value A 64-bit unsigned integer.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre \p key must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// key must be a c string of the map key to be used
-#[no_mangle]
-pub unsafe extern "C" fn AMmapPutUint<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- key: *const c_char,
- value: u64,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(doc.put(to_obj_id!(obj_id), to_str(key), value))
-}
-
-/// \memberof AMdoc
-/// \brief Puts a UTF-8 string as the value of a key in a map object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] key A UTF-8 string key for the map object identified by \p obj.
-/// \param[in] value A UTF-8 string.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre \p key must be a valid address.
-/// \pre \p value must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// key must be a c string of the map key to be used
-/// value must be a null-terminated array of `c_char`
-#[no_mangle]
-pub unsafe extern "C" fn AMmapPutStr<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- key: *const c_char,
- value: *const c_char,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(doc.put(to_obj_id!(obj_id), to_str(key), to_str(value)))
-}
-
-/// \memberof AMdoc
-/// \brief Puts an array of bytes as the value of a key in a map object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] key A UTF-8 string key for the map object identified by \p obj.
-/// \param[in] value A pointer to an array of bytes.
-/// \param[in] count The number of bytes to copy from \p value.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre \p key must be a valid address.
-/// \pre \p value must be a valid address.
-/// \pre `0 <=` \p count `<=` length of \p value.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// key must be a c string of the map key to be used
-/// value must be a byte array of length `count`
-#[no_mangle]
-pub unsafe extern "C" fn AMmapPutBytes<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- key: *const c_char,
- value: *const u8,
- count: usize,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let slice = std::slice::from_raw_parts(value, count);
- let mut vec = Vec::new();
- vec.extend_from_slice(slice);
- to_result(doc.put(to_obj_id!(obj_id), to_str(key), vec))
-}
-
-/// \memberof AMdoc
-/// \brief Puts a float as the value of a key in a map object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] key A UTF-8 string key for the map object identified by \p obj.
-/// \param[in] value A 64-bit float.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre \p key must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// key must be a c string of the map key to be used
-#[no_mangle]
-pub unsafe extern "C" fn AMmapPutF64<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- key: *const c_char,
- value: f64,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(doc.put(to_obj_id!(obj_id), to_str(key), value))
-}
-
-/// \memberof AMdoc
-/// \brief Puts a CRDT counter as the value of a key in a map object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] key A UTF-8 string key for the map object identified by \p obj.
-/// \param[in] value A 64-bit signed integer.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre \p key must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// key must be a c string of the map key to be used
-#[no_mangle]
-pub unsafe extern "C" fn AMmapPutCounter<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- key: *const c_char,
- value: i64,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(doc.put(
- to_obj_id!(obj_id),
- to_str(key),
- am::ScalarValue::Counter(value.into()),
- ))
-}
-
-/// \memberof AMdoc
-/// \brief Puts a Lamport timestamp as the value of a key in a map object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] key A UTF-8 string key for the map object identified by \p obj.
-/// \param[in] value A 64-bit signed integer.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre \p key must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// key must be a c string of the map key to be used
-#[no_mangle]
-pub unsafe extern "C" fn AMmapPutTimestamp<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- key: *const c_char,
- value: i64,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(doc.put(
- to_obj_id!(obj_id),
- to_str(key),
- am::ScalarValue::Timestamp(value),
- ))
-}
-
-/// \memberof AMdoc
-/// \brief Puts null as the value of a key in a map object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] key A UTF-8 string key for the map object identified by \p obj.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre \p key must be a valid address.
-/// \warning To avoid a memory leak, the returned p ointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// key must be a c string of the map key to be used
-#[no_mangle]
-pub unsafe extern "C" fn AMmapPutNull<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- key: *const c_char,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(doc.put(to_obj_id!(obj_id), to_str(key), ()))
-}
-
-/// \memberof AMdoc
-/// \brief Puts an empty object as the value of a key in a map object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] key A UTF-8 string key for the map object identified by \p obj.
-/// \param[in] obj_type An `AMobjIdType` enum tag.
-/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct.
-/// \pre \p doc must be a valid address.
-/// \pre \p key must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// key must be a c string of the map key to be used
-#[no_mangle]
-pub unsafe extern "C" fn AMmapPutObject<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- key: *const c_char,
- obj_type: AMobjType,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(doc.put_object(to_obj_id!(obj_id), to_str(key), obj_type.into()))
-}
-
-/// \memberof AMdoc
-/// \brief Gets the value at an index in a list object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] index An index within the list object identified by \p obj.
-/// \return A pointer to an `AMresult` struct.
-/// \pre \p doc must be a valid address.
-/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-#[no_mangle]
-pub unsafe extern "C" fn AMlistGet<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- index: usize,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(doc.get(to_obj_id!(obj_id), index))
-}
-
-/// \memberof AMdoc
-/// \brief Gets the value for a key in a map object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] key A UTF-8 string key for the map object identified by \p obj.
-/// \return A pointer to an `AMresult` struct.
-/// \pre \p doc must be a valid address.
-/// \pre \p key must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// key must be a c string of the map key to be used
-#[no_mangle]
-pub unsafe extern "C" fn AMmapGet<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- key: *const c_char,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- to_result(doc.get(to_obj_id!(obj_id), to_str(key)))
-}
-
-/// \memberof AMdoc
-/// \brief Puts an array of bytes as the value at an index in a list object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] index An index in the list object identified by \p obj.
-/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index.
-/// \param[in] value A pointer to an array of bytes.
-/// \param[in] count The number of bytes to copy from \p value.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj.
-/// \pre \p value must be a valid address.
-/// \pre `0 <=` \p count `<=` length of \p value.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// value must be a byte array of length `count`
-#[no_mangle]
-pub unsafe extern "C" fn AMlistPutBytes<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- index: usize,
- insert: bool,
- value: *const u8,
- count: usize,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let obj_id = to_obj_id!(obj_id);
- let slice = std::slice::from_raw_parts(value, count);
- let mut vec = Vec::new();
- vec.extend_from_slice(slice);
- to_result(if insert {
- doc.insert(obj_id, index, vec)
- } else {
- doc.put(obj_id, index, vec)
- })
-}
-
-/// \memberof AMdoc
-/// \brief Puts a CRDT counter as the value at an index in a list object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] index An index in the list object identified by \p obj.
-/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index.
-/// \param[in] value A 64-bit signed integer.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-#[no_mangle]
-pub unsafe extern "C" fn AMlistPutCounter<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- index: usize,
- insert: bool,
- value: i64,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let obj_id = to_obj_id!(obj_id);
- let value = am::ScalarValue::Counter(value.into());
- to_result(if insert {
- doc.insert(obj_id, index, value)
- } else {
- doc.put(obj_id, index, value)
- })
-}
-
-/// \memberof AMdoc
-/// \brief Puts a float as the value at an index in a list object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] index An index in the list object identified by \p obj.
-/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index.
-/// \param[in] value A 64-bit float.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-#[no_mangle]
-pub unsafe extern "C" fn AMlistPutF64<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- index: usize,
- insert: bool,
- value: f64,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let obj_id = to_obj_id!(obj_id);
- to_result(if insert {
- doc.insert(obj_id, index, value)
- } else {
- doc.put(obj_id, index, value)
- })
-}
-
-/// \memberof AMdoc
-/// \brief Puts a signed integer as the value at an index in a list object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] index An index in the list object identified by \p obj.
-/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index.
-/// \param[in] value A 64-bit signed integer.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-#[no_mangle]
-pub unsafe extern "C" fn AMlistPutInt<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- index: usize,
- insert: bool,
- value: i64,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let obj_id = to_obj_id!(obj_id);
- to_result(if insert {
- doc.insert(obj_id, index, value)
- } else {
- doc.put(obj_id, index, value)
- })
-}
-
-/// \memberof AMdoc
-/// \brief Puts null as the value at an index in a list object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] index An index in the list object identified by \p obj.
-/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-#[no_mangle]
-pub unsafe extern "C" fn AMlistPutNull<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- index: usize,
- insert: bool,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let obj_id = to_obj_id!(obj_id);
- let value = ();
- to_result(if insert {
- doc.insert(obj_id, index, value)
- } else {
- doc.put(obj_id, index, value)
- })
-}
-
-/// \memberof AMdoc
-/// \brief Puts an empty object as the value at an index in a list object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] index An index in the list object identified by \p obj.
-/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index.
-/// \param[in] obj_type An `AMobjIdType` enum tag.
-/// \return A pointer to an `AMresult` struct containing a pointer to an `AMobjId` struct.
-/// \pre \p doc must be a valid address.
-/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-#[no_mangle]
-pub unsafe extern "C" fn AMlistPutObject<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- index: usize,
- insert: bool,
- obj_type: AMobjType,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let obj_id = to_obj_id!(obj_id);
- let value = obj_type.into();
- to_result(if insert {
- doc.insert_object(obj_id, index, value)
- } else {
- doc.put_object(&obj_id, index, value)
- })
-}
-
-/// \memberof AMdoc
-/// \brief Puts a UTF-8 string as the value at an index in a list object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] index An index in the list object identified by \p obj.
-/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index.
-/// \param[in] value A UTF-8 string.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj.
-/// \pre \p value must be a valid address.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-/// value must be a null-terminated array of `c_char`
-#[no_mangle]
-pub unsafe extern "C" fn AMlistPutStr<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- index: usize,
- insert: bool,
- value: *const c_char,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let obj_id = to_obj_id!(obj_id);
- let value = to_str(value);
- to_result(if insert {
- doc.insert(obj_id, index, value)
- } else {
- doc.put(obj_id, index, value)
- })
-}
-
-/// \memberof AMdoc
-/// \brief Puts a Lamport timestamp as the value at an index in a list object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] index An index in the list object identified by \p obj.
-/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index.
-/// \param[in] value A 64-bit signed integer.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-#[no_mangle]
-pub unsafe extern "C" fn AMlistPutTimestamp<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- index: usize,
- insert: bool,
- value: i64,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let obj_id = to_obj_id!(obj_id);
- let value = am::ScalarValue::Timestamp(value);
- to_result(if insert {
- doc.insert(obj_id, index, value)
- } else {
- doc.put(obj_id, index, value)
- })
-}
-
-/// \memberof AMdoc
-/// \brief Puts an unsigned integer as the value at an index in a list object.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \param[in] index An index in the list object identified by \p obj.
-/// \param[in] insert A flag to insert \p value before \p index instead of writing \p value over \p index.
-/// \param[in] value A 64-bit unsigned integer.
-/// \return A pointer to an `AMresult` struct containing nothing.
-/// \pre \p doc must be a valid address.
-/// \pre `0 <=` \p index `<=` length of the list object identified by \p obj.
-/// \warning To avoid a memory leak, the returned pointer must be deallocated
-/// with `AMfreeResult()`.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-#[no_mangle]
-pub unsafe extern "C" fn AMlistPutUint<'a>(
- doc: *mut AMdoc,
- obj_id: *mut AMobjId,
- index: usize,
- insert: bool,
- value: u64,
-) -> *mut AMresult<'a> {
- let doc = to_doc!(doc);
- let obj_id = to_obj_id!(obj_id);
- to_result(if insert {
- doc.insert(obj_id, index, value)
- } else {
- doc.put(obj_id, index, value)
- })
-}
-
-/// \memberof AMresult
-/// \brief Deallocates the storage for an `AMresult` struct.
-///
-/// \param[in] result A pointer to an `AMresult` struct.
-/// \pre \p result must be a valid address.
-/// \internal
-///
-/// # Safety
-/// result must be a pointer to a valid AMresult
-#[no_mangle]
-pub unsafe extern "C" fn AMfreeResult(result: *mut AMresult) {
- if !result.is_null() {
- let result: AMresult = *Box::from_raw(result);
- drop(result)
- }
-}
-
-/// \memberof AMresult
-/// \brief Gets an `AMresult` struct's error message string.
-///
-/// \param[in] result A pointer to an `AMresult` struct.
-/// \return A UTF-8 string value or `NULL`.
-/// \pre \p result must be a valid address.
-/// \internal
-///
-/// # Safety
-/// result must be a pointer to a valid AMresult
-#[no_mangle]
-pub unsafe extern "C" fn AMerrorMessage(result: *mut AMresult) -> *const c_char {
- match result.as_mut() {
- Some(AMresult::Error(s)) => s.as_ptr(),
- _ => std::ptr::null::(),
- }
-}
-
-/// \memberof AMdoc
-/// \brief Gets the size of an `AMobjId` struct.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct or `NULL`.
-/// \return The count of values in \p obj.
-/// \pre \p doc must be a valid address.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-#[no_mangle]
-pub unsafe extern "C" fn AMobjSize(doc: *const AMdoc, obj_id: *const AMobjId) -> usize {
- if let Some(doc) = doc.as_ref() {
- doc.length(to_obj_id!(obj_id))
- } else {
- 0
- }
-}
-
-/// \memberof AMdoc
-/// \brief Deallocates the storage for an `AMobjId` struct.
-///
-/// \param[in] doc A pointer to an `AMdoc` struct.
-/// \param[in] obj_id A pointer to an `AMobjId` struct.
-/// \pre \p doc must be a valid address.
-/// \pre \p obj_id must be a valid address.
-/// \note An `AMobjId` struct is automatically deallocated along with its owning
-/// `AMdoc` struct, this function just enables an `AMobjId` struct to be
-/// deallocated sooner than that.
-/// \internal
-///
-/// # Safety
-/// doc must be a pointer to a valid AMdoc
-/// obj_id must be a pointer to a valid AMobjId or NULL
-#[no_mangle]
-pub unsafe extern "C" fn AMfreeObjId(doc: *mut AMdoc, obj_id: *const AMobjId) {
- if let Some(doc) = doc.as_mut() {
- if let Some(obj_id) = obj_id.as_ref() {
- doc.drop_obj_id(obj_id);
- };
- };
-}
diff --git a/automerge-c/src/result.rs b/automerge-c/src/result.rs
deleted file mode 100644
index 07395fb1..00000000
--- a/automerge-c/src/result.rs
+++ /dev/null
@@ -1,212 +0,0 @@
-use automerge as am;
-use std::ffi::CString;
-use std::ops::Deref;
-
-/// \struct AMobjId
-/// \brief An object's unique identifier.
-#[derive(Clone, Eq, Ord, PartialEq, PartialOrd)]
-pub struct AMobjId(am::ObjId);
-
-impl AMobjId {
- pub fn new(obj_id: am::ObjId) -> Self {
- Self(obj_id)
- }
-}
-
-impl AsRef for AMobjId {
- fn as_ref(&self) -> &am::ObjId {
- &self.0
- }
-}
-
-impl Deref for AMobjId {
- type Target = am::ObjId;
-
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-
-/// \memberof AMvalue
-/// \struct AMbyteSpan
-/// \brief A contiguous sequence of bytes.
-///
-#[repr(C)]
-pub struct AMbyteSpan {
- /// A pointer to the byte at position zero.
- /// \warning \p src is only valid until the `AMfreeResult()` function is called
- /// on the `AMresult` struct hosting the array of bytes to which
- /// it points.
- src: *const u8,
- /// The number of bytes in the sequence.
- count: usize,
-}
-
-impl From<&Vec> for AMbyteSpan {
- fn from(v: &Vec) -> Self {
- AMbyteSpan {
- src: (*v).as_ptr(),
- count: (*v).len(),
- }
- }
-}
-
-impl From<&mut am::ActorId> for AMbyteSpan {
- fn from(actor: &mut am::ActorId) -> Self {
- let slice = actor.to_bytes();
- AMbyteSpan {
- src: slice.as_ptr(),
- count: slice.len(),
- }
- }
-}
-
-/// \struct AMvalue
-/// \brief A discriminated union of value type variants for an `AMresult` struct.
-///
-/// \enum AMvalueVariant
-/// \brief A value type discriminant.
-///
-/// \var AMvalue::tag
-/// The variant discriminator of an `AMvalue` struct.
-///
-/// \var AMvalue::actor_id
-/// An actor ID as an `AMbyteSpan` struct.
-///
-/// \var AMvalue::boolean
-/// A boolean.
-///
-/// \var AMvalue::bytes
-/// An array of bytes as an `AMbyteSpan` struct.
-///
-/// \var AMvalue::counter
-/// A CRDT counter.
-///
-/// \var AMvalue::f64
-/// A 64-bit float.
-///
-/// \var AMvalue::change_hash
-/// A change hash as an `AMbyteSpan` struct.
-///
-/// \var AMvalue::int_
-/// A 64-bit signed integer.
-///
-/// \var AMvalue::obj_id
-/// An object identifier.
-///
-/// \var AMvalue::str
-/// A UTF-8 string.
-///
-/// \var AMvalue::timestamp
-/// A Lamport timestamp.
-///
-/// \var AMvalue::uint
-/// A 64-bit unsigned integer.
-#[repr(C)]
-pub enum AMvalue<'a> {
- /// An actor ID variant.
- ActorId(AMbyteSpan),
- /// A boolean variant.
- Boolean(libc::c_char),
- /// An array of bytes variant.
- Bytes(AMbyteSpan),
- /*
- /// A changes variant.
- Changes(_),
- */
- /// A CRDT counter variant.
- Counter(i64),
- /// A 64-bit float variant.
- F64(f64),
- /// A change hash variant.
- ChangeHash(AMbyteSpan),
- /// A 64-bit signed integer variant.
- Int(i64),
- /*
- /// A keys variant.
- Keys(_),
- */
- /// A nothing variant.
- Nothing,
- /// A null variant.
- Null,
- /// An object identifier variant.
- ObjId(&'a AMobjId),
- /// A UTF-8 string variant.
- Str(*const libc::c_char),
- /// A Lamport timestamp variant.
- Timestamp(i64),
- /*
- /// A transaction variant.
- Transaction(_),
- */
- /// A 64-bit unsigned integer variant.
- Uint(u64),
-}
-
-/// \struct AMresult
-/// \brief A discriminated union of result variants.
-///
-pub enum AMresult<'a> {
- ActorId(am::ActorId),
- Changes(Vec),
- Error(CString),
- ObjId(&'a AMobjId),
- Nothing,
- Scalars(Vec>, Option),
-}
-
-impl<'a> AMresult<'a> {
- pub(crate) fn err(s: &str) -> Self {
- AMresult::Error(CString::new(s).unwrap())
- }
-}
-
-impl<'a> From> for AMresult<'a> {
- fn from(maybe: Result) -> Self {
- match maybe {
- Ok(actor_id) => AMresult::ActorId(actor_id),
- Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()),
- }
- }
-}
-
-impl<'a> From> for AMresult<'a> {
- fn from(maybe: Result<&'a AMobjId, am::AutomergeError>) -> Self {
- match maybe {
- Ok(obj_id) => AMresult::ObjId(obj_id),
- Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()),
- }
- }
-}
-
-impl<'a> From> for AMresult<'a> {
- fn from(maybe: Result<(), am::AutomergeError>) -> Self {
- match maybe {
- Ok(()) => AMresult::Nothing,
- Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()),
- }
- }
-}
-
-impl<'a> From, am::ObjId)>, am::AutomergeError>>
- for AMresult<'a>
-{
- fn from(maybe: Result, am::ObjId)>, am::AutomergeError>) -> Self {
- match maybe {
- // \todo Ensure that it's alright to ignore the `am::ObjId` value.
- Ok(Some((value, _))) => AMresult::Scalars(vec![value], None),
- Ok(None) => AMresult::Nothing,
- Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()),
- }
- }
-}
-
-impl<'a> From, am::AutomergeError>> for AMresult<'a> {
- fn from(maybe: Result, am::AutomergeError>) -> Self {
- match maybe {
- Ok(value) => AMresult::Scalars(vec![value], None),
- Err(e) => AMresult::Error(CString::new(e.to_string()).unwrap()),
- }
- }
-}
diff --git a/automerge-c/src/utils.rs b/automerge-c/src/utils.rs
deleted file mode 100644
index 70d2471a..00000000
--- a/automerge-c/src/utils.rs
+++ /dev/null
@@ -1,7 +0,0 @@
-use crate::AMresult;
-
-impl<'a> From> for *mut AMresult<'a> {
- fn from(b: AMresult<'a>) -> Self {
- Box::into_raw(Box::new(b))
- }
-}
diff --git a/automerge-c/test/CMakeLists.txt b/automerge-c/test/CMakeLists.txt
deleted file mode 100644
index 3da6051e..00000000
--- a/automerge-c/test/CMakeLists.txt
+++ /dev/null
@@ -1,51 +0,0 @@
-cmake_minimum_required(VERSION 3.18 FATAL_ERROR)
-
-find_package(cmocka REQUIRED)
-
-add_executable(
- test_${LIBRARY_NAME}
- group_state.c
- amdoc_property_tests.c
- amlistput_tests.c
- ammapput_tests.c
- macro_utils.c
- main.c
-)
-
-set_target_properties(test_${LIBRARY_NAME} PROPERTIES LINKER_LANGUAGE C)
-
-# \note An imported library's INTERFACE_INCLUDE_DIRECTORIES property can't
-# contain a non-existent path so its build-time include directory
-# must be specified for all of its dependent targets instead.
-target_include_directories(
- test_${LIBRARY_NAME}
- PRIVATE "$"
-)
-
-target_link_libraries(test_${LIBRARY_NAME} PRIVATE cmocka ${LIBRARY_NAME})
-
-add_dependencies(test_${LIBRARY_NAME} ${LIBRARY_NAME}_artifacts)
-
-if(BUILD_SHARED_LIBS AND WIN32)
- add_custom_command(
- TARGET test_${LIBRARY_NAME}
- POST_BUILD
- COMMAND ${CMAKE_COMMAND} -E copy_if_different
- ${CARGO_CURRENT_BINARY_DIR}/${CMAKE_SHARED_LIBRARY_PREFIX}${LIBRARY_NAME}${CMAKE_${CMAKE_BUILD_TYPE}_POSTFIX}${CMAKE_SHARED_LIBRARY_SUFFIX}
- ${CMAKE_CURRENT_BINARY_DIR}
- COMMENT "Copying the DLL built by Cargo into the test directory..."
- VERBATIM
- )
-endif()
-
-add_test(NAME test_${LIBRARY_NAME} COMMAND test_${LIBRARY_NAME})
-
-add_custom_command(
- TARGET test_${LIBRARY_NAME}
- POST_BUILD
- COMMAND
- ${CMAKE_CTEST_COMMAND} --config $ --output-on-failure
- COMMENT
- "Running the test(s)..."
- VERBATIM
-)
diff --git a/automerge-c/test/amdoc_property_tests.c b/automerge-c/test/amdoc_property_tests.c
deleted file mode 100644
index 4b2b3d2a..00000000
--- a/automerge-c/test/amdoc_property_tests.c
+++ /dev/null
@@ -1,110 +0,0 @@
-#include
-#include
-#include
-#include
-#include
-#include
-
-/* third-party */
-#include
-
-/* local */
-#include "group_state.h"
-
-typedef struct {
- GroupState* group_state;
- char const* actor_id_str;
- uint8_t* actor_id_bytes;
- size_t actor_id_size;
-} TestState;
-
-static void hex_to_bytes(char const* hex_str, uint8_t* bytes, size_t const count) {
- unsigned int byte;
- char const* next = hex_str;
- for (size_t index = 0; *next && index != count; next += 2, ++index) {
- if (sscanf(next, "%02x", &byte) == 1) {
- bytes[index] = (uint8_t)byte;
- }
- }
-}
-
-static int setup(void** state) {
- TestState* test_state = calloc(1, sizeof(TestState));
- group_setup((void**)&test_state->group_state);
- test_state->actor_id_str = "000102030405060708090a0b0c0d0e0f";
- test_state->actor_id_size = strlen(test_state->actor_id_str) / 2;
- test_state->actor_id_bytes = malloc(test_state->actor_id_size);
- hex_to_bytes(test_state->actor_id_str, test_state->actor_id_bytes, test_state->actor_id_size);
- *state = test_state;
- return 0;
-}
-
-static int teardown(void** state) {
- TestState* test_state = *state;
- group_teardown((void**)&test_state->group_state);
- free(test_state->actor_id_bytes);
- free(test_state);
- return 0;
-}
-
-static void test_AMputActor(void **state) {
- TestState* test_state = *state;
- GroupState* group_state = test_state->group_state;
- AMresult* res = AMsetActor(
- group_state->doc,
- test_state->actor_id_bytes,
- test_state->actor_id_size
- );
- if (AMresultStatus(res) != AM_STATUS_OK) {
- fail_msg("%s", AMerrorMessage(res));
- }
- assert_int_equal(AMresultSize(res), 0);
- AMvalue value = AMresultValue(res, 0);
- assert_int_equal(value.tag, AM_VALUE_NOTHING);
- AMfreeResult(res);
- res = AMgetActor(group_state->doc);
- if (AMresultStatus(res) != AM_STATUS_OK) {
- fail_msg("%s", AMerrorMessage(res));
- }
- assert_int_equal(AMresultSize(res), 1);
- value = AMresultValue(res, 0);
- assert_int_equal(value.tag, AM_VALUE_ACTOR_ID);
- assert_int_equal(value.actor_id.count, test_state->actor_id_size);
- assert_memory_equal(value.actor_id.src, test_state->actor_id_bytes, value.actor_id.count);
- AMfreeResult(res);
-}
-
-static void test_AMputActorHex(void **state) {
- TestState* test_state = *state;
- GroupState* group_state = test_state->group_state;
- AMresult* res = AMsetActorHex(
- group_state->doc,
- test_state->actor_id_str
- );
- if (AMresultStatus(res) != AM_STATUS_OK) {
- fail_msg("%s", AMerrorMessage(res));
- }
- assert_int_equal(AMresultSize(res), 0);
- AMvalue value = AMresultValue(res, 0);
- assert_int_equal(value.tag, AM_VALUE_NOTHING);
- AMfreeResult(res);
- res = AMgetActorHex(group_state->doc);
- if (AMresultStatus(res) != AM_STATUS_OK) {
- fail_msg("%s", AMerrorMessage(res));
- }
- assert_int_equal(AMresultSize(res), 1);
- value = AMresultValue(res, 0);
- assert_int_equal(value.tag, AM_VALUE_STR);
- assert_int_equal(strlen(value.str), test_state->actor_id_size * 2);
- assert_string_equal(value.str, test_state->actor_id_str);
- AMfreeResult(res);
-}
-
-int run_AMdoc_property_tests(void) {
- const struct CMUnitTest tests[] = {
- cmocka_unit_test_setup_teardown(test_AMputActor, setup, teardown),
- cmocka_unit_test_setup_teardown(test_AMputActorHex, setup, teardown),
- };
-
- return cmocka_run_group_tests(tests, NULL, NULL);
-}
diff --git a/automerge-c/test/amlistput_tests.c b/automerge-c/test/amlistput_tests.c
deleted file mode 100644
index 27b4fae9..00000000
--- a/automerge-c/test/amlistput_tests.c
+++ /dev/null
@@ -1,235 +0,0 @@
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-
-/* third-party */
-#include
-
-/* local */
-#include "group_state.h"
-#include "macro_utils.h"
-
-#define test_AMlistPut(suffix, mode) test_AMlistPut ## suffix ## _ ## mode
-
-#define static_void_test_AMlistPut(suffix, mode, member, scalar_value) \
-static void test_AMlistPut ## suffix ## _ ## mode(void **state) { \
- GroupState* group_state = *state; \
- AMresult* res = AMlistPut ## suffix( \
- group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert"), scalar_value \
- ); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 0); \
- AMvalue value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AM_VALUE_NOTHING); \
- AMfreeResult(res); \
- res = AMlistGet(group_state->doc, AM_ROOT, 0); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 1); \
- value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \
- assert_true(value.member == scalar_value); \
- AMfreeResult(res); \
-}
-
-#define test_AMlistPutBytes(mode) test_AMlistPutBytes ## _ ## mode
-
-#define static_void_test_AMlistPutBytes(mode, bytes_value) \
-static void test_AMlistPutBytes_ ## mode(void **state) { \
- static size_t const BYTES_SIZE = sizeof(bytes_value) / sizeof(uint8_t); \
- \
- GroupState* group_state = *state; \
- AMresult* res = AMlistPutBytes( \
- group_state->doc, \
- AM_ROOT, \
- 0, \
- !strcmp(#mode, "insert"), \
- bytes_value, \
- BYTES_SIZE \
- ); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 0); \
- AMvalue value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AM_VALUE_NOTHING); \
- AMfreeResult(res); \
- res = AMlistGet(group_state->doc, AM_ROOT, 0); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 1); \
- value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AM_VALUE_BYTES); \
- assert_int_equal(value.bytes.count, BYTES_SIZE); \
- assert_memory_equal(value.bytes.src, bytes_value, BYTES_SIZE); \
- AMfreeResult(res); \
-}
-
-#define test_AMlistPutNull(mode) test_AMlistPutNull_ ## mode
-
-#define static_void_test_AMlistPutNull(mode) \
-static void test_AMlistPutNull_ ## mode(void **state) { \
- GroupState* group_state = *state; \
- AMresult* res = AMlistPutNull( \
- group_state->doc, AM_ROOT, 0, !strcmp(#mode, "insert")); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 0); \
- AMvalue value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AM_VALUE_NOTHING); \
- AMfreeResult(res); \
- res = AMlistGet(group_state->doc, AM_ROOT, 0); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 1); \
- value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AM_VALUE_NULL); \
- AMfreeResult(res); \
-}
-
-#define test_AMlistPutObject(label, mode) test_AMlistPutObject_ ## label ## _ ## mode
-
-#define static_void_test_AMlistPutObject(label, mode) \
-static void test_AMlistPutObject_ ## label ## _ ## mode(void **state) { \
- GroupState* group_state = *state; \
- AMresult* res = AMlistPutObject( \
- group_state->doc, \
- AM_ROOT, \
- 0, \
- !strcmp(#mode, "insert"), \
- AMobjType_tag(#label) \
- ); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 1); \
- AMvalue value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \
- /** \
- * \note The `AMresult` struct can be deallocated immediately when its \
- * value is a pointer to an opaque struct because its lifetime \
- * is tied to the `AMdoc` struct instead. \
- */ \
- AMfreeResult(res); \
- assert_non_null(value.obj_id); \
- assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \
- AMfreeObjId(group_state->doc, value.obj_id); \
-}
-
-#define test_AMlistPutStr(mode) test_AMlistPutStr ## _ ## mode
-
-#define static_void_test_AMlistPutStr(mode, str_value) \
-static void test_AMlistPutStr_ ## mode(void **state) { \
- static size_t const STR_LEN = strlen(str_value); \
- \
- GroupState* group_state = *state; \
- AMresult* res = AMlistPutStr( \
- group_state->doc, \
- AM_ROOT, \
- 0, \
- !strcmp(#mode, "insert"), \
- str_value \
- ); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 0); \
- AMvalue value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AM_VALUE_NOTHING); \
- AMfreeResult(res); \
- res = AMlistGet(group_state->doc, AM_ROOT, 0); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 1); \
- value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AM_VALUE_STR); \
- assert_int_equal(strlen(value.str), STR_LEN); \
- assert_memory_equal(value.str, str_value, STR_LEN + 1); \
- AMfreeResult(res); \
-}
-
-static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX};
-
-static_void_test_AMlistPutBytes(insert, BYTES_VALUE)
-
-static_void_test_AMlistPutBytes(update, BYTES_VALUE)
-
-static_void_test_AMlistPut(Counter, insert, counter, INT64_MAX)
-
-static_void_test_AMlistPut(Counter, update, counter, INT64_MAX)
-
-static_void_test_AMlistPut(F64, insert, f64, DBL_MAX)
-
-static_void_test_AMlistPut(F64, update, f64, DBL_MAX)
-
-static_void_test_AMlistPut(Int, insert, int_, INT64_MAX)
-
-static_void_test_AMlistPut(Int, update, int_, INT64_MAX)
-
-static_void_test_AMlistPutNull(insert)
-
-static_void_test_AMlistPutNull(update)
-
-static_void_test_AMlistPutObject(List, insert)
-
-static_void_test_AMlistPutObject(List, update)
-
-static_void_test_AMlistPutObject(Map, insert)
-
-static_void_test_AMlistPutObject(Map, update)
-
-static_void_test_AMlistPutObject(Text, insert)
-
-static_void_test_AMlistPutObject(Text, update)
-
-static_void_test_AMlistPutStr(insert, "Hello, world!")
-
-static_void_test_AMlistPutStr(update, "Hello, world!")
-
-static_void_test_AMlistPut(Timestamp, insert, timestamp, INT64_MAX)
-
-static_void_test_AMlistPut(Timestamp, update, timestamp, INT64_MAX)
-
-static_void_test_AMlistPut(Uint, insert, uint, UINT64_MAX)
-
-static_void_test_AMlistPut(Uint, update, uint, UINT64_MAX)
-
-int run_AMlistPut_tests(void) {
- const struct CMUnitTest tests[] = {
- cmocka_unit_test(test_AMlistPutBytes(insert)),
- cmocka_unit_test(test_AMlistPutBytes(update)),
- cmocka_unit_test(test_AMlistPut(Counter, insert)),
- cmocka_unit_test(test_AMlistPut(Counter, update)),
- cmocka_unit_test(test_AMlistPut(F64, insert)),
- cmocka_unit_test(test_AMlistPut(F64, update)),
- cmocka_unit_test(test_AMlistPut(Int, insert)),
- cmocka_unit_test(test_AMlistPut(Int, update)),
- cmocka_unit_test(test_AMlistPutNull(insert)),
- cmocka_unit_test(test_AMlistPutNull(update)),
- cmocka_unit_test(test_AMlistPutObject(List, insert)),
- cmocka_unit_test(test_AMlistPutObject(List, update)),
- cmocka_unit_test(test_AMlistPutObject(Map, insert)),
- cmocka_unit_test(test_AMlistPutObject(Map, update)),
- cmocka_unit_test(test_AMlistPutObject(Text, insert)),
- cmocka_unit_test(test_AMlistPutObject(Text, update)),
- cmocka_unit_test(test_AMlistPutStr(insert)),
- cmocka_unit_test(test_AMlistPutStr(update)),
- cmocka_unit_test(test_AMlistPut(Timestamp, insert)),
- cmocka_unit_test(test_AMlistPut(Timestamp, update)),
- cmocka_unit_test(test_AMlistPut(Uint, insert)),
- cmocka_unit_test(test_AMlistPut(Uint, update)),
- };
-
- return cmocka_run_group_tests(tests, group_setup, group_teardown);
-}
diff --git a/automerge-c/test/ammapput_tests.c b/automerge-c/test/ammapput_tests.c
deleted file mode 100644
index 79b79f62..00000000
--- a/automerge-c/test/ammapput_tests.c
+++ /dev/null
@@ -1,190 +0,0 @@
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-
-/* third-party */
-#include
-
-/* local */
-#include "group_state.h"
-#include "macro_utils.h"
-
-#define test_AMmapPut(suffix) test_AMmapPut ## suffix
-
-#define static_void_test_AMmapPut(suffix, member, scalar_value) \
-static void test_AMmapPut ## suffix(void **state) { \
- GroupState* group_state = *state; \
- AMresult* res = AMmapPut ## suffix( \
- group_state->doc, \
- AM_ROOT, \
- #suffix, \
- scalar_value \
- ); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 0); \
- AMvalue value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AM_VALUE_NOTHING); \
- AMfreeResult(res); \
- res = AMmapGet(group_state->doc, AM_ROOT, #suffix); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 1); \
- value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AMvalue_discriminant(#suffix)); \
- assert_true(value.member == scalar_value); \
- AMfreeResult(res); \
-}
-
-#define test_AMmapPutObject(label) test_AMmapPutObject_ ## label
-
-#define static_void_test_AMmapPutObject(label) \
-static void test_AMmapPutObject_ ## label(void **state) { \
- GroupState* group_state = *state; \
- AMresult* res = AMmapPutObject( \
- group_state->doc, \
- AM_ROOT, \
- #label, \
- AMobjType_tag(#label) \
- ); \
- if (AMresultStatus(res) != AM_STATUS_OK) { \
- fail_msg("%s", AMerrorMessage(res)); \
- } \
- assert_int_equal(AMresultSize(res), 1); \
- AMvalue value = AMresultValue(res, 0); \
- assert_int_equal(value.tag, AM_VALUE_OBJ_ID); \
- /** \
- * \note The `AMresult` struct can be deallocated immediately when its \
- * value is a pointer to an opaque struct because its lifetime \
- * is tied to the `AMdoc` struct instead. \
- */ \
- AMfreeResult(res); \
- assert_non_null(value.obj_id); \
- assert_int_equal(AMobjSize(group_state->doc, value.obj_id), 0); \
- AMfreeObjId(group_state->doc, value.obj_id); \
-}
-
-static void test_AMmapPutBytes(void **state) {
- static char const* const KEY = "Bytes";
- static uint8_t const BYTES_VALUE[] = {INT8_MIN, INT8_MAX / 2, INT8_MAX};
- static size_t const BYTES_SIZE = sizeof(BYTES_VALUE) / sizeof(uint8_t);
-
- GroupState* group_state = *state;
- AMresult* res = AMmapPutBytes(
- group_state->doc,
- AM_ROOT,
- KEY,
- BYTES_VALUE,
- BYTES_SIZE
- );
- if (AMresultStatus(res) != AM_STATUS_OK) {
- fail_msg("%s", AMerrorMessage(res));
- }
- assert_int_equal(AMresultSize(res), 0);
- AMvalue value = AMresultValue(res, 0);
- assert_int_equal(value.tag, AM_VALUE_NOTHING);
- AMfreeResult(res);
- res = AMmapGet(group_state->doc, AM_ROOT, KEY);
- if (AMresultStatus(res) != AM_STATUS_OK) {
- fail_msg("%s", AMerrorMessage(res));
- }
- assert_int_equal(AMresultSize(res), 1);
- value = AMresultValue(res, 0);
- assert_int_equal(value.tag, AM_VALUE_BYTES);
- assert_int_equal(value.bytes.count, BYTES_SIZE);
- assert_memory_equal(value.bytes.src, BYTES_VALUE, BYTES_SIZE);
- AMfreeResult(res);
-}
-
-static_void_test_AMmapPut(Counter, counter, INT64_MAX)
-
-static_void_test_AMmapPut(F64, f64, DBL_MAX)
-
-static_void_test_AMmapPut(Int, int_, INT64_MAX)
-
-static void test_AMmapPutNull(void **state) {
- static char const* const KEY = "Null";
-
- GroupState* group_state = *state;
- AMresult* res = AMmapPutNull(group_state->doc, AM_ROOT, KEY);
- if (AMresultStatus(res) != AM_STATUS_OK) {
- fail_msg("%s", AMerrorMessage(res));
- }
- assert_int_equal(AMresultSize(res), 0);
- AMvalue value = AMresultValue(res, 0);
- assert_int_equal(value.tag, AM_VALUE_NOTHING);
- AMfreeResult(res);
- res = AMmapGet(group_state->doc, AM_ROOT, KEY);
- if (AMresultStatus(res) != AM_STATUS_OK) {
- fail_msg("%s", AMerrorMessage(res));
- }
- assert_int_equal(AMresultSize(res), 1);
- value = AMresultValue(res, 0);
- assert_int_equal(value.tag, AM_VALUE_NULL);
- AMfreeResult(res);
-}
-
-static_void_test_AMmapPutObject(List)
-
-static_void_test_AMmapPutObject(Map)
-
-static_void_test_AMmapPutObject(Text)
-
-static void test_AMmapPutStr(void **state) {
- static char const* const KEY = "Str";
- static char const* const STR_VALUE = "Hello, world!";
- size_t const STR_LEN = strlen(STR_VALUE);
-
- GroupState* group_state = *state;
- AMresult* res = AMmapPutStr(
- group_state->doc,
- AM_ROOT,
- KEY,
- STR_VALUE
- );
- if (AMresultStatus(res) != AM_STATUS_OK) {
- fail_msg("%s", AMerrorMessage(res));
- }
- assert_int_equal(AMresultSize(res), 0);
- AMvalue value = AMresultValue(res, 0);
- assert_int_equal(value.tag, AM_VALUE_NOTHING);
- AMfreeResult(res);
- res = AMmapGet(group_state->doc, AM_ROOT, KEY);
- if (AMresultStatus(res) != AM_STATUS_OK) {
- fail_msg("%s", AMerrorMessage(res));
- }
- assert_int_equal(AMresultSize(res), 1);
- value = AMresultValue(res, 0);
- assert_int_equal(value.tag, AM_VALUE_STR);
- assert_int_equal(strlen(value.str), STR_LEN);
- assert_memory_equal(value.str, STR_VALUE, STR_LEN + 1);
- AMfreeResult(res);
-}
-
-static_void_test_AMmapPut(Timestamp, timestamp, INT64_MAX)
-
-static_void_test_AMmapPut(Uint, uint, UINT64_MAX)
-
-int run_AMmapPut_tests(void) {
- const struct CMUnitTest tests[] = {
- cmocka_unit_test(test_AMmapPutBytes),
- cmocka_unit_test(test_AMmapPut(Counter)),
- cmocka_unit_test(test_AMmapPut(F64)),
- cmocka_unit_test(test_AMmapPut(Int)),
- cmocka_unit_test(test_AMmapPutNull),
- cmocka_unit_test(test_AMmapPutObject(List)),
- cmocka_unit_test(test_AMmapPutObject(Map)),
- cmocka_unit_test(test_AMmapPutObject(Text)),
- cmocka_unit_test(test_AMmapPutStr),
- cmocka_unit_test(test_AMmapPut(Timestamp)),
- cmocka_unit_test(test_AMmapPut(Uint)),
- };
-
- return cmocka_run_group_tests(tests, group_setup, group_teardown);
-}
diff --git a/automerge-c/test/group_state.c b/automerge-c/test/group_state.c
deleted file mode 100644
index a0a2a049..00000000
--- a/automerge-c/test/group_state.c
+++ /dev/null
@@ -1,18 +0,0 @@
-#include
-
-/* local */
-#include "group_state.h"
-
-int group_setup(void** state) {
- GroupState* group_state = calloc(1, sizeof(GroupState));
- group_state->doc = AMallocDoc();
- *state = group_state;
- return 0;
-}
-
-int group_teardown(void** state) {
- GroupState* group_state = *state;
- AMfreeDoc(group_state->doc);
- free(group_state);
- return 0;
-}
diff --git a/automerge-c/test/group_state.h b/automerge-c/test/group_state.h
deleted file mode 100644
index 749209c2..00000000
--- a/automerge-c/test/group_state.h
+++ /dev/null
@@ -1,15 +0,0 @@
-#ifndef GROUP_STATE_INCLUDED
-#define GROUP_STATE_INCLUDED
-
-/* local */
-#include "automerge.h"
-
-typedef struct {
- AMdoc* doc;
-} GroupState;
-
-int group_setup(void** state);
-
-int group_teardown(void** state);
-
-#endif
diff --git a/automerge-c/test/macro_utils.c b/automerge-c/test/macro_utils.c
deleted file mode 100644
index d4343bc0..00000000
--- a/automerge-c/test/macro_utils.c
+++ /dev/null
@@ -1,23 +0,0 @@
-#include
-
-/* local */
-#include "macro_utils.h"
-
-AMvalueVariant AMvalue_discriminant(char const* suffix) {
- if (!strcmp(suffix, "Bytes")) return AM_VALUE_BYTES;
- else if (!strcmp(suffix, "Counter")) return AM_VALUE_COUNTER;
- else if (!strcmp(suffix, "F64")) return AM_VALUE_F64;
- else if (!strcmp(suffix, "Int")) return AM_VALUE_INT;
- else if (!strcmp(suffix, "Null")) return AM_VALUE_NULL;
- else if (!strcmp(suffix, "Str")) return AM_VALUE_STR;
- else if (!strcmp(suffix, "Timestamp")) return AM_VALUE_TIMESTAMP;
- else if (!strcmp(suffix, "Uint")) return AM_VALUE_UINT;
- else return AM_VALUE_NOTHING;
-}
-
-AMobjType AMobjType_tag(char const* obj_type_label) {
- if (!strcmp(obj_type_label, "List")) return AM_OBJ_TYPE_LIST;
- else if (!strcmp(obj_type_label, "Map")) return AM_OBJ_TYPE_MAP;
- else if (!strcmp(obj_type_label, "Text")) return AM_OBJ_TYPE_TEXT;
- else return 0;
-}
diff --git a/automerge-c/test/macro_utils.h b/automerge-c/test/macro_utils.h
deleted file mode 100644
index 5a74c562..00000000
--- a/automerge-c/test/macro_utils.h
+++ /dev/null
@@ -1,23 +0,0 @@
-#ifndef MACRO_UTILS_INCLUDED
-#define MACRO_UTILS_INCLUDED
-
-/* local */
-#include "automerge.h"
-
-/**
- * \brief Gets the `AMvalue` discriminant corresponding to a function name suffix.
- *
- * \param[in] suffix A string.
- * \return An `AMvalue` variant discriminant enum tag.
- */
-AMvalueVariant AMvalue_discriminant(char const* suffix);
-
-/**
- * \brief Gets the `AMobjType` tag corresponding to a object type label.
- *
- * \param[in] obj_type_label A string.
- * \return An `AMobjType` enum tag.
- */
-AMobjType AMobjType_tag(char const* obj_type_label);
-
-#endif
diff --git a/automerge-c/test/main.c b/automerge-c/test/main.c
deleted file mode 100644
index 11a2e888..00000000
--- a/automerge-c/test/main.c
+++ /dev/null
@@ -1,21 +0,0 @@
-#include
-#include
-#include
-#include
-
-/* third-party */
-#include
-
-extern int run_AMdoc_property_tests(void);
-
-extern int run_AMlistPut_tests(void);
-
-extern int run_AMmapPut_tests(void);
-
-int main(void) {
- return (
- run_AMdoc_property_tests() +
- run_AMlistPut_tests() +
- run_AMmapPut_tests()
- );
-}
diff --git a/automerge-cli/Cargo.lock b/automerge-cli/Cargo.lock
deleted file mode 100644
index a330ee89..00000000
--- a/automerge-cli/Cargo.lock
+++ /dev/null
@@ -1,857 +0,0 @@
-# This file is automatically @generated by Cargo.
-# It is not intended for manual editing.
-version = 3
-
-[[package]]
-name = "adler"
-version = "1.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
-
-[[package]]
-name = "ansi_term"
-version = "0.12.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
-dependencies = [
- "winapi",
-]
-
-[[package]]
-name = "anyhow"
-version = "1.0.55"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd"
-
-[[package]]
-name = "atty"
-version = "0.2.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
-dependencies = [
- "hermit-abi",
- "libc",
- "winapi",
-]
-
-[[package]]
-name = "autocfg"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
-
-[[package]]
-name = "automerge"
-version = "0.1.0"
-dependencies = [
- "flate2",
- "fxhash",
- "hex",
- "itertools",
- "js-sys",
- "leb128",
- "nonzero_ext",
- "rand",
- "serde",
- "sha2",
- "smol_str",
- "thiserror",
- "tinyvec",
- "tracing",
- "unicode-segmentation",
- "uuid",
- "wasm-bindgen",
- "web-sys",
-]
-
-[[package]]
-name = "automerge-cli"
-version = "0.1.0"
-dependencies = [
- "anyhow",
- "atty",
- "automerge",
- "clap",
- "colored_json",
- "combine",
- "duct",
- "maplit",
- "serde_json",
- "thiserror",
- "tracing-subscriber",
-]
-
-[[package]]
-name = "bitflags"
-version = "1.3.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
-
-[[package]]
-name = "block-buffer"
-version = "0.10.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324"
-dependencies = [
- "generic-array",
-]
-
-[[package]]
-name = "bumpalo"
-version = "3.9.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899"
-
-[[package]]
-name = "byteorder"
-version = "1.4.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
-
-[[package]]
-name = "bytes"
-version = "1.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8"
-
-[[package]]
-name = "cfg-if"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
-
-[[package]]
-name = "clap"
-version = "3.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ced1892c55c910c1219e98d6fc8d71f6bddba7905866ce740066d8bfea859312"
-dependencies = [
- "atty",
- "bitflags",
- "clap_derive",
- "indexmap",
- "lazy_static",
- "os_str_bytes",
- "strsim",
- "termcolor",
- "textwrap",
-]
-
-[[package]]
-name = "clap_derive"
-version = "3.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da95d038ede1a964ce99f49cbe27a7fb538d1da595e4b4f70b8c8f338d17bf16"
-dependencies = [
- "heck",
- "proc-macro-error",
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "colored_json"
-version = "2.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fd32eb54d016e203b7c2600e3a7802c75843a92e38ccc4869aefeca21771a64"
-dependencies = [
- "ansi_term",
- "atty",
- "libc",
- "serde",
- "serde_json",
-]
-
-[[package]]
-name = "combine"
-version = "4.6.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50b727aacc797f9fc28e355d21f34709ac4fc9adecfe470ad07b8f4464f53062"
-dependencies = [
- "bytes",
- "memchr",
-]
-
-[[package]]
-name = "cpufeatures"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469"
-dependencies = [
- "libc",
-]
-
-[[package]]
-name = "crc32fast"
-version = "1.3.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
-dependencies = [
- "cfg-if",
-]
-
-[[package]]
-name = "crypto-common"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8"
-dependencies = [
- "generic-array",
- "typenum",
-]
-
-[[package]]
-name = "digest"
-version = "0.10.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506"
-dependencies = [
- "block-buffer",
- "crypto-common",
-]
-
-[[package]]
-name = "duct"
-version = "0.13.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fc6a0a59ed0888e0041cf708e66357b7ae1a82f1c67247e1f93b5e0818f7d8d"
-dependencies = [
- "libc",
- "once_cell",
- "os_pipe",
- "shared_child",
-]
-
-[[package]]
-name = "either"
-version = "1.6.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
-
-[[package]]
-name = "flate2"
-version = "1.0.22"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f"
-dependencies = [
- "cfg-if",
- "crc32fast",
- "libc",
- "miniz_oxide",
-]
-
-[[package]]
-name = "fxhash"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
-dependencies = [
- "byteorder",
-]
-
-[[package]]
-name = "generic-array"
-version = "0.14.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803"
-dependencies = [
- "typenum",
- "version_check",
-]
-
-[[package]]
-name = "getrandom"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d39cd93900197114fa1fcb7ae84ca742095eed9442088988ae74fa744e930e77"
-dependencies = [
- "cfg-if",
- "js-sys",
- "libc",
- "wasi",
- "wasm-bindgen",
-]
-
-[[package]]
-name = "hashbrown"
-version = "0.11.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
-
-[[package]]
-name = "heck"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
-
-[[package]]
-name = "hermit-abi"
-version = "0.1.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
-dependencies = [
- "libc",
-]
-
-[[package]]
-name = "hex"
-version = "0.4.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
-
-[[package]]
-name = "indexmap"
-version = "1.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223"
-dependencies = [
- "autocfg",
- "hashbrown",
-]
-
-[[package]]
-name = "itertools"
-version = "0.10.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
-dependencies = [
- "either",
-]
-
-[[package]]
-name = "itoa"
-version = "1.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
-
-[[package]]
-name = "js-sys"
-version = "0.3.56"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04"
-dependencies = [
- "wasm-bindgen",
-]
-
-[[package]]
-name = "lazy_static"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
-
-[[package]]
-name = "leb128"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
-
-[[package]]
-name = "libc"
-version = "0.2.119"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4"
-
-[[package]]
-name = "log"
-version = "0.4.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
-dependencies = [
- "cfg-if",
-]
-
-[[package]]
-name = "maplit"
-version = "1.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
-
-[[package]]
-name = "memchr"
-version = "2.4.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
-
-[[package]]
-name = "miniz_oxide"
-version = "0.4.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
-dependencies = [
- "adler",
- "autocfg",
-]
-
-[[package]]
-name = "nonzero_ext"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44a1290799eababa63ea60af0cbc3f03363e328e58f32fb0294798ed3e85f444"
-
-[[package]]
-name = "once_cell"
-version = "1.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5"
-
-[[package]]
-name = "os_pipe"
-version = "0.9.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb233f06c2307e1f5ce2ecad9f8121cffbbee2c95428f44ea85222e460d0d213"
-dependencies = [
- "libc",
- "winapi",
-]
-
-[[package]]
-name = "os_str_bytes"
-version = "6.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
-dependencies = [
- "memchr",
-]
-
-[[package]]
-name = "pin-project-lite"
-version = "0.2.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c"
-
-[[package]]
-name = "ppv-lite86"
-version = "0.2.16"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
-
-[[package]]
-name = "proc-macro-error"
-version = "1.0.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
-dependencies = [
- "proc-macro-error-attr",
- "proc-macro2",
- "quote",
- "syn",
- "version_check",
-]
-
-[[package]]
-name = "proc-macro-error-attr"
-version = "1.0.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
-dependencies = [
- "proc-macro2",
- "quote",
- "version_check",
-]
-
-[[package]]
-name = "proc-macro2"
-version = "1.0.36"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029"
-dependencies = [
- "unicode-xid",
-]
-
-[[package]]
-name = "quote"
-version = "1.0.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145"
-dependencies = [
- "proc-macro2",
-]
-
-[[package]]
-name = "rand"
-version = "0.8.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
-dependencies = [
- "libc",
- "rand_chacha",
- "rand_core",
-]
-
-[[package]]
-name = "rand_chacha"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
-dependencies = [
- "ppv-lite86",
- "rand_core",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.6.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
-dependencies = [
- "getrandom",
-]
-
-[[package]]
-name = "ryu"
-version = "1.0.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
-
-[[package]]
-name = "serde"
-version = "1.0.136"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
-dependencies = [
- "serde_derive",
-]
-
-[[package]]
-name = "serde_derive"
-version = "1.0.136"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "serde_json"
-version = "1.0.79"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95"
-dependencies = [
- "itoa",
- "ryu",
- "serde",
-]
-
-[[package]]
-name = "sha2"
-version = "0.10.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676"
-dependencies = [
- "cfg-if",
- "cpufeatures",
- "digest",
-]
-
-[[package]]
-name = "sharded-slab"
-version = "0.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
-dependencies = [
- "lazy_static",
-]
-
-[[package]]
-name = "shared_child"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6be9f7d5565b1483af3e72975e2dee33879b3b86bd48c0929fccf6585d79e65a"
-dependencies = [
- "libc",
- "winapi",
-]
-
-[[package]]
-name = "smallvec"
-version = "1.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
-
-[[package]]
-name = "smol_str"
-version = "0.1.21"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61d15c83e300cce35b7c8cd39ff567c1ef42dde6d4a1a38dbdbf9a59902261bd"
-dependencies = [
- "serde",
-]
-
-[[package]]
-name = "strsim"
-version = "0.10.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
-
-[[package]]
-name = "syn"
-version = "1.0.86"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b"
-dependencies = [
- "proc-macro2",
- "quote",
- "unicode-xid",
-]
-
-[[package]]
-name = "termcolor"
-version = "1.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
-dependencies = [
- "winapi-util",
-]
-
-[[package]]
-name = "textwrap"
-version = "0.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
-
-[[package]]
-name = "thiserror"
-version = "1.0.30"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417"
-dependencies = [
- "thiserror-impl",
-]
-
-[[package]]
-name = "thiserror-impl"
-version = "1.0.30"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "thread_local"
-version = "1.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
-dependencies = [
- "once_cell",
-]
-
-[[package]]
-name = "tinyvec"
-version = "1.5.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2"
-dependencies = [
- "tinyvec_macros",
-]
-
-[[package]]
-name = "tinyvec_macros"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
-
-[[package]]
-name = "tracing"
-version = "0.1.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f6c650a8ef0cd2dd93736f033d21cbd1224c5a967aa0c258d00fcf7dafef9b9f"
-dependencies = [
- "cfg-if",
- "log",
- "pin-project-lite",
- "tracing-attributes",
- "tracing-core",
-]
-
-[[package]]
-name = "tracing-attributes"
-version = "0.1.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8276d9a4a3a558d7b7ad5303ad50b53d58264641b82914b7ada36bd762e7a716"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "tracing-core"
-version = "0.1.22"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "03cfcb51380632a72d3111cb8d3447a8d908e577d31beeac006f836383d29a23"
-dependencies = [
- "lazy_static",
- "valuable",
-]
-
-[[package]]
-name = "tracing-log"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3"
-dependencies = [
- "lazy_static",
- "log",
- "tracing-core",
-]
-
-[[package]]
-name = "tracing-subscriber"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e0ab7bdc962035a87fba73f3acca9b8a8d0034c2e6f60b84aeaaddddc155dce"
-dependencies = [
- "ansi_term",
- "sharded-slab",
- "smallvec",
- "thread_local",
- "tracing-core",
- "tracing-log",
-]
-
-[[package]]
-name = "typenum"
-version = "1.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
-
-[[package]]
-name = "unicode-segmentation"
-version = "1.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
-
-[[package]]
-name = "unicode-xid"
-version = "0.2.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
-
-[[package]]
-name = "uuid"
-version = "0.8.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
-dependencies = [
- "getrandom",
- "serde",
-]
-
-[[package]]
-name = "valuable"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
-
-[[package]]
-name = "version_check"
-version = "0.9.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
-
-[[package]]
-name = "wasi"
-version = "0.10.2+wasi-snapshot-preview1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
-
-[[package]]
-name = "wasm-bindgen"
-version = "0.2.79"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06"
-dependencies = [
- "cfg-if",
- "wasm-bindgen-macro",
-]
-
-[[package]]
-name = "wasm-bindgen-backend"
-version = "0.2.79"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca"
-dependencies = [
- "bumpalo",
- "lazy_static",
- "log",
- "proc-macro2",
- "quote",
- "syn",
- "wasm-bindgen-shared",
-]
-
-[[package]]
-name = "wasm-bindgen-macro"
-version = "0.2.79"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01"
-dependencies = [
- "quote",
- "wasm-bindgen-macro-support",
-]
-
-[[package]]
-name = "wasm-bindgen-macro-support"
-version = "0.2.79"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
- "wasm-bindgen-backend",
- "wasm-bindgen-shared",
-]
-
-[[package]]
-name = "wasm-bindgen-shared"
-version = "0.2.79"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2"
-
-[[package]]
-name = "web-sys"
-version = "0.3.56"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb"
-dependencies = [
- "js-sys",
- "wasm-bindgen",
-]
-
-[[package]]
-name = "winapi"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
-dependencies = [
- "winapi-i686-pc-windows-gnu",
- "winapi-x86_64-pc-windows-gnu",
-]
-
-[[package]]
-name = "winapi-i686-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
-
-[[package]]
-name = "winapi-util"
-version = "0.1.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
-dependencies = [
- "winapi",
-]
-
-[[package]]
-name = "winapi-x86_64-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/automerge-js/.gitignore b/automerge-js/.gitignore
deleted file mode 100644
index 5add9449..00000000
--- a/automerge-js/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-/node_modules
-/yarn.lock
diff --git a/automerge-js/package.json b/automerge-js/package.json
deleted file mode 100644
index 17018429..00000000
--- a/automerge-js/package.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "name": "automerge-js",
- "version": "0.1.0",
- "main": "src/index.js",
- "license": "MIT",
- "scripts": {
- "test": "mocha --bail --full-trace"
- },
- "devDependencies": {
- "mocha": "^9.1.1"
- },
- "dependencies": {
- "automerge-wasm": "file:../automerge-wasm",
- "fast-sha256": "^1.3.0",
- "pako": "^2.0.4",
- "uuid": "^8.3"
- }
-}
diff --git a/automerge-js/src/constants.js b/automerge-js/src/constants.js
deleted file mode 100644
index ea92228c..00000000
--- a/automerge-js/src/constants.js
+++ /dev/null
@@ -1,18 +0,0 @@
-// Properties of the document root object
-//const OPTIONS = Symbol('_options') // object containing options passed to init()
-//const CACHE = Symbol('_cache') // map from objectId to immutable object
-const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers)
-const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers)
-const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers)
-const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers)
-const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers)
-
-// Properties of all Automerge objects
-//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string)
-//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts
-//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback
-//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element
-
-module.exports = {
- STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN
-}
diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js
deleted file mode 100644
index 04cee89b..00000000
--- a/automerge-js/src/index.js
+++ /dev/null
@@ -1,372 +0,0 @@
-const AutomergeWASM = require("automerge-wasm")
-const uuid = require('./uuid')
-
-let { rootProxy, listProxy, textProxy, mapProxy } = require("./proxies")
-let { Counter } = require("./counter")
-let { Text } = require("./text")
-let { Int, Uint, Float64 } = require("./numbers")
-let { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } = require("./constants")
-
-function init(actor) {
- if (typeof actor != 'string') {
- actor = null
- }
- const state = AutomergeWASM.create(actor)
- return rootProxy(state, true);
-}
-
-function clone(doc) {
- const state = doc[STATE].clone()
- return rootProxy(state, true);
-}
-
-function free(doc) {
- return doc[STATE].free()
-}
-
-function from(data, actor) {
- let doc1 = init(actor)
- let doc2 = change(doc1, (d) => Object.assign(d, data))
- return doc2
-}
-
-function change(doc, options, callback) {
- if (callback === undefined) {
- // FIXME implement options
- callback = options
- options = {}
- }
- if (typeof options === "string") {
- options = { message: options }
- }
- if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
- throw new RangeError("must be the document root");
- }
- if (doc[FROZEN] === true) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (!!doc[HEADS] === true) {
- throw new RangeError("Attempting to change an out of date document");
- }
- if (doc[READ_ONLY] === false) {
- throw new RangeError("Calls to Automerge.change cannot be nested")
- }
- const state = doc[STATE]
- const heads = state.getHeads()
- try {
- doc[HEADS] = heads
- doc[FROZEN] = true
- let root = rootProxy(state);
- callback(root)
- if (state.pendingOps() === 0) {
- doc[FROZEN] = false
- doc[HEADS] = undefined
- return doc
- } else {
- state.commit(options.message, options.time)
- return rootProxy(state, true);
- }
- } catch (e) {
- //console.log("ERROR: ",e)
- doc[FROZEN] = false
- doc[HEADS] = undefined
- state.rollback()
- throw e
- }
-}
-
-function emptyChange(doc, options) {
- if (options === undefined) {
- options = {}
- }
- if (typeof options === "string") {
- options = { message: options }
- }
-
- if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
- throw new RangeError("must be the document root");
- }
- if (doc[FROZEN] === true) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (doc[READ_ONLY] === false) {
- throw new RangeError("Calls to Automerge.change cannot be nested")
- }
-
- const state = doc[STATE]
- state.commit(options.message, options.time)
- return rootProxy(state, true);
-}
-
-function load(data, actor) {
- const state = AutomergeWASM.load(data, actor)
- return rootProxy(state, true);
-}
-
-function save(doc) {
- const state = doc[STATE]
- return state.save()
-}
-
-function merge(local, remote) {
- if (local[HEADS] === true) {
- throw new RangeError("Attempting to change an out of date document");
- }
- const localState = local[STATE]
- const heads = localState.getHeads()
- const remoteState = remote[STATE]
- const changes = localState.getChangesAdded(remoteState)
- localState.applyChanges(changes)
- local[HEADS] = heads
- return rootProxy(localState, true)
-}
-
-function getActorId(doc) {
- const state = doc[STATE]
- return state.getActorId()
-}
-
-function conflictAt(context, objectId, prop) {
- let values = context.getAll(objectId, prop)
- if (values.length <= 1) {
- return
- }
- let result = {}
- for (const conflict of values) {
- const datatype = conflict[0]
- const value = conflict[1]
- switch (datatype) {
- case "map":
- result[value] = mapProxy(context, value, [ prop ], true)
- break;
- case "list":
- result[value] = listProxy(context, value, [ prop ], true)
- break;
- case "text":
- result[value] = textProxy(context, value, [ prop ], true)
- break;
- //case "table":
- //case "cursor":
- case "str":
- case "uint":
- case "int":
- case "f64":
- case "boolean":
- case "bytes":
- case "null":
- result[conflict[2]] = value
- break;
- case "counter":
- result[conflict[2]] = new Counter(value)
- break;
- case "timestamp":
- result[conflict[2]] = new Date(value)
- break;
- default:
- throw RangeError(`datatype ${datatype} unimplemented`)
- }
- }
- return result
-}
-
-function getConflicts(doc, prop) {
- const state = doc[STATE]
- const objectId = doc[OBJECT_ID]
- return conflictAt(state, objectId, prop)
-}
-
-function getLastLocalChange(doc) {
- const state = doc[STATE]
- try {
- return state.getLastLocalChange()
- } catch (e) {
- return
- }
-}
-
-function getObjectId(doc) {
- return doc[OBJECT_ID]
-}
-
-function getChanges(oldState, newState) {
- const o = oldState[STATE]
- const n = newState[STATE]
- const heads = oldState[HEADS]
- return n.getChanges(heads || o.getHeads())
-}
-
-function getAllChanges(doc) {
- const state = doc[STATE]
- return state.getChanges([])
-}
-
-function applyChanges(doc, changes) {
- if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
- throw new RangeError("must be the document root");
- }
- if (doc[FROZEN] === true) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (doc[READ_ONLY] === false) {
- throw new RangeError("Calls to Automerge.change cannot be nested")
- }
- const state = doc[STATE]
- const heads = state.getHeads()
- state.applyChanges(changes)
- doc[HEADS] = heads
- return [rootProxy(state, true)];
-}
-
-function getHistory(doc) {
- const actor = getActorId(doc)
- const history = getAllChanges(doc)
- return history.map((change, index) => ({
- get change () {
- return decodeChange(change)
- },
- get snapshot () {
- const [state] = applyChanges(init(), history.slice(0, index + 1))
- return state
- }
- })
- )
-}
-
-function equals() {
- if (!isObject(val1) || !isObject(val2)) return val1 === val2
- const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort()
- if (keys1.length !== keys2.length) return false
- for (let i = 0; i < keys1.length; i++) {
- if (keys1[i] !== keys2[i]) return false
- if (!equals(val1[keys1[i]], val2[keys2[i]])) return false
- }
- return true
-}
-
-function encodeSyncMessage(msg) {
- return AutomergeWASM.encodeSyncMessage(msg)
-}
-
-function decodeSyncMessage(msg) {
- return AutomergeWASM.decodeSyncMessage(msg)
-}
-
-function encodeSyncState(state) {
- return AutomergeWASM.encodeSyncState(AutomergeWASM.importSyncState(state))
-}
-
-function decodeSyncState(state) {
- return AutomergeWASM.exportSyncState(AutomergeWASM.decodeSyncState(state))
-}
-
-function generateSyncMessage(doc, inState) {
- const state = doc[STATE]
- const syncState = AutomergeWASM.importSyncState(inState)
- const message = state.generateSyncMessage(syncState)
- const outState = AutomergeWASM.exportSyncState(syncState)
- return [ outState, message ]
-}
-
-function receiveSyncMessage(doc, inState, message) {
- const syncState = AutomergeWASM.importSyncState(inState)
- if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
- throw new RangeError("must be the document root");
- }
- if (doc[FROZEN] === true) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (!!doc[HEADS] === true) {
- throw new RangeError("Attempting to change an out of date document");
- }
- if (doc[READ_ONLY] === false) {
- throw new RangeError("Calls to Automerge.change cannot be nested")
- }
- const state = doc[STATE]
- const heads = state.getHeads()
- state.receiveSyncMessage(syncState, message)
- const outState = AutomergeWASM.exportSyncState(syncState)
- doc[HEADS] = heads
- return [rootProxy(state, true), outState, null];
-}
-
-function initSyncState() {
- return AutomergeWASM.exportSyncState(AutomergeWASM.initSyncState(change))
-}
-
-function encodeChange(change) {
- return AutomergeWASM.encodeChange(change)
-}
-
-function decodeChange(data) {
- return AutomergeWASM.decodeChange(data)
-}
-
-function encodeSyncMessage(change) {
- return AutomergeWASM.encodeSyncMessage(change)
-}
-
-function decodeSyncMessage(data) {
- return AutomergeWASM.decodeSyncMessage(data)
-}
-
-function getMissingDeps(doc, heads) {
- const state = doc[STATE]
- return state.getMissingDeps(heads)
-}
-
-function getHeads(doc) {
- const state = doc[STATE]
- return doc[HEADS] || state.getHeads()
-}
-
-function dump(doc) {
- const state = doc[STATE]
- state.dump()
-}
-
-function toJS(doc) {
- if (typeof doc === "object") {
- if (doc instanceof Uint8Array) {
- return doc
- }
- if (doc === null) {
- return doc
- }
- if (doc instanceof Array) {
- return doc.map((a) => toJS(a))
- }
- if (doc instanceof Text) {
- return doc.map((a) => toJS(a))
- }
- let tmp = {}
- for (index in doc) {
- tmp[index] = toJS(doc[index])
- }
- return tmp
- } else {
- return doc
- }
-}
-
-module.exports = {
- init, from, change, emptyChange, clone, free,
- load, save, merge, getChanges, getAllChanges, applyChanges,
- getLastLocalChange, getObjectId, getActorId, getConflicts,
- encodeChange, decodeChange, equals, getHistory, getHeads, uuid,
- generateSyncMessage, receiveSyncMessage, initSyncState,
- decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState,
- getMissingDeps,
- dump, Text, Counter, Int, Uint, Float64, toJS,
-}
-
-// depricated
-// Frontend, setDefaultBackend, Backend
-
-// more...
-/*
-for (let name of ['getObjectId', 'getObjectById',
- 'setActorId',
- 'Text', 'Table', 'Counter', 'Observable' ]) {
- module.exports[name] = Frontend[name]
-}
-*/
diff --git a/automerge-js/src/numbers.js b/automerge-js/src/numbers.js
deleted file mode 100644
index 1ee22dee..00000000
--- a/automerge-js/src/numbers.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// Convience classes to allow users to stricly specify the number type they want
-
-class Int {
- constructor(value) {
- if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) {
- throw new RangeError(`Value ${value} cannot be a uint`)
- }
- this.value = value
- Object.freeze(this)
- }
-}
-
-class Uint {
- constructor(value) {
- if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) {
- throw new RangeError(`Value ${value} cannot be a uint`)
- }
- this.value = value
- Object.freeze(this)
- }
-}
-
-class Float64 {
- constructor(value) {
- if (typeof value !== 'number') {
- throw new RangeError(`Value ${value} cannot be a float64`)
- }
- this.value = value || 0.0
- Object.freeze(this)
- }
-}
-
-module.exports = { Int, Uint, Float64 }
diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js
deleted file mode 100644
index 3bf2fbd2..00000000
--- a/automerge-js/src/proxies.js
+++ /dev/null
@@ -1,617 +0,0 @@
-
-const AutomergeWASM = require("automerge-wasm")
-const { Int, Uint, Float64 } = require("./numbers");
-const { Counter, getWriteableCounter } = require("./counter");
-const { Text } = require("./text");
-const { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } = require("./constants")
-
-function parseListIndex(key) {
- if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10)
- if (typeof key !== 'number') {
- // throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key))
- return key
- }
- if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) {
- throw new RangeError('A list index must be positive, but you passed ' + key)
- }
- return key
-}
-
-function valueAt(target, prop) {
- const { context, objectId, path, readonly, heads} = target
- let value = context.get(objectId, prop, heads)
- if (value === undefined) {
- return
- }
- const datatype = value[0]
- const val = value[1]
- switch (datatype) {
- case undefined: return;
- case "map": return mapProxy(context, val, [ ... path, prop ], readonly, heads);
- case "list": return listProxy(context, val, [ ... path, prop ], readonly, heads);
- case "text": return textProxy(context, val, [ ... path, prop ], readonly, heads);
- //case "table":
- //case "cursor":
- case "str": return val;
- case "uint": return val;
- case "int": return val;
- case "f64": return val;
- case "boolean": return val;
- case "null": return null;
- case "bytes": return val;
- case "timestamp": return val;
- case "counter": {
- if (readonly) {
- return new Counter(val);
- } else {
- return getWriteableCounter(val, context, path, objectId, prop)
- }
- }
- default:
- throw RangeError(`datatype ${datatype} unimplemented`)
- }
-}
-
-function import_value(value) {
- switch (typeof value) {
- case 'object':
- if (value == null) {
- return [ null, "null"]
- } else if (value instanceof Uint) {
- return [ value.value, "uint" ]
- } else if (value instanceof Int) {
- return [ value.value, "int" ]
- } else if (value instanceof Float64) {
- return [ value.value, "f64" ]
- } else if (value instanceof Counter) {
- return [ value.value, "counter" ]
- } else if (value instanceof Date) {
- return [ value.getTime(), "timestamp" ]
- } else if (value instanceof Uint8Array) {
- return [ value, "bytes" ]
- } else if (value instanceof Array) {
- return [ value, "list" ]
- } else if (value instanceof Text) {
- return [ value, "text" ]
- } else if (value[OBJECT_ID]) {
- throw new RangeError('Cannot create a reference to an existing document object')
- } else {
- return [ value, "map" ]
- }
- break;
- case 'boolean':
- return [ value, "boolean" ]
- case 'number':
- if (Number.isInteger(value)) {
- return [ value, "int" ]
- } else {
- return [ value, "f64" ]
- }
- break;
- case 'string':
- return [ value ]
- break;
- default:
- throw new RangeError(`Unsupported type of value: ${typeof value}`)
- }
-}
-
-const MapHandler = {
- get (target, key) {
- const { context, objectId, path, readonly, frozen, heads, cache } = target
- if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] }
- if (key === OBJECT_ID) return objectId
- if (key === READ_ONLY) return readonly
- if (key === FROZEN) return frozen
- if (key === HEADS) return heads
- if (key === STATE) return context;
- if (!cache[key]) {
- cache[key] = valueAt(target, key)
- }
- return cache[key]
- },
-
- set (target, key, val) {
- let { context, objectId, path, readonly, frozen} = target
- target.cache = {} // reset cache on set
- if (val && val[OBJECT_ID]) {
- throw new RangeError('Cannot create a reference to an existing document object')
- }
- if (key === FROZEN) {
- target.frozen = val
- return
- }
- if (key === HEADS) {
- target.heads = val
- return
- }
- let [ value, datatype ] = import_value(val)
- if (frozen) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (readonly) {
- throw new RangeError(`Object property "${key}" cannot be modified`)
- }
- switch (datatype) {
- case "list":
- const list = context.putObject(objectId, key, [])
- const proxyList = listProxy(context, list, [ ... path, key ], readonly );
- for (let i = 0; i < value.length; i++) {
- proxyList[i] = value[i]
- }
- break;
- case "text":
- const text = context.putObject(objectId, key, "", "text")
- const proxyText = textProxy(context, text, [ ... path, key ], readonly );
- for (let i = 0; i < value.length; i++) {
- proxyText[i] = value.get(i)
- }
- break;
- case "map":
- const map = context.putObject(objectId, key, {})
- const proxyMap = mapProxy(context, map, [ ... path, key ], readonly );
- for (const key in value) {
- proxyMap[key] = value[key]
- }
- break;
- default:
- context.put(objectId, key, value, datatype)
- }
- return true
- },
-
- deleteProperty (target, key) {
- const { context, objectId, path, readonly, frozen } = target
- target.cache = {} // reset cache on delete
- if (readonly) {
- throw new RangeError(`Object property "${key}" cannot be modified`)
- }
- context.delete(objectId, key)
- return true
- },
-
- has (target, key) {
- const value = this.get(target, key)
- return value !== undefined
- },
-
- getOwnPropertyDescriptor (target, key) {
- const { context, objectId } = target
- const value = this.get(target, key)
- if (typeof value !== 'undefined') {
- return {
- configurable: true, enumerable: true, value
- }
- }
- },
-
- ownKeys (target) {
- const { context, objectId, heads} = target
- return context.keys(objectId, heads)
- },
-}
-
-
-const ListHandler = {
- get (target, index) {
- const {context, objectId, path, readonly, frozen, heads } = target
- index = parseListIndex(index)
- if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } }
- if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
- if (index === OBJECT_ID) return objectId
- if (index === READ_ONLY) return readonly
- if (index === FROZEN) return frozen
- if (index === HEADS) return heads
- if (index === STATE) return context;
- if (index === 'length') return context.length(objectId, heads);
- if (index === Symbol.iterator) {
- let i = 0;
- return function *() {
- // FIXME - ugly
- let value = valueAt(target, i)
- while (value !== undefined) {
- yield value
- i += 1
- value = valueAt(target, i)
- }
- }
- }
- if (typeof index === 'number') {
- return valueAt(target, index)
- } else {
- return listMethods(target)[index]
- }
- },
-
- set (target, index, val) {
- let {context, objectId, path, readonly, frozen } = target
- index = parseListIndex(index)
- if (val && val[OBJECT_ID]) {
- throw new RangeError('Cannot create a reference to an existing document object')
- }
- if (index === FROZEN) {
- target.frozen = val
- return
- }
- if (index === HEADS) {
- target.heads = val
- return
- }
- if (typeof index == "string") {
- throw new RangeError('list index must be a number')
- }
- const [ value, datatype] = import_value(val)
- if (frozen) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (readonly) {
- throw new RangeError(`Object property "${index}" cannot be modified`)
- }
- switch (datatype) {
- case "list":
- let list
- if (index >= context.length(objectId)) {
- list = context.insertObject(objectId, index, [])
- } else {
- list = context.putObject(objectId, index, [])
- }
- const proxyList = listProxy(context, list, [ ... path, index ], readonly);
- proxyList.splice(0,0,...value)
- break;
- case "text":
- let text
- if (index >= context.length(objectId)) {
- text = context.insertObject(objectId, index, "", "text")
- } else {
- text = context.putObject(objectId, index, "", "text")
- }
- const proxyText = textProxy(context, text, [ ... path, index ], readonly);
- proxyText.splice(0,0,...value)
- break;
- case "map":
- let map
- if (index >= context.length(objectId)) {
- map = context.insertObject(objectId, index, {})
- } else {
- map = context.putObject(objectId, index, {})
- }
- const proxyMap = mapProxy(context, map, [ ... path, index ], readonly);
- for (const key in value) {
- proxyMap[key] = value[key]
- }
- break;
- default:
- if (index >= context.length(objectId)) {
- context.insert(objectId, index, value, datatype)
- } else {
- context.put(objectId, index, value, datatype)
- }
- }
- return true
- },
-
- deleteProperty (target, index) {
- const {context, objectId} = target
- index = parseListIndex(index)
- if (context.get(objectId, index)[0] == "counter") {
- throw new TypeError('Unsupported operation: deleting a counter from a list')
- }
- context.delete(objectId, index)
- return true
- },
-
- has (target, index) {
- const {context, objectId, heads} = target
- index = parseListIndex(index)
- if (typeof index === 'number') {
- return index < context.length(objectId, heads)
- }
- return index === 'length'
- },
-
- getOwnPropertyDescriptor (target, index) {
- const {context, objectId, path, readonly, frozen, heads} = target
-
- if (index === 'length') return {writable: true, value: context.length(objectId, heads) }
- if (index === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId}
-
- index = parseListIndex(index)
-
- let value = valueAt(target, index)
- return { configurable: true, enumerable: true, value }
- },
-
- getPrototypeOf(target) { return Object.getPrototypeOf([]) },
- ownKeys (target) {
- const {context, objectId, heads } = target
- let keys = []
- // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array
- // but not uncommenting it causes for (i in list) {} to not enumerate values properly
- //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) }
- keys.push("length");
- return keys
- }
-}
-
-const TextHandler = Object.assign({}, ListHandler, {
- get (target, index) {
- // FIXME this is a one line change from ListHandler.get()
- const {context, objectId, path, readonly, frozen, heads } = target
- index = parseListIndex(index)
- if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
- if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } }
- if (index === OBJECT_ID) return objectId
- if (index === READ_ONLY) return readonly
- if (index === FROZEN) return frozen
- if (index === HEADS) return heads
- if (index === STATE) return context;
- if (index === 'length') return context.length(objectId, heads);
- if (index === Symbol.iterator) {
- let i = 0;
- return function *() {
- let value = valueAt(target, i)
- while (value !== undefined) {
- yield value
- i += 1
- value = valueAt(target, i)
- }
- }
- }
- if (typeof index === 'number') {
- return valueAt(target, index)
- } else {
- return textMethods(target)[index] || listMethods(target)[index]
- }
- },
- getPrototypeOf(target) {
- return Object.getPrototypeOf(new Text())
- },
-})
-
-function mapProxy(context, objectId, path, readonly, heads) {
- return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}}, MapHandler)
-}
-
-function listProxy(context, objectId, path, readonly, heads) {
- let target = []
- Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}})
- return new Proxy(target, ListHandler)
-}
-
-function textProxy(context, objectId, path, readonly, heads) {
- let target = []
- Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads, cache: {}})
- return new Proxy(target, TextHandler)
-}
-
-function rootProxy(context, readonly) {
- return mapProxy(context, "_root", [], readonly)
-}
-
-function listMethods(target) {
- const {context, objectId, path, readonly, frozen, heads} = target
- const methods = {
- deleteAt(index, numDelete) {
- if (typeof numDelete === 'number') {
- context.splice(objectId, index, numDelete)
- } else {
- context.delete(objectId, index)
- }
- return this
- },
-
- fill(val, start, end) {
- // FIXME
- let list = context.getObject(objectId)
- let [value, datatype] = valueAt(target, index)
- for (let index = parseListIndex(start || 0); index < parseListIndex(end || list.length); index++) {
- context.put(objectId, index, value, datatype)
- }
- return this
- },
-
- indexOf(o, start = 0) {
- // FIXME
- const id = o[OBJECT_ID]
- if (id) {
- const list = context.getObject(objectId)
- for (let index = start; index < list.length; index++) {
- if (list[index][OBJECT_ID] === id) {
- return index
- }
- }
- return -1
- } else {
- return context.indexOf(objectId, o, start)
- }
- },
-
- insertAt(index, ...values) {
- this.splice(index, 0, ...values)
- return this
- },
-
- pop() {
- let length = context.length(objectId)
- if (length == 0) {
- return undefined
- }
- let last = valueAt(target, length - 1)
- context.delete(objectId, length - 1)
- return last
- },
-
- push(...values) {
- let len = context.length(objectId)
- this.splice(len, 0, ...values)
- return context.length(objectId)
- },
-
- shift() {
- if (context.length(objectId) == 0) return
- const first = valueAt(target, 0)
- context.delete(objectId, 0)
- return first
- },
-
- splice(index, del, ...vals) {
- index = parseListIndex(index)
- del = parseListIndex(del)
- for (let val of vals) {
- if (val && val[OBJECT_ID]) {
- throw new RangeError('Cannot create a reference to an existing document object')
- }
- }
- if (frozen) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (readonly) {
- throw new RangeError("Sequence object cannot be modified outside of a change block")
- }
- let result = []
- for (let i = 0; i < del; i++) {
- let value = valueAt(target, index)
- result.push(value)
- context.delete(objectId, index)
- }
- const values = vals.map((val) => import_value(val))
- for (let [value,datatype] of values) {
- switch (datatype) {
- case "list":
- const list = context.insertObject(objectId, index, [])
- const proxyList = listProxy(context, list, [ ... path, index ], readonly);
- proxyList.splice(0,0,...value)
- break;
- case "text":
- const text = context.insertObject(objectId, index, "", "text")
- const proxyText = textProxy(context, text, [ ... path, index ], readonly);
- proxyText.splice(0,0,...value)
- break;
- case "map":
- const map = context.insertObject(objectId, index, {})
- const proxyMap = mapProxy(context, map, [ ... path, index ], readonly);
- for (const key in value) {
- proxyMap[key] = value[key]
- }
- break;
- default:
- context.insert(objectId, index, value, datatype)
- }
- index += 1
- }
- return result
- },
-
- unshift(...values) {
- this.splice(0, 0, ...values)
- return context.length(objectId)
- },
-
- entries() {
- let i = 0;
- const iterator = {
- next: () => {
- let value = valueAt(target, i)
- if (value === undefined) {
- return { value: undefined, done: true }
- } else {
- return { value: [ i, value ], done: false }
- }
- }
- }
- return iterator
- },
-
- keys() {
- let i = 0;
- let len = context.length(objectId, heads)
- const iterator = {
- next: () => {
- let value = undefined
- if (i < len) { value = i; i++ }
- return { value, done: true }
- }
- }
- return iterator
- },
-
- values() {
- let i = 0;
- const iterator = {
- next: () => {
- let value = valueAt(target, i)
- if (value === undefined) {
- return { value: undefined, done: true }
- } else {
- return { value, done: false }
- }
- }
- }
- return iterator
- }
- }
-
- // Read-only methods that can delegate to the JavaScript built-in implementations
- // FIXME - super slow
- for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
- 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
- 'slice', 'some', 'toLocaleString', 'toString']) {
- methods[method] = (...args) => {
- const list = []
- while (true) {
- let value = valueAt(target, list.length)
- if (value == undefined) {
- break
- }
- list.push(value)
- }
-
- return list[method](...args)
- }
- }
-
- return methods
-}
-
-function textMethods(target) {
- const {context, objectId, path, readonly, frozen, heads } = target
- const methods = {
- set (index, value) {
- return this[index] = value
- },
- get (index) {
- return this[index]
- },
- toString () {
- return context.text(objectId, heads).replace(//g,'')
- },
- toSpans () {
- let spans = []
- let chars = ''
- let length = this.length
- for (let i = 0; i < length; i++) {
- const value = this[i]
- if (typeof value === 'string') {
- chars += value
- } else {
- if (chars.length > 0) {
- spans.push(chars)
- chars = ''
- }
- spans.push(value)
- }
- }
- if (chars.length > 0) {
- spans.push(chars)
- }
- return spans
- },
- toJSON () {
- return this.toString()
- }
- }
- return methods
-}
-
-
-module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler }
diff --git a/automerge-js/src/text.js b/automerge-js/src/text.js
deleted file mode 100644
index a7f442fe..00000000
--- a/automerge-js/src/text.js
+++ /dev/null
@@ -1,132 +0,0 @@
-const { OBJECT_ID } = require('./constants')
-const { isObject } = require('../src/common')
-
-class Text {
- constructor (text) {
- const instance = Object.create(Text.prototype)
- if (typeof text === 'string') {
- instance.elems = [...text]
- } else if (Array.isArray(text)) {
- instance.elems = text
- } else if (text === undefined) {
- instance.elems = []
- } else {
- throw new TypeError(`Unsupported initial value for Text: ${text}`)
- }
- return instance
- }
-
- get length () {
- return this.elems.length
- }
-
- get (index) {
- return this.elems[index]
- }
-
- getElemId (index) {
- return undefined
- }
-
- /**
- * Iterates over the text elements character by character, including any
- * inline objects.
- */
- [Symbol.iterator] () {
- let elems = this.elems, index = -1
- return {
- next () {
- index += 1
- if (index < elems.length) {
- return {done: false, value: elems[index]}
- } else {
- return {done: true}
- }
- }
- }
- }
-
- /**
- * Returns the content of the Text object as a simple string, ignoring any
- * non-character elements.
- */
- toString() {
- // Concatting to a string is faster than creating an array and then
- // .join()ing for small (<100KB) arrays.
- // https://jsperf.com/join-vs-loop-w-type-test
- let str = ''
- for (const elem of this.elems) {
- if (typeof elem === 'string') str += elem
- }
- return str
- }
-
- /**
- * Returns the content of the Text object as a sequence of strings,
- * interleaved with non-character elements.
- *
- * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans:
- * => ['ab', {x: 3}, 'cd']
- */
- toSpans() {
- let spans = []
- let chars = ''
- for (const elem of this.elems) {
- if (typeof elem === 'string') {
- chars += elem
- } else {
- if (chars.length > 0) {
- spans.push(chars)
- chars = ''
- }
- spans.push(elem)
- }
- }
- if (chars.length > 0) {
- spans.push(chars)
- }
- return spans
- }
-
- /**
- * Returns the content of the Text object as a simple string, so that the
- * JSON serialization of an Automerge document represents text nicely.
- */
- toJSON() {
- return this.toString()
- }
-
- /**
- * Updates the list item at position `index` to a new value `value`.
- */
- set (index, value) {
- this.elems[index] = value
- }
-
- /**
- * Inserts new list items `values` starting at position `index`.
- */
- insertAt(index, ...values) {
- this.elems.splice(index, 0, ... values)
- }
-
- /**
- * Deletes `numDelete` list items starting at position `index`.
- * if `numDelete` is not given, one item is deleted.
- */
- deleteAt(index, numDelete = 1) {
- this.elems.splice(index, numDelete)
- }
-}
-
-// Read-only methods that can delegate to the JavaScript built-in array
-for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
- 'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
- 'slice', 'some', 'toLocaleString']) {
- Text.prototype[method] = function (...args) {
- const array = [...this]
- return array[method](...args)
- }
-}
-
-module.exports = { Text }
diff --git a/automerge-js/src/uuid.js b/automerge-js/src/uuid.js
deleted file mode 100644
index 42a8cc6e..00000000
--- a/automerge-js/src/uuid.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const { v4: uuid } = require('uuid')
-
-function defaultFactory() {
- return uuid().replace(/-/g, '')
-}
-
-let factory = defaultFactory
-
-function makeUuid() {
- return factory()
-}
-
-makeUuid.setFactory = newFactory => { factory = newFactory }
-makeUuid.reset = () => { factory = defaultFactory }
-
-module.exports = makeUuid
diff --git a/automerge-js/test/basic_test.js b/automerge-js/test/basic_test.js
deleted file mode 100644
index 68d2fecf..00000000
--- a/automerge-js/test/basic_test.js
+++ /dev/null
@@ -1,164 +0,0 @@
-
-const assert = require('assert')
-const util = require('util')
-const Automerge = require('..')
-
-describe('Automerge', () => {
- describe('basics', () => {
- it('should init clone and free', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.clone(doc1);
- })
-
- it('handle basic set and read on root object', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => {
- d.hello = "world"
- d.big = "little"
- d.zip = "zop"
- d.app = "dap"
- assert.deepEqual(d, { hello: "world", big: "little", zip: "zop", app: "dap" })
- })
- assert.deepEqual(doc2, { hello: "world", big: "little", zip: "zop", app: "dap" })
- })
-
- it('handle basic sets over many changes', () => {
- let doc1 = Automerge.init()
- let timestamp = new Date();
- let counter = new Automerge.Counter(100);
- let bytes = new Uint8Array([10,11,12]);
- let doc2 = Automerge.change(doc1, (d) => {
- d.hello = "world"
- })
- let doc3 = Automerge.change(doc2, (d) => {
- d.counter1 = counter
- })
- let doc4 = Automerge.change(doc3, (d) => {
- d.timestamp1 = timestamp
- })
- let doc5 = Automerge.change(doc4, (d) => {
- d.app = null
- })
- let doc6 = Automerge.change(doc5, (d) => {
- d.bytes1 = bytes
- })
- let doc7 = Automerge.change(doc6, (d) => {
- d.uint = new Automerge.Uint(1)
- d.int = new Automerge.Int(-1)
- d.float64 = new Automerge.Float64(5.5)
- d.number1 = 100
- d.number2 = -45.67
- d.true = true
- d.false = false
- })
-
- assert.deepEqual(doc7, { hello: "world", true: true, false: false, int: -1, uint: 1, float64: 5.5, number1: 100, number2: -45.67, counter1: counter, timestamp1: timestamp, bytes1: bytes, app: null })
-
- let changes = Automerge.getAllChanges(doc7)
- let t1 = Automerge.init()
- ;let [t2] = Automerge.applyChanges(t1, changes)
- assert.deepEqual(doc7,t2)
- })
-
- it('handle overwrites to values', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => {
- d.hello = "world1"
- })
- let doc3 = Automerge.change(doc2, (d) => {
- d.hello = "world2"
- })
- let doc4 = Automerge.change(doc3, (d) => {
- d.hello = "world3"
- })
- let doc5 = Automerge.change(doc4, (d) => {
- d.hello = "world4"
- })
- assert.deepEqual(doc5, { hello: "world4" } )
- })
-
- it('handle set with object value', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => {
- d.subobj = { hello: "world", subsubobj: { zip: "zop" } }
- })
- assert.deepEqual(doc2, { subobj: { hello: "world", subsubobj: { zip: "zop" } } })
- })
-
- it('handle simple list creation', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => d.list = [])
- assert.deepEqual(doc2, { list: []})
- })
-
- it('handle simple lists', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => {
- d.list = [ 1, 2, 3 ]
- })
- assert.deepEqual(doc2.list.length, 3)
- assert.deepEqual(doc2.list[0], 1)
- assert.deepEqual(doc2.list[1], 2)
- assert.deepEqual(doc2.list[2], 3)
- assert.deepEqual(doc2, { list: [1,2,3] })
- // assert.deepStrictEqual(Automerge.toJS(doc2), { list: [1,2,3] })
-
- let doc3 = Automerge.change(doc2, (d) => {
- d.list[1] = "a"
- })
-
- assert.deepEqual(doc3.list.length, 3)
- assert.deepEqual(doc3.list[0], 1)
- assert.deepEqual(doc3.list[1], "a")
- assert.deepEqual(doc3.list[2], 3)
- assert.deepEqual(doc3, { list: [1,"a",3] })
- })
- it('handle simple lists', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => {
- d.list = [ 1, 2, 3 ]
- })
- let changes = Automerge.getChanges(doc1, doc2)
- let docB1 = Automerge.init()
- ;let [docB2] = Automerge.applyChanges(docB1, changes)
- assert.deepEqual(docB2, doc2);
- })
- it('handle text', () => {
- let doc1 = Automerge.init()
- let tmp = new Automerge.Text("hello")
- let doc2 = Automerge.change(doc1, (d) => {
- d.list = new Automerge.Text("hello")
- d.list.insertAt(2,"Z")
- })
- let changes = Automerge.getChanges(doc1, doc2)
- let docB1 = Automerge.init()
- ;let [docB2] = Automerge.applyChanges(docB1, changes)
- assert.deepEqual(docB2, doc2);
- })
-
- it('have many list methods', () => {
- let doc1 = Automerge.from({ list: [1,2,3] })
- assert.deepEqual(doc1, { list: [1,2,3] });
- let doc2 = Automerge.change(doc1, (d) => {
- d.list.splice(1,1,9,10)
- })
- assert.deepEqual(doc2, { list: [1,9,10,3] });
- let doc3 = Automerge.change(doc2, (d) => {
- d.list.push(11,12)
- })
- assert.deepEqual(doc3, { list: [1,9,10,3,11,12] });
- let doc4 = Automerge.change(doc3, (d) => {
- d.list.unshift(2,2)
- })
- assert.deepEqual(doc4, { list: [2,2,1,9,10,3,11,12] });
- let doc5 = Automerge.change(doc4, (d) => {
- d.list.shift()
- })
- assert.deepEqual(doc5, { list: [2,1,9,10,3,11,12] });
- let doc6 = Automerge.change(doc5, (d) => {
- d.list.insertAt(3,100,101)
- })
- assert.deepEqual(doc6, { list: [2,1,9,100,101,10,3,11,12] });
- })
- })
-})
diff --git a/automerge-js/test/columnar_test.js b/automerge-js/test/columnar_test.js
deleted file mode 100644
index 8cbe1482..00000000
--- a/automerge-js/test/columnar_test.js
+++ /dev/null
@@ -1,97 +0,0 @@
-const assert = require('assert')
-const { checkEncoded } = require('./helpers')
-const Automerge = require('..')
-const { encodeChange, decodeChange } = Automerge
-
-describe('change encoding', () => {
- it('should encode text edits', () => {
- /*
- const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: '', deps: [], ops: [
- {action: 'makeText', obj: '_root', key: 'text', insert: false, pred: []},
- {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []},
- {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', insert: false, pred: ['2@aaaa']},
- {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []},
- {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []}
- ]}
- */
- const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: null, deps: [], ops: [
- {action: 'makeText', obj: '_root', key: 'text', pred: []},
- {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []},
- {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', pred: ['2@aaaa']},
- {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []},
- {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []}
- ]}
- checkEncoded(encodeChange(change1), [
- 0x85, 0x6f, 0x4a, 0x83, // magic bytes
- 0xe2, 0xbd, 0xfb, 0xf5, // checksum
- 1, 94, 0, 2, 0xaa, 0xaa, // chunkType: change, length, deps, actor 'aaaa'
- 1, 1, 9, 0, 0, // seq, startOp, time, message, actor list
- 12, 0x01, 4, 0x02, 4, // column count, objActor, objCtr
- 0x11, 8, 0x13, 7, 0x15, 8, // keyActor, keyCtr, keyStr
- 0x34, 4, 0x42, 6, // insert, action
- 0x56, 6, 0x57, 3, // valLen, valRaw
- 0x70, 6, 0x71, 2, 0x73, 2, // predNum, predActor, predCtr
- 0, 1, 4, 0, // objActor column: null, 0, 0, 0, 0
- 0, 1, 4, 1, // objCtr column: null, 1, 1, 1, 1
- 0, 2, 0x7f, 0, 0, 1, 0x7f, 0, // keyActor column: null, null, 0, null, 0
- 0, 1, 0x7c, 0, 2, 0x7e, 4, // keyCtr column: null, 0, 2, 0, 4
- 0x7f, 4, 0x74, 0x65, 0x78, 0x74, 0, 4, // keyStr column: 'text', null, null, null, null
- 1, 1, 1, 2, // insert column: false, true, false, true, true
- 0x7d, 4, 1, 3, 2, 1, // action column: makeText, set, del, set, set
- 0x7d, 0, 0x16, 0, 2, 0x16, // valLen column: 0, 0x16, 0, 0x16, 0x16
- 0x68, 0x48, 0x69, // valRaw column: 'h', 'H', 'i'
- 2, 0, 0x7f, 1, 2, 0, // predNum column: 0, 0, 1, 0, 0
- 0x7f, 0, // predActor column: 0
- 0x7f, 2 // predCtr column: 2
- ])
- const decoded = decodeChange(encodeChange(change1))
- assert.deepStrictEqual(decoded, Object.assign({hash: decoded.hash}, change1))
- })
-
- // FIXME - skipping this b/c it was never implemented in the rust impl and isnt trivial
-/*
- it.skip('should require strict ordering of preds', () => {
- const change = new Uint8Array([
- 133, 111, 74, 131, 31, 229, 112, 44, 1, 105, 1, 58, 30, 190, 100, 253, 180, 180, 66, 49, 126,
- 81, 142, 10, 3, 35, 140, 189, 231, 34, 145, 57, 66, 23, 224, 149, 64, 97, 88, 140, 168, 194,
- 229, 4, 244, 209, 58, 138, 67, 140, 1, 152, 236, 250, 2, 0, 1, 4, 55, 234, 66, 242, 8, 21, 11,
- 52, 1, 66, 2, 86, 3, 87, 10, 112, 2, 113, 3, 115, 4, 127, 9, 99, 111, 109, 109, 111, 110, 86,
- 97, 114, 1, 127, 1, 127, 166, 1, 52, 48, 57, 49, 52, 57, 52, 53, 56, 50, 127, 2, 126, 0, 1,
- 126, 139, 1, 0
- ])
- assert.throws(() => { decodeChange(change) }, /operation IDs are not in ascending order/)
- })
-*/
-
- describe('with trailing bytes', () => {
- let change = new Uint8Array([
- 0x85, 0x6f, 0x4a, 0x83, // magic bytes
- 0xb2, 0x98, 0x9e, 0xa9, // checksum
- 1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234'
- 1, 1, 252, 250, 220, 255, 5, // seq, startOp, time
- 14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111, 110, // message: 'Initialization'
- 0, 6, // actor list, column count
- 0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action
- 0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum
- 0x7f, 1, 0x78, // keyStr: 'x'
- 1, // insert: false
- 0x7f, 1, // action: set
- 0x7f, 19, // valLen: 1 byte of type uint
- 1, // valRaw: 1
- 0x7f, 0, // predNum: 0
- 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 // 10 trailing bytes
- ])
-
- it('should allow decoding and re-encoding', () => {
- // NOTE: This calls the JavaScript encoding and decoding functions, even when the WebAssembly
- // backend is loaded. Should the wasm backend export its own functions for testing?
- checkEncoded(change, encodeChange(decodeChange(change)))
- })
-
- it('should be preserved in document encoding', () => {
- const [doc] = Automerge.applyChanges(Automerge.init(), [change])
- const [reconstructed] = Automerge.getAllChanges(Automerge.load(Automerge.save(doc)))
- checkEncoded(change, reconstructed)
- })
- })
-})
diff --git a/automerge-js/test/legacy_tests.js b/automerge-js/test/legacy_tests.js
deleted file mode 100644
index 76348d06..00000000
--- a/automerge-js/test/legacy_tests.js
+++ /dev/null
@@ -1,1419 +0,0 @@
-const assert = require('assert')
-//const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge')
-const Automerge = require('../src')
-const { assertEqualsOneOf } = require('./helpers')
-const { decodeChange } = require('../src/columnar')
-//const { decodeChange } = Automerge
-
-const UUID_PATTERN = /^[0-9a-f]{32}$/
-const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/
-
-// CORE FEATURES
-//
-// TODO - Cursors
-// TODO - Tables
-// TODO - on-pass load() & reconstruct change from opset
-// TODO - micro-patches (needed for fully hydrated object in js)
-// TODO - valueAt(heads) / GC
-//
-// AUTOMERGE UNSUPPORTED
-//
-// TODO - patchCallback
-
-
-describe('Automerge', () => {
- describe('initialization ', () => {
- it('should initially be an empty map', () => {
- const doc = Automerge.init()
- assert.deepStrictEqual(doc, {})
- })
-
- it('should allow instantiating from an existing object', () => {
- const initialState = { birds: { wrens: 3, magpies: 4 } }
- const doc = Automerge.from(initialState)
- assert.deepStrictEqual(doc, initialState)
- })
-
- it('should allow merging of an object initialized with `from`', () => {
- let doc1 = Automerge.from({ cards: [] })
- let doc2 = Automerge.merge(Automerge.init(), doc1)
- assert.deepStrictEqual(doc2, { cards: [] })
- })
-
- it('should allow passing an actorId when instantiating from an existing object', () => {
- const actorId = '1234'
- let doc = Automerge.from({ foo: 1 }, actorId)
- assert.strictEqual(Automerge.getActorId(doc), '1234')
- })
-
- it('accepts an empty object as initial state', () => {
- const doc = Automerge.from({})
- assert.deepStrictEqual(doc, {})
- })
-
- it('accepts an array as initial state, but converts it to an object', () => {
- const doc = Automerge.from(['a', 'b', 'c'])
- assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' })
- })
-
- it('accepts strings as initial values, but treats them as an array of characters', () => {
- const doc = Automerge.from('abc')
- assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' })
- })
-
- it('ignores numbers provided as initial values', () => {
- const doc = Automerge.from(123)
- assert.deepStrictEqual(doc, {})
- })
-
- it('ignores booleans provided as initial values', () => {
- const doc1 = Automerge.from(false)
- assert.deepStrictEqual(doc1, {})
- const doc2 = Automerge.from(true)
- assert.deepStrictEqual(doc2, {})
- })
- })
-
- describe('sequential use', () => {
- let s1, s2
- beforeEach(() => {
- s1 = Automerge.init()
- })
-
- it('should not mutate objects', () => {
- s2 = Automerge.change(s1, doc => doc.foo = 'bar')
- assert.strictEqual(s1.foo, undefined)
- assert.strictEqual(s2.foo, 'bar')
- })
-
- it('changes should be retrievable', () => {
- const change1 = Automerge.getLastLocalChange(s1)
- s2 = Automerge.change(s1, doc => doc.foo = 'bar')
- const change2 = Automerge.getLastLocalChange(s2)
- assert.strictEqual(change1, undefined)
- const change = decodeChange(change2)
- assert.deepStrictEqual(change, {
- actor: change.actor, deps: [], seq: 1, startOp: 1,
- hash: change.hash, message: '', time: change.time,
- ops: [{obj: '_root', key: 'foo', action: 'set', insert: false, value: 'bar', pred: []}]
- })
- })
-
- it('should not register any conflicts on repeated assignment', () => {
- assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined)
- s1 = Automerge.change(s1, 'change', doc => doc.foo = 'one')
- assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined)
- s1 = Automerge.change(s1, 'change', doc => doc.foo = 'two')
- assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined)
- })
-
- describe('changes', () => {
- it('should group several changes', () => {
- s2 = Automerge.change(s1, 'change message', doc => {
- doc.first = 'one'
- assert.strictEqual(doc.first, 'one')
- doc.second = 'two'
- assert.deepStrictEqual(doc, {
- first: 'one', second: 'two'
- })
- })
- assert.deepStrictEqual(s1, {})
- assert.deepStrictEqual(s2, {first: 'one', second: 'two'})
- })
-
- it('should freeze objects if desired', () => {
- s1 = Automerge.init({freeze: true})
- s2 = Automerge.change(s1, doc => doc.foo = 'bar')
- try {
- s2.foo = 'lemon'
- } catch (e) { }
- assert.strictEqual(s2.foo, 'bar')
-
- let deleted = false
- try {
- deleted = delete s2.foo
- } catch (e) { }
- assert.strictEqual(s2.foo, 'bar')
- assert.strictEqual(deleted, false)
-
- Automerge.change(s2, () => {
- try {
- s2.foo = 'lemon'
- } catch (e) { }
- assert.strictEqual(s2.foo, 'bar')
- })
-
- assert.throws(() => { Object.assign(s2, {x: 4}) })
- assert.strictEqual(s2.x, undefined)
- })
-
- it('should allow repeated reading and writing of values', () => {
- s2 = Automerge.change(s1, 'change message', doc => {
- doc.value = 'a'
- assert.strictEqual(doc.value, 'a')
- doc.value = 'b'
- doc.value = 'c'
- assert.strictEqual(doc.value, 'c')
- })
- assert.deepStrictEqual(s1, {})
- assert.deepStrictEqual(s2, {value: 'c'})
- })
-
- it('should not record conflicts when writing the same field several times within one change', () => {
- s1 = Automerge.change(s1, 'change message', doc => {
- doc.value = 'a'
- doc.value = 'b'
- doc.value = 'c'
- })
- assert.strictEqual(s1.value, 'c')
- assert.strictEqual(Automerge.getConflicts(s1, 'value'), undefined)
- })
-
- it('should return the unchanged state object if nothing changed', () => {
- s2 = Automerge.change(s1, () => {})
- assert.strictEqual(s2, s1)
- })
-
- it('should ignore field updates that write the existing value', () => {
- s1 = Automerge.change(s1, doc => doc.field = 123)
- s2 = Automerge.change(s1, doc => doc.field = 123)
- assert.strictEqual(s2, s1)
- })
-
- it('should not ignore field updates that resolve a conflict', () => {
- s2 = Automerge.merge(Automerge.init(), s1)
- s1 = Automerge.change(s1, doc => doc.field = 123)
- s2 = Automerge.change(s2, doc => doc.field = 321)
- s1 = Automerge.merge(s1, s2)
- assert.strictEqual(Object.keys(Automerge.getConflicts(s1, 'field')).length, 2)
- const resolved = Automerge.change(s1, doc => doc.field = s1.field)
- assert.notStrictEqual(resolved, s1)
- assert.deepStrictEqual(resolved, {field: s1.field})
- assert.strictEqual(Automerge.getConflicts(resolved, 'field'), undefined)
- })
-
- it('should ignore list element updates that write the existing value', () => {
- s1 = Automerge.change(s1, doc => doc.list = [123])
- s2 = Automerge.change(s1, doc => doc.list[0] = 123)
- assert.strictEqual(s2, s1)
- })
-
- it('should not ignore list element updates that resolve a conflict', () => {
- s1 = Automerge.change(s1, doc => doc.list = [1])
- s2 = Automerge.merge(Automerge.init(), s1)
- s1 = Automerge.change(s1, doc => doc.list[0] = 123)
- s2 = Automerge.change(s2, doc => doc.list[0] = 321)
- s1 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(Automerge.getConflicts(s1.list, 0), {
- [`3@${Automerge.getActorId(s1)}`]: 123,
- [`3@${Automerge.getActorId(s2)}`]: 321
- })
- const resolved = Automerge.change(s1, doc => doc.list[0] = s1.list[0])
- assert.deepStrictEqual(resolved, s1)
- assert.notStrictEqual(resolved, s1)
- assert.strictEqual(Automerge.getConflicts(resolved.list, 0), undefined)
- })
-
- it('should sanity-check arguments', () => {
- s1 = Automerge.change(s1, doc => doc.nested = {})
- assert.throws(() => { Automerge.change({}, doc => doc.foo = 'bar') }, /must be the document root/)
- assert.throws(() => { Automerge.change(s1.nested, doc => doc.foo = 'bar') }, /must be the document root/)
- })
-
- it('should not allow nested change blocks', () => {
- assert.throws(() => {
- Automerge.change(s1, doc1 => {
- Automerge.change(doc1, doc2 => {
- doc2.foo = 'bar'
- })
- })
- }, /Calls to Automerge.change cannot be nested/)
- assert.throws(() => {
- s1 = Automerge.change(s1, doc1 => {
- s2 = Automerge.change(s1, doc2 => doc2.two = 2)
- doc1.one = 1
- })
- }, /Attempting to use an outdated Automerge document/)
- })
-
- it('should not allow the same base document to be used for multiple changes', () => {
- assert.throws(() => {
- Automerge.change(s1, doc => doc.one = 1)
- Automerge.change(s1, doc => doc.two = 2)
- }, /Attempting to use an outdated Automerge document/)
- })
-
- it('should allow a document to be cloned', () => {
- s1 = Automerge.change(s1, doc => doc.zero = 0)
- s2 = Automerge.clone(s1)
- s1 = Automerge.change(s1, doc => doc.one = 1)
- s2 = Automerge.change(s2, doc => doc.two = 2)
- assert.deepStrictEqual(s1, {zero: 0, one: 1})
- assert.deepStrictEqual(s2, {zero: 0, two: 2})
- Automerge.free(s1)
- Automerge.free(s2)
- })
-
- it('should work with Object.assign merges', () => {
- s1 = Automerge.change(s1, doc1 => {
- doc1.stuff = {foo: 'bar', baz: 'blur'}
- })
- s1 = Automerge.change(s1, doc1 => {
- doc1.stuff = Object.assign({}, doc1.stuff, {baz: 'updated!'})
- })
- assert.deepStrictEqual(s1, {stuff: {foo: 'bar', baz: 'updated!'}})
- })
-
- it('should support Date objects in maps', () => {
- const now = new Date()
- s1 = Automerge.change(s1, doc => doc.now = now)
- let changes = Automerge.getAllChanges(s1)
- ;[s2] = Automerge.applyChanges(Automerge.init(), changes)
- assert.strictEqual(s2.now instanceof Date, true)
- assert.strictEqual(s2.now.getTime(), now.getTime())
- })
-
- it('should support Date objects in lists', () => {
- const now = new Date()
- s1 = Automerge.change(s1, doc => doc.list = [now])
- let changes = Automerge.getAllChanges(s1)
- ;[s2] = Automerge.applyChanges(Automerge.init(), changes)
- assert.strictEqual(s2.list[0] instanceof Date, true)
- assert.strictEqual(s2.list[0].getTime(), now.getTime())
- })
-
- /*
- it.skip('should call patchCallback if supplied', () => {
- const callbacks = [], actor = Automerge.getActorId(s1)
- const s2 = Automerge.change(s1, {
- patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local})
- }, doc => {
- doc.birds = ['Goldfinch']
- })
- assert.strictEqual(callbacks.length, 1)
- assert.deepStrictEqual(callbacks[0].patch, {
- actor, seq: 1, maxOp: 2, deps: [], clock: {[actor]: 1}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
- objectId: `1@${actor}`, type: 'list', edits: [
- {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {'type': 'value', value: 'Goldfinch'}}
- ]
- }}}}
- })
- assert.strictEqual(callbacks[0].before, s1)
- assert.strictEqual(callbacks[0].after, s2)
- assert.strictEqual(callbacks[0].local, true)
- })
- */
-
- /*
- it.skip('should call a patchCallback set up on document initialisation', () => {
- const callbacks = []
- s1 = Automerge.init({
- patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local})
- })
- const s2 = Automerge.change(s1, doc => doc.bird = 'Goldfinch')
- const actor = Automerge.getActorId(s1)
- assert.strictEqual(callbacks.length, 1)
- assert.deepStrictEqual(callbacks[0].patch, {
- actor, seq: 1, maxOp: 1, deps: [], clock: {[actor]: 1}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}}
- })
- assert.strictEqual(callbacks[0].before, s1)
- assert.strictEqual(callbacks[0].after, s2)
- assert.strictEqual(callbacks[0].local, true)
- })
- */
- })
-
- describe('emptyChange()', () => {
- it('should append an empty change to the history', () => {
- s1 = Automerge.change(s1, 'first change', doc => doc.field = 123)
- s2 = Automerge.emptyChange(s1, 'empty change')
- assert.notStrictEqual(s2, s1)
- assert.deepStrictEqual(s2, s1)
- assert.deepStrictEqual(Automerge.getHistory(s2).map(state => state.change.message), ['first change', 'empty change'])
- })
-
- it('should reference dependencies', () => {
- s1 = Automerge.change(s1, doc => doc.field = 123)
- s2 = Automerge.merge(Automerge.init(), s1)
- s2 = Automerge.change(s2, doc => doc.other = 'hello')
- s1 = Automerge.emptyChange(Automerge.merge(s1, s2))
- const history = Automerge.getHistory(s1)
- const emptyChange = history[2].change
- assert.deepStrictEqual(emptyChange.deps, [history[0].change.hash, history[1].change.hash].sort())
- assert.deepStrictEqual(emptyChange.ops, [])
- })
- })
-
- describe('root object', () => {
- it('should handle single-property assignment', () => {
- s1 = Automerge.change(s1, 'set bar', doc => doc.foo = 'bar')
- s1 = Automerge.change(s1, 'set zap', doc => doc.zip = 'zap')
- assert.strictEqual(s1.foo, 'bar')
- assert.strictEqual(s1.zip, 'zap')
- assert.deepStrictEqual(s1, {foo: 'bar', zip: 'zap'})
- })
-
- it('should allow floating-point values', () => {
- s1 = Automerge.change(s1, doc => doc.number = 1589032171.1)
- assert.strictEqual(s1.number, 1589032171.1)
- })
-
- it('should handle multi-property assignment', () => {
- s1 = Automerge.change(s1, 'multi-assign', doc => {
- Object.assign(doc, {foo: 'bar', answer: 42})
- })
- assert.strictEqual(s1.foo, 'bar')
- assert.strictEqual(s1.answer, 42)
- assert.deepStrictEqual(s1, {foo: 'bar', answer: 42})
- })
-
- it('should handle root property deletion', () => {
- s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar'; doc.something = null })
- s1 = Automerge.change(s1, 'del foo', doc => { delete doc.foo })
- assert.strictEqual(s1.foo, undefined)
- assert.strictEqual(s1.something, null)
- assert.deepStrictEqual(s1, {something: null})
- })
-
- it('should follow JS delete behavior', () => {
- s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar' })
- let deleted
- s1 = Automerge.change(s1, 'del foo', doc => {
- deleted = delete doc.foo
- })
- assert.strictEqual(deleted, true)
- let deleted2
- assert.doesNotThrow(() => {
- s1 = Automerge.change(s1, 'del baz', doc => {
- deleted2 = delete doc.baz
- })
- })
- assert.strictEqual(deleted2, true)
- })
-
- it('should allow the type of a property to be changed', () => {
- s1 = Automerge.change(s1, 'set number', doc => doc.prop = 123)
- assert.strictEqual(s1.prop, 123)
- s1 = Automerge.change(s1, 'set string', doc => doc.prop = '123')
- assert.strictEqual(s1.prop, '123')
- s1 = Automerge.change(s1, 'set null', doc => doc.prop = null)
- assert.strictEqual(s1.prop, null)
- s1 = Automerge.change(s1, 'set bool', doc => doc.prop = true)
- assert.strictEqual(s1.prop, true)
- })
-
- it('should require property names to be valid', () => {
- assert.throws(() => {
- Automerge.change(s1, 'foo', doc => doc[''] = 'x')
- }, /must not be an empty string/)
- })
-
- it('should not allow assignment of unsupported datatypes', () => {
- Automerge.change(s1, doc => {
- assert.throws(() => { doc.foo = undefined }, /Unsupported type of value: undefined/)
- assert.throws(() => { doc.foo = {prop: undefined} }, /Unsupported type of value: undefined/)
- assert.throws(() => { doc.foo = () => {} }, /Unsupported type of value: function/)
- assert.throws(() => { doc.foo = Symbol('foo') }, /Unsupported type of value: symbol/)
- })
- })
- })
-
- describe('nested maps', () => {
- it('should assign an objectId to nested maps', () => {
- s1 = Automerge.change(s1, doc => { doc.nested = {} })
- let id = Automerge.getObjectId(s1.nested)
- assert.strictEqual(OPID_PATTERN.test(Automerge.getObjectId(s1.nested)), true)
- assert.notEqual(Automerge.getObjectId(s1.nested), '_root')
- })
-
- it('should handle assignment of a nested property', () => {
- s1 = Automerge.change(s1, 'first change', doc => {
- doc.nested = {}
- doc.nested.foo = 'bar'
- })
- s1 = Automerge.change(s1, 'second change', doc => {
- doc.nested.one = 1
- })
- assert.deepStrictEqual(s1, {nested: {foo: 'bar', one: 1}})
- assert.deepStrictEqual(s1.nested, {foo: 'bar', one: 1})
- assert.strictEqual(s1.nested.foo, 'bar')
- assert.strictEqual(s1.nested.one, 1)
- })
-
- it('should handle assignment of an object literal', () => {
- s1 = Automerge.change(s1, doc => {
- doc.textStyle = {bold: false, fontSize: 12}
- })
- assert.deepStrictEqual(s1, {textStyle: {bold: false, fontSize: 12}})
- assert.deepStrictEqual(s1.textStyle, {bold: false, fontSize: 12})
- assert.strictEqual(s1.textStyle.bold, false)
- assert.strictEqual(s1.textStyle.fontSize, 12)
- })
-
- it('should handle assignment of multiple nested properties', () => {
- s1 = Automerge.change(s1, doc => {
- doc.textStyle = {bold: false, fontSize: 12}
- Object.assign(doc.textStyle, {typeface: 'Optima', fontSize: 14})
- })
- assert.strictEqual(s1.textStyle.typeface, 'Optima')
- assert.strictEqual(s1.textStyle.bold, false)
- assert.strictEqual(s1.textStyle.fontSize, 14)
- assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', bold: false, fontSize: 14})
- })
-
- it('should handle arbitrary-depth nesting', () => {
- s1 = Automerge.change(s1, doc => {
- doc.a = {b: {c: {d: {e: {f: {g: 'h'}}}}}}
- })
- s1 = Automerge.change(s1, doc => {
- doc.a.b.c.d.e.f.i = 'j'
- })
- assert.deepStrictEqual(s1, {a: { b: { c: { d: { e: { f: { g: 'h', i: 'j'}}}}}}})
- assert.strictEqual(s1.a.b.c.d.e.f.g, 'h')
- assert.strictEqual(s1.a.b.c.d.e.f.i, 'j')
- })
-
- it('should allow an old object to be replaced with a new one', () => {
- s1 = Automerge.change(s1, 'change 1', doc => {
- doc.myPet = {species: 'dog', legs: 4, breed: 'dachshund'}
- })
- s2 = Automerge.change(s1, 'change 2', doc => {
- doc.myPet = {species: 'koi', variety: '紅白', colors: {red: true, white: true, black: false}}
- })
- assert.deepStrictEqual(s1.myPet, {
- species: 'dog', legs: 4, breed: 'dachshund'
- })
- assert.strictEqual(s1.myPet.breed, 'dachshund')
- assert.deepStrictEqual(s2.myPet, {
- species: 'koi', variety: '紅白',
- colors: {red: true, white: true, black: false}
- })
- assert.strictEqual(s2.myPet.breed, undefined)
- assert.strictEqual(s2.myPet.variety, '紅白')
- })
-
- it('should allow fields to be changed between primitive and nested map', () => {
- s1 = Automerge.change(s1, doc => doc.color = '#ff7f00')
- assert.strictEqual(s1.color, '#ff7f00')
- s1 = Automerge.change(s1, doc => doc.color = {red: 255, green: 127, blue: 0})
- assert.deepStrictEqual(s1.color, {red: 255, green: 127, blue: 0})
- s1 = Automerge.change(s1, doc => doc.color = '#ff7f00')
- assert.strictEqual(s1.color, '#ff7f00')
- })
-
- it('should not allow several references to the same map object', () => {
- s1 = Automerge.change(s1, doc => doc.object = {})
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = doc.object })
- }, /Cannot create a reference to an existing document object/)
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = s1.object })
- }, /Cannot create a reference to an existing document object/)
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = {}; doc.y = doc.x })
- }, /Cannot create a reference to an existing document object/)
- })
-
- it('should not allow object-copying idioms', () => {
- s1 = Automerge.change(s1, doc => {
- doc.items = [{id: 'id1', name: 'one'}, {id: 'id2', name: 'two'}]
- })
- // People who have previously worked with immutable state in JavaScript may be tempted
- // to use idioms like this, which don't work well with Automerge -- see e.g.
- // https://github.com/automerge/automerge/issues/260
- assert.throws(() => {
- Automerge.change(s1, doc => {
- doc.items = [...doc.items, {id: 'id3', name: 'three'}]
- })
- }, /Cannot create a reference to an existing document object/)
- })
-
- it('should handle deletion of properties within a map', () => {
- s1 = Automerge.change(s1, 'set style', doc => {
- doc.textStyle = {typeface: 'Optima', bold: false, fontSize: 12}
- })
- s1 = Automerge.change(s1, 'non-bold', doc => delete doc.textStyle.bold)
- assert.strictEqual(s1.textStyle.bold, undefined)
- assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', fontSize: 12})
- })
-
- it('should handle deletion of references to a map', () => {
- s1 = Automerge.change(s1, 'make rich text doc', doc => {
- Object.assign(doc, {title: 'Hello', textStyle: {typeface: 'Optima', fontSize: 12}})
- })
- s1 = Automerge.change(s1, doc => delete doc.textStyle)
- assert.strictEqual(s1.textStyle, undefined)
- assert.deepStrictEqual(s1, {title: 'Hello'})
- })
-
- it('should validate field names', () => {
- s1 = Automerge.change(s1, doc => doc.nested = {})
- assert.throws(() => { Automerge.change(s1, doc => doc.nested[''] = 'x') }, /must not be an empty string/)
- assert.throws(() => { Automerge.change(s1, doc => doc.nested = {'': 'x'}) }, /must not be an empty string/)
- })
- })
-
- describe('lists', () => {
- it('should allow elements to be inserted', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = [])
- s1 = Automerge.change(s1, doc => doc.noodles.insertAt(0, 'udon', 'soba'))
- s1 = Automerge.change(s1, doc => doc.noodles.insertAt(1, 'ramen'))
- assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']})
- assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba'])
- assert.strictEqual(s1.noodles[0], 'udon')
- assert.strictEqual(s1.noodles[1], 'ramen')
- assert.strictEqual(s1.noodles[2], 'soba')
- assert.strictEqual(s1.noodles.length, 3)
- })
-
- it('should handle assignment of a list literal', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
- assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']})
- assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba'])
- assert.strictEqual(s1.noodles[0], 'udon')
- assert.strictEqual(s1.noodles[1], 'ramen')
- assert.strictEqual(s1.noodles[2], 'soba')
- assert.strictEqual(s1.noodles[3], undefined)
- assert.strictEqual(s1.noodles.length, 3)
- })
-
- it('should only allow numeric indexes', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
- s1 = Automerge.change(s1, doc => doc.noodles[1] = 'Ramen!')
- assert.strictEqual(s1.noodles[1], 'Ramen!')
- s1 = Automerge.change(s1, doc => doc.noodles['1'] = 'RAMEN!!!')
- assert.strictEqual(s1.noodles[1], 'RAMEN!!!')
- assert.throws(() => { Automerge.change(s1, doc => doc.noodles.favourite = 'udon') }, /list index must be a number/)
- assert.throws(() => { Automerge.change(s1, doc => doc.noodles[''] = 'udon') }, /list index must be a number/)
- assert.throws(() => { Automerge.change(s1, doc => doc.noodles['1e6'] = 'udon') }, /list index must be a number/)
- })
-
- it('should handle deletion of list elements', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
- s1 = Automerge.change(s1, doc => delete doc.noodles[1])
- assert.deepStrictEqual(s1.noodles, ['udon', 'soba'])
- s1 = Automerge.change(s1, doc => doc.noodles.deleteAt(1))
- assert.deepStrictEqual(s1.noodles, ['udon'])
- assert.strictEqual(s1.noodles[0], 'udon')
- assert.strictEqual(s1.noodles[1], undefined)
- assert.strictEqual(s1.noodles[2], undefined)
- assert.strictEqual(s1.noodles.length, 1)
- })
-
- it('should handle assignment of individual list indexes', () => {
- s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon', 'ramen', 'soba'])
- s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi')
- assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi', 'soba'])
- assert.strictEqual(s1.japaneseFood[0], 'udon')
- assert.strictEqual(s1.japaneseFood[1], 'sushi')
- assert.strictEqual(s1.japaneseFood[2], 'soba')
- assert.strictEqual(s1.japaneseFood[3], undefined)
- assert.strictEqual(s1.japaneseFood.length, 3)
- })
-
- it('concurrent edits insert in reverse actorid order if counters equal', () => {
- s1 = Automerge.init('aaaa')
- s2 = Automerge.init('bbbb')
- s1 = Automerge.change(s1, doc => doc.list = [])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa"))
- s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "2@bbbb"))
- s2 = Automerge.merge(s2, s1)
- assert.deepStrictEqual(Automerge.toJS(s2).list, ["2@bbbb", "2@aaaa"])
- })
-
- it('concurrent edits insert in reverse counter order if different', () => {
- s1 = Automerge.init('aaaa')
- s2 = Automerge.init('bbbb')
- s1 = Automerge.change(s1, doc => doc.list = [])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.list.splice(0, 0, "2@aaaa"))
- s2 = Automerge.change(s2, doc => doc.foo = "2@bbbb")
- s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, "3@bbbb"))
- s2 = Automerge.merge(s2, s1)
- assert.deepStrictEqual(s2.list, ["3@bbbb", "2@aaaa"])
- })
-
- it('should treat out-by-one assignment as insertion', () => {
- s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon'])
- s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi')
- assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi'])
- assert.strictEqual(s1.japaneseFood[0], 'udon')
- assert.strictEqual(s1.japaneseFood[1], 'sushi')
- assert.strictEqual(s1.japaneseFood[2], undefined)
- assert.strictEqual(s1.japaneseFood.length, 2)
- })
-
- it('should not allow out-of-range assignment', () => {
- s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon'])
- assert.throws(() => { Automerge.change(s1, doc => doc.japaneseFood[4] = 'ramen') }, /is out of bounds/)
- })
-
- it('should allow bulk assignment of multiple list indexes', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
- s1 = Automerge.change(s1, doc => Object.assign(doc.noodles, {0: 'うどん', 2: 'そば'}))
- assert.deepStrictEqual(s1.noodles, ['うどん', 'ramen', 'そば'])
- assert.strictEqual(s1.noodles[0], 'うどん')
- assert.strictEqual(s1.noodles[1], 'ramen')
- assert.strictEqual(s1.noodles[2], 'そば')
- assert.strictEqual(s1.noodles.length, 3)
- })
-
- it('should handle nested objects', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = [{type: 'ramen', dishes: ['tonkotsu', 'shoyu']}])
- s1 = Automerge.change(s1, doc => doc.noodles.push({type: 'udon', dishes: ['tempura udon']}))
- s1 = Automerge.change(s1, doc => doc.noodles[0].dishes.push('miso'))
- assert.deepStrictEqual(s1, {noodles: [
- {type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso']},
- {type: 'udon', dishes: ['tempura udon']}
- ]})
- assert.deepStrictEqual(s1.noodles[0], {
- type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso']
- })
- assert.deepStrictEqual(s1.noodles[1], {
- type: 'udon', dishes: ['tempura udon']
- })
- })
-
- it('should handle nested lists', () => {
- s1 = Automerge.change(s1, doc => doc.noodleMatrix = [['ramen', 'tonkotsu', 'shoyu']])
- s1 = Automerge.change(s1, doc => doc.noodleMatrix.push(['udon', 'tempura udon']))
- s1 = Automerge.change(s1, doc => doc.noodleMatrix[0].push('miso'))
- assert.deepStrictEqual(s1.noodleMatrix, [['ramen', 'tonkotsu', 'shoyu', 'miso'], ['udon', 'tempura udon']])
- assert.deepStrictEqual(s1.noodleMatrix[0], ['ramen', 'tonkotsu', 'shoyu', 'miso'])
- assert.deepStrictEqual(s1.noodleMatrix[1], ['udon', 'tempura udon'])
- })
-
- it('should handle deep nesting', () => {
- s1 = Automerge.change(s1, doc => doc.nesting = {
- maps: { m1: { m2: { foo: "bar", baz: {} }, m2a: { } } },
- lists: [ [ 1, 2, 3 ], [ [ 3, 4, 5, [6]], 7 ] ],
- mapsinlists: [ { foo: "bar" }, [ { bar: "baz" } ] ],
- listsinmaps: { foo: [1, 2, 3], bar: [ [ { baz: "123" } ] ] }
- })
- s1 = Automerge.change(s1, doc => {
- doc.nesting.maps.m1a = "123"
- doc.nesting.maps.m1.m2.baz.xxx = "123"
- delete doc.nesting.maps.m1.m2a
- doc.nesting.lists.shift()
- doc.nesting.lists[0][0].pop()
- doc.nesting.lists[0][0].push(100)
- doc.nesting.mapsinlists[0].foo = "baz"
- doc.nesting.mapsinlists[1][0].foo = "bar"
- delete doc.nesting.mapsinlists[1]
- doc.nesting.listsinmaps.foo.push(4)
- doc.nesting.listsinmaps.bar[0][0].baz = "456"
- delete doc.nesting.listsinmaps.bar
- })
- assert.deepStrictEqual(s1, { nesting: {
- maps: { m1: { m2: { foo: "bar", baz: { xxx: "123" } } }, m1a: "123" },
- lists: [ [ [ 3, 4, 5, 100 ], 7 ] ],
- mapsinlists: [ { foo: "baz" } ],
- listsinmaps: { foo: [1, 2, 3, 4] }
- }})
- })
-
- it('should handle replacement of the entire list', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen'])
- s1 = Automerge.change(s1, doc => doc.japaneseNoodles = doc.noodles.slice())
- s1 = Automerge.change(s1, doc => doc.noodles = ['wonton', 'pho'])
- assert.deepStrictEqual(s1, {
- noodles: ['wonton', 'pho'],
- japaneseNoodles: ['udon', 'soba', 'ramen']
- })
- assert.deepStrictEqual(s1.noodles, ['wonton', 'pho'])
- assert.strictEqual(s1.noodles[0], 'wonton')
- assert.strictEqual(s1.noodles[1], 'pho')
- assert.strictEqual(s1.noodles[2], undefined)
- assert.strictEqual(s1.noodles.length, 2)
- })
-
- it('should allow assignment to change the type of a list element', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen'])
- assert.deepStrictEqual(s1.noodles, ['udon', 'soba', 'ramen'])
- s1 = Automerge.change(s1, doc => doc.noodles[1] = {type: 'soba', options: ['hot', 'cold']})
- assert.deepStrictEqual(s1.noodles, ['udon', {type: 'soba', options: ['hot', 'cold']}, 'ramen'])
- s1 = Automerge.change(s1, doc => doc.noodles[1] = ['hot soba', 'cold soba'])
- assert.deepStrictEqual(s1.noodles, ['udon', ['hot soba', 'cold soba'], 'ramen'])
- s1 = Automerge.change(s1, doc => doc.noodles[1] = 'soba is the best')
- assert.deepStrictEqual(s1.noodles, ['udon', 'soba is the best', 'ramen'])
- })
-
- it('should allow list creation and assignment in the same change callback', () => {
- s1 = Automerge.change(Automerge.init(), doc => {
- doc.letters = ['a', 'b', 'c']
- doc.letters[1] = 'd'
- })
- assert.strictEqual(s1.letters[1], 'd')
- })
-
- it('should allow adding and removing list elements in the same change callback', () => {
- s1 = Automerge.change(Automerge.init(), doc => doc.noodles = [])
- s1 = Automerge.change(s1, doc => {
- doc.noodles.push('udon')
- doc.noodles.deleteAt(0)
- })
- assert.deepStrictEqual(s1, {noodles: []})
- // do the add-remove cycle twice, test for #151 (https://github.com/automerge/automerge/issues/151)
- s1 = Automerge.change(s1, doc => {
- doc.noodles.push('soba')
- doc.noodles.deleteAt(0)
- })
- assert.deepStrictEqual(s1, {noodles: []})
- })
-
- it('should handle arbitrary-depth nesting', () => {
- s1 = Automerge.change(s1, doc => doc.maze = [[[[[[[['noodles', ['here']]]]]]]]])
- s1 = Automerge.change(s1, doc => doc.maze[0][0][0][0][0][0][0][1].unshift('found'))
- assert.deepStrictEqual(s1.maze, [[[[[[[['noodles', ['found', 'here']]]]]]]]])
- assert.deepStrictEqual(s1.maze[0][0][0][0][0][0][0][1][1], 'here')
- s2 = Automerge.load(Automerge.save(s1))
- assert.deepStrictEqual(s1,s2)
- })
-
- it('should not allow several references to the same list object', () => {
- s1 = Automerge.change(s1, doc => doc.list = [])
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = doc.list })
- }, /Cannot create a reference to an existing document object/)
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = s1.list })
- }, /Cannot create a reference to an existing document object/)
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = []; doc.y = doc.x })
- }, /Cannot create a reference to an existing document object/)
- })
- })
-
- describe('counters', () => {
- // counter
- it('should allow deleting counters from maps', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.birds = {wrens: new Automerge.Counter(1)})
- const s2 = Automerge.change(s1, doc => doc.birds.wrens.increment(2))
- const s3 = Automerge.change(s2, doc => delete doc.birds.wrens)
- assert.deepStrictEqual(s2, {birds: {wrens: new Automerge.Counter(3)}})
- assert.deepStrictEqual(s3, {birds: {}})
- })
-
- // counter
- /*
- it('should not allow deleting counters from lists', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.recordings = [new Automerge.Counter(1)])
- const s2 = Automerge.change(s1, doc => doc.recordings[0].increment(2))
- assert.deepStrictEqual(s2, {recordings: [new Automerge.Counter(3)]})
- assert.throws(() => { Automerge.change(s2, doc => doc.recordings.deleteAt(0)) }, /Unsupported operation/)
- })
- */
- })
- })
-
- describe('concurrent use', () => {
- let s1, s2, s3
- beforeEach(() => {
- s1 = Automerge.init()
- s2 = Automerge.init()
- s3 = Automerge.init()
- })
-
- it('should merge concurrent updates of different properties', () => {
- s1 = Automerge.change(s1, doc => doc.foo = 'bar')
- s2 = Automerge.change(s2, doc => doc.hello = 'world')
- s3 = Automerge.merge(s1, s2)
- assert.strictEqual(s3.foo, 'bar')
- assert.strictEqual(s3.hello, 'world')
- assert.deepStrictEqual(s3, {foo: 'bar', hello: 'world'})
- assert.strictEqual(Automerge.getConflicts(s3, 'foo'), undefined)
- assert.strictEqual(Automerge.getConflicts(s3, 'hello'), undefined)
- s4 = Automerge.load(Automerge.save(s3))
- assert.deepEqual(s3,s4)
- })
-
- it('should add concurrent increments of the same property', () => {
- s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter())
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.counter.increment())
- s2 = Automerge.change(s2, doc => doc.counter.increment(2))
- s3 = Automerge.merge(s1, s2)
- assert.strictEqual(s1.counter.value, 1)
- assert.strictEqual(s2.counter.value, 2)
- assert.strictEqual(s3.counter.value, 3)
- assert.strictEqual(Automerge.getConflicts(s3, 'counter'), undefined)
- s4 = Automerge.load(Automerge.save(s3))
- assert.deepEqual(s3,s4)
- })
-
- it('should add increments only to the values they precede', () => {
- s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter(0))
- s1 = Automerge.change(s1, doc => doc.counter.increment())
- s2 = Automerge.change(s2, doc => doc.counter = new Automerge.Counter(100))
- s2 = Automerge.change(s2, doc => doc.counter.increment(3))
- s3 = Automerge.merge(s1, s2)
- if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
- assert.deepStrictEqual(s3, {counter: new Automerge.Counter(1)})
- } else {
- assert.deepStrictEqual(s3, {counter: new Automerge.Counter(103)})
- }
- assert.deepStrictEqual(Automerge.getConflicts(s3, 'counter'), {
- [`1@${Automerge.getActorId(s1)}`]: new Automerge.Counter(1),
- [`1@${Automerge.getActorId(s2)}`]: new Automerge.Counter(103)
- })
- s4 = Automerge.load(Automerge.save(s3))
- assert.deepEqual(s3,s4)
- })
-
- it('should detect concurrent updates of the same field', () => {
- s1 = Automerge.change(s1, doc => doc.field = 'one')
- s2 = Automerge.change(s2, doc => doc.field = 'two')
- s3 = Automerge.merge(s1, s2)
- if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
- assert.deepStrictEqual(s3, {field: 'one'})
- } else {
- assert.deepStrictEqual(s3, {field: 'two'})
- }
- assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), {
- [`1@${Automerge.getActorId(s1)}`]: 'one',
- [`1@${Automerge.getActorId(s2)}`]: 'two'
- })
- })
-
- it('should detect concurrent updates of the same list element', () => {
- s1 = Automerge.change(s1, doc => doc.birds = ['finch'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds[0] = 'greenfinch')
- s2 = Automerge.change(s2, doc => doc.birds[0] = 'goldfinch')
- s3 = Automerge.merge(s1, s2)
- if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
- assert.deepStrictEqual(s3.birds, ['greenfinch'])
- } else {
- assert.deepStrictEqual(s3.birds, ['goldfinch'])
- }
- assert.deepStrictEqual(Automerge.getConflicts(s3.birds, 0), {
- [`3@${Automerge.getActorId(s1)}`]: 'greenfinch',
- [`3@${Automerge.getActorId(s2)}`]: 'goldfinch'
- })
- })
-
- it('should handle assignment conflicts of different types', () => {
- s1 = Automerge.change(s1, doc => doc.field = 'string')
- s2 = Automerge.change(s2, doc => doc.field = ['list'])
- s3 = Automerge.change(s3, doc => doc.field = {thing: 'map'})
- s1 = Automerge.merge(Automerge.merge(s1, s2), s3)
- assertEqualsOneOf(s1.field, 'string', ['list'], {thing: 'map'})
- assert.deepStrictEqual(Automerge.getConflicts(s1, 'field'), {
- [`1@${Automerge.getActorId(s1)}`]: 'string',
- [`1@${Automerge.getActorId(s2)}`]: ['list'],
- [`1@${Automerge.getActorId(s3)}`]: {thing: 'map'}
- })
- })
-
- it('should handle changes within a conflicting map field', () => {
- s1 = Automerge.change(s1, doc => doc.field = 'string')
- s2 = Automerge.change(s2, doc => doc.field = {})
- s2 = Automerge.change(s2, doc => doc.field.innerKey = 42)
- s3 = Automerge.merge(s1, s2)
- assertEqualsOneOf(s3.field, 'string', {innerKey: 42})
- assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), {
- [`1@${Automerge.getActorId(s1)}`]: 'string',
- [`1@${Automerge.getActorId(s2)}`]: {innerKey: 42}
- })
- })
-
- it('should handle changes within a conflicting list element', () => {
- s1 = Automerge.change(s1, doc => doc.list = ['hello'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.list[0] = {map1: true})
- s1 = Automerge.change(s1, doc => doc.list[0].key = 1)
- s2 = Automerge.change(s2, doc => doc.list[0] = {map2: true})
- s2 = Automerge.change(s2, doc => doc.list[0].key = 2)
- s3 = Automerge.merge(s1, s2)
- if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
- assert.deepStrictEqual(s3.list, [{map1: true, key: 1}])
- } else {
- assert.deepStrictEqual(s3.list, [{map2: true, key: 2}])
- }
- assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), {
- [`3@${Automerge.getActorId(s1)}`]: {map1: true, key: 1},
- [`3@${Automerge.getActorId(s2)}`]: {map2: true, key: 2}
- })
- })
-
- it('should not merge concurrently assigned nested maps', () => {
- s1 = Automerge.change(s1, doc => doc.config = {background: 'blue'})
- s2 = Automerge.change(s2, doc => doc.config = {logo_url: 'logo.png'})
- s3 = Automerge.merge(s1, s2)
- assertEqualsOneOf(s3.config, {background: 'blue'}, {logo_url: 'logo.png'})
- assert.deepStrictEqual(Automerge.getConflicts(s3, 'config'), {
- [`1@${Automerge.getActorId(s1)}`]: {background: 'blue'},
- [`1@${Automerge.getActorId(s2)}`]: {logo_url: 'logo.png'}
- })
- })
-
- it('should clear conflicts after assigning a new value', () => {
- s1 = Automerge.change(s1, doc => doc.field = 'one')
- s2 = Automerge.change(s2, doc => doc.field = 'two')
- s3 = Automerge.merge(s1, s2)
- s3 = Automerge.change(s3, doc => doc.field = 'three')
- assert.deepStrictEqual(s3, {field: 'three'})
- assert.strictEqual(Automerge.getConflicts(s3, 'field'), undefined)
- s2 = Automerge.merge(s2, s3)
- assert.deepStrictEqual(s2, {field: 'three'})
- assert.strictEqual(Automerge.getConflicts(s2, 'field'), undefined)
- })
-
- it('should handle concurrent insertions at different list positions', () => {
- s1 = Automerge.change(s1, doc => doc.list = ['one', 'three'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.list.splice(1, 0, 'two'))
- s2 = Automerge.change(s2, doc => doc.list.push('four'))
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s3, {list: ['one', 'two', 'three', 'four']})
- assert.strictEqual(Automerge.getConflicts(s3, 'list'), undefined)
- })
-
- it('should handle concurrent insertions at the same list position', () => {
- s1 = Automerge.change(s1, doc => doc.birds = ['parakeet'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds.push('starling'))
- s2 = Automerge.change(s2, doc => doc.birds.push('chaffinch'))
- s3 = Automerge.merge(s1, s2)
- assertEqualsOneOf(s3.birds, ['parakeet', 'starling', 'chaffinch'], ['parakeet', 'chaffinch', 'starling'])
- s2 = Automerge.merge(s2, s3)
- assert.deepStrictEqual(s2, s3)
- })
-
- it('should handle concurrent assignment and deletion of a map entry', () => {
- // Add-wins semantics
- s1 = Automerge.change(s1, doc => doc.bestBird = 'robin')
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => delete doc.bestBird)
- s2 = Automerge.change(s2, doc => doc.bestBird = 'magpie')
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s1, {})
- assert.deepStrictEqual(s2, {bestBird: 'magpie'})
- assert.deepStrictEqual(s3, {bestBird: 'magpie'})
- assert.strictEqual(Automerge.getConflicts(s3, 'bestBird'), undefined)
- })
-
- it('should handle concurrent assignment and deletion of a list element', () => {
- // Concurrent assignment ressurects a deleted list element. Perhaps a little
- // surprising, but consistent with add-wins semantics of maps (see test above)
- s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds[1] = 'starling')
- s2 = Automerge.change(s2, doc => doc.birds.splice(1, 1))
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s1.birds, ['blackbird', 'starling', 'goldfinch'])
- assert.deepStrictEqual(s2.birds, ['blackbird', 'goldfinch'])
- assert.deepStrictEqual(s3.birds, ['blackbird', 'starling', 'goldfinch'])
- s4 = Automerge.load(Automerge.save(s3))
- assert.deepStrictEqual(s3, s4);
- })
-
- it('should handle insertion after a deleted list element', () => {
- s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds.splice(1, 2))
- s2 = Automerge.change(s2, doc => doc.birds.splice(2, 0, 'starling'))
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s3, {birds: ['blackbird', 'starling']})
- assert.deepStrictEqual(Automerge.merge(s2, s3), {birds: ['blackbird', 'starling']})
- })
-
- it('should handle concurrent deletion of the same element', () => {
- s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds.deleteAt(1)) // buzzard
- s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s3.birds, ['albatross', 'cormorant'])
- })
-
- it('should handle concurrent deletion of different elements', () => {
- s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds.deleteAt(0)) // albatross
- s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s3.birds, ['cormorant'])
- })
-
- it('should handle concurrent updates at different levels of the tree', () => {
- // A delete higher up in the tree overrides an update in a subtree
- s1 = Automerge.change(s1, doc => doc.animals = {birds: {pink: 'flamingo', black: 'starling'}, mammals: ['badger']})
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.animals.birds.brown = 'sparrow')
- s2 = Automerge.change(s2, doc => delete doc.animals.birds)
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s1.animals, {
- birds: {
- pink: 'flamingo', brown: 'sparrow', black: 'starling'
- },
- mammals: ['badger']
- })
- assert.deepStrictEqual(s2.animals, {mammals: ['badger']})
- assert.deepStrictEqual(s3.animals, {mammals: ['badger']})
- })
-
- it('should handle updates of concurrently deleted objects', () => {
- s1 = Automerge.change(s1, doc => doc.birds = {blackbird: {feathers: 'black'}})
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => delete doc.birds.blackbird)
- s2 = Automerge.change(s2, doc => doc.birds.blackbird.beak = 'orange')
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s1, {birds: {}})
- })
-
- it('should not interleave sequence insertions at the same position', () => {
- s1 = Automerge.change(s1, doc => doc.wisdom = [])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.wisdom.push('to', 'be', 'is', 'to', 'do'))
- s2 = Automerge.change(s2, doc => doc.wisdom.push('to', 'do', 'is', 'to', 'be'))
- s3 = Automerge.merge(s1, s2)
- assertEqualsOneOf(s3.wisdom,
- ['to', 'be', 'is', 'to', 'do', 'to', 'do', 'is', 'to', 'be'],
- ['to', 'do', 'is', 'to', 'be', 'to', 'be', 'is', 'to', 'do'])
- // In case you're wondering: http://quoteinvestigator.com/2013/09/16/do-be-do/
- })
-
- describe('multiple insertions at the same list position', () => {
- it('should handle insertion by greater actor ID', () => {
- s1 = Automerge.init('aaaa')
- s2 = Automerge.init('bbbb')
- s1 = Automerge.change(s1, doc => doc.list = ['two'])
- s2 = Automerge.merge(s2, s1)
- s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one'))
- assert.deepStrictEqual(s2.list, ['one', 'two'])
- })
-
- it('should handle insertion by lesser actor ID', () => {
- s1 = Automerge.init('bbbb')
- s2 = Automerge.init('aaaa')
- s1 = Automerge.change(s1, doc => doc.list = ['two'])
- s2 = Automerge.merge(s2, s1)
- s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one'))
- assert.deepStrictEqual(s2.list, ['one', 'two'])
- })
-
- it('should handle insertion regardless of actor ID', () => {
- s1 = Automerge.change(s1, doc => doc.list = ['two'])
- s2 = Automerge.merge(s2, s1)
- s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one'))
- assert.deepStrictEqual(s2.list, ['one', 'two'])
- })
-
- it('should make insertion order consistent with causality', () => {
- s1 = Automerge.change(s1, doc => doc.list = ['four'])
- s2 = Automerge.merge(s2, s1)
- s2 = Automerge.change(s2, doc => doc.list.unshift('three'))
- s1 = Automerge.merge(s1, s2)
- s1 = Automerge.change(s1, doc => doc.list.unshift('two'))
- s2 = Automerge.merge(s2, s1)
- s2 = Automerge.change(s2, doc => doc.list.unshift('one'))
- assert.deepStrictEqual(s2.list, ['one', 'two', 'three', 'four'])
- })
- })
- })
-
- describe('saving and loading', () => {
- it('should save and restore an empty document', () => {
- let s = Automerge.load(Automerge.save(Automerge.init()))
- assert.deepStrictEqual(s, {})
- })
-
- it('should generate a new random actor ID', () => {
- let s1 = Automerge.init()
- let s2 = Automerge.load(Automerge.save(s1))
- assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s1).toString()), true)
- assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s2).toString()), true)
- assert.notEqual(Automerge.getActorId(s1), Automerge.getActorId(s2))
- })
-
- it('should allow a custom actor ID to be set', () => {
- let s = Automerge.load(Automerge.save(Automerge.init()), '333333')
- assert.strictEqual(Automerge.getActorId(s), '333333')
- })
-
- it('should reconstitute complex datatypes', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.todos = [{title: 'water plants', done: false}])
- let s2 = Automerge.load(Automerge.save(s1))
- assert.deepStrictEqual(s2, {todos: [{title: 'water plants', done: false}]})
- })
-
- it('should save and load maps with @ symbols in the keys', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc["123@4567"] = "hello")
- let s2 = Automerge.load(Automerge.save(s1))
- assert.deepStrictEqual(s2, { "123@4567": "hello" })
- })
-
- it('should reconstitute conflicts', () => {
- let s1 = Automerge.change(Automerge.init('111111'), doc => doc.x = 3)
- let s2 = Automerge.change(Automerge.init('222222'), doc => doc.x = 5)
- s1 = Automerge.merge(s1, s2)
- let s3 = Automerge.load(Automerge.save(s1))
- assert.strictEqual(s1.x, 5)
- assert.strictEqual(s3.x, 5)
- assert.deepStrictEqual(Automerge.getConflicts(s1, 'x'), {'1@111111': 3, '1@222222': 5})
- assert.deepStrictEqual(Automerge.getConflicts(s3, 'x'), {'1@111111': 3, '1@222222': 5})
- })
-
- it('should reconstitute element ID counters', () => {
- const s1 = Automerge.init('01234567')
- const s2 = Automerge.change(s1, doc => doc.list = ['a'])
- const listId = Automerge.getObjectId(s2.list)
- const changes12 = Automerge.getAllChanges(s2).map(decodeChange)
- assert.deepStrictEqual(changes12, [{
- hash: changes12[0].hash, actor: '01234567', seq: 1, startOp: 1,
- time: changes12[0].time, message: '', deps: [], ops: [
- {obj: '_root', action: 'makeList', key: 'list', insert: false, pred: []},
- {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'a', pred: []}
- ]
- }])
- const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0))
- const s4 = Automerge.load(Automerge.save(s3), '01234567')
- const s5 = Automerge.change(s4, doc => doc.list.push('b'))
- const changes45 = Automerge.getAllChanges(s5).map(decodeChange)
- assert.deepStrictEqual(s5, {list: ['b']})
- assert.deepStrictEqual(changes45[2], {
- hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 4,
- time: changes45[2].time, message: '', deps: [changes45[1].hash], ops: [
- {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'b', pred: []}
- ]
- })
- })
-
- it('should allow a reloaded list to be mutated', () => {
- let doc = Automerge.change(Automerge.init(), doc => doc.foo = [])
- doc = Automerge.load(Automerge.save(doc))
- doc = Automerge.change(doc, 'add', doc => doc.foo.push(1))
- doc = Automerge.load(Automerge.save(doc))
- assert.deepStrictEqual(doc.foo, [1])
- })
-
- it('should reload a document containing deflated columns', () => {
- // In this test, the keyCtr column is long enough for deflate compression to kick in, but the
- // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr.
- // When checking whether the columns appear in ascending order, we must ignore the deflate bit.
- let doc = Automerge.change(Automerge.init(), doc => {
- doc.list = []
- for (let i = 0; i < 200; i++) doc.list.insertAt(Math.floor(Math.random() * i), 'a')
- })
- Automerge.load(Automerge.save(doc))
- let expected = []
- for (let i = 0; i < 200; i++) expected.push('a')
- assert.deepStrictEqual(doc, {list: expected})
- })
-
- /*
- it.skip('should call patchCallback if supplied', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch'])
- const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch'))
- const callbacks = [], actor = Automerge.getActorId(s1)
- const reloaded = Automerge.load(Automerge.save(s2), {
- patchCallback(patch, before, after, local) {
- callbacks.push({patch, before, after, local})
- }
- })
- assert.strictEqual(callbacks.length, 1)
- assert.deepStrictEqual(callbacks[0].patch, {
- maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
- objectId: `1@${actor}`, type: 'list', edits: [
- {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']}
- ]
- }}}}
- })
- assert.deepStrictEqual(callbacks[0].before, {})
- assert.strictEqual(callbacks[0].after, reloaded)
- assert.strictEqual(callbacks[0].local, false)
- })
- */
- })
-
- describe('history API', () => {
- it('should return an empty history for an empty document', () => {
- assert.deepStrictEqual(Automerge.getHistory(Automerge.init()), [])
- })
-
- it('should make past document states accessible', () => {
- let s = Automerge.init()
- s = Automerge.change(s, doc => doc.config = {background: 'blue'})
- s = Automerge.change(s, doc => doc.birds = ['mallard'])
- s = Automerge.change(s, doc => doc.birds.unshift('oystercatcher'))
- assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.snapshot), [
- {config: {background: 'blue'}},
- {config: {background: 'blue'}, birds: ['mallard']},
- {config: {background: 'blue'}, birds: ['oystercatcher', 'mallard']}
- ])
- })
-
- it('should make change messages accessible', () => {
- let s = Automerge.init()
- s = Automerge.change(s, 'Empty Bookshelf', doc => doc.books = [])
- s = Automerge.change(s, 'Add Orwell', doc => doc.books.push('Nineteen Eighty-Four'))
- s = Automerge.change(s, 'Add Huxley', doc => doc.books.push('Brave New World'))
- assert.deepStrictEqual(s.books, ['Nineteen Eighty-Four', 'Brave New World'])
- assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.change.message),
- ['Empty Bookshelf', 'Add Orwell', 'Add Huxley'])
- })
- })
-
- describe('changes API', () => {
- it('should return an empty list on an empty document', () => {
- let changes = Automerge.getAllChanges(Automerge.init())
- assert.deepStrictEqual(changes, [])
- })
-
- it('should return an empty list when nothing changed', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch'])
- assert.deepStrictEqual(Automerge.getChanges(s1, s1), [])
- })
-
- it('should do nothing when applying an empty list of changes', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch'])
- assert.deepStrictEqual(Automerge.applyChanges(s1, [])[0], s1)
- })
-
- it('should return all changes when compared to an empty document', () => {
- let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
- let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
- let changes = Automerge.getChanges(Automerge.init(), s2)
- assert.strictEqual(changes.length, 2)
- })
-
- it('should allow a document copy to be reconstructed from scratch', () => {
- let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
- let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
- let changes = Automerge.getAllChanges(s2)
- let [s3] = Automerge.applyChanges(Automerge.init(), changes)
- assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch'])
- })
-
- it('should return changes since the last given version', () => {
- let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
- let changes1 = Automerge.getAllChanges(s1)
- let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
- let changes2 = Automerge.getChanges(s1, s2)
- assert.strictEqual(changes1.length, 1) // Add Chaffinch
- assert.strictEqual(changes2.length, 1) // Add Bullfinch
- })
-
- it('should incrementally apply changes since the last given version', () => {
- let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
- let changes1 = Automerge.getAllChanges(s1)
- let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
- let changes2 = Automerge.getChanges(s1, s2)
- let [s3] = Automerge.applyChanges(Automerge.init(), changes1)
- let [s4] = Automerge.applyChanges(s3, changes2)
- assert.deepStrictEqual(s3.birds, ['Chaffinch'])
- assert.deepStrictEqual(s4.birds, ['Chaffinch', 'Bullfinch'])
- })
-
- it('should handle updates to a list element', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch', 'Bullfinch'])
- let s2 = Automerge.change(s1, doc => doc.birds[0] = 'Goldfinch')
- let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2))
- assert.deepStrictEqual(s3.birds, ['Goldfinch', 'Bullfinch'])
- assert.strictEqual(Automerge.getConflicts(s3.birds, 0), undefined)
- })
-
- // TEXT
- it('should handle updates to a text object', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('ab'))
- let s2 = Automerge.change(s1, doc => doc.text.set(0, 'A'))
- let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2))
- assert.deepStrictEqual([...s3.text], ['A', 'b'])
- })
-
- /*
- it.skip('should report missing dependencies', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch'])
- let s2 = Automerge.merge(Automerge.init(), s1)
- s2 = Automerge.change(s2, doc => doc.birds.push('Bullfinch'))
- let changes = Automerge.getAllChanges(s2)
- let [s3, patch] = Automerge.applyChanges(Automerge.init(), [changes[1]])
- assert.deepStrictEqual(s3, {})
- assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)),
- decodeChange(changes[1]).deps)
- assert.strictEqual(patch.pendingChanges, 1)
- ;[s3, patch] = Automerge.applyChanges(s3, [changes[0]])
- assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch'])
- assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), [])
- assert.strictEqual(patch.pendingChanges, 0)
- })
- */
-
- it('should report missing dependencies with out-of-order applyChanges', () => {
- let s0 = Automerge.init()
- let s1 = Automerge.change(s0, doc => doc.test = ['a'])
- let changes01 = Automerge.getAllChanges(s1)
- let s2 = Automerge.change(s1, doc => doc.test = ['b'])
- let changes12 = Automerge.getChanges(s1, s2)
- let s3 = Automerge.change(s2, doc => doc.test = ['c'])
- let changes23 = Automerge.getChanges(s2, s3)
- let s4 = Automerge.init()
- let [s5] = Automerge.applyChanges(s4, changes23)
- let [s6] = Automerge.applyChanges(s5, changes12)
-// assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s6)), [decodeChange(changes01[0]).hash])
- assert.deepStrictEqual(Automerge.getMissingDeps(s6), [decodeChange(changes01[0]).hash])
- })
-
- /*
- it.skip('should call patchCallback if supplied when applying changes', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch'])
- const callbacks = [], actor = Automerge.getActorId(s1)
- const before = Automerge.init()
- const [after, patch] = Automerge.applyChanges(before, Automerge.getAllChanges(s1), {
- patchCallback(patch, before, after, local) {
- callbacks.push({patch, before, after, local})
- }
- })
- assert.strictEqual(callbacks.length, 1)
- assert.deepStrictEqual(callbacks[0].patch, {
- maxOp: 2, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
- objectId: `1@${actor}`, type: 'list', edits: [
- {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {type: 'value', value: 'Goldfinch'}}
- ]
- }}}}
- })
- assert.strictEqual(callbacks[0].patch, patch)
- assert.strictEqual(callbacks[0].before, before)
- assert.strictEqual(callbacks[0].after, after)
- assert.strictEqual(callbacks[0].local, false)
- })
- */
-
- /*
- it.skip('should merge multiple applied changes into one patch', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch'])
- const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch'))
- const patches = [], actor = Automerge.getActorId(s2)
- Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2),
- {patchCallback: p => patches.push(p)})
- assert.deepStrictEqual(patches, [{
- maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
- objectId: `1@${actor}`, type: 'list', edits: [
- {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']}
- ]
- }}}}
- }])
- })
- */
-
- /*
- it.skip('should call a patchCallback registered on doc initialisation', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch')
- const patches = [], actor = Automerge.getActorId(s1)
- const before = Automerge.init({patchCallback: p => patches.push(p)})
- Automerge.applyChanges(before, Automerge.getAllChanges(s1))
- assert.deepStrictEqual(patches, [{
- maxOp: 1, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}}
- }])
- })
- */
- })
-})
diff --git a/automerge-js/test/text_test.js b/automerge-js/test/text_test.js
deleted file mode 100644
index 57e8884e..00000000
--- a/automerge-js/test/text_test.js
+++ /dev/null
@@ -1,697 +0,0 @@
-const assert = require('assert')
-const Automerge = require('..')
-const { assertEqualsOneOf } = require('./helpers')
-
-function attributeStateToAttributes(accumulatedAttributes) {
- const attributes = {}
- Object.entries(accumulatedAttributes).forEach(([key, values]) => {
- if (values.length && values[0] !== null) {
- attributes[key] = values[0]
- }
- })
- return attributes
-}
-
-function isEquivalent(a, b) {
- const aProps = Object.getOwnPropertyNames(a)
- const bProps = Object.getOwnPropertyNames(b)
-
- if (aProps.length != bProps.length) {
- return false
- }
-
- for (let i = 0; i < aProps.length; i++) {
- const propName = aProps[i]
- if (a[propName] !== b[propName]) {
- return false
- }
- }
-
- return true
-}
-
-function isControlMarker(pseudoCharacter) {
- return typeof pseudoCharacter === 'object' && pseudoCharacter.attributes
-}
-
-function opFrom(text, attributes) {
- let op = { insert: text }
- if (Object.keys(attributes).length > 0) {
- op.attributes = attributes
- }
- return op
-}
-
-function accumulateAttributes(span, accumulatedAttributes) {
- Object.entries(span).forEach(([key, value]) => {
- if (!accumulatedAttributes[key]) {
- accumulatedAttributes[key] = []
- }
- if (value === null) {
- if (accumulatedAttributes[key].length === 0 || accumulatedAttributes[key] === null) {
- accumulatedAttributes[key].unshift(null)
- } else {
- accumulatedAttributes[key].shift()
- }
- } else {
- if (accumulatedAttributes[key][0] === null) {
- accumulatedAttributes[key].shift()
- } else {
- accumulatedAttributes[key].unshift(value)
- }
- }
- })
- return accumulatedAttributes
-}
-
-function automergeTextToDeltaDoc(text) {
- let ops = []
- let controlState = {}
- let currentString = ""
- let attributes = {}
- text.toSpans().forEach((span) => {
- if (isControlMarker(span)) {
- controlState = accumulateAttributes(span.attributes, controlState)
- } else {
- let next = attributeStateToAttributes(controlState)
-
- // if the next span has the same calculated attributes as the current span
- // don't bother outputting it as a separate span, just let it ride
- if (typeof span === 'string' && isEquivalent(next, attributes)) {
- currentString = currentString + span
- return
- }
-
- if (currentString) {
- ops.push(opFrom(currentString, attributes))
- }
-
- // If we've got a string, we might be able to concatenate it to another
- // same-attributed-string, so remember it and go to the next iteration.
- if (typeof span === 'string') {
- currentString = span
- attributes = next
- } else {
- // otherwise we have an embed "character" and should output it immediately.
- // embeds are always one-"character" in length.
- ops.push(opFrom(span, next))
- currentString = ''
- attributes = {}
- }
- }
- })
-
- // at the end, flush any accumulated string out
- if (currentString) {
- ops.push(opFrom(currentString, attributes))
- }
-
- return ops
-}
-
-function inverseAttributes(attributes) {
- let invertedAttributes = {}
- Object.keys(attributes).forEach((key) => {
- invertedAttributes[key] = null
- })
- return invertedAttributes
-}
-
-function applyDeleteOp(text, offset, op) {
- let length = op.delete
- while (length > 0) {
- if (isControlMarker(text.get(offset))) {
- offset += 1
- } else {
- // we need to not delete control characters, but we do delete embed characters
- text.deleteAt(offset, 1)
- length -= 1
- }
- }
- return [text, offset]
-}
-
-function applyRetainOp(text, offset, op) {
- let length = op.retain
-
- if (op.attributes) {
- text.insertAt(offset, { attributes: op.attributes })
- offset += 1
- }
-
- while (length > 0) {
- const char = text.get(offset)
- offset += 1
- if (!isControlMarker(char)) {
- length -= 1
- }
- }
-
- if (op.attributes) {
- text.insertAt(offset, { attributes: inverseAttributes(op.attributes) })
- offset += 1
- }
-
- return [text, offset]
-}
-
-
-function applyInsertOp(text, offset, op) {
- let originalOffset = offset
-
- if (typeof op.insert === 'string') {
- text.insertAt(offset, ...op.insert.split(''))
- offset += op.insert.length
- } else {
- // we have an embed or something similar
- text.insertAt(offset, op.insert)
- offset += 1
- }
-
- if (op.attributes) {
- text.insertAt(originalOffset, { attributes: op.attributes })
- offset += 1
- }
- if (op.attributes) {
- text.insertAt(offset, { attributes: inverseAttributes(op.attributes) })
- offset += 1
- }
- return [text, offset]
-}
-
-// XXX: uhhhhh, why can't I pass in text?
-function applyDeltaDocToAutomergeText(delta, doc) {
- let offset = 0
-
- delta.forEach(op => {
- if (op.retain) {
- [, offset] = applyRetainOp(doc.text, offset, op)
- } else if (op.delete) {
- [, offset] = applyDeleteOp(doc.text, offset, op)
- } else if (op.insert) {
- [, offset] = applyInsertOp(doc.text, offset, op)
- }
- })
-}
-
-describe('Automerge.Text', () => {
- let s1, s2
- beforeEach(() => {
- s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text())
- s2 = Automerge.merge(Automerge.init(), s1)
- })
-
- it('should support insertion', () => {
- s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a'))
- assert.strictEqual(s1.text.length, 1)
- assert.strictEqual(s1.text.get(0), 'a')
- assert.strictEqual(s1.text.toString(), 'a')
- //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`)
- })
-
- it('should support deletion', () => {
- s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
- s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1))
- assert.strictEqual(s1.text.length, 2)
- assert.strictEqual(s1.text.get(0), 'a')
- assert.strictEqual(s1.text.get(1), 'c')
- assert.strictEqual(s1.text.toString(), 'ac')
- })
-
- it("should support implicit and explicit deletion", () => {
- s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c"))
- s1 = Automerge.change(s1, doc => doc.text.deleteAt(1))
- s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0))
- assert.strictEqual(s1.text.length, 2)
- assert.strictEqual(s1.text.get(0), "a")
- assert.strictEqual(s1.text.get(1), "c")
- assert.strictEqual(s1.text.toString(), "ac")
- })
-
- it('should handle concurrent insertion', () => {
- s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
- s2 = Automerge.change(s2, doc => doc.text.insertAt(0, 'x', 'y', 'z'))
- s1 = Automerge.merge(s1, s2)
- assert.strictEqual(s1.text.length, 6)
- assertEqualsOneOf(s1.text.toString(), 'abcxyz', 'xyzabc')
- assertEqualsOneOf(s1.text.join(''), 'abcxyz', 'xyzabc')
- })
-
- it('should handle text and other ops in the same change', () => {
- s1 = Automerge.change(s1, doc => {
- doc.foo = 'bar'
- doc.text.insertAt(0, 'a')
- })
- assert.strictEqual(s1.foo, 'bar')
- assert.strictEqual(s1.text.toString(), 'a')
- assert.strictEqual(s1.text.join(''), 'a')
- })
-
- it('should serialize to JSON as a simple string', () => {
- s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', '"', 'b'))
- assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}')
- })
-
- it('should allow modification before an object is assigned to a document', () => {
- s1 = Automerge.change(Automerge.init(), doc => {
- const text = new Automerge.Text()
- text.insertAt(0, 'a', 'b', 'c', 'd')
- text.deleteAt(2)
- doc.text = text
- assert.strictEqual(doc.text.toString(), 'abd')
- assert.strictEqual(doc.text.join(''), 'abd')
- })
- assert.strictEqual(s1.text.toString(), 'abd')
- assert.strictEqual(s1.text.join(''), 'abd')
- })
-
- it('should allow modification after an object is assigned to a document', () => {
- s1 = Automerge.change(Automerge.init(), doc => {
- const text = new Automerge.Text()
- doc.text = text
- doc.text.insertAt(0, 'a', 'b', 'c', 'd')
- doc.text.deleteAt(2)
- assert.strictEqual(doc.text.toString(), 'abd')
- assert.strictEqual(doc.text.join(''), 'abd')
- })
- assert.strictEqual(s1.text.join(''), 'abd')
- })
-
- it('should not allow modification outside of a change callback', () => {
- assert.throws(() => s1.text.insertAt(0, 'a'), /object cannot be modified outside of a change block/)
- })
-
- describe('with initial value', () => {
- it('should accept a string as initial value', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('init'))
- assert.strictEqual(s1.text.length, 4)
- assert.strictEqual(s1.text.get(0), 'i')
- assert.strictEqual(s1.text.get(1), 'n')
- assert.strictEqual(s1.text.get(2), 'i')
- assert.strictEqual(s1.text.get(3), 't')
- assert.strictEqual(s1.text.toString(), 'init')
- })
-
- it('should accept an array as initial value', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text(['i', 'n', 'i', 't']))
- assert.strictEqual(s1.text.length, 4)
- assert.strictEqual(s1.text.get(0), 'i')
- assert.strictEqual(s1.text.get(1), 'n')
- assert.strictEqual(s1.text.get(2), 'i')
- assert.strictEqual(s1.text.get(3), 't')
- assert.strictEqual(s1.text.toString(), 'init')
- })
-
- it('should initialize text in Automerge.from()', () => {
- let s1 = Automerge.from({text: new Automerge.Text('init')})
- assert.strictEqual(s1.text.length, 4)
- assert.strictEqual(s1.text.get(0), 'i')
- assert.strictEqual(s1.text.get(1), 'n')
- assert.strictEqual(s1.text.get(2), 'i')
- assert.strictEqual(s1.text.get(3), 't')
- assert.strictEqual(s1.text.toString(), 'init')
- })
-
- it('should encode the initial value as a change', () => {
- const s1 = Automerge.from({text: new Automerge.Text('init')})
- const changes = Automerge.getAllChanges(s1)
- assert.strictEqual(changes.length, 1)
- const [s2] = Automerge.applyChanges(Automerge.init(), changes)
- assert.strictEqual(s2.text instanceof Automerge.Text, true)
- assert.strictEqual(s2.text.toString(), 'init')
- assert.strictEqual(s2.text.join(''), 'init')
- })
-
- it('should allow immediate access to the value', () => {
- Automerge.change(Automerge.init(), doc => {
- const text = new Automerge.Text('init')
- assert.strictEqual(text.length, 4)
- assert.strictEqual(text.get(0), 'i')
- assert.strictEqual(text.toString(), 'init')
- doc.text = text
- assert.strictEqual(doc.text.length, 4)
- assert.strictEqual(doc.text.get(0), 'i')
- assert.strictEqual(doc.text.toString(), 'init')
- })
- })
-
- it('should allow pre-assignment modification of the initial value', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- const text = new Automerge.Text('init')
- text.deleteAt(3)
- assert.strictEqual(text.join(''), 'ini')
- doc.text = text
- assert.strictEqual(doc.text.join(''), 'ini')
- assert.strictEqual(doc.text.toString(), 'ini')
- })
- assert.strictEqual(s1.text.toString(), 'ini')
- assert.strictEqual(s1.text.join(''), 'ini')
- })
-
- it('should allow post-assignment modification of the initial value', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- const text = new Automerge.Text('init')
- doc.text = text
- doc.text.deleteAt(0)
- doc.text.insertAt(0, 'I')
- assert.strictEqual(doc.text.join(''), 'Init')
- assert.strictEqual(doc.text.toString(), 'Init')
- })
- assert.strictEqual(s1.text.join(''), 'Init')
- assert.strictEqual(s1.text.toString(), 'Init')
- })
- })
-
- describe('non-textual control characters', () => {
- let s1
- beforeEach(() => {
- s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text()
- doc.text.insertAt(0, 'a')
- doc.text.insertAt(1, { attribute: 'bold' })
- })
- })
-
- it('should allow fetching non-textual characters', () => {
- assert.deepEqual(s1.text.get(1), { attribute: 'bold' })
- //assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`)
- })
-
- it('should include control characters in string length', () => {
- assert.strictEqual(s1.text.length, 2)
- assert.strictEqual(s1.text.get(0), 'a')
- })
-
- it('should exclude control characters from toString()', () => {
- assert.strictEqual(s1.text.toString(), 'a')
- })
-
- it('should allow control characters to be updated', () => {
- const s2 = Automerge.change(s1, doc => doc.text.get(1).attribute = 'italic')
- const s3 = Automerge.load(Automerge.save(s2))
- assert.strictEqual(s1.text.get(1).attribute, 'bold')
- assert.strictEqual(s2.text.get(1).attribute, 'italic')
- assert.strictEqual(s3.text.get(1).attribute, 'italic')
- })
-
- describe('spans interface to Text', () => {
- it('should return a simple string as a single span', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('hello world')
- })
- assert.deepEqual(s1.text.toSpans(), ['hello world'])
- })
- it('should return an empty string as an empty array', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text()
- })
- assert.deepEqual(s1.text.toSpans(), [])
- })
- it('should split a span at a control character', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('hello world')
- doc.text.insertAt(5, { attributes: { bold: true } })
- })
- assert.deepEqual(s1.text.toSpans(),
- ['hello', { attributes: { bold: true } }, ' world'])
- })
- it('should allow consecutive control characters', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('hello world')
- doc.text.insertAt(5, { attributes: { bold: true } })
- doc.text.insertAt(6, { attributes: { italic: true } })
- })
- assert.deepEqual(s1.text.toSpans(),
- ['hello',
- { attributes: { bold: true } },
- { attributes: { italic: true } },
- ' world'
- ])
- })
- it('should allow non-consecutive control characters', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('hello world')
- doc.text.insertAt(5, { attributes: { bold: true } })
- doc.text.insertAt(12, { attributes: { italic: true } })
- })
- assert.deepEqual(s1.text.toSpans(),
- ['hello',
- { attributes: { bold: true } },
- ' world',
- { attributes: { italic: true } }
- ])
- })
-
- it('should be convertable into a Quill delta', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Gandalf the Grey')
- doc.text.insertAt(0, { attributes: { bold: true } })
- doc.text.insertAt(7 + 1, { attributes: { bold: null } })
- doc.text.insertAt(12 + 2, { attributes: { color: '#cccccc' } })
- })
-
- let deltaDoc = automergeTextToDeltaDoc(s1.text)
-
- // From https://quilljs.com/docs/delta/
- let expectedDoc = [
- { insert: 'Gandalf', attributes: { bold: true } },
- { insert: ' the ' },
- { insert: 'Grey', attributes: { color: '#cccccc' } }
- ]
-
- assert.deepEqual(deltaDoc, expectedDoc)
- })
-
- it('should support embeds', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('')
- doc.text.insertAt(0, { attributes: { link: 'https://quilljs.com' } })
- doc.text.insertAt(1, {
- image: 'https://quilljs.com/assets/images/icon.png'
- })
- doc.text.insertAt(2, { attributes: { link: null } })
- })
-
- let deltaDoc = automergeTextToDeltaDoc(s1.text)
-
- // From https://quilljs.com/docs/delta/
- let expectedDoc = [{
- // An image link
- insert: {
- image: 'https://quilljs.com/assets/images/icon.png'
- },
- attributes: {
- link: 'https://quilljs.com'
- }
- }]
-
- assert.deepEqual(deltaDoc, expectedDoc)
- })
-
- it('should handle concurrent overlapping spans', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Gandalf the Grey')
- })
-
- let s2 = Automerge.merge(Automerge.init(), s1)
-
- let s3 = Automerge.change(s1, doc => {
- doc.text.insertAt(8, { attributes: { bold: true } })
- doc.text.insertAt(16 + 1, { attributes: { bold: null } })
- })
-
- let s4 = Automerge.change(s2, doc => {
- doc.text.insertAt(0, { attributes: { bold: true } })
- doc.text.insertAt(11 + 1, { attributes: { bold: null } })
- })
-
- let merged = Automerge.merge(s3, s4)
-
- let deltaDoc = automergeTextToDeltaDoc(merged.text)
-
- // From https://quilljs.com/docs/delta/
- let expectedDoc = [
- { insert: 'Gandalf the Grey', attributes: { bold: true } },
- ]
-
- assert.deepEqual(deltaDoc, expectedDoc)
- })
-
- it('should handle debolding spans', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Gandalf the Grey')
- })
-
- let s2 = Automerge.merge(Automerge.init(), s1)
-
- let s3 = Automerge.change(s1, doc => {
- doc.text.insertAt(0, { attributes: { bold: true } })
- doc.text.insertAt(16 + 1, { attributes: { bold: null } })
- })
-
- let s4 = Automerge.change(s2, doc => {
- doc.text.insertAt(8, { attributes: { bold: null } })
- doc.text.insertAt(11 + 1, { attributes: { bold: true } })
- })
-
-
- let merged = Automerge.merge(s3, s4)
-
- let deltaDoc = automergeTextToDeltaDoc(merged.text)
-
- // From https://quilljs.com/docs/delta/
- let expectedDoc = [
- { insert: 'Gandalf ', attributes: { bold: true } },
- { insert: 'the' },
- { insert: ' Grey', attributes: { bold: true } },
- ]
-
- assert.deepEqual(deltaDoc, expectedDoc)
- })
-
- // xxx: how would this work for colors?
- it('should handle destyling across destyled spans', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Gandalf the Grey')
- })
-
- let s2 = Automerge.merge(Automerge.init(), s1)
-
- let s3 = Automerge.change(s1, doc => {
- doc.text.insertAt(0, { attributes: { bold: true } })
- doc.text.insertAt(16 + 1, { attributes: { bold: null } })
- })
-
- let s4 = Automerge.change(s2, doc => {
- doc.text.insertAt(8, { attributes: { bold: null } })
- doc.text.insertAt(11 + 1, { attributes: { bold: true } })
- })
-
- let merged = Automerge.merge(s3, s4)
-
- let final = Automerge.change(merged, doc => {
- doc.text.insertAt(3 + 1, { attributes: { bold: null } })
- doc.text.insertAt(doc.text.length, { attributes: { bold: true } })
- })
-
- let deltaDoc = automergeTextToDeltaDoc(final.text)
-
- // From https://quilljs.com/docs/delta/
- let expectedDoc = [
- { insert: 'Gan', attributes: { bold: true } },
- { insert: 'dalf the Grey' },
- ]
-
- assert.deepEqual(deltaDoc, expectedDoc)
- })
-
- it('should apply an insert', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Hello world')
- })
-
- const delta = [
- { retain: 6 },
- { insert: 'reader' },
- { delete: 5 }
- ]
-
- let s2 = Automerge.change(s1, doc => {
- applyDeltaDocToAutomergeText(delta, doc)
- })
-
- assert.strictEqual(s2.text.join(''), 'Hello reader')
- })
-
- it('should apply an insert with control characters', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Hello world')
- })
-
- const delta = [
- { retain: 6 },
- { insert: 'reader', attributes: { bold: true } },
- { delete: 5 },
- { insert: '!' }
- ]
-
- let s2 = Automerge.change(s1, doc => {
- applyDeltaDocToAutomergeText(delta, doc)
- })
-
- assert.strictEqual(s2.text.toString(), 'Hello reader!')
- assert.deepEqual(s2.text.toSpans(), [
- "Hello ",
- { attributes: { bold: true } },
- "reader",
- { attributes: { bold: null } },
- "!"
- ])
- })
-
- it('should account for control characters in retain/delete lengths', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Hello world')
- doc.text.insertAt(4, { attributes: { color: '#ccc' } })
- doc.text.insertAt(10, { attributes: { color: '#f00' } })
- })
-
- const delta = [
- { retain: 6 },
- { insert: 'reader', attributes: { bold: true } },
- { delete: 5 },
- { insert: '!' }
- ]
-
- let s2 = Automerge.change(s1, doc => {
- applyDeltaDocToAutomergeText(delta, doc)
- })
-
- assert.strictEqual(s2.text.toString(), 'Hello reader!')
- assert.deepEqual(s2.text.toSpans(), [
- "Hell",
- { attributes: { color: '#ccc'} },
- "o ",
- { attributes: { bold: true } },
- "reader",
- { attributes: { bold: null } },
- { attributes: { color: '#f00'} },
- "!"
- ])
- })
-
- it('should support embeds', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('')
- })
-
- let deltaDoc = [{
- // An image link
- insert: {
- image: 'https://quilljs.com/assets/images/icon.png'
- },
- attributes: {
- link: 'https://quilljs.com'
- }
- }]
-
- let s2 = Automerge.change(s1, doc => {
- applyDeltaDocToAutomergeText(deltaDoc, doc)
- })
-
- assert.deepEqual(s2.text.toSpans(), [
- { attributes: { link: 'https://quilljs.com' } },
- { image: 'https://quilljs.com/assets/images/icon.png'},
- { attributes: { link: null } },
- ])
- })
- })
- })
-
- it('should support unicode when creating text', () => {
- s1 = Automerge.from({
- text: new Automerge.Text('🐦')
- })
- assert.strictEqual(s1.text.get(0), '🐦')
- })
-})
diff --git a/automerge-js/test/uuid_test.js b/automerge-js/test/uuid_test.js
deleted file mode 100644
index a0f83df1..00000000
--- a/automerge-js/test/uuid_test.js
+++ /dev/null
@@ -1,32 +0,0 @@
-const assert = require('assert')
-const Automerge = require('..')
-
-const uuid = Automerge.uuid
-
-describe('uuid', () => {
- afterEach(() => {
- uuid.reset()
- })
-
- describe('default implementation', () => {
- it('generates unique values', () => {
- assert.notEqual(uuid(), uuid())
- })
- })
-
- describe('custom implementation', () => {
- let counter
-
- function customUuid() {
- return `custom-uuid-${counter++}`
- }
-
- before(() => uuid.setFactory(customUuid))
- beforeEach(() => counter = 0)
-
- it('invokes the custom factory', () => {
- assert.equal(uuid(), 'custom-uuid-0')
- assert.equal(uuid(), 'custom-uuid-1')
- })
- })
-})
diff --git a/automerge-wasm/index.d.ts b/automerge-wasm/index.d.ts
deleted file mode 100644
index f6b58bfe..00000000
--- a/automerge-wasm/index.d.ts
+++ /dev/null
@@ -1,164 +0,0 @@
-
-export type Actor = string;
-export type ObjID = string;
-export type Change = Uint8Array;
-export type SyncMessage = Uint8Array;
-export type Prop = string | number;
-export type Hash = string;
-export type Heads = Hash[];
-export type Value = string | number | boolean | null | Date | Uint8Array
-export type ObjType = string | Array | Object
-export type FullValue =
- ["str", string] |
- ["int", number] |
- ["uint", number] |
- ["f64", number] |
- ["boolean", boolean] |
- ["timestamp", Date] |
- ["counter", number] |
- ["bytes", Uint8Array] |
- ["null", Uint8Array] |
- ["map", ObjID] |
- ["list", ObjID] |
- ["text", ObjID] |
- ["table", ObjID]
-
-export enum ObjTypeName {
- list = "list",
- map = "map",
- table = "table",
- text = "text",
-}
-
-export type Datatype =
- "boolean" |
- "str" |
- "int" |
- "uint" |
- "f64" |
- "null" |
- "timestamp" |
- "counter" |
- "bytes" |
- "map" |
- "text" |
- "list";
-
-export type DecodedSyncMessage = {
- heads: Heads,
- need: Heads,
- have: any[]
- changes: Change[]
-}
-
-export type DecodedChange = {
- actor: Actor,
- seq: number
- startOp: number,
- time: number,
- message: string | null,
- deps: Heads,
- hash: Hash,
- ops: Op[]
-}
-
-export type Op = {
- action: string,
- obj: ObjID,
- key: string,
- value?: string | number | boolean,
- datatype?: string,
- pred: string[],
-}
-
-export type Patch = {
- obj: ObjID
- action: 'assign' | 'insert' | 'delete'
- key: Prop
- value: Value
- datatype: Datatype
- conflict: boolean
-}
-
-export function create(actor?: Actor): Automerge;
-export function load(data: Uint8Array, actor?: Actor): Automerge;
-export function encodeChange(change: DecodedChange): Change;
-export function decodeChange(change: Change): DecodedChange;
-export function initSyncState(): SyncState;
-export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage;
-export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage;
-export function encodeSyncState(state: SyncState): Uint8Array;
-export function decodeSyncState(data: Uint8Array): SyncState;
-
-export class Automerge {
- // change state
- put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): undefined;
- putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID;
- insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): undefined;
- insertObject(obj: ObjID, index: number, value: ObjType): ObjID;
- push(obj: ObjID, value: Value, datatype?: Datatype): undefined;
- pushObject(obj: ObjID, value: ObjType): ObjID;
- splice(obj: ObjID, start: number, delete_count: number, text?: string | Array): ObjID[] | undefined;
- increment(obj: ObjID, prop: Prop, value: number): void;
- delete(obj: ObjID, prop: Prop): void;
-
- // returns a single value - if there is a conflict return the winner
- get(obj: ObjID, prop: any, heads?: Heads): FullValue | null;
- // return all values in case of a conflict
- getAll(obj: ObjID, arg: any, heads?: Heads): FullValue[];
- keys(obj: ObjID, heads?: Heads): string[];
- text(obj: ObjID, heads?: Heads): string;
- length(obj: ObjID, heads?: Heads): number;
- materialize(obj?: ObjID, heads?: Heads): any;
-
- // transactions
- commit(message?: string, time?: number): Hash;
- merge(other: Automerge): Heads;
- getActorId(): Actor;
- pendingOps(): number;
- rollback(): number;
-
- // patches
- enablePatches(enable: boolean): void;
- popPatches(): Patch[];
-
- // save and load to local store
- save(): Uint8Array;
- saveIncremental(): Uint8Array;
- loadIncremental(data: Uint8Array): number;
-
- // sync over network
- receiveSyncMessage(state: SyncState, message: SyncMessage): void;
- generateSyncMessage(state: SyncState): SyncMessage | null;
-
- // low level change functions
- applyChanges(changes: Change[]): void;
- getChanges(have_deps: Heads): Change[];
- getChangeByHash(hash: Hash): Change | null;
- getChangesAdded(other: Automerge): Change[];
- getHeads(): Heads;
- getLastLocalChange(): Change;
- getMissingDeps(heads?: Heads): Heads;
-
- // memory management
- free(): void;
- clone(actor?: string): Automerge;
- fork(actor?: string): Automerge;
- forkAt(heads: Heads, actor?: string): Automerge;
-
- // dump internal state to console.log
- dump(): void;
-
- // dump internal state to a JS object
- toJS(): any;
-}
-
-export class SyncState {
- free(): void;
- clone(): SyncState;
- lastSentHeads: any;
- sentHashes: any;
- readonly sharedHeads: any;
-}
-
-export default function init (): Promise;
diff --git a/automerge-wasm/nodejs-index.js b/automerge-wasm/nodejs-index.js
deleted file mode 100644
index a8b9b1cd..00000000
--- a/automerge-wasm/nodejs-index.js
+++ /dev/null
@@ -1,6 +0,0 @@
-let wasm = require("./bindgen")
-module.exports = wasm
-module.exports.load = module.exports.loadDoc
-delete module.exports.loadDoc
-Object.defineProperty(module.exports, "__esModule", { value: true });
-module.exports.default = () => (new Promise((resolve,reject) => { resolve() }))
diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json
deleted file mode 100644
index a7243e3e..00000000
--- a/automerge-wasm/package.json
+++ /dev/null
@@ -1,49 +0,0 @@
-{
- "collaborators": [
- "Orion Henry ",
- "Alex Good ",
- "Martin Kleppmann"
- ],
- "name": "automerge-wasm",
- "description": "wasm-bindgen bindings to the automerge rust implementation",
- "homepage": "https://github.com/automerge/automerge-rs/tree/main/automerge-wasm",
- "repository": "github:automerge/automerge-rs",
- "version": "0.1.2",
- "license": "MIT",
- "files": [
- "README.md",
- "LICENSE",
- "package.json",
- "index.d.ts",
- "nodejs/index.js",
- "nodejs/bindgen.js",
- "nodejs/bindgen_bg.wasm",
- "web/index.js",
- "web/bindgen.js",
- "web/bindgen_bg.wasm"
- ],
- "types": "index.d.ts",
- "module": "./web/index.js",
- "main": "./nodejs/index.js",
- "scripts": {
- "build": "cross-env PROFILE=dev TARGET=nodejs yarn target",
- "release": "cross-env PROFILE=release yarn buildall",
- "buildall": "cross-env TARGET=nodejs yarn target && cross-env TARGET=web yarn target",
- "target": "rimraf ./$TARGET && wasm-pack build --target $TARGET --$PROFILE --out-name bindgen -d $TARGET && cp $TARGET-index.js $TARGET/index.js",
- "test": "ts-mocha -p tsconfig.json --type-check --bail --full-trace test/*.ts"
- },
- "dependencies": {},
- "devDependencies": {
- "@types/expect": "^24.3.0",
- "@types/jest": "^27.4.0",
- "@types/mocha": "^9.1.0",
- "@types/node": "^17.0.13",
- "cross-env": "^7.0.3",
- "fast-sha256": "^1.3.0",
- "mocha": "^9.1.3",
- "pako": "^2.0.4",
- "rimraf": "^3.0.2",
- "ts-mocha": "^9.0.2",
- "typescript": "^4.5.5"
- }
-}
diff --git a/automerge-wasm/src/interop.rs b/automerge-wasm/src/interop.rs
deleted file mode 100644
index be0927f7..00000000
--- a/automerge-wasm/src/interop.rs
+++ /dev/null
@@ -1,433 +0,0 @@
-use automerge as am;
-use automerge::transaction::Transactable;
-use automerge::{Change, ChangeHash, Prop};
-use js_sys::{Array, Object, Reflect, Uint8Array};
-use std::collections::HashSet;
-use std::fmt::Display;
-use wasm_bindgen::prelude::*;
-use wasm_bindgen::JsCast;
-
-use crate::{ObjId, ScalarValue, Value};
-
-pub(crate) struct JS(pub(crate) JsValue);
-pub(crate) struct AR(pub(crate) Array);
-
-impl From for JsValue {
- fn from(ar: AR) -> Self {
- ar.0.into()
- }
-}
-
-impl From for JsValue {
- fn from(js: JS) -> Self {
- js.0
- }
-}
-
-impl From for JS {
- fn from(state: am::sync::State) -> Self {
- let shared_heads: JS = state.shared_heads.into();
- let last_sent_heads: JS = state.last_sent_heads.into();
- let their_heads: JS = state.their_heads.into();
- let their_need: JS = state.their_need.into();
- let sent_hashes: JS = state.sent_hashes.into();
- let their_have = if let Some(have) = &state.their_have {
- JsValue::from(AR::from(have.as_slice()).0)
- } else {
- JsValue::null()
- };
- let result: JsValue = Object::new().into();
- // we can unwrap here b/c we made the object and know its not frozen
- Reflect::set(&result, &"sharedHeads".into(), &shared_heads.0).unwrap();
- Reflect::set(&result, &"lastSentHeads".into(), &last_sent_heads.0).unwrap();
- Reflect::set(&result, &"theirHeads".into(), &their_heads.0).unwrap();
- Reflect::set(&result, &"theirNeed".into(), &their_need.0).unwrap();
- Reflect::set(&result, &"theirHave".into(), &their_have).unwrap();
- Reflect::set(&result, &"sentHashes".into(), &sent_hashes.0).unwrap();
- JS(result)
- }
-}
-
-impl From> for JS {
- fn from(heads: Vec) -> Self {
- let heads: Array = heads
- .iter()
- .map(|h| JsValue::from_str(&h.to_string()))
- .collect();
- JS(heads.into())
- }
-}
-
-impl From> for JS {
- fn from(heads: HashSet) -> Self {
- let result: JsValue = Object::new().into();
- for key in &heads {
- Reflect::set(&result, &key.to_string().into(), &true.into()).unwrap();
- }
- JS(result)
- }
-}
-
-impl From>> for JS {
- fn from(heads: Option>) -> Self {
- if let Some(v) = heads {
- let v: Array = v
- .iter()
- .map(|h| JsValue::from_str(&h.to_string()))
- .collect();
- JS(v.into())
- } else {
- JS(JsValue::null())
- }
- }
-}
-
-impl TryFrom for HashSet {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- let mut result = HashSet::new();
- for key in Reflect::own_keys(&value.0)?.iter() {
- if let Some(true) = Reflect::get(&value.0, &key)?.as_bool() {
- result.insert(key.into_serde().map_err(to_js_err)?);
- }
- }
- Ok(result)
- }
-}
-
-impl TryFrom for Vec {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- let value = value.0.dyn_into::()?;
- let value: Result, _> = value.iter().map(|j| j.into_serde()).collect();
- let value = value.map_err(to_js_err)?;
- Ok(value)
- }
-}
-
-impl From for Option> {
- fn from(value: JS) -> Self {
- let value = value.0.dyn_into::().ok()?;
- let value: Result, _> = value.iter().map(|j| j.into_serde()).collect();
- let value = value.ok()?;
- Some(value)
- }
-}
-
-impl TryFrom for Vec {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- let value = value.0.dyn_into::()?;
- let changes: Result, _> = value.iter().map(|j| j.dyn_into()).collect();
- let changes = changes?;
- let changes: Result, _> = changes
- .iter()
- .map(|a| Change::try_from(a.to_vec()))
- .collect();
- let changes = changes.map_err(to_js_err)?;
- Ok(changes)
- }
-}
-
-impl TryFrom for am::sync::State {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- let value = value.0;
- let shared_heads = js_get(&value, "sharedHeads")?.try_into()?;
- let last_sent_heads = js_get(&value, "lastSentHeads")?.try_into()?;
- let their_heads = js_get(&value, "theirHeads")?.into();
- let their_need = js_get(&value, "theirNeed")?.into();
- let their_have = js_get(&value, "theirHave")?.try_into()?;
- let sent_hashes = js_get(&value, "sentHashes")?.try_into()?;
- Ok(am::sync::State {
- shared_heads,
- last_sent_heads,
- their_heads,
- their_need,
- their_have,
- sent_hashes,
- })
- }
-}
-
-impl TryFrom for Option> {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- if value.0.is_null() {
- Ok(None)
- } else {
- Ok(Some(value.try_into()?))
- }
- }
-}
-
-impl TryFrom for Vec {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- let value = value.0.dyn_into::()?;
- let have: Result, JsValue> = value
- .iter()
- .map(|s| {
- let last_sync = js_get(&s, "lastSync")?.try_into()?;
- let bloom = js_get(&s, "bloom")?.try_into()?;
- Ok(am::sync::Have { last_sync, bloom })
- })
- .collect();
- let have = have?;
- Ok(have)
- }
-}
-
-impl TryFrom for am::sync::BloomFilter {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- let value: Uint8Array = value.0.dyn_into()?;
- let value = value.to_vec();
- let value = value.as_slice().try_into().map_err(to_js_err)?;
- Ok(value)
- }
-}
-
-impl From<&[ChangeHash]> for AR {
- fn from(value: &[ChangeHash]) -> Self {
- AR(value
- .iter()
- .map(|h| JsValue::from_str(&hex::encode(&h.0)))
- .collect())
- }
-}
-
-impl From<&[Change]> for AR {
- fn from(value: &[Change]) -> Self {
- let changes: Array = value
- .iter()
- .map(|c| Uint8Array::from(c.raw_bytes()))
- .collect();
- AR(changes)
- }
-}
-
-impl From<&[am::sync::Have]> for AR {
- fn from(value: &[am::sync::Have]) -> Self {
- AR(value
- .iter()
- .map(|have| {
- let last_sync: Array = have
- .last_sync
- .iter()
- .map(|h| JsValue::from_str(&hex::encode(&h.0)))
- .collect();
- // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes()
- let bloom = Uint8Array::from(have.bloom.to_bytes().as_slice());
- let obj: JsValue = Object::new().into();
- // we can unwrap here b/c we created the object and know its not frozen
- Reflect::set(&obj, &"lastSync".into(), &last_sync.into()).unwrap();
- Reflect::set(&obj, &"bloom".into(), &bloom.into()).unwrap();
- obj
- })
- .collect())
- }
-}
-
-pub(crate) fn to_js_err(err: T) -> JsValue {
- js_sys::Error::new(&std::format!("{}", err)).into()
-}
-
-pub(crate) fn js_get>(obj: J, prop: &str) -> Result {
- Ok(JS(Reflect::get(&obj.into(), &prop.into())?))
-}
-
-pub(crate) fn js_set>(obj: &JsValue, prop: &str, val: V) -> Result {
- Reflect::set(obj, &prop.into(), &val.into())
-}
-
-pub(crate) fn to_prop(p: JsValue) -> Result {
- if let Some(s) = p.as_string() {
- Ok(Prop::Map(s))
- } else if let Some(n) = p.as_f64() {
- Ok(Prop::Seq(n as usize))
- } else {
- Err(to_js_err("prop must me a string or number"))
- }
-}
-
-pub(crate) fn to_objtype(
- value: &JsValue,
- datatype: &Option,
-) -> Option<(am::ObjType, Vec<(Prop, JsValue)>)> {
- match datatype.as_deref() {
- Some("map") => {
- let map = value.clone().dyn_into::().ok()?;
- // FIXME unwrap
- let map = js_sys::Object::keys(&map)
- .iter()
- .zip(js_sys::Object::values(&map).iter())
- .map(|(key, val)| (key.as_string().unwrap().into(), val))
- .collect();
- Some((am::ObjType::Map, map))
- }
- Some("list") => {
- let list = value.clone().dyn_into::().ok()?;
- let list = list
- .iter()
- .enumerate()
- .map(|(i, e)| (i.into(), e))
- .collect();
- Some((am::ObjType::List, list))
- }
- Some("text") => {
- let text = value.as_string()?;
- let text = text
- .chars()
- .enumerate()
- .map(|(i, ch)| (i.into(), ch.to_string().into()))
- .collect();
- Some((am::ObjType::Text, text))
- }
- Some(_) => None,
- None => {
- if let Ok(list) = value.clone().dyn_into::() {
- let list = list
- .iter()
- .enumerate()
- .map(|(i, e)| (i.into(), e))
- .collect();
- Some((am::ObjType::List, list))
- } else if let Ok(map) = value.clone().dyn_into::() {
- // FIXME unwrap
- let map = js_sys::Object::keys(&map)
- .iter()
- .zip(js_sys::Object::values(&map).iter())
- .map(|(key, val)| (key.as_string().unwrap().into(), val))
- .collect();
- Some((am::ObjType::Map, map))
- } else if let Some(text) = value.as_string() {
- let text = text
- .chars()
- .enumerate()
- .map(|(i, ch)| (i.into(), ch.to_string().into()))
- .collect();
- Some((am::ObjType::Text, text))
- } else {
- None
- }
- }
- }
-}
-
-pub(crate) fn get_heads(heads: Option) -> Option> {
- let heads = heads?;
- let heads: Result, _> = heads.iter().map(|j| j.into_serde()).collect();
- heads.ok()
-}
-
-pub(crate) fn map_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue {
- let keys = doc.keys(obj);
- let map = Object::new();
- for k in keys {
- let val = doc.get(obj, &k);
- match val {
- Ok(Some((Value::Object(o), exid)))
- if o == am::ObjType::Map || o == am::ObjType::Table =>
- {
- Reflect::set(&map, &k.into(), &map_to_js(doc, &exid)).unwrap();
- }
- Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => {
- Reflect::set(&map, &k.into(), &list_to_js(doc, &exid)).unwrap();
- }
- Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => {
- Reflect::set(&map, &k.into(), &doc.text(&exid).unwrap().into()).unwrap();
- }
- Ok(Some((Value::Scalar(v), _))) => {
- Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap();
- }
- _ => (),
- };
- }
- map.into()
-}
-
-pub(crate) fn map_to_js_at(doc: &am::AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue {
- let keys = doc.keys(obj);
- let map = Object::new();
- for k in keys {
- let val = doc.get_at(obj, &k, heads);
- match val {
- Ok(Some((Value::Object(o), exid)))
- if o == am::ObjType::Map || o == am::ObjType::Table =>
- {
- Reflect::set(&map, &k.into(), &map_to_js_at(doc, &exid, heads)).unwrap();
- }
- Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => {
- Reflect::set(&map, &k.into(), &list_to_js_at(doc, &exid, heads)).unwrap();
- }
- Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => {
- Reflect::set(&map, &k.into(), &doc.text_at(&exid, heads).unwrap().into()).unwrap();
- }
- Ok(Some((Value::Scalar(v), _))) => {
- Reflect::set(&map, &k.into(), &ScalarValue(v).into()).unwrap();
- }
- _ => (),
- };
- }
- map.into()
-}
-
-pub(crate) fn list_to_js(doc: &am::AutoCommit, obj: &ObjId) -> JsValue {
- let len = doc.length(obj);
- let array = Array::new();
- for i in 0..len {
- let val = doc.get(obj, i as usize);
- match val {
- Ok(Some((Value::Object(o), exid)))
- if o == am::ObjType::Map || o == am::ObjType::Table =>
- {
- array.push(&map_to_js(doc, &exid));
- }
- Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => {
- array.push(&list_to_js(doc, &exid));
- }
- Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => {
- array.push(&doc.text(&exid).unwrap().into());
- }
- Ok(Some((Value::Scalar(v), _))) => {
- array.push(&ScalarValue(v).into());
- }
- _ => (),
- };
- }
- array.into()
-}
-
-pub(crate) fn list_to_js_at(doc: &am::AutoCommit, obj: &ObjId, heads: &[ChangeHash]) -> JsValue {
- let len = doc.length(obj);
- let array = Array::new();
- for i in 0..len {
- let val = doc.get_at(obj, i as usize, heads);
- match val {
- Ok(Some((Value::Object(o), exid)))
- if o == am::ObjType::Map || o == am::ObjType::Table =>
- {
- array.push(&map_to_js_at(doc, &exid, heads));
- }
- Ok(Some((Value::Object(o), exid))) if o == am::ObjType::List => {
- array.push(&list_to_js_at(doc, &exid, heads));
- }
- Ok(Some((Value::Object(o), exid))) if o == am::ObjType::Text => {
- array.push(&doc.text_at(exid, heads).unwrap().into());
- }
- Ok(Some((Value::Scalar(v), _))) => {
- array.push(&ScalarValue(v).into());
- }
- _ => (),
- };
- }
- array.into()
-}
diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs
deleted file mode 100644
index 4429c0c8..00000000
--- a/automerge-wasm/src/lib.rs
+++ /dev/null
@@ -1,919 +0,0 @@
-#![doc(
- html_logo_url = "https://raw.githubusercontent.com/automerge/automerge-rs/main/img/brandmark.svg",
- html_favicon_url = "https:///raw.githubusercontent.com/automerge/automerge-rs/main/img/favicon.ico"
-)]
-#![warn(
- missing_debug_implementations,
- // missing_docs, // TODO: add documentation!
- rust_2021_compatibility,
- rust_2018_idioms,
- unreachable_pub,
- bad_style,
- const_err,
- dead_code,
- improper_ctypes,
- non_shorthand_field_patterns,
- no_mangle_generic_items,
- overflowing_literals,
- path_statements,
- patterns_in_fns_without_body,
- private_in_public,
- unconditional_recursion,
- unused,
- unused_allocation,
- unused_comparisons,
- unused_parens,
- while_true
-)]
-#![allow(clippy::unused_unit)]
-use am::transaction::CommitOptions;
-use am::transaction::Transactable;
-use am::ApplyOptions;
-use automerge as am;
-use automerge::Patch;
-use automerge::VecOpObserver;
-use automerge::{Change, ObjId, Prop, Value, ROOT};
-use js_sys::{Array, Object, Uint8Array};
-use std::convert::TryInto;
-use wasm_bindgen::prelude::*;
-use wasm_bindgen::JsCast;
-
-mod interop;
-mod sync;
-mod value;
-
-use interop::{
- get_heads, js_get, js_set, list_to_js, list_to_js_at, map_to_js, map_to_js_at, to_js_err,
- to_objtype, to_prop, AR, JS,
-};
-use sync::SyncState;
-use value::{datatype, ScalarValue};
-
-#[allow(unused_macros)]
-macro_rules! log {
- ( $( $t:tt )* ) => {
- web_sys::console::log_1(&format!( $( $t )* ).into());
- };
-}
-
-#[cfg(feature = "wee_alloc")]
-#[global_allocator]
-static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
-
-#[wasm_bindgen]
-#[derive(Debug)]
-pub struct Automerge {
- doc: automerge::AutoCommit,
- observer: Option,
-}
-
-#[wasm_bindgen]
-impl Automerge {
- pub fn new(actor: Option) -> Result {
- let mut automerge = automerge::AutoCommit::new();
- if let Some(a) = actor {
- let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec());
- automerge.set_actor(a);
- }
- Ok(Automerge {
- doc: automerge,
- observer: None,
- })
- }
-
- fn ensure_transaction_closed(&mut self) {
- if self.doc.pending_ops() > 0 {
- let mut opts = CommitOptions::default();
- if let Some(observer) = self.observer.as_mut() {
- opts.set_op_observer(observer);
- }
- self.doc.commit_with(opts);
- }
- }
-
- #[allow(clippy::should_implement_trait)]
- pub fn clone(&mut self, actor: Option) -> Result {
- self.ensure_transaction_closed();
- let mut automerge = Automerge {
- doc: self.doc.clone(),
- observer: None,
- };
- if let Some(s) = actor {
- let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
- automerge.doc.set_actor(actor);
- }
- Ok(automerge)
- }
-
- pub fn fork(&mut self, actor: Option) -> Result {
- self.ensure_transaction_closed();
- let mut automerge = Automerge {
- doc: self.doc.fork(),
- observer: None,
- };
- if let Some(s) = actor {
- let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
- automerge.doc.set_actor(actor);
- }
- Ok(automerge)
- }
-
- #[wasm_bindgen(js_name = forkAt)]
- pub fn fork_at(&mut self, heads: JsValue, actor: Option) -> Result {
- let deps: Vec<_> = JS(heads).try_into()?;
- let mut automerge = Automerge {
- doc: self.doc.fork_at(&deps)?,
- observer: None,
- };
- if let Some(s) = actor {
- let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
- automerge.doc.set_actor(actor);
- }
- Ok(automerge)
- }
-
- pub fn free(self) {}
-
- #[wasm_bindgen(js_name = pendingOps)]
- pub fn pending_ops(&self) -> JsValue {
- (self.doc.pending_ops() as u32).into()
- }
-
- pub fn commit(&mut self, message: Option, time: Option) -> JsValue {
- let mut commit_opts = CommitOptions::default();
- if let Some(message) = message {
- commit_opts.set_message(message);
- }
- if let Some(time) = time {
- commit_opts.set_time(time as i64);
- }
- if let Some(observer) = self.observer.as_mut() {
- commit_opts.set_op_observer(observer);
- }
- let hash = self.doc.commit_with(commit_opts);
- JsValue::from_str(&hex::encode(&hash.0))
- }
-
- pub fn merge(&mut self, other: &mut Automerge) -> Result {
- self.ensure_transaction_closed();
- let options = if let Some(observer) = self.observer.as_mut() {
- ApplyOptions::default().with_op_observer(observer)
- } else {
- ApplyOptions::default()
- };
- let heads = self.doc.merge_with(&mut other.doc, options)?;
- let heads: Array = heads
- .iter()
- .map(|h| JsValue::from_str(&hex::encode(&h.0)))
- .collect();
- Ok(heads)
- }
-
- pub fn rollback(&mut self) -> f64 {
- self.doc.rollback() as f64
- }
-
- pub fn keys(&self, obj: JsValue, heads: Option) -> Result {
- let obj = self.import(obj)?;
- let result = if let Some(heads) = get_heads(heads) {
- self.doc
- .keys_at(&obj, &heads)
- .map(|s| JsValue::from_str(&s))
- .collect()
- } else {
- self.doc.keys(&obj).map(|s| JsValue::from_str(&s)).collect()
- };
- Ok(result)
- }
-
- pub fn text(&self, obj: JsValue, heads: Option) -> Result {
- let obj = self.import(obj)?;
- if let Some(heads) = get_heads(heads) {
- Ok(self.doc.text_at(&obj, &heads)?)
- } else {
- Ok(self.doc.text(&obj)?)
- }
- }
-
- pub fn splice(
- &mut self,
- obj: JsValue,
- start: f64,
- delete_count: f64,
- text: JsValue,
- ) -> Result<(), JsValue> {
- let obj = self.import(obj)?;
- let start = start as usize;
- let delete_count = delete_count as usize;
- let mut vals = vec![];
- if let Some(t) = text.as_string() {
- self.doc.splice_text(&obj, start, delete_count, &t)?;
- } else {
- if let Ok(array) = text.dyn_into::() {
- for i in array.iter() {
- let value = self
- .import_scalar(&i, &None)
- .ok_or_else(|| to_js_err("expected scalar"))?;
- vals.push(value);
- }
- }
- self.doc
- .splice(&obj, start, delete_count, vals.into_iter())?;
- }
- Ok(())
- }
-
- pub fn push(&mut self, obj: JsValue, value: JsValue, datatype: JsValue) -> Result<(), JsValue> {
- let obj = self.import(obj)?;
- let value = self
- .import_scalar(&value, &datatype.as_string())
- .ok_or_else(|| to_js_err("invalid scalar value"))?;
- let index = self.doc.length(&obj);
- self.doc.insert(&obj, index, value)?;
- Ok(())
- }
-
- #[wasm_bindgen(js_name = pushObject)]
- pub fn push_object(&mut self, obj: JsValue, value: JsValue) -> Result, JsValue> {
- let obj = self.import(obj)?;
- let (value, subvals) =
- to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?;
- let index = self.doc.length(&obj);
- let opid = self.doc.insert_object(&obj, index, value)?;
- self.subset(&opid, subvals)?;
- Ok(opid.to_string().into())
- }
-
- pub fn insert(
- &mut self,
- obj: JsValue,
- index: f64,
- value: JsValue,
- datatype: JsValue,
- ) -> Result<(), JsValue> {
- let obj = self.import(obj)?;
- let index = index as f64;
- let value = self
- .import_scalar(&value, &datatype.as_string())
- .ok_or_else(|| to_js_err("expected scalar value"))?;
- self.doc.insert(&obj, index as usize, value)?;
- Ok(())
- }
-
- #[wasm_bindgen(js_name = insertObject)]
- pub fn insert_object(
- &mut self,
- obj: JsValue,
- index: f64,
- value: JsValue,
- ) -> Result , JsValue> {
- let obj = self.import(obj)?;
- let index = index as f64;
- let (value, subvals) =
- to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?;
- let opid = self.doc.insert_object(&obj, index as usize, value)?;
- self.subset(&opid, subvals)?;
- Ok(opid.to_string().into())
- }
-
- pub fn put(
- &mut self,
- obj: JsValue,
- prop: JsValue,
- value: JsValue,
- datatype: JsValue,
- ) -> Result<(), JsValue> {
- let obj = self.import(obj)?;
- let prop = self.import_prop(prop)?;
- let value = self
- .import_scalar(&value, &datatype.as_string())
- .ok_or_else(|| to_js_err("expected scalar value"))?;
- self.doc.put(&obj, prop, value)?;
- Ok(())
- }
-
- #[wasm_bindgen(js_name = putObject)]
- pub fn put_object(
- &mut self,
- obj: JsValue,
- prop: JsValue,
- value: JsValue,
- ) -> Result {
- let obj = self.import(obj)?;
- let prop = self.import_prop(prop)?;
- let (value, subvals) =
- to_objtype(&value, &None).ok_or_else(|| to_js_err("expected object"))?;
- let opid = self.doc.put_object(&obj, prop, value)?;
- self.subset(&opid, subvals)?;
- Ok(opid.to_string().into())
- }
-
- fn subset(&mut self, obj: &am::ObjId, vals: Vec<(am::Prop, JsValue)>) -> Result<(), JsValue> {
- for (p, v) in vals {
- let (value, subvals) = self.import_value(&v, None)?;
- //let opid = self.0.set(id, p, value)?;
- let opid = match (p, value) {
- (Prop::Map(s), Value::Object(objtype)) => {
- Some(self.doc.put_object(obj, s, objtype)?)
- }
- (Prop::Map(s), Value::Scalar(scalar)) => {
- self.doc.put(obj, s, scalar.into_owned())?;
- None
- }
- (Prop::Seq(i), Value::Object(objtype)) => {
- Some(self.doc.insert_object(obj, i, objtype)?)
- }
- (Prop::Seq(i), Value::Scalar(scalar)) => {
- self.doc.insert(obj, i, scalar.into_owned())?;
- None
- }
- };
- if let Some(opid) = opid {
- self.subset(&opid, subvals)?;
- }
- }
- Ok(())
- }
-
- pub fn increment(
- &mut self,
- obj: JsValue,
- prop: JsValue,
- value: JsValue,
- ) -> Result<(), JsValue> {
- let obj = self.import(obj)?;
- let prop = self.import_prop(prop)?;
- let value: f64 = value
- .as_f64()
- .ok_or_else(|| to_js_err("increment needs a numeric value"))?;
- self.doc.increment(&obj, prop, value as i64)?;
- Ok(())
- }
-
- #[wasm_bindgen(js_name = get)]
- pub fn get(
- &self,
- obj: JsValue,
- prop: JsValue,
- heads: Option,
- ) -> Result, JsValue> {
- let obj = self.import(obj)?;
- let result = Array::new();
- let prop = to_prop(prop);
- let heads = get_heads(heads);
- if let Ok(prop) = prop {
- let value = if let Some(h) = heads {
- self.doc.get_at(&obj, prop, &h)?
- } else {
- self.doc.get(&obj, prop)?
- };
- match value {
- Some((Value::Object(obj_type), obj_id)) => {
- result.push(&obj_type.to_string().into());
- result.push(&obj_id.to_string().into());
- Ok(Some(result))
- }
- Some((Value::Scalar(value), _)) => {
- result.push(&datatype(&value).into());
- result.push(&ScalarValue(value).into());
- Ok(Some(result))
- }
- None => Ok(None),
- }
- } else {
- Ok(None)
- }
- }
-
- #[wasm_bindgen(js_name = getAll)]
- pub fn get_all(
- &self,
- obj: JsValue,
- arg: JsValue,
- heads: Option,
- ) -> Result {
- let obj = self.import(obj)?;
- let result = Array::new();
- let prop = to_prop(arg);
- if let Ok(prop) = prop {
- let values = if let Some(heads) = get_heads(heads) {
- self.doc.get_all_at(&obj, prop, &heads)
- } else {
- self.doc.get_all(&obj, prop)
- }
- .map_err(to_js_err)?;
- for value in values {
- match value {
- (Value::Object(obj_type), obj_id) => {
- let sub = Array::new();
- sub.push(&obj_type.to_string().into());
- sub.push(&obj_id.to_string().into());
- result.push(&sub.into());
- }
- (Value::Scalar(value), id) => {
- let sub = Array::new();
- sub.push(&datatype(&value).into());
- sub.push(&ScalarValue(value).into());
- sub.push(&id.to_string().into());
- result.push(&sub.into());
- }
- }
- }
- }
- Ok(result)
- }
-
- #[wasm_bindgen(js_name = enablePatches)]
- pub fn enable_patches(&mut self, enable: JsValue) -> Result<(), JsValue> {
- let enable = enable
- .as_bool()
- .ok_or_else(|| to_js_err("expected boolean"))?;
- if enable {
- if self.observer.is_none() {
- self.observer = Some(VecOpObserver::default());
- }
- } else {
- self.observer = None;
- }
- Ok(())
- }
-
- #[wasm_bindgen(js_name = popPatches)]
- pub fn pop_patches(&mut self) -> Result {
- // transactions send out observer updates as they occur, not waiting for them to be
- // committed.
- // If we pop the patches then we won't be able to revert them.
- self.ensure_transaction_closed();
-
- let patches = self
- .observer
- .as_mut()
- .map_or_else(Vec::new, |o| o.take_patches());
- let result = Array::new();
- for p in patches {
- let patch = Object::new();
- match p {
- Patch::Put {
- obj,
- key,
- value,
- conflict,
- } => {
- js_set(&patch, "action", "put")?;
- js_set(&patch, "obj", obj.to_string())?;
- js_set(&patch, "key", key)?;
- match value {
- (Value::Object(obj_type), obj_id) => {
- js_set(&patch, "datatype", obj_type.to_string())?;
- js_set(&patch, "value", obj_id.to_string())?;
- }
- (Value::Scalar(value), _) => {
- js_set(&patch, "datatype", datatype(&value))?;
- js_set(&patch, "value", ScalarValue(value))?;
- }
- };
- js_set(&patch, "conflict", conflict)?;
- }
-
- Patch::Insert { obj, index, value } => {
- js_set(&patch, "action", "insert")?;
- js_set(&patch, "obj", obj.to_string())?;
- js_set(&patch, "key", index as f64)?;
- match value {
- (Value::Object(obj_type), obj_id) => {
- js_set(&patch, "datatype", obj_type.to_string())?;
- js_set(&patch, "value", obj_id.to_string())?;
- }
- (Value::Scalar(value), _) => {
- js_set(&patch, "datatype", datatype(&value))?;
- js_set(&patch, "value", ScalarValue(value))?;
- }
- };
- }
-
- Patch::Increment { obj, key, value } => {
- js_set(&patch, "action", "increment")?;
- js_set(&patch, "obj", obj.to_string())?;
- js_set(&patch, "key", key)?;
- js_set(&patch, "value", value.0)?;
- }
-
- Patch::Delete { obj, key } => {
- js_set(&patch, "action", "delete")?;
- js_set(&patch, "obj", obj.to_string())?;
- js_set(&patch, "key", key)?;
- }
- }
- result.push(&patch);
- }
- Ok(result)
- }
-
- pub fn length(&self, obj: JsValue, heads: Option) -> Result {
- let obj = self.import(obj)?;
- if let Some(heads) = get_heads(heads) {
- Ok(self.doc.length_at(&obj, &heads) as f64)
- } else {
- Ok(self.doc.length(&obj) as f64)
- }
- }
-
- pub fn delete(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> {
- let obj = self.import(obj)?;
- let prop = to_prop(prop)?;
- self.doc.delete(&obj, prop).map_err(to_js_err)?;
- Ok(())
- }
-
- pub fn save(&mut self) -> Uint8Array {
- self.ensure_transaction_closed();
- Uint8Array::from(self.doc.save().as_slice())
- }
-
- #[wasm_bindgen(js_name = saveIncremental)]
- pub fn save_incremental(&mut self) -> Uint8Array {
- self.ensure_transaction_closed();
- let bytes = self.doc.save_incremental();
- Uint8Array::from(bytes.as_slice())
- }
-
- #[wasm_bindgen(js_name = loadIncremental)]
- pub fn load_incremental(&mut self, data: Uint8Array) -> Result {
- self.ensure_transaction_closed();
- let data = data.to_vec();
- let options = if let Some(observer) = self.observer.as_mut() {
- ApplyOptions::default().with_op_observer(observer)
- } else {
- ApplyOptions::default()
- };
- let len = self
- .doc
- .load_incremental_with(&data, options)
- .map_err(to_js_err)?;
- Ok(len as f64)
- }
-
- #[wasm_bindgen(js_name = applyChanges)]
- pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> {
- self.ensure_transaction_closed();
- let changes: Vec<_> = JS(changes).try_into()?;
- let options = if let Some(observer) = self.observer.as_mut() {
- ApplyOptions::default().with_op_observer(observer)
- } else {
- ApplyOptions::default()
- };
- self.doc
- .apply_changes_with(changes, options)
- .map_err(to_js_err)?;
- Ok(())
- }
-
- #[wasm_bindgen(js_name = getChanges)]
- pub fn get_changes(&mut self, have_deps: JsValue) -> Result {
- self.ensure_transaction_closed();
- let deps: Vec<_> = JS(have_deps).try_into()?;
- let changes = self.doc.get_changes(&deps);
- let changes: Array = changes
- .iter()
- .map(|c| Uint8Array::from(c.raw_bytes()))
- .collect();
- Ok(changes)
- }
-
- #[wasm_bindgen(js_name = getChangeByHash)]
- pub fn get_change_by_hash(&mut self, hash: JsValue) -> Result {
- self.ensure_transaction_closed();
- let hash = hash.into_serde().map_err(to_js_err)?;
- let change = self.doc.get_change_by_hash(&hash);
- if let Some(c) = change {
- Ok(Uint8Array::from(c.raw_bytes()).into())
- } else {
- Ok(JsValue::null())
- }
- }
-
- #[wasm_bindgen(js_name = getChangesAdded)]
- pub fn get_changes_added(&mut self, other: &mut Automerge) -> Result {
- self.ensure_transaction_closed();
- let changes = self.doc.get_changes_added(&mut other.doc);
- let changes: Array = changes
- .iter()
- .map(|c| Uint8Array::from(c.raw_bytes()))
- .collect();
- Ok(changes)
- }
-
- #[wasm_bindgen(js_name = getHeads)]
- pub fn get_heads(&mut self) -> Array {
- self.ensure_transaction_closed();
- let heads = self.doc.get_heads();
- let heads: Array = heads
- .iter()
- .map(|h| JsValue::from_str(&hex::encode(&h.0)))
- .collect();
- heads
- }
-
- #[wasm_bindgen(js_name = getActorId)]
- pub fn get_actor_id(&self) -> String {
- let actor = self.doc.get_actor();
- actor.to_string()
- }
-
- #[wasm_bindgen(js_name = getLastLocalChange)]
- pub fn get_last_local_change(&mut self) -> Result {
- self.ensure_transaction_closed();
- if let Some(change) = self.doc.get_last_local_change() {
- Ok(Uint8Array::from(change.raw_bytes()))
- } else {
- Err(to_js_err("no local changes"))
- }
- }
-
- pub fn dump(&mut self) {
- self.ensure_transaction_closed();
- self.doc.dump()
- }
-
- #[wasm_bindgen(js_name = getMissingDeps)]
- pub fn get_missing_deps(&mut self, heads: Option) -> Result {
- self.ensure_transaction_closed();
- let heads = get_heads(heads).unwrap_or_default();
- let deps = self.doc.get_missing_deps(&heads);
- let deps: Array = deps
- .iter()
- .map(|h| JsValue::from_str(&hex::encode(&h.0)))
- .collect();
- Ok(deps)
- }
-
- #[wasm_bindgen(js_name = receiveSyncMessage)]
- pub fn receive_sync_message(
- &mut self,
- state: &mut SyncState,
- message: Uint8Array,
- ) -> Result<(), JsValue> {
- self.ensure_transaction_closed();
- let message = message.to_vec();
- let message = am::sync::Message::decode(message.as_slice()).map_err(to_js_err)?;
- let options = if let Some(observer) = self.observer.as_mut() {
- ApplyOptions::default().with_op_observer(observer)
- } else {
- ApplyOptions::default()
- };
- self.doc
- .receive_sync_message_with(&mut state.0, message, options)
- .map_err(to_js_err)?;
- Ok(())
- }
-
- #[wasm_bindgen(js_name = generateSyncMessage)]
- pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result {
- self.ensure_transaction_closed();
- if let Some(message) = self.doc.generate_sync_message(&mut state.0) {
- Ok(Uint8Array::from(message.encode().as_slice()).into())
- } else {
- Ok(JsValue::null())
- }
- }
-
- #[wasm_bindgen(js_name = toJS)]
- pub fn to_js(&self) -> JsValue {
- map_to_js(&self.doc, &ROOT)
- }
-
- pub fn materialize(&self, obj: JsValue, heads: Option) -> Result {
- let obj = self.import(obj).unwrap_or(ROOT);
- let heads = get_heads(heads);
- if let Some(heads) = heads {
- match self.doc.object_type(&obj) {
- Some(am::ObjType::Map) => Ok(map_to_js_at(&self.doc, &obj, heads.as_slice())),
- Some(am::ObjType::List) => Ok(list_to_js_at(&self.doc, &obj, heads.as_slice())),
- Some(am::ObjType::Text) => Ok(self.doc.text_at(&obj, heads.as_slice())?.into()),
- Some(am::ObjType::Table) => Ok(map_to_js_at(&self.doc, &obj, heads.as_slice())),
- None => Err(to_js_err(format!("invalid obj {}", obj))),
- }
- } else {
- match self.doc.object_type(&obj) {
- Some(am::ObjType::Map) => Ok(map_to_js(&self.doc, &obj)),
- Some(am::ObjType::List) => Ok(list_to_js(&self.doc, &obj)),
- Some(am::ObjType::Text) => Ok(self.doc.text(&obj)?.into()),
- Some(am::ObjType::Table) => Ok(map_to_js(&self.doc, &obj)),
- None => Err(to_js_err(format!("invalid obj {}", obj))),
- }
- }
- }
-
- fn import(&self, id: JsValue) -> Result {
- if let Some(s) = id.as_string() {
- if let Some(post) = s.strip_prefix('/') {
- let mut obj = ROOT;
- let mut is_map = true;
- let parts = post.split('/');
- for prop in parts {
- if prop.is_empty() {
- break;
- }
- let val = if is_map {
- self.doc.get(obj, prop)?
- } else {
- self.doc.get(obj, am::Prop::Seq(prop.parse().unwrap()))?
- };
- match val {
- Some((am::Value::Object(am::ObjType::Map), id)) => {
- is_map = true;
- obj = id;
- }
- Some((am::Value::Object(am::ObjType::Table), id)) => {
- is_map = true;
- obj = id;
- }
- Some((am::Value::Object(_), id)) => {
- is_map = false;
- obj = id;
- }
- None => return Err(to_js_err(format!("invalid path '{}'", s))),
- _ => return Err(to_js_err(format!("path '{}' is not an object", s))),
- };
- }
- Ok(obj)
- } else {
- Ok(self.doc.import(&s)?)
- }
- } else {
- Err(to_js_err("invalid objid"))
- }
- }
-
- fn import_prop(&self, prop: JsValue) -> Result {
- if let Some(s) = prop.as_string() {
- Ok(s.into())
- } else if let Some(n) = prop.as_f64() {
- Ok((n as usize).into())
- } else {
- Err(to_js_err(format!("invalid prop {:?}", prop)))
- }
- }
-
- fn import_scalar(&self, value: &JsValue, datatype: &Option) -> Option {
- match datatype.as_deref() {
- Some("boolean") => value.as_bool().map(am::ScalarValue::Boolean),
- Some("int") => value.as_f64().map(|v| am::ScalarValue::Int(v as i64)),
- Some("uint") => value.as_f64().map(|v| am::ScalarValue::Uint(v as u64)),
- Some("str") => value.as_string().map(|v| am::ScalarValue::Str(v.into())),
- Some("f64") => value.as_f64().map(am::ScalarValue::F64),
- Some("bytes") => Some(am::ScalarValue::Bytes(
- value.clone().dyn_into::().unwrap().to_vec(),
- )),
- Some("counter") => value.as_f64().map(|v| am::ScalarValue::counter(v as i64)),
- Some("timestamp") => {
- if let Some(v) = value.as_f64() {
- Some(am::ScalarValue::Timestamp(v as i64))
- } else if let Ok(d) = value.clone().dyn_into::() {
- Some(am::ScalarValue::Timestamp(d.get_time() as i64))
- } else {
- None
- }
- }
- Some("null") => Some(am::ScalarValue::Null),
- Some(_) => None,
- None => {
- if value.is_null() {
- Some(am::ScalarValue::Null)
- } else if let Some(b) = value.as_bool() {
- Some(am::ScalarValue::Boolean(b))
- } else if let Some(s) = value.as_string() {
- Some(am::ScalarValue::Str(s.into()))
- } else if let Some(n) = value.as_f64() {
- if (n.round() - n).abs() < f64::EPSILON {
- Some(am::ScalarValue::Int(n as i64))
- } else {
- Some(am::ScalarValue::F64(n))
- }
- } else if let Ok(d) = value.clone().dyn_into::() {
- Some(am::ScalarValue::Timestamp(d.get_time() as i64))
- } else if let Ok(o) = &value.clone().dyn_into::() {
- Some(am::ScalarValue::Bytes(o.to_vec()))
- } else {
- None
- }
- }
- }
- }
-
- fn import_value(
- &self,
- value: &JsValue,
- datatype: Option,
- ) -> Result<(Value<'static>, Vec<(Prop, JsValue)>), JsValue> {
- match self.import_scalar(value, &datatype) {
- Some(val) => Ok((val.into(), vec![])),
- None => {
- if let Some((o, subvals)) = to_objtype(value, &datatype) {
- Ok((o.into(), subvals))
- } else {
- web_sys::console::log_2(&"Invalid value".into(), value);
- Err(to_js_err("invalid value"))
- }
- }
- }
- }
-}
-
-#[wasm_bindgen(js_name = create)]
-pub fn init(actor: Option) -> Result {
- console_error_panic_hook::set_once();
- Automerge::new(actor)
-}
-
-#[wasm_bindgen(js_name = loadDoc)]
-pub fn load(data: Uint8Array, actor: Option) -> Result {
- let data = data.to_vec();
- let observer = None;
- let options = ApplyOptions::<()>::default();
- let mut automerge = am::AutoCommit::load_with(&data, options).map_err(to_js_err)?;
- if let Some(s) = actor {
- let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
- automerge.set_actor(actor);
- }
- Ok(Automerge {
- doc: automerge,
- observer,
- })
-}
-
-#[wasm_bindgen(js_name = encodeChange)]
-pub fn encode_change(change: JsValue) -> Result {
- let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?;
- let change: Change = change.into();
- Ok(Uint8Array::from(change.raw_bytes()))
-}
-
-#[wasm_bindgen(js_name = decodeChange)]
-pub fn decode_change(change: Uint8Array) -> Result {
- let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?;
- let change: am::ExpandedChange = change.decode();
- JsValue::from_serde(&change).map_err(to_js_err)
-}
-
-#[wasm_bindgen(js_name = initSyncState)]
-pub fn init_sync_state() -> SyncState {
- SyncState(am::sync::State::new())
-}
-
-// this is needed to be compatible with the automerge-js api
-#[wasm_bindgen(js_name = importSyncState)]
-pub fn import_sync_state(state: JsValue) -> Result {
- Ok(SyncState(JS(state).try_into()?))
-}
-
-// this is needed to be compatible with the automerge-js api
-#[wasm_bindgen(js_name = exportSyncState)]
-pub fn export_sync_state(state: SyncState) -> JsValue {
- JS::from(state.0).into()
-}
-
-#[wasm_bindgen(js_name = encodeSyncMessage)]
-pub fn encode_sync_message(message: JsValue) -> Result {
- let heads = js_get(&message, "heads")?.try_into()?;
- let need = js_get(&message, "need")?.try_into()?;
- let changes = js_get(&message, "changes")?.try_into()?;
- let have = js_get(&message, "have")?.try_into()?;
- Ok(Uint8Array::from(
- am::sync::Message {
- heads,
- need,
- have,
- changes,
- }
- .encode()
- .as_slice(),
- ))
-}
-
-#[wasm_bindgen(js_name = decodeSyncMessage)]
-pub fn decode_sync_message(msg: Uint8Array) -> Result {
- let data = msg.to_vec();
- let msg = am::sync::Message::decode(&data).map_err(to_js_err)?;
- let heads = AR::from(msg.heads.as_slice());
- let need = AR::from(msg.need.as_slice());
- let changes = AR::from(msg.changes.as_slice());
- let have = AR::from(msg.have.as_slice());
- let obj = Object::new().into();
- js_set(&obj, "heads", heads)?;
- js_set(&obj, "need", need)?;
- js_set(&obj, "have", have)?;
- js_set(&obj, "changes", changes)?;
- Ok(obj)
-}
-
-#[wasm_bindgen(js_name = encodeSyncState)]
-pub fn encode_sync_state(state: SyncState) -> Result {
- let state = state.0;
- Ok(Uint8Array::from(state.encode().as_slice()))
-}
-
-#[wasm_bindgen(js_name = decodeSyncState)]
-pub fn decode_sync_state(data: Uint8Array) -> Result {
- SyncState::decode(data)
-}
diff --git a/automerge-wasm/src/value.rs b/automerge-wasm/src/value.rs
deleted file mode 100644
index 5b20cc20..00000000
--- a/automerge-wasm/src/value.rs
+++ /dev/null
@@ -1,38 +0,0 @@
-use std::borrow::Cow;
-
-use automerge as am;
-use js_sys::Uint8Array;
-use wasm_bindgen::prelude::*;
-
-#[derive(Debug)]
-pub struct ScalarValue<'a>(pub(crate) Cow<'a, am::ScalarValue>);
-
-impl<'a> From> for JsValue {
- fn from(val: ScalarValue<'a>) -> Self {
- match &*val.0 {
- am::ScalarValue::Bytes(v) => Uint8Array::from(v.as_slice()).into(),
- am::ScalarValue::Str(v) => v.to_string().into(),
- am::ScalarValue::Int(v) => (*v as f64).into(),
- am::ScalarValue::Uint(v) => (*v as f64).into(),
- am::ScalarValue::F64(v) => (*v).into(),
- am::ScalarValue::Counter(v) => (f64::from(v)).into(),
- am::ScalarValue::Timestamp(v) => js_sys::Date::new(&(*v as f64).into()).into(),
- am::ScalarValue::Boolean(v) => (*v).into(),
- am::ScalarValue::Null => JsValue::null(),
- }
- }
-}
-
-pub(crate) fn datatype(s: &am::ScalarValue) -> String {
- match s {
- am::ScalarValue::Bytes(_) => "bytes".into(),
- am::ScalarValue::Str(_) => "str".into(),
- am::ScalarValue::Int(_) => "int".into(),
- am::ScalarValue::Uint(_) => "uint".into(),
- am::ScalarValue::F64(_) => "f64".into(),
- am::ScalarValue::Counter(_) => "counter".into(),
- am::ScalarValue::Timestamp(_) => "timestamp".into(),
- am::ScalarValue::Boolean(_) => "boolean".into(),
- am::ScalarValue::Null => "null".into(),
- }
-}
diff --git a/automerge-wasm/test/helpers/columnar.js b/automerge-wasm/test/helpers/columnar.js
deleted file mode 100644
index 8d266f5b..00000000
--- a/automerge-wasm/test/helpers/columnar.js
+++ /dev/null
@@ -1,1415 +0,0 @@
-const pako = require('pako')
-const { copyObject, parseOpId, equalBytes } = require('./common')
-const {
- utf8ToString, hexStringToBytes, bytesToHexString,
- Encoder, Decoder, RLEEncoder, RLEDecoder, DeltaEncoder, DeltaDecoder, BooleanEncoder, BooleanDecoder
-} = require('./encoding')
-
-// Maybe we should be using the platform's built-in hash implementation?
-// Node has the crypto module: https://nodejs.org/api/crypto.html and browsers have
-// https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/digest
-// However, the WebCrypto API is asynchronous (returns promises), which would
-// force all our APIs to become asynchronous as well, which would be annoying.
-//
-// I think on balance, it's safe enough to use a random library off npm:
-// - We only need one hash function (not a full suite of crypto algorithms);
-// - SHA256 is quite simple and has fairly few opportunities for subtle bugs
-// (compared to asymmetric cryptography anyway);
-// - It does not need a secure source of random bits and does not need to be
-// constant-time;
-// - I have reviewed the source code and it seems pretty reasonable.
-const { Hash } = require('fast-sha256')
-
-// These bytes don't mean anything, they were generated randomly
-const MAGIC_BYTES = new Uint8Array([0x85, 0x6f, 0x4a, 0x83])
-
-const CHUNK_TYPE_DOCUMENT = 0
-const CHUNK_TYPE_CHANGE = 1
-const CHUNK_TYPE_DEFLATE = 2 // like CHUNK_TYPE_CHANGE but with DEFLATE compression
-
-// Minimum number of bytes in a value before we enable DEFLATE compression (there is no point
-// compressing very short values since compression may actually make them bigger)
-const DEFLATE_MIN_SIZE = 256
-
-// The least-significant 3 bits of a columnId indicate its datatype
-const COLUMN_TYPE = {
- GROUP_CARD: 0, ACTOR_ID: 1, INT_RLE: 2, INT_DELTA: 3, BOOLEAN: 4,
- STRING_RLE: 5, VALUE_LEN: 6, VALUE_RAW: 7
-}
-
-// The 4th-least-significant bit of a columnId is set if the column is DEFLATE-compressed
-const COLUMN_TYPE_DEFLATE = 8
-
-// In the values in a column of type VALUE_LEN, the bottom four bits indicate the type of the value,
-// one of the following types in VALUE_TYPE. The higher bits indicate the length of the value in the
-// associated VALUE_RAW column (in bytes).
-const VALUE_TYPE = {
- NULL: 0, FALSE: 1, TRUE: 2, LEB128_UINT: 3, LEB128_INT: 4, IEEE754: 5,
- UTF8: 6, BYTES: 7, COUNTER: 8, TIMESTAMP: 9, MIN_UNKNOWN: 10, MAX_UNKNOWN: 15
-}
-
-// make* actions must be at even-numbered indexes in this list
-const ACTIONS = ['makeMap', 'set', 'makeList', 'del', 'makeText', 'inc', 'makeTable', 'link']
-
-const OBJECT_TYPE = {makeMap: 'map', makeList: 'list', makeText: 'text', makeTable: 'table'}
-
-const COMMON_COLUMNS = [
- {columnName: 'objActor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID},
- {columnName: 'objCtr', columnId: 0 << 4 | COLUMN_TYPE.INT_RLE},
- {columnName: 'keyActor', columnId: 1 << 4 | COLUMN_TYPE.ACTOR_ID},
- {columnName: 'keyCtr', columnId: 1 << 4 | COLUMN_TYPE.INT_DELTA},
- {columnName: 'keyStr', columnId: 1 << 4 | COLUMN_TYPE.STRING_RLE},
- {columnName: 'idActor', columnId: 2 << 4 | COLUMN_TYPE.ACTOR_ID},
- {columnName: 'idCtr', columnId: 2 << 4 | COLUMN_TYPE.INT_DELTA},
- {columnName: 'insert', columnId: 3 << 4 | COLUMN_TYPE.BOOLEAN},
- {columnName: 'action', columnId: 4 << 4 | COLUMN_TYPE.INT_RLE},
- {columnName: 'valLen', columnId: 5 << 4 | COLUMN_TYPE.VALUE_LEN},
- {columnName: 'valRaw', columnId: 5 << 4 | COLUMN_TYPE.VALUE_RAW},
- {columnName: 'chldActor', columnId: 6 << 4 | COLUMN_TYPE.ACTOR_ID},
- {columnName: 'chldCtr', columnId: 6 << 4 | COLUMN_TYPE.INT_DELTA}
-]
-
-const CHANGE_COLUMNS = COMMON_COLUMNS.concat([
- {columnName: 'predNum', columnId: 7 << 4 | COLUMN_TYPE.GROUP_CARD},
- {columnName: 'predActor', columnId: 7 << 4 | COLUMN_TYPE.ACTOR_ID},
- {columnName: 'predCtr', columnId: 7 << 4 | COLUMN_TYPE.INT_DELTA}
-])
-
-const DOC_OPS_COLUMNS = COMMON_COLUMNS.concat([
- {columnName: 'succNum', columnId: 8 << 4 | COLUMN_TYPE.GROUP_CARD},
- {columnName: 'succActor', columnId: 8 << 4 | COLUMN_TYPE.ACTOR_ID},
- {columnName: 'succCtr', columnId: 8 << 4 | COLUMN_TYPE.INT_DELTA}
-])
-
-const DOCUMENT_COLUMNS = [
- {columnName: 'actor', columnId: 0 << 4 | COLUMN_TYPE.ACTOR_ID},
- {columnName: 'seq', columnId: 0 << 4 | COLUMN_TYPE.INT_DELTA},
- {columnName: 'maxOp', columnId: 1 << 4 | COLUMN_TYPE.INT_DELTA},
- {columnName: 'time', columnId: 2 << 4 | COLUMN_TYPE.INT_DELTA},
- {columnName: 'message', columnId: 3 << 4 | COLUMN_TYPE.STRING_RLE},
- {columnName: 'depsNum', columnId: 4 << 4 | COLUMN_TYPE.GROUP_CARD},
- {columnName: 'depsIndex', columnId: 4 << 4 | COLUMN_TYPE.INT_DELTA},
- {columnName: 'extraLen', columnId: 5 << 4 | COLUMN_TYPE.VALUE_LEN},
- {columnName: 'extraRaw', columnId: 5 << 4 | COLUMN_TYPE.VALUE_RAW}
-]
-
-/**
- * Maps an opId of the form {counter: 12345, actorId: 'someActorId'} to the form
- * {counter: 12345, actorNum: 123, actorId: 'someActorId'}, where the actorNum
- * is the index into the `actorIds` array.
- */
-function actorIdToActorNum(opId, actorIds) {
- if (!opId || !opId.actorId) return opId
- const counter = opId.counter
- const actorNum = actorIds.indexOf(opId.actorId)
- if (actorNum < 0) throw new RangeError('missing actorId') // should not happen
- return {counter, actorNum, actorId: opId.actorId}
-}
-
-/**
- * Comparison function to pass to Array.sort(), which compares two opIds in the
- * form produced by `actorIdToActorNum` so that they are sorted in increasing
- * Lamport timestamp order (sorted first by counter, then by actorId).
- */
-function compareParsedOpIds(id1, id2) {
- if (id1.counter < id2.counter) return -1
- if (id1.counter > id2.counter) return +1
- if (id1.actorId < id2.actorId) return -1
- if (id1.actorId > id2.actorId) return +1
- return 0
-}
-
-/**
- * Takes `changes`, an array of changes (represented as JS objects). Returns an
- * object `{changes, actorIds}`, where `changes` is a copy of the argument in
- * which all string opIds have been replaced with `{counter, actorNum}` objects,
- * and where `actorIds` is a lexicographically sorted array of actor IDs occurring
- * in any of the operations. `actorNum` is an index into that array of actorIds.
- * If `single` is true, the actorId of the author of the change is moved to the
- * beginning of the array of actorIds, so that `actorNum` is zero when referencing
- * the author of the change itself. This special-casing is omitted if `single` is
- * false.
- */
-function parseAllOpIds(changes, single) {
- const actors = {}, newChanges = []
- for (let change of changes) {
- change = copyObject(change)
- actors[change.actor] = true
- change.ops = expandMultiOps(change.ops, change.startOp, change.actor)
- change.ops = change.ops.map(op => {
- op = copyObject(op)
- if (op.obj !== '_root') op.obj = parseOpId(op.obj)
- if (op.elemId && op.elemId !== '_head') op.elemId = parseOpId(op.elemId)
- if (op.child) op.child = parseOpId(op.child)
- if (op.pred) op.pred = op.pred.map(parseOpId)
- if (op.obj.actorId) actors[op.obj.actorId] = true
- if (op.elemId && op.elemId.actorId) actors[op.elemId.actorId] = true
- if (op.child && op.child.actorId) actors[op.child.actorId] = true
- for (let pred of op.pred) actors[pred.actorId] = true
- return op
- })
- newChanges.push(change)
- }
-
- let actorIds = Object.keys(actors).sort()
- if (single) {
- actorIds = [changes[0].actor].concat(actorIds.filter(actor => actor !== changes[0].actor))
- }
- for (let change of newChanges) {
- change.actorNum = actorIds.indexOf(change.actor)
- for (let i = 0; i < change.ops.length; i++) {
- let op = change.ops[i]
- op.id = {counter: change.startOp + i, actorNum: change.actorNum, actorId: change.actor}
- op.obj = actorIdToActorNum(op.obj, actorIds)
- op.elemId = actorIdToActorNum(op.elemId, actorIds)
- op.child = actorIdToActorNum(op.child, actorIds)
- op.pred = op.pred.map(pred => actorIdToActorNum(pred, actorIds))
- }
- }
- return {changes: newChanges, actorIds}
-}
-
-/**
- * Encodes the `obj` property of operation `op` into the two columns
- * `objActor` and `objCtr`.
- */
-function encodeObjectId(op, columns) {
- if (op.obj === '_root') {
- columns.objActor.appendValue(null)
- columns.objCtr.appendValue(null)
- } else if (op.obj.actorNum >= 0 && op.obj.counter > 0) {
- columns.objActor.appendValue(op.obj.actorNum)
- columns.objCtr.appendValue(op.obj.counter)
- } else {
- throw new RangeError(`Unexpected objectId reference: ${JSON.stringify(op.obj)}`)
- }
-}
-
-/**
- * Encodes the `key` and `elemId` properties of operation `op` into the three
- * columns `keyActor`, `keyCtr`, and `keyStr`.
- */
-function encodeOperationKey(op, columns) {
- if (op.key) {
- columns.keyActor.appendValue(null)
- columns.keyCtr.appendValue(null)
- columns.keyStr.appendValue(op.key)
- } else if (op.elemId === '_head' && op.insert) {
- columns.keyActor.appendValue(null)
- columns.keyCtr.appendValue(0)
- columns.keyStr.appendValue(null)
- } else if (op.elemId && op.elemId.actorNum >= 0 && op.elemId.counter > 0) {
- columns.keyActor.appendValue(op.elemId.actorNum)
- columns.keyCtr.appendValue(op.elemId.counter)
- columns.keyStr.appendValue(null)
- } else {
- throw new RangeError(`Unexpected operation key: ${JSON.stringify(op)}`)
- }
-}
-
-/**
- * Encodes the `action` property of operation `op` into the `action` column.
- */
-function encodeOperationAction(op, columns) {
- const actionCode = ACTIONS.indexOf(op.action)
- if (actionCode >= 0) {
- columns.action.appendValue(actionCode)
- } else if (typeof op.action === 'number') {
- columns.action.appendValue(op.action)
- } else {
- throw new RangeError(`Unexpected operation action: ${op.action}`)
- }
-}
-
-/**
- * Encodes the integer `value` into the two columns `valLen` and `valRaw`,
- * with the datatype tag set to `typeTag`. If `typeTag` is zero, it is set
- * automatically to signed or unsigned depending on the sign of the value.
- * Values with non-zero type tags are always encoded as signed integers.
- */
-function encodeInteger(value, typeTag, columns) {
- let numBytes
- if (value < 0 || typeTag > 0) {
- numBytes = columns.valRaw.appendInt53(value)
- if (!typeTag) typeTag = VALUE_TYPE.LEB128_INT
- } else {
- numBytes = columns.valRaw.appendUint53(value)
- typeTag = VALUE_TYPE.LEB128_UINT
- }
- columns.valLen.appendValue(numBytes << 4 | typeTag)
-}
-
-/**
- * Encodes the `value` property of operation `op` into the two columns
- * `valLen` and `valRaw`.
- */
-function encodeValue(op, columns) {
- if ((op.action !== 'set' && op.action !== 'inc') || op.value === null) {
- columns.valLen.appendValue(VALUE_TYPE.NULL)
- } else if (op.value === false) {
- columns.valLen.appendValue(VALUE_TYPE.FALSE)
- } else if (op.value === true) {
- columns.valLen.appendValue(VALUE_TYPE.TRUE)
- } else if (typeof op.value === 'string') {
- const numBytes = columns.valRaw.appendRawString(op.value)
- columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.UTF8)
- } else if (ArrayBuffer.isView(op.value)) {
- const numBytes = columns.valRaw.appendRawBytes(new Uint8Array(op.value.buffer))
- columns.valLen.appendValue(numBytes << 4 | VALUE_TYPE.BYTES)
- } else if (op.datatype === 'counter' && typeof op.value === 'number') {
- encodeInteger(op.value, VALUE_TYPE.COUNTER, columns)
- } else if (op.datatype === 'timestamp' && typeof op.value === 'number') {
- encodeInteger(op.value, VALUE_TYPE.TIMESTAMP, columns)
- } else if (typeof op.datatype === 'number' && op.datatype >= VALUE_TYPE.MIN_UNKNOWN &&
- op.datatype <= VALUE_TYPE.MAX_UNKNOWN && op.value instanceof Uint8Array) {
- const numBytes = columns.valRaw.appendRawBytes(op.value)
- columns.valLen.appendValue(numBytes << 4 | op.datatype)
- } else if (op.datatype) {
- throw new RangeError(`Unknown datatype ${op.datatype} for value ${op.value}`)
- } else if (typeof op.value === 'number') {
- if (Number.isInteger(op.value) && op.value <= Number.MAX_SAFE_INTEGER && op.value >= Number.MIN_SAFE_INTEGER) {
- encodeInteger(op.value, 0, columns)
- } else {
- // Encode number in 32-bit float if this can be done without loss of precision
- const buf32 = new ArrayBuffer(4), view32 = new DataView(buf32)
- view32.setFloat32(0, op.value, true) // true means little-endian
- if (view32.getFloat32(0, true) === op.value) {
- columns.valRaw.appendRawBytes(new Uint8Array(buf32))
- columns.valLen.appendValue(4 << 4 | VALUE_TYPE.IEEE754)
- } else {
- const buf64 = new ArrayBuffer(8), view64 = new DataView(buf64)
- view64.setFloat64(0, op.value, true) // true means little-endian
- columns.valRaw.appendRawBytes(new Uint8Array(buf64))
- columns.valLen.appendValue(8 << 4 | VALUE_TYPE.IEEE754)
- }
- }
- } else {
- throw new RangeError(`Unsupported value in operation: ${op.value}`)
- }
-}
-
-/**
- * Given `sizeTag` (an unsigned integer read from a VALUE_LEN column) and `bytes` (a Uint8Array
- * read from a VALUE_RAW column, with length `sizeTag >> 4`), this function returns an object of the
- * form `{value: value, datatype: datatypeTag}` where `value` is a JavaScript primitive datatype
- * corresponding to the value, and `datatypeTag` is a datatype annotation such as 'counter'.
- */
-function decodeValue(sizeTag, bytes) {
- if (sizeTag === VALUE_TYPE.NULL) {
- return {value: null}
- } else if (sizeTag === VALUE_TYPE.FALSE) {
- return {value: false}
- } else if (sizeTag === VALUE_TYPE.TRUE) {
- return {value: true}
- } else if (sizeTag % 16 === VALUE_TYPE.UTF8) {
- return {value: utf8ToString(bytes)}
- } else {
- if (sizeTag % 16 === VALUE_TYPE.LEB128_UINT) {
- return {value: new Decoder(bytes).readUint53()}
- } else if (sizeTag % 16 === VALUE_TYPE.LEB128_INT) {
- return {value: new Decoder(bytes).readInt53()}
- } else if (sizeTag % 16 === VALUE_TYPE.IEEE754) {
- const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength)
- if (bytes.byteLength === 4) {
- return {value: view.getFloat32(0, true)} // true means little-endian
- } else if (bytes.byteLength === 8) {
- return {value: view.getFloat64(0, true)}
- } else {
- throw new RangeError(`Invalid length for floating point number: ${bytes.byteLength}`)
- }
- } else if (sizeTag % 16 === VALUE_TYPE.COUNTER) {
- return {value: new Decoder(bytes).readInt53(), datatype: 'counter'}
- } else if (sizeTag % 16 === VALUE_TYPE.TIMESTAMP) {
- return {value: new Decoder(bytes).readInt53(), datatype: 'timestamp'}
- } else {
- return {value: bytes, datatype: sizeTag % 16}
- }
- }
-}
-
-/**
- * Reads one value from the column `columns[colIndex]` and interprets it based
- * on the column type. `actorIds` is a list of actors that appear in the change;
- * `actorIds[0]` is the actorId of the change's author. Mutates the `result`
- * object with the value, and returns the number of columns processed (this is 2
- * in the case of a pair of VALUE_LEN and VALUE_RAW columns, which are processed
- * in one go).
- */
-function decodeValueColumns(columns, colIndex, actorIds, result) {
- const { columnId, columnName, decoder } = columns[colIndex]
- if (columnId % 8 === COLUMN_TYPE.VALUE_LEN && colIndex + 1 < columns.length &&
- columns[colIndex + 1].columnId === columnId + 1) {
- const sizeTag = decoder.readValue()
- const rawValue = columns[colIndex + 1].decoder.readRawBytes(sizeTag >> 4)
- const { value, datatype } = decodeValue(sizeTag, rawValue)
- result[columnName] = value
- if (datatype) result[columnName + '_datatype'] = datatype
- return 2
- } else if (columnId % 8 === COLUMN_TYPE.ACTOR_ID) {
- const actorNum = decoder.readValue()
- if (actorNum === null) {
- result[columnName] = null
- } else {
- if (!actorIds[actorNum]) throw new RangeError(`No actor index ${actorNum}`)
- result[columnName] = actorIds[actorNum]
- }
- } else {
- result[columnName] = decoder.readValue()
- }
- return 1
-}
-
-/**
- * Encodes an array of operations in a set of columns. The operations need to
- * be parsed with `parseAllOpIds()` beforehand. If `forDocument` is true, we use
- * the column structure of a whole document, otherwise we use the column
- * structure for an individual change. Returns an array of `{id, name, encoder}`
- * objects.
- */
-function encodeOps(ops, forDocument) {
- const columns = {
- objActor : new RLEEncoder('uint'),
- objCtr : new RLEEncoder('uint'),
- keyActor : new RLEEncoder('uint'),
- keyCtr : new DeltaEncoder(),
- keyStr : new RLEEncoder('utf8'),
- insert : new BooleanEncoder(),
- action : new RLEEncoder('uint'),
- valLen : new RLEEncoder('uint'),
- valRaw : new Encoder(),
- chldActor : new RLEEncoder('uint'),
- chldCtr : new DeltaEncoder()
- }
-
- if (forDocument) {
- columns.idActor = new RLEEncoder('uint')
- columns.idCtr = new DeltaEncoder()
- columns.succNum = new RLEEncoder('uint')
- columns.succActor = new RLEEncoder('uint')
- columns.succCtr = new DeltaEncoder()
- } else {
- columns.predNum = new RLEEncoder('uint')
- columns.predCtr = new DeltaEncoder()
- columns.predActor = new RLEEncoder('uint')
- }
-
- for (let op of ops) {
- encodeObjectId(op, columns)
- encodeOperationKey(op, columns)
- columns.insert.appendValue(!!op.insert)
- encodeOperationAction(op, columns)
- encodeValue(op, columns)
-
- if (op.child && op.child.counter) {
- columns.chldActor.appendValue(op.child.actorNum)
- columns.chldCtr.appendValue(op.child.counter)
- } else {
- columns.chldActor.appendValue(null)
- columns.chldCtr.appendValue(null)
- }
-
- if (forDocument) {
- columns.idActor.appendValue(op.id.actorNum)
- columns.idCtr.appendValue(op.id.counter)
- columns.succNum.appendValue(op.succ.length)
- op.succ.sort(compareParsedOpIds)
- for (let i = 0; i < op.succ.length; i++) {
- columns.succActor.appendValue(op.succ[i].actorNum)
- columns.succCtr.appendValue(op.succ[i].counter)
- }
- } else {
- columns.predNum.appendValue(op.pred.length)
- op.pred.sort(compareParsedOpIds)
- for (let i = 0; i < op.pred.length; i++) {
- columns.predActor.appendValue(op.pred[i].actorNum)
- columns.predCtr.appendValue(op.pred[i].counter)
- }
- }
- }
-
- let columnList = []
- for (let {columnName, columnId} of forDocument ? DOC_OPS_COLUMNS : CHANGE_COLUMNS) {
- if (columns[columnName]) columnList.push({id: columnId, name: columnName, encoder: columns[columnName]})
- }
- return columnList.sort((a, b) => a.id - b.id)
-}
-
-function expandMultiOps(ops, startOp, actor) {
- let opNum = startOp
- let expandedOps = []
- for (const op of ops) {
- if (op.action === 'set' && op.values && op.insert) {
- if (op.pred.length !== 0) throw new RangeError('multi-insert pred must be empty')
- let lastElemId = op.elemId
- for (const value of op.values) {
- expandedOps.push({action: 'set', obj: op.obj, elemId: lastElemId, value, pred: [], insert: true})
- lastElemId = `${opNum}@${actor}`
- opNum += 1
- }
- } else if (op.action === 'del' && op.multiOp > 1) {
- if (op.pred.length !== 1) throw new RangeError('multiOp deletion must have exactly one pred')
- const startElemId = parseOpId(op.elemId), startPred = parseOpId(op.pred[0])
- for (let i = 0; i < op.multiOp; i++) {
- const elemId = `${startElemId.counter + i}@${startElemId.actorId}`
- const pred = [`${startPred.counter + i}@${startPred.actorId}`]
- expandedOps.push({action: 'del', obj: op.obj, elemId, pred})
- opNum += 1
- }
- } else {
- expandedOps.push(op)
- opNum += 1
- }
- }
- return expandedOps
-}
-
-/**
- * Takes a change as decoded by `decodeColumns`, and changes it into the form
- * expected by the rest of the backend. If `forDocument` is true, we use the op
- * structure of a whole document, otherwise we use the op structure for an
- * individual change.
- */
-function decodeOps(ops, forDocument) {
- const newOps = []
- for (let op of ops) {
- const obj = (op.objCtr === null) ? '_root' : `${op.objCtr}@${op.objActor}`
- const elemId = op.keyStr ? undefined : (op.keyCtr === 0 ? '_head' : `${op.keyCtr}@${op.keyActor}`)
- const action = ACTIONS[op.action] || op.action
- const newOp = elemId ? {obj, elemId, action} : {obj, key: op.keyStr, action}
- newOp.insert = !!op.insert
- if (ACTIONS[op.action] === 'set' || ACTIONS[op.action] === 'inc') {
- newOp.value = op.valLen
- if (op.valLen_datatype) newOp.datatype = op.valLen_datatype
- }
- if (!!op.chldCtr !== !!op.chldActor) {
- throw new RangeError(`Mismatched child columns: ${op.chldCtr} and ${op.chldActor}`)
- }
- if (op.chldCtr !== null) newOp.child = `${op.chldCtr}@${op.chldActor}`
- if (forDocument) {
- newOp.id = `${op.idCtr}@${op.idActor}`
- newOp.succ = op.succNum.map(succ => `${succ.succCtr}@${succ.succActor}`)
- checkSortedOpIds(op.succNum.map(succ => ({counter: succ.succCtr, actorId: succ.succActor})))
- } else {
- newOp.pred = op.predNum.map(pred => `${pred.predCtr}@${pred.predActor}`)
- checkSortedOpIds(op.predNum.map(pred => ({counter: pred.predCtr, actorId: pred.predActor})))
- }
- newOps.push(newOp)
- }
- return newOps
-}
-
-/**
- * Throws an exception if the opIds in the given array are not in sorted order.
- */
-function checkSortedOpIds(opIds) {
- let last = null
- for (let opId of opIds) {
- if (last && compareParsedOpIds(last, opId) !== -1) {
- throw new RangeError('operation IDs are not in ascending order')
- }
- last = opId
- }
-}
-
-function encoderByColumnId(columnId) {
- if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) {
- return new DeltaEncoder()
- } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) {
- return new BooleanEncoder()
- } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) {
- return new RLEEncoder('utf8')
- } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) {
- return new Encoder()
- } else {
- return new RLEEncoder('uint')
- }
-}
-
-function decoderByColumnId(columnId, buffer) {
- if ((columnId & 7) === COLUMN_TYPE.INT_DELTA) {
- return new DeltaDecoder(buffer)
- } else if ((columnId & 7) === COLUMN_TYPE.BOOLEAN) {
- return new BooleanDecoder(buffer)
- } else if ((columnId & 7) === COLUMN_TYPE.STRING_RLE) {
- return new RLEDecoder('utf8', buffer)
- } else if ((columnId & 7) === COLUMN_TYPE.VALUE_RAW) {
- return new Decoder(buffer)
- } else {
- return new RLEDecoder('uint', buffer)
- }
-}
-
-function makeDecoders(columns, columnSpec) {
- const emptyBuf = new Uint8Array(0)
- let decoders = [], columnIndex = 0, specIndex = 0
-
- while (columnIndex < columns.length || specIndex < columnSpec.length) {
- if (columnIndex === columns.length ||
- (specIndex < columnSpec.length && columnSpec[specIndex].columnId < columns[columnIndex].columnId)) {
- const {columnId, columnName} = columnSpec[specIndex]
- decoders.push({columnId, columnName, decoder: decoderByColumnId(columnId, emptyBuf)})
- specIndex++
- } else if (specIndex === columnSpec.length || columns[columnIndex].columnId < columnSpec[specIndex].columnId) {
- const {columnId, buffer} = columns[columnIndex]
- decoders.push({columnId, decoder: decoderByColumnId(columnId, buffer)})
- columnIndex++
- } else { // columns[columnIndex].columnId === columnSpec[specIndex].columnId
- const {columnId, buffer} = columns[columnIndex], {columnName} = columnSpec[specIndex]
- decoders.push({columnId, columnName, decoder: decoderByColumnId(columnId, buffer)})
- columnIndex++
- specIndex++
- }
- }
- return decoders
-}
-
-function decodeColumns(columns, actorIds, columnSpec) {
- columns = makeDecoders(columns, columnSpec)
- let parsedRows = []
- while (columns.some(col => !col.decoder.done)) {
- let row = {}, col = 0
- while (col < columns.length) {
- const columnId = columns[col].columnId
- let groupId = columnId >> 4, groupCols = 1
- while (col + groupCols < columns.length && columns[col + groupCols].columnId >> 4 === groupId) {
- groupCols++
- }
-
- if (columnId % 8 === COLUMN_TYPE.GROUP_CARD) {
- const values = [], count = columns[col].decoder.readValue()
- for (let i = 0; i < count; i++) {
- let value = {}
- for (let colOffset = 1; colOffset < groupCols; colOffset++) {
- decodeValueColumns(columns, col + colOffset, actorIds, value)
- }
- values.push(value)
- }
- row[columns[col].columnName] = values
- col += groupCols
- } else {
- col += decodeValueColumns(columns, col, actorIds, row)
- }
- }
- parsedRows.push(row)
- }
- return parsedRows
-}
-
-function decodeColumnInfo(decoder) {
- // A number that is all 1 bits except for the bit that indicates whether a column is
- // deflate-compressed. We ignore this bit when checking whether columns are sorted by ID.
- const COLUMN_ID_MASK = (-1 ^ COLUMN_TYPE_DEFLATE) >>> 0
-
- let lastColumnId = -1, columns = [], numColumns = decoder.readUint53()
- for (let i = 0; i < numColumns; i++) {
- const columnId = decoder.readUint53(), bufferLen = decoder.readUint53()
- if ((columnId & COLUMN_ID_MASK) <= (lastColumnId & COLUMN_ID_MASK)) {
- throw new RangeError('Columns must be in ascending order')
- }
- lastColumnId = columnId
- columns.push({columnId, bufferLen})
- }
- return columns
-}
-
-function encodeColumnInfo(encoder, columns) {
- const nonEmptyColumns = columns.filter(column => column.encoder.buffer.byteLength > 0)
- encoder.appendUint53(nonEmptyColumns.length)
- for (let column of nonEmptyColumns) {
- encoder.appendUint53(column.id)
- encoder.appendUint53(column.encoder.buffer.byteLength)
- }
-}
-
-function decodeChangeHeader(decoder) {
- const numDeps = decoder.readUint53(), deps = []
- for (let i = 0; i < numDeps; i++) {
- deps.push(bytesToHexString(decoder.readRawBytes(32)))
- }
- let change = {
- actor: decoder.readHexString(),
- seq: decoder.readUint53(),
- startOp: decoder.readUint53(),
- time: decoder.readInt53(),
- message: decoder.readPrefixedString(),
- deps
- }
- const actorIds = [change.actor], numActorIds = decoder.readUint53()
- for (let i = 0; i < numActorIds; i++) actorIds.push(decoder.readHexString())
- change.actorIds = actorIds
- return change
-}
-
-/**
- * Assembles a chunk of encoded data containing a checksum, headers, and a
- * series of encoded columns. Calls `encodeHeaderCallback` with an encoder that
- * should be used to add the headers. The columns should be given as `columns`.
- */
-function encodeContainer(chunkType, encodeContentsCallback) {
- const CHECKSUM_SIZE = 4 // checksum is first 4 bytes of SHA-256 hash of the rest of the data
- const HEADER_SPACE = MAGIC_BYTES.byteLength + CHECKSUM_SIZE + 1 + 5 // 1 byte type + 5 bytes length
- const body = new Encoder()
- // Make space for the header at the beginning of the body buffer. We will
- // copy the header in here later. This is cheaper than copying the body since
- // the body is likely to be much larger than the header.
- body.appendRawBytes(new Uint8Array(HEADER_SPACE))
- encodeContentsCallback(body)
-
- const bodyBuf = body.buffer
- const header = new Encoder()
- header.appendByte(chunkType)
- header.appendUint53(bodyBuf.byteLength - HEADER_SPACE)
-
- // Compute the hash over chunkType, length, and body
- const headerBuf = header.buffer
- const sha256 = new Hash()
- sha256.update(headerBuf)
- sha256.update(bodyBuf.subarray(HEADER_SPACE))
- const hash = sha256.digest(), checksum = hash.subarray(0, CHECKSUM_SIZE)
-
- // Copy header into the body buffer so that they are contiguous
- bodyBuf.set(MAGIC_BYTES, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength)
- bodyBuf.set(checksum, HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE)
- bodyBuf.set(headerBuf, HEADER_SPACE - headerBuf.byteLength)
- return {hash, bytes: bodyBuf.subarray(HEADER_SPACE - headerBuf.byteLength - CHECKSUM_SIZE - MAGIC_BYTES.byteLength)}
-}
-
-function decodeContainerHeader(decoder, computeHash) {
- if (!equalBytes(decoder.readRawBytes(MAGIC_BYTES.byteLength), MAGIC_BYTES)) {
- throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83')
- }
- const expectedHash = decoder.readRawBytes(4)
- const hashStartOffset = decoder.offset
- const chunkType = decoder.readByte()
- const chunkLength = decoder.readUint53()
- const header = {chunkType, chunkLength, chunkData: decoder.readRawBytes(chunkLength)}
-
- if (computeHash) {
- const sha256 = new Hash()
- sha256.update(decoder.buf.subarray(hashStartOffset, decoder.offset))
- const binaryHash = sha256.digest()
- if (!equalBytes(binaryHash.subarray(0, 4), expectedHash)) {
- throw new RangeError('checksum does not match data')
- }
- header.hash = bytesToHexString(binaryHash)
- }
- return header
-}
-
-/**
- * Returns the checksum of a change (bytes 4 to 7) as a 32-bit unsigned integer.
- */
-function getChangeChecksum(change) {
- if (change[0] !== MAGIC_BYTES[0] || change[1] !== MAGIC_BYTES[1] ||
- change[2] !== MAGIC_BYTES[2] || change[3] !== MAGIC_BYTES[3]) {
- throw new RangeError('Data does not begin with magic bytes 85 6f 4a 83')
- }
- return ((change[4] << 24) | (change[5] << 16) | (change[6] << 8) | change[7]) >>> 0
-}
-
-function encodeChange(changeObj) {
- const { changes, actorIds } = parseAllOpIds([changeObj], true)
- const change = changes[0]
-
- const { hash, bytes } = encodeContainer(CHUNK_TYPE_CHANGE, encoder => {
- if (!Array.isArray(change.deps)) throw new TypeError('deps is not an array')
- encoder.appendUint53(change.deps.length)
- for (let hash of change.deps.slice().sort()) {
- encoder.appendRawBytes(hexStringToBytes(hash))
- }
- encoder.appendHexString(change.actor)
- encoder.appendUint53(change.seq)
- encoder.appendUint53(change.startOp)
- encoder.appendInt53(change.time)
- encoder.appendPrefixedString(change.message || '')
- encoder.appendUint53(actorIds.length - 1)
- for (let actor of actorIds.slice(1)) encoder.appendHexString(actor)
-
- const columns = encodeOps(change.ops, false)
- encodeColumnInfo(encoder, columns)
- for (let column of columns) encoder.appendRawBytes(column.encoder.buffer)
- if (change.extraBytes) encoder.appendRawBytes(change.extraBytes)
- })
-
- const hexHash = bytesToHexString(hash)
- if (changeObj.hash && changeObj.hash !== hexHash) {
- throw new RangeError(`Change hash does not match encoding: ${changeObj.hash} != ${hexHash}`)
- }
- return (bytes.byteLength >= DEFLATE_MIN_SIZE) ? deflateChange(bytes) : bytes
-}
-
-function decodeChangeColumns(buffer) {
- if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer)
- const decoder = new Decoder(buffer)
- const header = decodeContainerHeader(decoder, true)
- const chunkDecoder = new Decoder(header.chunkData)
- if (!decoder.done) throw new RangeError('Encoded change has trailing data')
- if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`)
-
- const change = decodeChangeHeader(chunkDecoder)
- const columns = decodeColumnInfo(chunkDecoder)
- for (let i = 0; i < columns.length; i++) {
- if ((columns[i].columnId & COLUMN_TYPE_DEFLATE) !== 0) {
- throw new RangeError('change must not contain deflated columns')
- }
- columns[i].buffer = chunkDecoder.readRawBytes(columns[i].bufferLen)
- }
- if (!chunkDecoder.done) {
- const restLen = chunkDecoder.buf.byteLength - chunkDecoder.offset
- change.extraBytes = chunkDecoder.readRawBytes(restLen)
- }
-
- change.columns = columns
- change.hash = header.hash
- return change
-}
-
-/**
- * Decodes one change in binary format into its JS object representation.
- */
-function decodeChange(buffer) {
- const change = decodeChangeColumns(buffer)
- change.ops = decodeOps(decodeColumns(change.columns, change.actorIds, CHANGE_COLUMNS), false)
- delete change.actorIds
- delete change.columns
- return change
-}
-
-/**
- * Decodes the header fields of a change in binary format, but does not decode
- * the operations. Saves work when we only need to inspect the headers. Only
- * computes the hash of the change if `computeHash` is true.
- */
-function decodeChangeMeta(buffer, computeHash) {
- if (buffer[8] === CHUNK_TYPE_DEFLATE) buffer = inflateChange(buffer)
- const header = decodeContainerHeader(new Decoder(buffer), computeHash)
- if (header.chunkType !== CHUNK_TYPE_CHANGE) {
- throw new RangeError('Buffer chunk type is not a change')
- }
- const meta = decodeChangeHeader(new Decoder(header.chunkData))
- meta.change = buffer
- if (computeHash) meta.hash = header.hash
- return meta
-}
-
-/**
- * Compresses a binary change using DEFLATE.
- */
-function deflateChange(buffer) {
- const header = decodeContainerHeader(new Decoder(buffer), false)
- if (header.chunkType !== CHUNK_TYPE_CHANGE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`)
- const compressed = pako.deflateRaw(header.chunkData)
- const encoder = new Encoder()
- encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum
- encoder.appendByte(CHUNK_TYPE_DEFLATE)
- encoder.appendUint53(compressed.byteLength)
- encoder.appendRawBytes(compressed)
- return encoder.buffer
-}
-
-/**
- * Decompresses a binary change that has been compressed with DEFLATE.
- */
-function inflateChange(buffer) {
- const header = decodeContainerHeader(new Decoder(buffer), false)
- if (header.chunkType !== CHUNK_TYPE_DEFLATE) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`)
- const decompressed = pako.inflateRaw(header.chunkData)
- const encoder = new Encoder()
- encoder.appendRawBytes(buffer.subarray(0, 8)) // copy MAGIC_BYTES and checksum
- encoder.appendByte(CHUNK_TYPE_CHANGE)
- encoder.appendUint53(decompressed.byteLength)
- encoder.appendRawBytes(decompressed)
- return encoder.buffer
-}
-
-/**
- * Takes an Uint8Array that may contain multiple concatenated changes, and
- * returns an array of subarrays, each subarray containing one change.
- */
-function splitContainers(buffer) {
- let decoder = new Decoder(buffer), chunks = [], startOffset = 0
- while (!decoder.done) {
- decodeContainerHeader(decoder, false)
- chunks.push(buffer.subarray(startOffset, decoder.offset))
- startOffset = decoder.offset
- }
- return chunks
-}
-
-/**
- * Decodes a list of changes from the binary format into JS objects.
- * `binaryChanges` is an array of `Uint8Array` objects.
- */
-function decodeChanges(binaryChanges) {
- let decoded = []
- for (let binaryChange of binaryChanges) {
- for (let chunk of splitContainers(binaryChange)) {
- if (chunk[8] === CHUNK_TYPE_DOCUMENT) {
- decoded = decoded.concat(decodeDocument(chunk))
- } else if (chunk[8] === CHUNK_TYPE_CHANGE || chunk[8] === CHUNK_TYPE_DEFLATE) {
- decoded.push(decodeChange(chunk))
- } else {
- // ignoring chunk of unknown type
- }
- }
- }
- return decoded
-}
-
-function sortOpIds(a, b) {
- if (a === b) return 0
- if (a === '_root') return -1
- if (b === '_root') return +1
- const a_ = parseOpId(a), b_ = parseOpId(b)
- if (a_.counter < b_.counter) return -1
- if (a_.counter > b_.counter) return +1
- if (a_.actorId < b_.actorId) return -1
- if (a_.actorId > b_.actorId) return +1
- return 0
-}
-
-function groupDocumentOps(changes) {
- let byObjectId = {}, byReference = {}, objectType = {}
- for (let change of changes) {
- for (let i = 0; i < change.ops.length; i++) {
- const op = change.ops[i], opId = `${op.id.counter}@${op.id.actorId}`
- const objectId = (op.obj === '_root') ? '_root' : `${op.obj.counter}@${op.obj.actorId}`
- if (op.action.startsWith('make')) {
- objectType[opId] = op.action
- if (op.action === 'makeList' || op.action === 'makeText') {
- byReference[opId] = {'_head': []}
- }
- }
-
- let key
- if (objectId === '_root' || objectType[objectId] === 'makeMap' || objectType[objectId] === 'makeTable') {
- key = op.key
- } else if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') {
- if (op.insert) {
- key = opId
- const ref = (op.elemId === '_head') ? '_head' : `${op.elemId.counter}@${op.elemId.actorId}`
- byReference[objectId][ref].push(opId)
- byReference[objectId][opId] = []
- } else {
- key = `${op.elemId.counter}@${op.elemId.actorId}`
- }
- } else {
- throw new RangeError(`Unknown object type for object ${objectId}`)
- }
-
- if (!byObjectId[objectId]) byObjectId[objectId] = {}
- if (!byObjectId[objectId][key]) byObjectId[objectId][key] = {}
- byObjectId[objectId][key][opId] = op
- op.succ = []
-
- for (let pred of op.pred) {
- const predId = `${pred.counter}@${pred.actorId}`
- if (!byObjectId[objectId][key][predId]) {
- throw new RangeError(`No predecessor operation ${predId}`)
- }
- byObjectId[objectId][key][predId].succ.push(op.id)
- }
- }
- }
-
- let ops = []
- for (let objectId of Object.keys(byObjectId).sort(sortOpIds)) {
- let keys = []
- if (objectType[objectId] === 'makeList' || objectType[objectId] === 'makeText') {
- let stack = ['_head']
- while (stack.length > 0) {
- const key = stack.pop()
- if (key !== '_head') keys.push(key)
- for (let opId of byReference[objectId][key].sort(sortOpIds)) stack.push(opId)
- }
- } else {
- // FIXME JavaScript sorts based on UTF-16 encoding. We should change this to use the UTF-8
- // encoding instead (the sort order will be different beyond the basic multilingual plane)
- keys = Object.keys(byObjectId[objectId]).sort()
- }
-
- for (let key of keys) {
- for (let opId of Object.keys(byObjectId[objectId][key]).sort(sortOpIds)) {
- const op = byObjectId[objectId][key][opId]
- if (op.action !== 'del') ops.push(op)
- }
- }
- }
- return ops
-}
-
-/**
- * Takes a set of operations `ops` loaded from an encoded document, and
- * reconstructs the changes that they originally came from.
- * Does not return anything, only mutates `changes`.
- */
-function groupChangeOps(changes, ops) {
- let changesByActor = {} // map from actorId to array of changes by that actor
- for (let change of changes) {
- change.ops = []
- if (!changesByActor[change.actor]) changesByActor[change.actor] = []
- if (change.seq !== changesByActor[change.actor].length + 1) {
- throw new RangeError(`Expected seq = ${changesByActor[change.actor].length + 1}, got ${change.seq}`)
- }
- if (change.seq > 1 && changesByActor[change.actor][change.seq - 2].maxOp > change.maxOp) {
- throw new RangeError('maxOp must increase monotonically per actor')
- }
- changesByActor[change.actor].push(change)
- }
-
- let opsById = {}
- for (let op of ops) {
- if (op.action === 'del') throw new RangeError('document should not contain del operations')
- op.pred = opsById[op.id] ? opsById[op.id].pred : []
- opsById[op.id] = op
- for (let succ of op.succ) {
- if (!opsById[succ]) {
- if (op.elemId) {
- const elemId = op.insert ? op.id : op.elemId
- opsById[succ] = {id: succ, action: 'del', obj: op.obj, elemId, pred: []}
- } else {
- opsById[succ] = {id: succ, action: 'del', obj: op.obj, key: op.key, pred: []}
- }
- }
- opsById[succ].pred.push(op.id)
- }
- delete op.succ
- }
- for (let op of Object.values(opsById)) {
- if (op.action === 'del') ops.push(op)
- }
-
- for (let op of ops) {
- const { counter, actorId } = parseOpId(op.id)
- const actorChanges = changesByActor[actorId]
- // Binary search to find the change that should contain this operation
- let left = 0, right = actorChanges.length
- while (left < right) {
- const index = Math.floor((left + right) / 2)
- if (actorChanges[index].maxOp < counter) {
- left = index + 1
- } else {
- right = index
- }
- }
- if (left >= actorChanges.length) {
- throw new RangeError(`Operation ID ${op.id} outside of allowed range`)
- }
- actorChanges[left].ops.push(op)
- }
-
- for (let change of changes) {
- change.ops.sort((op1, op2) => sortOpIds(op1.id, op2.id))
- change.startOp = change.maxOp - change.ops.length + 1
- delete change.maxOp
- for (let i = 0; i < change.ops.length; i++) {
- const op = change.ops[i], expectedId = `${change.startOp + i}@${change.actor}`
- if (op.id !== expectedId) {
- throw new RangeError(`Expected opId ${expectedId}, got ${op.id}`)
- }
- delete op.id
- }
- }
-}
-
-function encodeDocumentChanges(changes) {
- const columns = { // see DOCUMENT_COLUMNS
- actor : new RLEEncoder('uint'),
- seq : new DeltaEncoder(),
- maxOp : new DeltaEncoder(),
- time : new DeltaEncoder(),
- message : new RLEEncoder('utf8'),
- depsNum : new RLEEncoder('uint'),
- depsIndex : new DeltaEncoder(),
- extraLen : new RLEEncoder('uint'),
- extraRaw : new Encoder()
- }
- let indexByHash = {} // map from change hash to its index in the changes array
- let heads = {} // change hashes that are not a dependency of any other change
-
- for (let i = 0; i < changes.length; i++) {
- const change = changes[i]
- indexByHash[change.hash] = i
- heads[change.hash] = true
-
- columns.actor.appendValue(change.actorNum)
- columns.seq.appendValue(change.seq)
- columns.maxOp.appendValue(change.startOp + change.ops.length - 1)
- columns.time.appendValue(change.time)
- columns.message.appendValue(change.message)
- columns.depsNum.appendValue(change.deps.length)
-
- for (let dep of change.deps) {
- if (typeof indexByHash[dep] !== 'number') {
- throw new RangeError(`Unknown dependency hash: ${dep}`)
- }
- columns.depsIndex.appendValue(indexByHash[dep])
- if (heads[dep]) delete heads[dep]
- }
-
- if (change.extraBytes) {
- columns.extraLen.appendValue(change.extraBytes.byteLength << 4 | VALUE_TYPE.BYTES)
- columns.extraRaw.appendRawBytes(change.extraBytes)
- } else {
- columns.extraLen.appendValue(VALUE_TYPE.BYTES) // zero-length byte array
- }
- }
-
- let changesColumns = []
- for (let {columnName, columnId} of DOCUMENT_COLUMNS) {
- changesColumns.push({id: columnId, name: columnName, encoder: columns[columnName]})
- }
- changesColumns.sort((a, b) => a.id - b.id)
- return { changesColumns, heads: Object.keys(heads).sort() }
-}
-
-function decodeDocumentChanges(changes, expectedHeads) {
- let heads = {} // change hashes that are not a dependency of any other change
- for (let i = 0; i < changes.length; i++) {
- let change = changes[i]
- change.deps = []
- for (let index of change.depsNum.map(d => d.depsIndex)) {
- if (!changes[index] || !changes[index].hash) {
- throw new RangeError(`No hash for index ${index} while processing index ${i}`)
- }
- const hash = changes[index].hash
- change.deps.push(hash)
- if (heads[hash]) delete heads[hash]
- }
- change.deps.sort()
- delete change.depsNum
-
- if (change.extraLen_datatype !== VALUE_TYPE.BYTES) {
- throw new RangeError(`Bad datatype for extra bytes: ${VALUE_TYPE.BYTES}`)
- }
- change.extraBytes = change.extraLen
- delete change.extraLen_datatype
-
- // Encoding and decoding again to compute the hash of the change
- changes[i] = decodeChange(encodeChange(change))
- heads[changes[i].hash] = true
- }
-
- const actualHeads = Object.keys(heads).sort()
- let headsEqual = (actualHeads.length === expectedHeads.length), i = 0
- while (headsEqual && i < actualHeads.length) {
- headsEqual = (actualHeads[i] === expectedHeads[i])
- i++
- }
- if (!headsEqual) {
- throw new RangeError(`Mismatched heads hashes: expected ${expectedHeads.join(', ')}, got ${actualHeads.join(', ')}`)
- }
-}
-
-/**
- * Transforms a list of changes into a binary representation of the document state.
- */
-function encodeDocument(binaryChanges) {
- const { changes, actorIds } = parseAllOpIds(decodeChanges(binaryChanges), false)
- const { changesColumns, heads } = encodeDocumentChanges(changes)
- const opsColumns = encodeOps(groupDocumentOps(changes), true)
- for (let column of changesColumns) deflateColumn(column)
- for (let column of opsColumns) deflateColumn(column)
-
- return encodeContainer(CHUNK_TYPE_DOCUMENT, encoder => {
- encoder.appendUint53(actorIds.length)
- for (let actor of actorIds) {
- encoder.appendHexString(actor)
- }
- encoder.appendUint53(heads.length)
- for (let head of heads.sort()) {
- encoder.appendRawBytes(hexStringToBytes(head))
- }
- encodeColumnInfo(encoder, changesColumns)
- encodeColumnInfo(encoder, opsColumns)
- for (let column of changesColumns) encoder.appendRawBytes(column.encoder.buffer)
- for (let column of opsColumns) encoder.appendRawBytes(column.encoder.buffer)
- }).bytes
-}
-
-function decodeDocumentHeader(buffer) {
- const documentDecoder = new Decoder(buffer)
- const header = decodeContainerHeader(documentDecoder, true)
- const decoder = new Decoder(header.chunkData)
- if (!documentDecoder.done) throw new RangeError('Encoded document has trailing data')
- if (header.chunkType !== CHUNK_TYPE_DOCUMENT) throw new RangeError(`Unexpected chunk type: ${header.chunkType}`)
-
- const actorIds = [], numActors = decoder.readUint53()
- for (let i = 0; i < numActors; i++) {
- actorIds.push(decoder.readHexString())
- }
- const heads = [], numHeads = decoder.readUint53()
- for (let i = 0; i < numHeads; i++) {
- heads.push(bytesToHexString(decoder.readRawBytes(32)))
- }
-
- const changesColumns = decodeColumnInfo(decoder)
- const opsColumns = decodeColumnInfo(decoder)
- for (let i = 0; i < changesColumns.length; i++) {
- changesColumns[i].buffer = decoder.readRawBytes(changesColumns[i].bufferLen)
- inflateColumn(changesColumns[i])
- }
- for (let i = 0; i < opsColumns.length; i++) {
- opsColumns[i].buffer = decoder.readRawBytes(opsColumns[i].bufferLen)
- inflateColumn(opsColumns[i])
- }
-
- const extraBytes = decoder.readRawBytes(decoder.buf.byteLength - decoder.offset)
- return { changesColumns, opsColumns, actorIds, heads, extraBytes }
-}
-
-function decodeDocument(buffer) {
- const { changesColumns, opsColumns, actorIds, heads } = decodeDocumentHeader(buffer)
- const changes = decodeColumns(changesColumns, actorIds, DOCUMENT_COLUMNS)
- const ops = decodeOps(decodeColumns(opsColumns, actorIds, DOC_OPS_COLUMNS), true)
- groupChangeOps(changes, ops)
- decodeDocumentChanges(changes, heads)
- return changes
-}
-
-/**
- * DEFLATE-compresses the given column if it is large enough to make the compression worthwhile.
- */
-function deflateColumn(column) {
- if (column.encoder.buffer.byteLength >= DEFLATE_MIN_SIZE) {
- column.encoder = {buffer: pako.deflateRaw(column.encoder.buffer)}
- column.id |= COLUMN_TYPE_DEFLATE
- }
-}
-
-/**
- * Decompresses the given column if it is DEFLATE-compressed.
- */
-function inflateColumn(column) {
- if ((column.columnId & COLUMN_TYPE_DEFLATE) !== 0) {
- column.buffer = pako.inflateRaw(column.buffer)
- column.columnId ^= COLUMN_TYPE_DEFLATE
- }
-}
-
-/**
- * Takes all the operations for the same property (i.e. the same key in a map, or the same list
- * element) and mutates the object patch to reflect the current value(s) of that property. There
- * might be multiple values in the case of a conflict. `objects` is a map from objectId to the
- * patch for that object. `property` contains `objId`, `key`, a list of `ops`, and `index` (the
- * current list index if the object is a list). Returns true if one or more values are present,
- * or false if the property has been deleted.
- */
-function addPatchProperty(objects, property) {
- let values = {}, counter = null
- for (let op of property.ops) {
- // Apply counters and their increments regardless of the number of successor operations
- if (op.actionName === 'set' && op.value.datatype === 'counter') {
- if (!counter) counter = {opId: op.opId, value: 0, succ: {}}
- counter.value += op.value.value
- for (let succId of op.succ) counter.succ[succId] = true
- } else if (op.actionName === 'inc') {
- if (!counter) throw new RangeError(`inc operation ${op.opId} without a counter`)
- counter.value += op.value.value
- delete counter.succ[op.opId]
- for (let succId of op.succ) counter.succ[succId] = true
-
- } else if (op.succ.length === 0) { // Ignore any ops that have been overwritten
- if (op.actionName.startsWith('make')) {
- values[op.opId] = objects[op.opId]
- } else if (op.actionName === 'set') {
- values[op.opId] = {value: op.value.value, type: 'value'}
- if (op.value.datatype) {
- values[op.opId].datatype = op.value.datatype
- }
- } else if (op.actionName === 'link') {
- // NB. This assumes that the ID of the child object is greater than the ID of the current
- // object. This is true as long as link operations are only used to redo undone make*
- // operations, but it will cease to be true once subtree moves are allowed.
- if (!op.childId) throw new RangeError(`link operation ${op.opId} without a childId`)
- values[op.opId] = objects[op.childId]
- } else {
- throw new RangeError(`Unexpected action type: ${op.actionName}`)
- }
- }
- }
-
- // If the counter had any successor operation that was not an increment, that means the counter
- // must have been deleted, so we omit it from the patch.
- if (counter && Object.keys(counter.succ).length === 0) {
- values[counter.opId] = {type: 'value', value: counter.value, datatype: 'counter'}
- }
-
- if (Object.keys(values).length > 0) {
- let obj = objects[property.objId]
- if (obj.type === 'map' || obj.type === 'table') {
- obj.props[property.key] = values
- } else if (obj.type === 'list' || obj.type === 'text') {
- makeListEdits(obj, values, property.key, property.index)
- }
- return true
- } else {
- return false
- }
-}
-
-/**
- * When constructing a patch to instantiate a loaded document, this function adds the edits to
- * insert one list element. Usually there is one value, but in the case of a conflict there may be
- * several values. `elemId` is the ID of the list element, and `index` is the list index at which
- * the value(s) should be placed.
- */
-function makeListEdits(list, values, elemId, index) {
- let firstValue = true
- const opIds = Object.keys(values).sort((id1, id2) => compareParsedOpIds(parseOpId(id1), parseOpId(id2)))
- for (const opId of opIds) {
- if (firstValue) {
- list.edits.push({action: 'insert', value: values[opId], elemId, opId, index})
- } else {
- list.edits.push({action: 'update', value: values[opId], opId, index})
- }
- firstValue = false
- }
-}
-
-/**
- * Recursively walks the patch tree, calling appendEdit on every list edit in order to consense
- * consecutive sequences of insertions into multi-inserts.
- */
-function condenseEdits(diff) {
- if (diff.type === 'list' || diff.type === 'text') {
- diff.edits.forEach(e => condenseEdits(e.value))
- let newEdits = diff.edits
- diff.edits = []
- for (const edit of newEdits) appendEdit(diff.edits, edit)
- } else if (diff.type === 'map' || diff.type === 'table') {
- for (const prop of Object.keys(diff.props)) {
- for (const opId of Object.keys(diff.props[prop])) {
- condenseEdits(diff.props[prop][opId])
- }
- }
- }
-}
-
-/**
- * Appends a list edit operation (insert, update, remove) to an array of existing operations. If the
- * last existing operation can be extended (as a multi-op), we do that.
- */
-function appendEdit(existingEdits, nextEdit) {
- if (existingEdits.length === 0) {
- existingEdits.push(nextEdit)
- return
- }
-
- let lastEdit = existingEdits[existingEdits.length - 1]
- if (lastEdit.action === 'insert' && nextEdit.action === 'insert' &&
- lastEdit.index === nextEdit.index - 1 &&
- lastEdit.value.type === 'value' && nextEdit.value.type === 'value' &&
- lastEdit.elemId === lastEdit.opId && nextEdit.elemId === nextEdit.opId &&
- opIdDelta(lastEdit.elemId, nextEdit.elemId, 1)) {
- lastEdit.action = 'multi-insert'
- lastEdit.values = [lastEdit.value.value, nextEdit.value.value]
- delete lastEdit.value
- delete lastEdit.opId
-
- } else if (lastEdit.action === 'multi-insert' && nextEdit.action === 'insert' &&
- lastEdit.index + lastEdit.values.length === nextEdit.index &&
- nextEdit.value.type === 'value' && nextEdit.elemId === nextEdit.opId &&
- opIdDelta(lastEdit.elemId, nextEdit.elemId, lastEdit.values.length)) {
- lastEdit.values.push(nextEdit.value.value)
-
- } else if (lastEdit.action === 'remove' && nextEdit.action === 'remove' &&
- lastEdit.index === nextEdit.index) {
- lastEdit.count += nextEdit.count
-
- } else {
- existingEdits.push(nextEdit)
- }
-}
-
-/**
- * Returns true if the two given operation IDs have the same actor ID, and the counter of `id2` is
- * exactly `delta` greater than the counter of `id1`.
- */
-function opIdDelta(id1, id2, delta = 1) {
- const parsed1 = parseOpId(id1), parsed2 = parseOpId(id2)
- return parsed1.actorId === parsed2.actorId && parsed1.counter + delta === parsed2.counter
-}
-
-/**
- * Parses the document (in compressed binary format) given as `documentBuffer`
- * and returns a patch that can be sent to the frontend to instantiate the
- * current state of that document.
- */
-function constructPatch(documentBuffer) {
- const { opsColumns, actorIds } = decodeDocumentHeader(documentBuffer)
- const col = makeDecoders(opsColumns, DOC_OPS_COLUMNS).reduce(
- (acc, col) => Object.assign(acc, {[col.columnName]: col.decoder}), {})
-
- let objects = {_root: {objectId: '_root', type: 'map', props: {}}}
- let property = null
-
- while (!col.idActor.done) {
- const opId = `${col.idCtr.readValue()}@${actorIds[col.idActor.readValue()]}`
- const action = col.action.readValue(), actionName = ACTIONS[action]
- if (action % 2 === 0) { // even-numbered actions are object creation
- const type = OBJECT_TYPE[actionName] || 'unknown'
- if (type === 'list' || type === 'text') {
- objects[opId] = {objectId: opId, type, edits: []}
- } else {
- objects[opId] = {objectId: opId, type, props: {}}
- }
- }
-
- const objActor = col.objActor.readValue(), objCtr = col.objCtr.readValue()
- const objId = objActor === null ? '_root' : `${objCtr}@${actorIds[objActor]}`
- let obj = objects[objId]
- if (!obj) throw new RangeError(`Operation for nonexistent object: ${objId}`)
-
- const keyActor = col.keyActor.readValue(), keyCtr = col.keyCtr.readValue()
- const keyStr = col.keyStr.readValue(), insert = !!col.insert.readValue()
- const chldActor = col.chldActor.readValue(), chldCtr = col.chldCtr.readValue()
- const childId = chldActor === null ? null : `${chldCtr}@${actorIds[chldActor]}`
- const sizeTag = col.valLen.readValue()
- const rawValue = col.valRaw.readRawBytes(sizeTag >> 4)
- const value = decodeValue(sizeTag, rawValue)
- const succNum = col.succNum.readValue()
- let succ = []
- for (let i = 0; i < succNum; i++) {
- succ.push(`${col.succCtr.readValue()}@${actorIds[col.succActor.readValue()]}`)
- }
-
- if (!actionName || obj.type === 'unknown') continue
-
- let key
- if (obj.type === 'list' || obj.type === 'text') {
- if (keyCtr === null || (keyCtr === 0 && !insert)) {
- throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`)
- }
- key = insert ? opId : `${keyCtr}@${actorIds[keyActor]}`
- } else {
- if (keyStr === null) {
- throw new RangeError(`Operation ${opId} on ${obj.type} object has no key`)
- }
- key = keyStr
- }
-
- if (!property || property.objId !== objId || property.key !== key) {
- let index = 0
- if (property) {
- index = property.index
- if (addPatchProperty(objects, property)) index += 1
- if (property.objId !== objId) index = 0
- }
- property = {objId, key, index, ops: []}
- }
- property.ops.push({opId, actionName, value, childId, succ})
- }
-
- if (property) addPatchProperty(objects, property)
- condenseEdits(objects._root)
- return objects._root
-}
-
-module.exports = {
- COLUMN_TYPE, VALUE_TYPE, ACTIONS, OBJECT_TYPE, DOC_OPS_COLUMNS, CHANGE_COLUMNS,
- encoderByColumnId, decoderByColumnId, makeDecoders, decodeValue,
- splitContainers, encodeChange, decodeChangeColumns, decodeChange, decodeChangeMeta, decodeChanges,
- decodeDocumentHeader, encodeDocument, decodeDocument,
- getChangeChecksum, appendEdit, constructPatch
-}
diff --git a/automerge-wasm/web-index.js b/automerge-wasm/web-index.js
deleted file mode 100644
index ab9e8a1d..00000000
--- a/automerge-wasm/web-index.js
+++ /dev/null
@@ -1,13 +0,0 @@
-export {
- loadDoc as load,
- create,
- encodeChange,
- decodeChange,
- initSyncState,
- encodeSyncMessage,
- decodeSyncMessage,
- encodeSyncState,
- decodeSyncState,
-} from "./bindgen.js"
-import init from "./bindgen.js"
-export default init;
diff --git a/automerge/benches/map.rs b/automerge/benches/map.rs
deleted file mode 100644
index 19141d29..00000000
--- a/automerge/benches/map.rs
+++ /dev/null
@@ -1,48 +0,0 @@
-use automerge::{transaction::Transactable, Automerge, ROOT};
-use criterion::{criterion_group, criterion_main, Criterion};
-
-fn query_single(doc: &Automerge, rounds: u32) {
- for _ in 0..rounds {
- // repeatedly get the last key
- doc.get(ROOT, (rounds - 1).to_string()).unwrap();
- }
-}
-
-fn query_range(doc: &Automerge, rounds: u32) {
- for i in 0..rounds {
- doc.get(ROOT, i.to_string()).unwrap();
- }
-}
-
-fn put_doc(doc: &mut Automerge, rounds: u32) {
- for i in 0..rounds {
- let mut tx = doc.transaction();
- tx.put(ROOT, i.to_string(), "value").unwrap();
- tx.commit();
- }
-}
-
-fn bench(c: &mut Criterion) {
- let mut group = c.benchmark_group("map");
-
- let rounds = 10_000;
- let mut doc = Automerge::new();
- put_doc(&mut doc, rounds);
-
- group.bench_function("query single", |b| b.iter(|| query_single(&doc, rounds)));
-
- group.bench_function("query range", |b| b.iter(|| query_range(&doc, rounds)));
-
- group.bench_function("put", |b| {
- b.iter_batched(
- Automerge::new,
- |mut doc| put_doc(&mut doc, rounds),
- criterion::BatchSize::LargeInput,
- )
- });
-
- group.finish();
-}
-
-criterion_group!(benches, bench);
-criterion_main!(benches);
diff --git a/automerge/src/autocommit.rs b/automerge/src/autocommit.rs
deleted file mode 100644
index 27b4e9af..00000000
--- a/automerge/src/autocommit.rs
+++ /dev/null
@@ -1,482 +0,0 @@
-use std::ops::RangeBounds;
-
-use crate::exid::ExId;
-use crate::op_observer::OpObserver;
-use crate::transaction::{CommitOptions, Transactable};
-use crate::{
- sync, ApplyOptions, Keys, KeysAt, ObjType, Parents, Range, RangeAt, ScalarValue, Values,
- ValuesAt,
-};
-use crate::{
- transaction::TransactionInner, ActorId, Automerge, AutomergeError, Change, ChangeHash, Prop,
- Value,
-};
-
-/// An automerge document that automatically manages transactions.
-#[derive(Debug, Clone)]
-pub struct AutoCommit {
- doc: Automerge,
- transaction: Option,
-}
-
-impl Default for AutoCommit {
- fn default() -> Self {
- Self::new()
- }
-}
-
-impl AutoCommit {
- pub fn new() -> Self {
- Self {
- doc: Automerge::new(),
- transaction: None,
- }
- }
-
- /// Get the inner document.
- #[doc(hidden)]
- pub fn document(&mut self) -> &Automerge {
- self.ensure_transaction_closed();
- &self.doc
- }
-
- pub fn with_actor(mut self, actor: ActorId) -> Self {
- self.ensure_transaction_closed();
- self.doc.set_actor(actor);
- self
- }
-
- pub fn set_actor(&mut self, actor: ActorId) -> &mut Self {
- self.ensure_transaction_closed();
- self.doc.set_actor(actor);
- self
- }
-
- pub fn get_actor(&self) -> &ActorId {
- self.doc.get_actor()
- }
-
- fn ensure_transaction_open(&mut self) {
- if self.transaction.is_none() {
- self.transaction = Some(self.doc.transaction_inner());
- }
- }
-
- pub fn fork(&mut self) -> Self {
- self.ensure_transaction_closed();
- Self {
- doc: self.doc.fork(),
- transaction: self.transaction.clone(),
- }
- }
-
- pub fn fork_at(&mut self, heads: &[ChangeHash]) -> Result {
- self.ensure_transaction_closed();
- Ok(Self {
- doc: self.doc.fork_at(heads)?,
- transaction: self.transaction.clone(),
- })
- }
-
- fn ensure_transaction_closed(&mut self) {
- if let Some(tx) = self.transaction.take() {
- tx.commit::<()>(&mut self.doc, None, None, None);
- }
- }
-
- pub fn load(data: &[u8]) -> Result {
- let doc = Automerge::load(data)?;
- Ok(Self {
- doc,
- transaction: None,
- })
- }
-
- pub fn load_with(
- data: &[u8],
- options: ApplyOptions<'_, Obs>,
- ) -> Result {
- let doc = Automerge::load_with(data, options)?;
- Ok(Self {
- doc,
- transaction: None,
- })
- }
-
- pub fn load_incremental(&mut self, data: &[u8]) -> Result {
- self.ensure_transaction_closed();
- self.doc.load_incremental(data)
- }
-
- pub fn load_incremental_with<'a, Obs: OpObserver>(
- &mut self,
- data: &[u8],
- options: ApplyOptions<'a, Obs>,
- ) -> Result {
- self.ensure_transaction_closed();
- self.doc.load_incremental_with(data, options)
- }
-
- pub fn apply_changes(&mut self, changes: Vec) -> Result<(), AutomergeError> {
- self.ensure_transaction_closed();
- self.doc.apply_changes(changes)
- }
-
- pub fn apply_changes_with(
- &mut self,
- changes: Vec,
- options: ApplyOptions<'_, Obs>,
- ) -> Result<(), AutomergeError> {
- self.ensure_transaction_closed();
- self.doc.apply_changes_with(changes, options)
- }
-
- /// Takes all the changes in `other` which are not in `self` and applies them
- pub fn merge(&mut self, other: &mut Self) -> Result, AutomergeError> {
- self.ensure_transaction_closed();
- other.ensure_transaction_closed();
- self.doc.merge(&mut other.doc)
- }
-
- /// Takes all the changes in `other` which are not in `self` and applies them
- pub fn merge_with<'a, Obs: OpObserver>(
- &mut self,
- other: &mut Self,
- options: ApplyOptions<'a, Obs>,
- ) -> Result, AutomergeError> {
- self.ensure_transaction_closed();
- other.ensure_transaction_closed();
- self.doc.merge_with(&mut other.doc, options)
- }
-
- pub fn save(&mut self) -> Vec {
- self.ensure_transaction_closed();
- self.doc.save()
- }
-
- // should this return an empty vec instead of None?
- pub fn save_incremental(&mut self) -> Vec {
- self.ensure_transaction_closed();
- self.doc.save_incremental()
- }
-
- pub fn get_missing_deps(&mut self, heads: &[ChangeHash]) -> Vec {
- self.ensure_transaction_closed();
- self.doc.get_missing_deps(heads)
- }
-
- pub fn get_last_local_change(&mut self) -> Option<&Change> {
- self.ensure_transaction_closed();
- self.doc.get_last_local_change()
- }
-
- pub fn get_changes(&mut self, have_deps: &[ChangeHash]) -> Vec<&Change> {
- self.ensure_transaction_closed();
- self.doc.get_changes(have_deps)
- }
-
- pub fn get_change_by_hash(&mut self, hash: &ChangeHash) -> Option<&Change> {
- self.ensure_transaction_closed();
- self.doc.get_change_by_hash(hash)
- }
-
- pub fn get_changes_added<'a>(&mut self, other: &'a mut Self) -> Vec<&'a Change> {
- self.ensure_transaction_closed();
- other.ensure_transaction_closed();
- self.doc.get_changes_added(&other.doc)
- }
-
- pub fn import(&self, s: &str) -> Result {
- self.doc.import(s)
- }
-
- pub fn dump(&mut self) {
- self.ensure_transaction_closed();
- self.doc.dump()
- }
-
- pub fn generate_sync_message(&mut self, sync_state: &mut sync::State) -> Option {
- self.ensure_transaction_closed();
- self.doc.generate_sync_message(sync_state)
- }
-
- pub fn receive_sync_message(
- &mut self,
- sync_state: &mut sync::State,
- message: sync::Message,
- ) -> Result<(), AutomergeError> {
- self.ensure_transaction_closed();
- self.doc.receive_sync_message(sync_state, message)
- }
-
- pub fn receive_sync_message_with<'a, Obs: OpObserver>(
- &mut self,
- sync_state: &mut sync::State,
- message: sync::Message,
- options: ApplyOptions<'a, Obs>,
- ) -> Result<(), AutomergeError> {
- self.ensure_transaction_closed();
- self.doc
- .receive_sync_message_with(sync_state, message, options)
- }
-
- #[cfg(feature = "optree-visualisation")]
- pub fn visualise_optree(&self) -> String {
- self.doc.visualise_optree()
- }
-
- /// Get the current heads of the document.
- ///
- /// This closes the transaction first, if one is in progress.
- pub fn get_heads(&mut self) -> Vec {
- self.ensure_transaction_closed();
- self.doc.get_heads()
- }
-
- pub fn commit(&mut self) -> ChangeHash {
- self.commit_with::<()>(CommitOptions::default())
- }
-
- /// Commit the current operations with some options.
- ///
- /// ```
- /// # use automerge::transaction::CommitOptions;
- /// # use automerge::transaction::Transactable;
- /// # use automerge::ROOT;
- /// # use automerge::AutoCommit;
- /// # use automerge::ObjType;
- /// # use std::time::SystemTime;
- /// let mut doc = AutoCommit::new();
- /// doc.put_object(&ROOT, "todos", ObjType::List).unwrap();
- /// let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs() as
- /// i64;
- /// doc.commit_with::<()>(CommitOptions::default().with_message("Create todos list").with_time(now));
- /// ```
- pub fn commit_with(&mut self, options: CommitOptions<'_, Obs>) -> ChangeHash {
- // ensure that even no changes triggers a change
- self.ensure_transaction_open();
- let tx = self.transaction.take().unwrap();
- tx.commit(
- &mut self.doc,
- options.message,
- options.time,
- options.op_observer,
- )
- }
-
- pub fn rollback(&mut self) -> usize {
- self.transaction
- .take()
- .map(|tx| tx.rollback(&mut self.doc))
- .unwrap_or(0)
- }
-}
-
-impl Transactable for AutoCommit {
- fn pending_ops(&self) -> usize {
- self.transaction
- .as_ref()
- .map(|t| t.pending_ops())
- .unwrap_or(0)
- }
-
- // KeysAt::()
- // LenAt::()
- // PropAt::()
- // NthAt::()
-
- fn keys>(&self, obj: O) -> Keys<'_, '_> {
- self.doc.keys(obj)
- }
-
- fn keys_at>(&self, obj: O, heads: &[ChangeHash]) -> KeysAt<'_, '_> {
- self.doc.keys_at(obj, heads)
- }
-
- fn range, R: RangeBounds>(&self, obj: O, range: R) -> Range<'_, R> {
- self.doc.range(obj, range)
- }
-
- fn range_at, R: RangeBounds>(
- &self,
- obj: O,
- range: R,
- heads: &[ChangeHash],
- ) -> RangeAt<'_, R> {
- self.doc.range_at(obj, range, heads)
- }
-
- fn values>(&self, obj: O) -> Values<'_> {
- self.doc.values(obj)
- }
-
- fn values_at>(&self, obj: O, heads: &[ChangeHash]) -> ValuesAt<'_> {
- self.doc.values_at(obj, heads)
- }
-
- fn length>(&self, obj: O) -> usize {
- self.doc.length(obj)
- }
-
- fn length_at>(&self, obj: O, heads: &[ChangeHash]) -> usize {
- self.doc.length_at(obj, heads)
- }
-
- fn object_type>(&self, obj: O) -> Option {
- self.doc.object_type(obj)
- }
-
- // set(obj, prop, value) - value can be scalar or objtype
- // del(obj, prop)
- // inc(obj, prop, value)
- // insert(obj, index, value)
-
- /// Set the value of property `P` to value `V` in object `obj`.
- ///
- /// # Returns
- ///
- /// The opid of the operation which was created, or None if this operation doesn't change the
- /// document or create a new object.
- ///
- /// # Errors
- ///
- /// This will return an error if
- /// - The object does not exist
- /// - The key is the wrong type for the object
- /// - The key does not exist in the object
- fn put, P: Into, V: Into>(
- &mut self,
- obj: O,
- prop: P,
- value: V,
- ) -> Result<(), AutomergeError> {
- self.ensure_transaction_open();
- let tx = self.transaction.as_mut().unwrap();
- tx.put(&mut self.doc, obj.as_ref(), prop, value)
- }
-
- fn put_object, P: Into>(
- &mut self,
- obj: O,
- prop: P,
- value: ObjType,
- ) -> Result {
- self.ensure_transaction_open();
- let tx = self.transaction.as_mut().unwrap();
- tx.put_object(&mut self.doc, obj.as_ref(), prop, value)
- }
-
- fn insert, V: Into>(
- &mut self,
- obj: O,
- index: usize,
- value: V,
- ) -> Result<(), AutomergeError> {
- self.ensure_transaction_open();
- let tx = self.transaction.as_mut().unwrap();
- tx.insert(&mut self.doc, obj.as_ref(), index, value)
- }
-
- fn insert_object>(
- &mut self,
- obj: O,
- index: usize,
- value: ObjType,
- ) -> Result {
- self.ensure_transaction_open();
- let tx = self.transaction.as_mut().unwrap();
- tx.insert_object(&mut self.doc, obj.as_ref(), index, value)
- }
-
- fn increment, P: Into>(
- &mut self,
- obj: O,
- prop: P,
- value: i64,
- ) -> Result<(), AutomergeError> {
- self.ensure_transaction_open();
- let tx = self.transaction.as_mut().unwrap();
- tx.increment(&mut self.doc, obj.as_ref(), prop, value)
- }
-
- fn delete, P: Into>(
- &mut self,
- obj: O,
- prop: P,
- ) -> Result<(), AutomergeError> {
- self.ensure_transaction_open();
- let tx = self.transaction.as_mut().unwrap();
- tx.delete(&mut self.doc, obj.as_ref(), prop)
- }
-
- /// Splice new elements into the given sequence. Returns a vector of the OpIds used to insert
- /// the new elements
- fn splice, V: IntoIterator- >(
- &mut self,
- obj: O,
- pos: usize,
- del: usize,
- vals: V,
- ) -> Result<(), AutomergeError> {
- self.ensure_transaction_open();
- let tx = self.transaction.as_mut().unwrap();
- tx.splice(&mut self.doc, obj.as_ref(), pos, del, vals)
- }
-
- fn text
>(&self, obj: O) -> Result {
- self.doc.text(obj)
- }
-
- fn text_at>(
- &self,
- obj: O,
- heads: &[ChangeHash],
- ) -> Result {
- self.doc.text_at(obj, heads)
- }
-
- // TODO - I need to return these OpId's here **only** to get
- // the legacy conflicts format of { [opid]: value }
- // Something better?
- fn get, P: Into>(
- &self,
- obj: O,
- prop: P,
- ) -> Result, ExId)>, AutomergeError> {
- self.doc.get(obj, prop)
- }
-
- fn get_at, P: Into>(
- &self,
- obj: O,
- prop: P,
- heads: &[ChangeHash],
- ) -> Result, ExId)>, AutomergeError> {
- self.doc.get_at(obj, prop, heads)
- }
-
- fn get_all, P: Into>(
- &self,
- obj: O,
- prop: P,
- ) -> Result, ExId)>, AutomergeError> {
- self.doc.get_all(obj, prop)
- }
-
- fn get_all_at, P: Into>(
- &self,
- obj: O,
- prop: P,
- heads: &[ChangeHash],
- ) -> Result, ExId)>, AutomergeError> {
- self.doc.get_all_at(obj, prop, heads)
- }
-
- fn parent_object>(&self, obj: O) -> Option<(ExId, Prop)> {
- self.doc.parent_object(obj)
- }
-
- fn parents(&self, obj: ExId) -> Parents<'_> {
- self.doc.parents(obj)
- }
-}
diff --git a/automerge/src/automerge.rs b/automerge/src/automerge.rs
deleted file mode 100644
index da9004d6..00000000
--- a/automerge/src/automerge.rs
+++ /dev/null
@@ -1,2100 +0,0 @@
-use std::collections::{HashMap, HashSet, VecDeque};
-use std::fmt::Debug;
-use std::num::NonZeroU64;
-use std::ops::RangeBounds;
-
-use crate::change::encode_document;
-use crate::exid::ExId;
-use crate::keys::Keys;
-use crate::op_observer::OpObserver;
-use crate::op_set::OpSet;
-use crate::parents::Parents;
-use crate::range::Range;
-use crate::transaction::{self, CommitOptions, Failure, Success, Transaction, TransactionInner};
-use crate::types::{
- ActorId, ChangeHash, Clock, ElemId, Export, Exportable, Key, ObjId, Op, OpId, OpType,
- ScalarValue, Value,
-};
-use crate::{legacy, query, types, ApplyOptions, ObjType, RangeAt, ValuesAt};
-use crate::{AutomergeError, Change, Prop};
-use crate::{KeysAt, Values};
-use serde::Serialize;
-
-#[derive(Debug, Clone, PartialEq)]
-pub(crate) enum Actor {
- Unused(ActorId),
- Cached(usize),
-}
-
-/// An automerge document.
-#[derive(Debug, Clone)]
-pub struct Automerge {
- /// The list of unapplied changes that are not causally ready.
- pub(crate) queue: Vec,
- /// The history of changes that form this document, topologically sorted too.
- pub(crate) history: Vec,
- /// Mapping from change hash to index into the history list.
- pub(crate) history_index: HashMap,
- /// Mapping from actor index to list of seqs seen for them.
- pub(crate) states: HashMap>,
- /// Current dependencies of this document (heads hashes).
- pub(crate) deps: HashSet,
- /// Heads at the last save.
- pub(crate) saved: Vec,
- /// The set of operations that form this document.
- pub(crate) ops: OpSet,
- /// The current actor.
- pub(crate) actor: Actor,
- /// The maximum operation counter this document has seen.
- pub(crate) max_op: u64,
-}
-
-impl Automerge {
- /// Create a new document with a random actor id.
- pub fn new() -> Self {
- Automerge {
- queue: vec![],
- history: vec![],
- history_index: HashMap::new(),
- states: HashMap::new(),
- ops: Default::default(),
- deps: Default::default(),
- saved: Default::default(),
- actor: Actor::Unused(ActorId::random()),
- max_op: 0,
- }
- }
-
- /// Set the actor id for this document.
- pub fn with_actor(mut self, actor: ActorId) -> Self {
- self.actor = Actor::Unused(actor);
- self
- }
-
- /// Set the actor id for this document.
- pub fn set_actor(&mut self, actor: ActorId) -> &mut Self {
- self.actor = Actor::Unused(actor);
- self
- }
-
- /// Get the current actor id of this document.
- pub fn get_actor(&self) -> &ActorId {
- match &self.actor {
- Actor::Unused(actor) => actor,
- Actor::Cached(index) => self.ops.m.actors.get(*index),
- }
- }
-
- pub(crate) fn get_actor_index(&mut self) -> usize {
- match &mut self.actor {
- Actor::Unused(actor) => {
- let index = self
- .ops
- .m
- .actors
- .cache(std::mem::replace(actor, ActorId::from(&[][..])));
- self.actor = Actor::Cached(index);
- index
- }
- Actor::Cached(index) => *index,
- }
- }
-
- /// Start a transaction.
- pub fn transaction(&mut self) -> Transaction<'_> {
- Transaction {
- inner: Some(self.transaction_inner()),
- doc: self,
- }
- }
-
- pub(crate) fn transaction_inner(&mut self) -> TransactionInner {
- let actor = self.get_actor_index();
- let seq = self.states.get(&actor).map_or(0, |v| v.len()) as u64 + 1;
- let mut deps = self.get_heads();
- if seq > 1 {
- let last_hash = self.get_hash(actor, seq - 1).unwrap();
- if !deps.contains(&last_hash) {
- deps.push(last_hash);
- }
- }
-
- TransactionInner {
- actor,
- seq,
- // SAFETY: this unwrap is safe as we always add 1
- start_op: NonZeroU64::new(self.max_op + 1).unwrap(),
- time: 0,
- message: None,
- extra_bytes: Default::default(),
- hash: None,
- operations: vec![],
- deps,
- }
- }
-
- /// Run a transaction on this document in a closure, automatically handling commit or rollback
- /// afterwards.
- pub fn transact(&mut self, f: F) -> transaction::Result
- where
- F: FnOnce(&mut Transaction<'_>) -> Result,
- {
- let mut tx = self.transaction();
- let result = f(&mut tx);
- match result {
- Ok(result) => Ok(Success {
- result,
- hash: tx.commit(),
- }),
- Err(error) => Err(Failure {
- error,
- cancelled: tx.rollback(),
- }),
- }
- }
-
- /// Like [`Self::transact`] but with a function for generating the commit options.
- pub fn transact_with<'a, F, O, E, C, Obs>(&mut self, c: C, f: F) -> transaction::Result
- where
- F: FnOnce(&mut Transaction<'_>) -> Result,
- C: FnOnce(&O) -> CommitOptions<'a, Obs>,
- Obs: 'a + OpObserver,
- {
- let mut tx = self.transaction();
- let result = f(&mut tx);
- match result {
- Ok(result) => {
- let commit_options = c(&result);
- let hash = tx.commit_with(commit_options);
- Ok(Success { result, hash })
- }
- Err(error) => Err(Failure {
- error,
- cancelled: tx.rollback(),
- }),
- }
- }
-
- /// Fork this document at the current point for use by a different actor.
- pub fn fork(&self) -> Self {
- let mut f = self.clone();
- f.set_actor(ActorId::random());
- f
- }
-
- /// Fork this document at the give heads
- pub fn fork_at(&self, heads: &[ChangeHash]) -> Result {
- let mut seen = heads.iter().cloned().collect::>();
- let mut heads = heads.to_vec();
- let mut changes = vec![];
- while let Some(hash) = heads.pop() {
- if let Some(idx) = self.history_index.get(&hash) {
- let change = &self.history[*idx];
- for dep in &change.deps {
- if !seen.contains(dep) {
- heads.push(*dep);
- }
- }
- changes.push(change);
- seen.insert(hash);
- } else {
- return Err(AutomergeError::InvalidHash(hash));
- }
- }
- let mut f = Self::new();
- f.set_actor(ActorId::random());
- f.apply_changes(changes.into_iter().rev().cloned())?;
- Ok(f)
- }
-
- // KeysAt::()
- // LenAt::()
- // PropAt::()
- // NthAt::()
-
- /// Get the object id of the object that contains this object and the prop that this object is
- /// at in that object.
- pub fn parent_object