diff --git a/.github/workflows/advisory-cron.yaml b/.github/workflows/advisory-cron.yaml
index 90923191..31bac5a3 100644
--- a/.github/workflows/advisory-cron.yaml
+++ b/.github/workflows/advisory-cron.yaml
@@ -1,4 +1,4 @@
-name: ci
+name: Advisories
on:
schedule:
- cron: '0 18 * * *'
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index b6e8dc31..8519ac5e 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -1,11 +1,11 @@
-name: ci
-on:
+name: CI
+on:
push:
branches:
- - experiment
+ - main
pull_request:
branches:
- - experiment
+ - main
jobs:
fmt:
runs-on: ubuntu-latest
@@ -14,7 +14,8 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: 1.67.0
+ default: true
components: rustfmt
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/fmt
@@ -27,7 +28,8 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: 1.67.0
+ default: true
components: clippy
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/lint
@@ -40,9 +42,14 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: 1.67.0
+ default: true
- uses: Swatinem/rust-cache@v1
- - run: ./scripts/ci/docs
+ - name: Build rust docs
+ run: ./scripts/ci/rust-docs
+ shell: bash
+ - name: Install doxygen
+ run: sudo apt-get install -y doxygen
shell: bash
cargo-deny:
@@ -57,31 +64,88 @@ jobs:
- uses: actions/checkout@v2
- uses: EmbarkStudios/cargo-deny-action@v1
with:
+ arguments: '--manifest-path ./rust/Cargo.toml'
command: check ${{ matrix.checks }}
+ wasm_tests:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Install wasm-bindgen-cli
+ run: cargo install wasm-bindgen-cli wasm-opt
+ - name: Install wasm32 target
+ run: rustup target add wasm32-unknown-unknown
+ - name: run tests
+ run: ./scripts/ci/wasm_tests
+ deno_tests:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: denoland/setup-deno@v1
+ with:
+ deno-version: v1.x
+ - name: Install wasm-bindgen-cli
+ run: cargo install wasm-bindgen-cli wasm-opt
+ - name: Install wasm32 target
+ run: rustup target add wasm32-unknown-unknown
+ - name: run tests
+ run: ./scripts/ci/deno_tests
+
+ js_fmt:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: install
+ run: yarn global add prettier
+ - name: format
+ run: prettier -c javascript/.prettierrc javascript
+
js_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- - name: Install wasm-pack
- run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
+ - name: Install wasm-bindgen-cli
+ run: cargo install wasm-bindgen-cli wasm-opt
+ - name: Install wasm32 target
+ run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/js_tests
+ cmake_build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: actions-rs/toolchain@v1
+ with:
+ profile: minimal
+ toolchain: nightly-2023-01-26
+ default: true
+ - uses: Swatinem/rust-cache@v1
+ - name: Install CMocka
+ run: sudo apt-get install -y libcmocka-dev
+ - name: Install/update CMake
+ uses: jwlawson/actions-setup-cmake@v1.12
+ with:
+ cmake-version: latest
+ - name: Install rust-src
+ run: rustup component add rust-src
+ - name: Build and test C bindings
+ run: ./scripts/ci/cmake-build Release Static
+ shell: bash
+
linux:
runs-on: ubuntu-latest
strategy:
matrix:
toolchain:
- - stable
- - nightly
- continue-on-error: ${{ matrix.toolchain == 'nightly' }}
+ - 1.67.0
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.toolchain }}
+ default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
@@ -93,7 +157,8 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: 1.67.0
+ default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
@@ -105,8 +170,8 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- toolchain: stable
+ toolchain: 1.67.0
+ default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
-
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
new file mode 100644
index 00000000..b501d526
--- /dev/null
+++ b/.github/workflows/docs.yaml
@@ -0,0 +1,52 @@
+on:
+ push:
+ branches:
+ - main
+
+name: Documentation
+
+jobs:
+ deploy-docs:
+ concurrency: deploy-docs
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Toolchain
+ uses: actions-rs/toolchain@v1
+ with:
+ profile: minimal
+ toolchain: stable
+ override: true
+
+ - name: Cache
+ uses: Swatinem/rust-cache@v1
+
+ - name: Clean docs dir
+ run: rm -rf docs
+ shell: bash
+
+ - name: Clean Rust docs dir
+ uses: actions-rs/cargo@v1
+ with:
+ command: clean
+ args: --manifest-path ./rust/Cargo.toml --doc
+
+ - name: Build Rust docs
+ uses: actions-rs/cargo@v1
+ with:
+ command: doc
+ args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps
+
+ - name: Move Rust docs
+ run: mkdir -p docs && mv rust/target/doc/* docs/.
+ shell: bash
+
+ - name: Configure root page
+ run: echo '' > docs/index.html
+
+ - name: Deploy docs
+ uses: peaceiris/actions-gh-pages@v3
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ publish_dir: ./docs
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
new file mode 100644
index 00000000..762671ff
--- /dev/null
+++ b/.github/workflows/release.yaml
@@ -0,0 +1,214 @@
+name: Release
+on:
+ push:
+ branches:
+ - main
+
+jobs:
+ check_if_wasm_version_upgraded:
+ name: Check if WASM version has been upgraded
+ runs-on: ubuntu-latest
+ outputs:
+ wasm_version: ${{ steps.version-updated.outputs.current-package-version }}
+ wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }}
+ steps:
+ - uses: JiPaix/package-json-updated-action@v1.0.5
+ id: version-updated
+ with:
+ path: rust/automerge-wasm/package.json
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ publish-wasm:
+ name: Publish WASM package
+ runs-on: ubuntu-latest
+ needs:
+ - check_if_wasm_version_upgraded
+ # We create release only if the version in the package.json has been upgraded
+ if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true'
+ steps:
+ - uses: actions/setup-node@v3
+ with:
+ node-version: '16.x'
+ registry-url: 'https://registry.npmjs.org'
+ - uses: denoland/setup-deno@v1
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ ref: ${{ github.ref }}
+ - name: Get rid of local github workflows
+ run: rm -r .github/workflows
+ - name: Remove tmp_branch if it exists
+ run: git push origin :tmp_branch || true
+ - run: git checkout -b tmp_branch
+ - name: Install wasm-bindgen-cli
+ run: cargo install wasm-bindgen-cli wasm-opt
+ - name: Install wasm32 target
+ run: rustup target add wasm32-unknown-unknown
+ - name: run wasm js tests
+ id: wasm_js_tests
+ run: ./scripts/ci/wasm_tests
+ - name: run wasm deno tests
+ id: wasm_deno_tests
+ run: ./scripts/ci/deno_tests
+ - name: build release
+ id: build_release
+ run: |
+ npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release
+ - name: Collate deno release files
+ if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
+ run: |
+ mkdir $GITHUB_WORKSPACE/deno_wasm_dist
+ cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist
+ cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist
+ cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist
+ cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist
+ sed -i '1i /// ' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js
+ - name: Create npm release
+ if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
+ run: |
+ if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then
+ echo "This version is already published"
+ exit 0
+ fi
+ EXTRA_ARGS="--access public"
+ if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
+ echo "Is pre-release version"
+ EXTRA_ARGS="$EXTRA_ARGS --tag next"
+ fi
+ if [ "$NODE_AUTH_TOKEN" = "" ]; then
+ echo "Can't publish on NPM, You need a NPM_TOKEN secret."
+ false
+ fi
+ npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS
+ env:
+ NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
+ VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
+ - name: Commit wasm deno release files
+ run: |
+ git config --global user.name "actions"
+ git config --global user.email actions@github.com
+ git add $GITHUB_WORKSPACE/deno_wasm_dist
+ git commit -am "Add deno release files"
+ git push origin tmp_branch
+ - name: Tag wasm release
+ if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
+ uses: softprops/action-gh-release@v1
+ with:
+ name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
+ tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
+ target_commitish: tmp_branch
+ generate_release_notes: false
+ draft: false
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - name: Remove tmp_branch
+ run: git push origin :tmp_branch
+ check_if_js_version_upgraded:
+ name: Check if JS version has been upgraded
+ runs-on: ubuntu-latest
+ outputs:
+ js_version: ${{ steps.version-updated.outputs.current-package-version }}
+ js_has_updated: ${{ steps.version-updated.outputs.has-updated }}
+ steps:
+ - uses: JiPaix/package-json-updated-action@v1.0.5
+ id: version-updated
+ with:
+ path: javascript/package.json
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ publish-js:
+ name: Publish JS package
+ runs-on: ubuntu-latest
+ needs:
+ - check_if_js_version_upgraded
+ - check_if_wasm_version_upgraded
+ - publish-wasm
+ # We create release only if the version in the package.json has been upgraded and after the WASM release
+ if: |
+ (always() && ! cancelled()) &&
+ (needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') &&
+ needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true'
+ steps:
+ - uses: actions/setup-node@v3
+ with:
+ node-version: '16.x'
+ registry-url: 'https://registry.npmjs.org'
+ - uses: denoland/setup-deno@v1
+ - uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ ref: ${{ github.ref }}
+ - name: Get rid of local github workflows
+ run: rm -r .github/workflows
+ - name: Remove js_tmp_branch if it exists
+ run: git push origin :js_tmp_branch || true
+ - run: git checkout -b js_tmp_branch
+ - name: check js formatting
+ run: |
+ yarn global add prettier
+ prettier -c javascript/.prettierrc javascript
+ - name: run js tests
+ id: js_tests
+ run: |
+ cargo install wasm-bindgen-cli wasm-opt
+ rustup target add wasm32-unknown-unknown
+ ./scripts/ci/js_tests
+ - name: build js release
+ id: build_release
+ run: |
+ npm --prefix $GITHUB_WORKSPACE/javascript run build
+ - name: build js deno release
+ id: build_deno_release
+ run: |
+ VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build
+ env:
+ WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
+ - name: run deno tests
+ id: deno_tests
+ run: |
+ npm --prefix $GITHUB_WORKSPACE/javascript run deno:test
+ - name: Collate deno release files
+ if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
+ run: |
+ mkdir $GITHUB_WORKSPACE/deno_js_dist
+ cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist
+ - name: Create npm release
+ if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
+ run: |
+ if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then
+ echo "This version is already published"
+ exit 0
+ fi
+ EXTRA_ARGS="--access public"
+ if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
+ echo "Is pre-release version"
+ EXTRA_ARGS="$EXTRA_ARGS --tag next"
+ fi
+ if [ "$NODE_AUTH_TOKEN" = "" ]; then
+ echo "Can't publish on NPM, You need a NPM_TOKEN secret."
+ false
+ fi
+ npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS
+ env:
+ NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
+ VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }}
+ - name: Commit js deno release files
+ run: |
+ git config --global user.name "actions"
+ git config --global user.email actions@github.com
+ git add $GITHUB_WORKSPACE/deno_js_dist
+ git commit -am "Add deno js release files"
+ git push origin js_tmp_branch
+ - name: Tag JS release
+ if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
+ uses: softprops/action-gh-release@v1
+ with:
+ name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }}
+ tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }}
+ target_commitish: js_tmp_branch
+ generate_release_notes: false
+ draft: false
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - name: Remove js_tmp_branch
+ run: git push origin :js_tmp_branch
diff --git a/.gitignore b/.gitignore
index 95d3d639..f77865d0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,6 @@
-/target
/.direnv
perf.*
/Cargo.lock
+build/
+.vim/*
+/target
diff --git a/Makefile b/Makefile
deleted file mode 100644
index 9f8db2d1..00000000
--- a/Makefile
+++ /dev/null
@@ -1,13 +0,0 @@
-rust:
- cd automerge && cargo test
-
-wasm:
- cd automerge-wasm && yarn
- cd automerge-wasm && yarn build
- cd automerge-wasm && yarn test
- cd automerge-wasm && yarn link
-
-js: wasm
- cd automerge-js && yarn
- cd automerge-js && yarn link "automerge-wasm"
- cd automerge-js && yarn test
diff --git a/README.md b/README.md
index e7a277a8..ad174da4 100644
--- a/README.md
+++ b/README.md
@@ -1,81 +1,147 @@
-# Automerge - NEXT
+# Automerge
-This is pretty much a ground up rewrite of automerge-rs. The objective of this
-rewrite is to radically simplify the API. The end goal being to produce a library
-which is easy to work with both in Rust and from FFI.
+
-## How?
+[](https://automerge.org/)
+[](https://automerge.org/automerge-rs/automerge/)
+[](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml)
+[](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml)
-The current iteration of automerge-rs is complicated to work with because it
-adopts the frontend/backend split architecture of the JS implementation. This
-architecture was necessary due to basic operations on the automerge opset being
-too slow to perform on the UI thread. Recently @orionz has been able to improve
-the performance to the point where the split is no longer necessary. This means
-we can adopt a much simpler mutable API.
+Automerge is a library which provides fast implementations of several different
+CRDTs, a compact compression format for these CRDTs, and a sync protocol for
+efficiently transmitting those changes over the network. The objective of the
+project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational
+databases support server applications - by providing mechanisms for persistence
+which allow application developers to avoid thinking about hard distributed
+computing problems. Automerge aims to be PostgreSQL for your local-first app.
-The architecture is now built around the `OpTree`. This is a data structure
-which supports efficiently inserting new operations and realising values of
-existing operations. Most interactions with the `OpTree` are in the form of
-implementations of `TreeQuery` - a trait which can be used to traverse the
-optree and producing state of some kind. User facing operations are exposed on
-an `Automerge` object, under the covers these operations typically instantiate
-some `TreeQuery` and run it over the `OpTree`.
+If you're looking for documentation on the JavaScript implementation take a look
+at https://automerge.org/docs/hello/. There are other implementations in both
+Rust and C, but they are earlier and don't have documentation yet. You can find
+them in `rust/automerge` and `rust/automerge-c` if you are comfortable
+reading the code and tests to figure out how to use them.
+
+If you're familiar with CRDTs and interested in the design of Automerge in
+particular take a look at https://automerge.org/docs/how-it-works/backend/
+
+Finally, if you want to talk to us about this project please [join the
+Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw)
## Status
-We have working code which passes all of the tests in the JS test suite. We're
-now working on writing a bunch more tests and cleaning up the API.
+This project is formed of a core Rust implementation which is exposed via FFI in
+javascript+WASM, C, and soon other languages. Alex
+([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining
+automerge, other members of Ink and Switch are also contributing time and there
+are several other maintainers. The focus is currently on shipping the new JS
+package. We expect to be iterating the API and adding new features over the next
+six months so there will likely be several major version bumps in all packages
+in that time.
-## Development
+In general we try and respect semver.
-### Running CI
+### JavaScript
-The steps CI will run are all defined in `./scripts/ci`. Obviously CI will run
-everything when you submit a PR, but if you want to run everything locally
-before you push you can run `./scripts/ci/run` to run everything.
+A stable release of the javascript package is currently available as
+`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are
+available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at
+https://deno.land/x/automerge
-### Running the JS tests
+### Rust
-You will need to have [node](https://nodejs.org/en/), [yarn](https://yarnpkg.com/getting-started/install), [rust](https://rustup.rs/) and [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/) installed.
+The rust codebase is currently oriented around producing a performant backend
+for the Javascript wrapper and as such the API for Rust code is low level and
+not well documented. We will be returning to this over the next few months but
+for now you will need to be comfortable reading the tests and asking questions
+to figure out how to use it. If you are looking to build rust applications which
+use automerge you may want to look into
+[autosurgeon](https://github.com/alexjg/autosurgeon)
-To build and test the rust library:
+## Repository Organisation
-```shell
- $ cd automerge
- $ cargo test
+- `./rust` - the rust rust implementation and also the Rust components of
+ platform specific wrappers (e.g. `automerge-wasm` for the WASM API or
+ `automerge-c` for the C FFI bindings)
+- `./javascript` - The javascript library which uses `automerge-wasm`
+ internally but presents a more idiomatic javascript interface
+- `./scripts` - scripts which are useful to maintenance of the repository.
+ This includes the scripts which are run in CI.
+- `./img` - static assets for use in `.md` files
+
+## Building
+
+To build this codebase you will need:
+
+- `rust`
+- `node`
+- `yarn`
+- `cmake`
+- `cmocka`
+
+You will also need to install the following with `cargo install`
+
+- `wasm-bindgen-cli`
+- `wasm-opt`
+- `cargo-deny`
+
+And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation.
+
+The various subprojects (the rust code, the wrapper projects) have their own
+build instructions, but to run the tests that will be run in CI you can run
+`./scripts/ci/run`.
+
+### For macOS
+
+These instructions worked to build locally on macOS 13.1 (arm64) as of
+Nov 29th 2022.
+
+```bash
+# clone the repo
+git clone https://github.com/automerge/automerge-rs
+cd automerge-rs
+
+# install rustup
+curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
+
+# install homebrew
+/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
+
+# install cmake, node, cmocka
+brew install cmake node cmocka
+
+# install yarn
+npm install --global yarn
+
+# install javascript dependencies
+yarn --cwd ./javascript
+
+# install rust dependencies
+cargo install wasm-bindgen-cli wasm-opt cargo-deny
+
+# get nightly rust to produce optimized automerge-c builds
+rustup toolchain install nightly
+rustup component add rust-src --toolchain nightly
+
+# add wasm target in addition to current architecture
+rustup target add wasm32-unknown-unknown
+
+# Run ci script
+./scripts/ci/run
```
-To build and test the wasm library:
+If your build fails to find `cmocka.h` you may need to teach it about homebrew's
+installation location:
-```shell
- ## setup
- $ cd automerge-wasm
- $ yarn
-
- ## building or testing
- $ yarn build
- $ yarn test
-
- ## without this the js library wont automatically use changes
- $ yarn link
-
- ## cutting a release or doing benchmarking
- $ yarn release
- $ yarn opt ## or set `wasm-opt = false` in Cargo.toml on supported platforms (not arm64 osx)
+```
+export CPATH=/opt/homebrew/include
+export LIBRARY_PATH=/opt/homebrew/lib
+./scripts/ci/run
```
-And finally to test the js library. This is where most of the tests reside.
+## Contributing
-```shell
- ## setup
- $ cd automerge-js
- $ yarn
- $ yarn link "automerge-wasm"
-
- ## testing
- $ yarn test
-```
-
-## Benchmarking
-
-The `edit-trace` folder has the main code for running the edit trace benchmarking.
+Please try and split your changes up into relatively independent commits which
+change one subsystem at a time and add good commit messages which describe what
+the change is and why you're making it (err on the side of longer commit
+messages). `git blame` should give future maintainers a good idea of why
+something is the way it is.
diff --git a/TODO.md b/TODO.md
deleted file mode 100644
index 5e6889dc..00000000
--- a/TODO.md
+++ /dev/null
@@ -1,20 +0,0 @@
-
-### next steps:
- 1. C API
-
-### ergronomics:
- 1. value() -> () or something that into's a value
-
-### automerge:
- 1. single pass (fast) load
- 2. micro-patches / bare bones observation API / fully hydrated documents
-
-### sync
- 1. get all sync tests passing
-
-### maybe:
- 1. tables
-
-### no:
- 1. cursors
-
diff --git a/automerge-js/.gitignore b/automerge-js/.gitignore
deleted file mode 100644
index 5add9449..00000000
--- a/automerge-js/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-/node_modules
-/yarn.lock
diff --git a/automerge-js/package.json b/automerge-js/package.json
deleted file mode 100644
index 8742d99a..00000000
--- a/automerge-js/package.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "name": "automerge-js",
- "version": "0.1.0",
- "main": "src/index.js",
- "license": "MIT",
- "scripts": {
- "test": "mocha --bail --full-trace"
- },
- "devDependencies": {
- "mocha": "^9.1.1"
- },
- "dependencies": {
- "automerge-wasm": "file:../automerge-wasm/dev",
- "fast-sha256": "^1.3.0",
- "pako": "^2.0.4",
- "uuid": "^8.3"
- }
-}
diff --git a/automerge-js/src/constants.js b/automerge-js/src/constants.js
deleted file mode 100644
index ea92228c..00000000
--- a/automerge-js/src/constants.js
+++ /dev/null
@@ -1,18 +0,0 @@
-// Properties of the document root object
-//const OPTIONS = Symbol('_options') // object containing options passed to init()
-//const CACHE = Symbol('_cache') // map from objectId to immutable object
-const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers)
-const HEADS = Symbol('_heads') // object containing metadata about current state (e.g. sequence numbers)
-const OBJECT_ID = Symbol('_objectId') // object containing metadata about current state (e.g. sequence numbers)
-const READ_ONLY = Symbol('_readOnly') // object containing metadata about current state (e.g. sequence numbers)
-const FROZEN = Symbol('_frozen') // object containing metadata about current state (e.g. sequence numbers)
-
-// Properties of all Automerge objects
-//const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string)
-//const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts
-//const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback
-//const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element
-
-module.exports = {
- STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN
-}
diff --git a/automerge-js/src/index.js b/automerge-js/src/index.js
deleted file mode 100644
index 1d15789b..00000000
--- a/automerge-js/src/index.js
+++ /dev/null
@@ -1,372 +0,0 @@
-const AutomergeWASM = require("automerge-wasm")
-const uuid = require('./uuid')
-
-let { rootProxy, listProxy, textProxy, mapProxy } = require("./proxies")
-let { Counter } = require("./counter")
-let { Text } = require("./text")
-let { Int, Uint, Float64 } = require("./numbers")
-let { STATE, HEADS, OBJECT_ID, READ_ONLY, FROZEN } = require("./constants")
-
-function init(actor) {
- const state = AutomergeWASM.init(actor)
- return rootProxy(state, true);
-}
-
-function clone(doc) {
- const state = doc[STATE].clone()
- return rootProxy(state, true);
-}
-
-function free(doc) {
- return doc[STATE].free()
-}
-
-function from(data, actor) {
- let doc1 = init(actor)
- let doc2 = change(doc1, (d) => Object.assign(d, data))
- return doc2
-}
-
-function change(doc, options, callback) {
- if (callback === undefined) {
- // FIXME implement options
- callback = options
- options = {}
- }
- if (typeof options === "string") {
- options = { message: options }
- }
- if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
- throw new RangeError("must be the document root");
- }
- if (doc[FROZEN] === true) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (!!doc[HEADS] === true) {
- console.log("HEADS", doc[HEADS])
- throw new RangeError("Attempting to change an out of date document");
- }
- if (doc[READ_ONLY] === false) {
- throw new RangeError("Calls to Automerge.change cannot be nested")
- }
- const state = doc[STATE]
- const heads = state.getHeads()
- try {
- doc[HEADS] = heads
- doc[FROZEN] = true
- let root = rootProxy(state);
- callback(root)
- if (state.pending_ops() === 0) {
- doc[FROZEN] = false
- doc[HEADS] = undefined
- return doc
- } else {
- state.commit(options.message, options.time)
- return rootProxy(state, true);
- }
- } catch (e) {
- //console.log("ERROR: ",e)
- doc[FROZEN] = false
- doc[HEADS] = undefined
- state.rollback()
- throw e
- }
-}
-
-function emptyChange(doc, options) {
- if (options === undefined) {
- options = {}
- }
- if (typeof options === "string") {
- options = { message: options }
- }
-
- if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
- throw new RangeError("must be the document root");
- }
- if (doc[FROZEN] === true) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (doc[READ_ONLY] === false) {
- throw new RangeError("Calls to Automerge.change cannot be nested")
- }
-
- const state = doc[STATE]
- state.commit(options.message, options.time)
- return rootProxy(state, true);
-}
-
-function load(data, actor) {
- const state = AutomergeWASM.load(data, actor)
- return rootProxy(state, true);
-}
-
-function save(doc) {
- const state = doc[STATE]
- return state.save()
-}
-
-function merge(local, remote) {
- if (local[HEADS] === true) {
- throw new RangeError("Attempting to change an out of date document");
- }
- const localState = local[STATE]
- const heads = localState.getHeads()
- const remoteState = remote[STATE]
- const changes = localState.getChangesAdded(remoteState)
- localState.applyChanges(changes)
- local[HEADS] = heads
- return rootProxy(localState, true)
-}
-
-function getActorId(doc) {
- const state = doc[STATE]
- return state.getActorId()
-}
-
-function conflictAt(context, objectId, prop) {
- let values = context.values(objectId, prop)
- if (values.length <= 1) {
- return
- }
- let result = {}
- for (const conflict of values) {
- const datatype = conflict[0]
- const value = conflict[1]
- switch (datatype) {
- case "map":
- result[value] = mapProxy(context, value, [ prop ], true, true)
- break;
- case "list":
- result[value] = listProxy(context, value, [ prop ], true, true)
- break;
- case "text":
- result[value] = textProxy(context, value, [ prop ], true, true)
- break;
- //case "table":
- //case "cursor":
- case "str":
- case "uint":
- case "int":
- case "f64":
- case "boolean":
- case "bytes":
- case "null":
- result[conflict[2]] = value
- break;
- case "counter":
- result[conflict[2]] = new Counter(value)
- break;
- case "timestamp":
- result[conflict[2]] = new Date(value)
- break;
- default:
- throw RangeError(`datatype ${datatype} unimplemented`)
- }
- }
- return result
-}
-
-function getConflicts(doc, prop) {
- const state = doc[STATE]
- const objectId = doc[OBJECT_ID]
- return conflictAt(state, objectId, prop)
-}
-
-function getLastLocalChange(doc) {
- const state = doc[STATE]
- return state.getLastLocalChange()
-}
-
-function getObjectId(doc) {
- return doc[OBJECT_ID]
-}
-
-function getChanges(oldState, newState) {
- const o = oldState[STATE]
- const n = newState[STATE]
- const heads = oldState[HEADS]
- return n.getChanges(heads || o.getHeads())
-}
-
-function getAllChanges(doc) {
- const state = doc[STATE]
- return state.getChanges([])
-}
-
-function applyChanges(doc, changes) {
- if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
- throw new RangeError("must be the document root");
- }
- if (doc[FROZEN] === true) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (doc[READ_ONLY] === false) {
- throw new RangeError("Calls to Automerge.change cannot be nested")
- }
- const state = doc[STATE]
- const heads = state.getHeads()
- state.applyChanges(changes)
- doc[HEADS] = heads
- return [rootProxy(state, true)];
-}
-
-function getHistory(doc) {
- const actor = getActorId(doc)
- const history = getAllChanges(doc)
- return history.map((change, index) => ({
- get change () {
- return decodeChange(change)
- },
- get snapshot () {
- const [state] = applyChanges(init(), history.slice(0, index + 1))
- return state
- }
- })
- )
-}
-
-function equals() {
- if (!isObject(val1) || !isObject(val2)) return val1 === val2
- const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort()
- if (keys1.length !== keys2.length) return false
- for (let i = 0; i < keys1.length; i++) {
- if (keys1[i] !== keys2[i]) return false
- if (!equals(val1[keys1[i]], val2[keys2[i]])) return false
- }
- return true
-}
-
-function encodeSyncMessage(msg) {
- return AutomergeWASM.encodeSyncMessage(msg)
-}
-
-function decodeSyncMessage(msg) {
- return AutomergeWASM.decodeSyncMessage(msg)
-}
-
-function encodeSyncState(state) {
- return AutomergeWASM.encodeSyncState(state)
-}
-
-function decodeSyncState() {
- return AutomergeWASM.decodeSyncState(state)
-}
-
-function generateSyncMessage(doc, syncState) {
- const state = doc[STATE]
- return [ syncState, state.generateSyncMessage(syncState) ]
-}
-
-function receiveSyncMessage(doc, syncState, message) {
- if (doc === undefined || doc[STATE] === undefined || doc[OBJECT_ID] !== "_root") {
- throw new RangeError("must be the document root");
- }
- if (doc[FROZEN] === true) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (!!doc[HEADS] === true) {
- throw new RangeError("Attempting to change an out of date document");
- }
- if (doc[READ_ONLY] === false) {
- throw new RangeError("Calls to Automerge.change cannot be nested")
- }
- const state = doc[STATE]
- const heads = state.getHeads()
- state.receiveSyncMessage(syncState, message)
- doc[HEADS] = heads
- return [rootProxy(state, true), syncState, null];
-}
-
-function initSyncState() {
- return AutomergeWASM.initSyncState(change)
-}
-
-function encodeChange(change) {
- return AutomergeWASM.encodeChange(change)
-}
-
-function decodeChange(data) {
- return AutomergeWASM.decodeChange(data)
-}
-
-function encodeSyncMessage(change) {
- return AutomergeWASM.encodeSyncMessage(change)
-}
-
-function decodeSyncMessage(data) {
- return AutomergeWASM.decodeSyncMessage(data)
-}
-
-function encodeSyncState(change) {
- return AutomergeWASM.encodeSyncState(change)
-}
-
-function decodeSyncState(data) {
- return AutomergeWASM.decodeSyncState(data)
-}
-
-function getMissingDeps(doc, heads) {
- const state = doc[STATE]
- if (!heads) {
- heads = []
- }
- return state.getMissingDeps(heads)
-}
-
-function getHeads(doc) {
- const state = doc[STATE]
- return doc[HEADS] || state.getHeads()
-}
-
-function dump(doc) {
- const state = doc[STATE]
- state.dump()
-}
-
-function toJS(doc) {
- if (typeof doc === "object") {
- if (doc instanceof Uint8Array) {
- return doc
- }
- if (doc === null) {
- return doc
- }
- if (doc instanceof Array) {
- return doc.map((a) => toJS(a))
- }
- if (doc instanceof Text) {
- return doc.map((a) => toJS(a))
- }
- let tmp = {}
- for (index in doc) {
- tmp[index] = toJS(doc[index])
- }
- return tmp
- } else {
- return doc
- }
-}
-
-module.exports = {
- init, from, change, emptyChange, clone, free,
- load, save, merge, getChanges, getAllChanges, applyChanges,
- getLastLocalChange, getObjectId, getActorId, getConflicts,
- encodeChange, decodeChange, equals, getHistory, getHeads, uuid,
- generateSyncMessage, receiveSyncMessage, initSyncState,
- decodeSyncMessage, encodeSyncMessage, decodeSyncState, encodeSyncState,
- getMissingDeps,
- dump, Text, Counter, Int, Uint, Float64, toJS,
-}
-
-// depricated
-// Frontend, setDefaultBackend, Backend
-
-// more...
-/*
-for (let name of ['getObjectId', 'getObjectById',
- 'setActorId',
- 'Text', 'Table', 'Counter', 'Observable' ]) {
- module.exports[name] = Frontend[name]
-}
-*/
diff --git a/automerge-js/src/numbers.js b/automerge-js/src/numbers.js
deleted file mode 100644
index 1ee22dee..00000000
--- a/automerge-js/src/numbers.js
+++ /dev/null
@@ -1,33 +0,0 @@
-// Convience classes to allow users to stricly specify the number type they want
-
-class Int {
- constructor(value) {
- if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) {
- throw new RangeError(`Value ${value} cannot be a uint`)
- }
- this.value = value
- Object.freeze(this)
- }
-}
-
-class Uint {
- constructor(value) {
- if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) {
- throw new RangeError(`Value ${value} cannot be a uint`)
- }
- this.value = value
- Object.freeze(this)
- }
-}
-
-class Float64 {
- constructor(value) {
- if (typeof value !== 'number') {
- throw new RangeError(`Value ${value} cannot be a float64`)
- }
- this.value = value || 0.0
- Object.freeze(this)
- }
-}
-
-module.exports = { Int, Uint, Float64 }
diff --git a/automerge-js/src/proxies.js b/automerge-js/src/proxies.js
deleted file mode 100644
index e946b37f..00000000
--- a/automerge-js/src/proxies.js
+++ /dev/null
@@ -1,623 +0,0 @@
-
-const AutomergeWASM = require("automerge-wasm")
-const { Int, Uint, Float64 } = require("./numbers");
-const { Counter, getWriteableCounter } = require("./counter");
-const { Text } = require("./text");
-const { STATE, HEADS, FROZEN, OBJECT_ID, READ_ONLY } = require("./constants")
-const { MAP, LIST, TABLE, TEXT } = require("automerge-wasm")
-
-function parseListIndex(key) {
- if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10)
- if (typeof key !== 'number') {
- // throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key))
- return key
- }
- if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) {
- throw new RangeError('A list index must be positive, but you passed ' + key)
- }
- return key
-}
-
-function valueAt(target, prop) {
- const { context, objectId, path, readonly, heads} = target
- let value = context.value(objectId, prop, heads)
- if (value === undefined) {
- return
- }
- const datatype = value[0]
- const val = value[1]
- switch (datatype) {
- case undefined: return;
- case "map": return mapProxy(context, val, [ ... path, prop ], readonly, heads);
- case "list": return listProxy(context, val, [ ... path, prop ], readonly, heads);
- case "text": return textProxy(context, val, [ ... path, prop ], readonly, heads);
- //case "table":
- //case "cursor":
- case "str": return val;
- case "uint": return val;
- case "int": return val;
- case "f64": return val;
- case "boolean": return val;
- case "null": return null;
- case "bytes": return val;
- case "counter": {
- if (readonly) {
- return new Counter(val);
- } else {
- return getWriteableCounter(val, context, path, objectId, prop)
- }
- }
- case "timestamp": return new Date(val);
- default:
- throw RangeError(`datatype ${datatype} unimplemented`)
- }
-}
-
-function import_value(value) {
- switch (typeof value) {
- case 'object':
- if (value == null) {
- return [ null, "null"]
- } else if (value instanceof Uint) {
- return [ value.value, "uint" ]
- } else if (value instanceof Int) {
- return [ value.value, "int" ]
- } else if (value instanceof Float64) {
- return [ value.value, "f64" ]
- } else if (value instanceof Counter) {
- return [ value.value, "counter" ]
- } else if (value instanceof Date) {
- return [ value.getTime(), "timestamp" ]
- } else if (value instanceof Uint8Array) {
- return [ value, "bytes" ]
- } else if (value instanceof Array) {
- return [ value, "list" ]
- } else if (value instanceof Text) {
- return [ value, "text" ]
- } else if (value[OBJECT_ID]) {
- throw new RangeError('Cannot create a reference to an existing document object')
- } else {
- return [ value, "map" ]
- }
- break;
- case 'boolean':
- return [ value, "boolean" ]
- case 'number':
- if (Number.isInteger(value)) {
- return [ value, "int" ]
- } else {
- return [ value, "f64" ]
- }
- break;
- case 'string':
- return [ value ]
- break;
- default:
- throw new RangeError(`Unsupported type of value: ${typeof value}`)
- }
-}
-
-const MapHandler = {
- get (target, key) {
- const { context, objectId, path, readonly, frozen, heads } = target
- if (key === Symbol.toStringTag) { return target[Symbol.toStringTag] }
- if (key === OBJECT_ID) return objectId
- if (key === READ_ONLY) return readonly
- if (key === FROZEN) return frozen
- if (key === HEADS) return heads
- if (key === STATE) return context;
- return valueAt(target, key)
- },
-
- set (target, key, val) {
- let { context, objectId, path, readonly, frozen} = target
- if (val && val[OBJECT_ID]) {
- throw new RangeError('Cannot create a reference to an existing document object')
- }
- if (key === FROZEN) {
- target.frozen = val
- return
- }
- if (key === HEADS) {
- target.heads = val
- return
- }
- let [ value, datatype ] = import_value(val)
- if (frozen) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (readonly) {
- throw new RangeError(`Object property "${key}" cannot be modified`)
- }
- switch (datatype) {
- case "list":
- const list = context.set(objectId, key, LIST)
- const proxyList = listProxy(context, list, [ ... path, key ], readonly );
- for (let i = 0; i < value.length; i++) {
- proxyList[i] = value[i]
- }
- break;
- case "text":
- const text = context.set(objectId, key, TEXT)
- const proxyText = textProxy(context, text, [ ... path, key ], readonly );
- for (let i = 0; i < value.length; i++) {
- proxyText[i] = value.get(i)
- }
- break;
- case "map":
- const map = context.set(objectId, key, MAP)
- const proxyMap = mapProxy(context, map, [ ... path, key ], readonly );
- for (const key in value) {
- proxyMap[key] = value[key]
- }
- break;
- default:
- context.set(objectId, key, value, datatype)
- }
- return true
- },
-
- deleteProperty (target, key) {
- const { context, objectId, path, readonly, frozen } = target
- if (readonly) {
- throw new RangeError(`Object property "${key}" cannot be modified`)
- }
- context.del(objectId, key)
- return true
- },
-
- has (target, key) {
- const value = this.get(target, key)
- return value !== undefined
- },
-
- getOwnPropertyDescriptor (target, key) {
- const { context, objectId } = target
- const value = this.get(target, key)
- if (typeof value !== 'undefined') {
- return {
- configurable: true, enumerable: true, value
- }
- }
- },
-
- ownKeys (target) {
- const { context, objectId, heads} = target
- return context.keys(objectId, heads)
- },
-}
-
-
-const ListHandler = {
- get (target, index) {
- const {context, objectId, path, readonly, frozen, heads } = target
- index = parseListIndex(index)
- if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } }
- if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
- if (index === OBJECT_ID) return objectId
- if (index === READ_ONLY) return readonly
- if (index === FROZEN) return frozen
- if (index === HEADS) return heads
- if (index === STATE) return context;
- if (index === 'length') return context.length(objectId, heads);
- if (index === Symbol.iterator) {
- let i = 0;
- return function *() {
- // FIXME - ugly
- let value = valueAt(target, i)
- while (value !== undefined) {
- yield value
- i += 1
- value = valueAt(target, i)
- }
- }
- }
- if (typeof index === 'number') {
- return valueAt(target, index)
- } else {
- return listMethods(target)[index]
- }
- },
-
- set (target, index, val) {
- let {context, objectId, path, readonly, frozen } = target
- index = parseListIndex(index)
- if (val && val[OBJECT_ID]) {
- throw new RangeError('Cannot create a reference to an existing document object')
- }
- if (index === FROZEN) {
- target.frozen = val
- return
- }
- if (index === HEADS) {
- target.heads = val
- return
- }
- if (typeof index == "string") {
- throw new RangeError('list index must be a number')
- }
- const [ value, datatype] = import_value(val)
- if (frozen) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (readonly) {
- throw new RangeError(`Object property "${index}" cannot be modified`)
- }
- switch (datatype) {
- case "list":
- let list
- if (index >= context.length(objectId)) {
- list = context.insert(objectId, index, LIST)
- } else {
- list = context.set(objectId, index, LIST)
- }
- const proxyList = listProxy(context, list, [ ... path, index ], readonly);
- proxyList.splice(0,0,...value)
- break;
- case "text":
- let text
- if (index >= context.length(objectId)) {
- text = context.insert(objectId, index, TEXT)
- } else {
- text = context.set(objectId, index, TEXT)
- }
- const proxyText = textProxy(context, text, [ ... path, index ], readonly);
- proxyText.splice(0,0,...value)
- break;
- case "map":
- let map
- if (index >= context.length(objectId)) {
- map = context.insert(objectId, index, MAP)
- } else {
- map = context.set(objectId, index, MAP)
- }
- const proxyMap = mapProxy(context, map, [ ... path, index ], readonly);
- for (const key in value) {
- proxyMap[key] = value[key]
- }
- break;
- default:
- if (index >= context.length(objectId)) {
- context.insert(objectId, index, value, datatype)
- } else {
- context.set(objectId, index, value, datatype)
- }
- }
- return true
- },
-
- deleteProperty (target, index) {
- const {context, objectId} = target
- index = parseListIndex(index)
- if (context.value(objectId, index)[0] == "counter") {
- throw new TypeError('Unsupported operation: deleting a counter from a list')
- }
- context.del(objectId, index)
- return true
- },
-
- has (target, index) {
- const {context, objectId, heads} = target
- index = parseListIndex(index)
- if (typeof index === 'number') {
- return index < context.length(objectId, heads)
- }
- return index === 'length'
- },
-
- getOwnPropertyDescriptor (target, index) {
- const {context, objectId, path, readonly, frozen, heads} = target
-
- if (index === 'length') return {writable: true, value: context.length(objectId, heads) }
- if (index === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId}
-
- index = parseListIndex(index)
-
- let value = valueAt(target, index)
- return { configurable: true, enumerable: true, value }
- },
-
- getPrototypeOf(target) { return Object.getPrototypeOf([]) },
- ownKeys (target) {
- const {context, objectId, heads } = target
- let keys = []
- // uncommenting this causes assert.deepEqual() to fail when comparing to a pojo array
- // but not uncommenting it causes for (i in list) {} to not enumerate values properly
- //for (let i = 0; i < target.context.length(objectId, heads); i++) { keys.push(i.toString()) }
- keys.push("length");
- return keys
- }
-}
-
-const TextHandler = Object.assign({}, ListHandler, {
- get (target, index) {
- // FIXME this is a one line change from ListHandler.get()
- const {context, objectId, path, readonly, frozen, heads } = target
- index = parseListIndex(index)
- if (index === Symbol.toStringTag) { return target[Symbol.toStringTag] }
- if (index === Symbol.hasInstance) { return (instance) => { return [].has(instance) } }
- if (index === OBJECT_ID) return objectId
- if (index === READ_ONLY) return readonly
- if (index === FROZEN) return frozen
- if (index === HEADS) return heads
- if (index === STATE) return context;
- if (index === 'length') return context.length(objectId, heads);
- if (index === Symbol.iterator) {
- let i = 0;
- return function *() {
- let value = valueAt(target, i)
- while (value !== undefined) {
- yield value
- i += 1
- value = valueAt(target, i)
- }
- }
- }
- if (typeof index === 'number') {
- return valueAt(target, index)
- } else {
- return textMethods(target)[index] || listMethods(target)[index]
- }
- },
- getPrototypeOf(target) {
- return Object.getPrototypeOf(new Text())
- },
-})
-
-function mapProxy(context, objectId, path, readonly, heads) {
- return new Proxy({context, objectId, path, readonly: !!readonly, frozen: false, heads}, MapHandler)
-}
-
-function listProxy(context, objectId, path, readonly, heads) {
- let target = []
- Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads})
- return new Proxy(target, ListHandler)
-}
-
-function textProxy(context, objectId, path, readonly, heads) {
- let target = []
- Object.assign(target, {context, objectId, path, readonly: !!readonly, frozen: false, heads})
- return new Proxy(target, TextHandler)
-}
-
-function rootProxy(context, readonly) {
- return mapProxy(context, "_root", [], readonly, false)
-}
-
-function listMethods(target) {
- const {context, objectId, path, readonly, frozen, heads} = target
- const methods = {
- deleteAt(index, numDelete) {
- // FIXME - what about many deletes?
- if (context.value(objectId, index)[0] == "counter") {
- throw new TypeError('Unsupported operation: deleting a counter from a list')
- }
- if (typeof numDelete === 'number') {
- context.splice(objectId, index, numDelete)
- } else {
- context.del(objectId, index)
- }
- return this
- },
-
- fill(val, start, end) {
- // FIXME
- let list = context.getObject(objectId)
- let [value, datatype] = valueAt(target, index)
- for (let index = parseListIndex(start || 0); index < parseListIndex(end || list.length); index++) {
- context.set(objectId, index, value, datatype)
- }
- return this
- },
-
- indexOf(o, start = 0) {
- // FIXME
- const id = o[OBJECT_ID]
- if (id) {
- const list = context.getObject(objectId)
- for (let index = start; index < list.length; index++) {
- if (list[index][OBJECT_ID] === id) {
- return index
- }
- }
- return -1
- } else {
- return context.indexOf(objectId, o, start)
- }
- },
-
- insertAt(index, ...values) {
- this.splice(index, 0, ...values)
- return this
- },
-
- pop() {
- let length = context.length(objectId)
- if (length == 0) {
- return undefined
- }
- let last = valueAt(target, length - 1)
- context.del(objectId, length - 1)
- return last
- },
-
- push(...values) {
- let len = context.length(objectId)
- this.splice(len, 0, ...values)
- return context.length(objectId)
- },
-
- shift() {
- if (context.length(objectId) == 0) return
- const first = valueAt(target, 0)
- context.del(objectId, 0)
- return first
- },
-
- splice(index, del, ...vals) {
- index = parseListIndex(index)
- del = parseListIndex(del)
- for (let val of vals) {
- if (val && val[OBJECT_ID]) {
- throw new RangeError('Cannot create a reference to an existing document object')
- }
- }
- if (frozen) {
- throw new RangeError("Attempting to use an outdated Automerge document")
- }
- if (readonly) {
- throw new RangeError("Sequence object cannot be modified outside of a change block")
- }
- let result = []
- for (let i = 0; i < del; i++) {
- let value = valueAt(target, index)
- result.push(value)
- context.del(objectId, index)
- }
- const values = vals.map((val) => import_value(val))
- for (let [value,datatype] of values) {
- switch (datatype) {
- case "list":
- const list = context.insert(objectId, index, LIST)
- const proxyList = listProxy(context, list, [ ... path, index ], readonly);
- proxyList.splice(0,0,...value)
- break;
- case "text":
- const text = context.insert(objectId, index, TEXT)
- const proxyText = textProxy(context, text, [ ... path, index ], readonly);
- proxyText.splice(0,0,...value)
- break;
- case "map":
- const map = context.insert(objectId, index, MAP)
- const proxyMap = mapProxy(context, map, [ ... path, index ], readonly);
- for (const key in value) {
- proxyMap[key] = value[key]
- }
- break;
- default:
- context.insert(objectId, index, value, datatype)
- }
- index += 1
- }
- return result
- },
-
- unshift(...values) {
- this.splice(0, 0, ...values)
- return context.length(objectId)
- },
-
- entries() {
- let i = 0;
- const iterator = {
- next: () => {
- let value = valueAt(target, i)
- if (value === undefined) {
- return { value: undefined, done: true }
- } else {
- return { value: [ i, value ], done: false }
- }
- }
- }
- return iterator
- },
-
- keys() {
- let i = 0;
- let len = context.length(objectId, heads)
- const iterator = {
- next: () => {
- let value = undefined
- if (i < len) { value = i; i++ }
- return { value, done: true }
- }
- }
- return iterator
- },
-
- values() {
- let i = 0;
- const iterator = {
- next: () => {
- let value = valueAt(target, i)
- if (value === undefined) {
- return { value: undefined, done: true }
- } else {
- return { value, done: false }
- }
- }
- }
- return iterator
- }
- }
-
- // Read-only methods that can delegate to the JavaScript built-in implementations
- // FIXME - super slow
- for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
- 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
- 'slice', 'some', 'toLocaleString', 'toString']) {
- methods[method] = (...args) => {
- const list = []
- while (true) {
- let value = valueAt(target, list.length)
- if (value == undefined) {
- break
- }
- list.push(value)
- }
-
- return list[method](...args)
- }
- }
-
- return methods
-}
-
-function textMethods(target) {
- const {context, objectId, path, readonly, frozen} = target
- const methods = {
- set (index, value) {
- return this[index] = value
- },
- get (index) {
- return this[index]
- },
- toString () {
- let str = ''
- let length = this.length
- for (let i = 0; i < length; i++) {
- const value = this.get(i)
- if (typeof value === 'string') str += value
- }
- return str
- },
- toSpans () {
- let spans = []
- let chars = ''
- let length = this.length
- for (let i = 0; i < length; i++) {
- const value = this[i]
- if (typeof value === 'string') {
- chars += value
- } else {
- if (chars.length > 0) {
- spans.push(chars)
- chars = ''
- }
- spans.push(value)
- }
- }
- if (chars.length > 0) {
- spans.push(chars)
- }
- return spans
- },
- toJSON () {
- return this.toString()
- }
- }
- return methods
-}
-
-
-module.exports = { rootProxy, textProxy, listProxy, mapProxy, MapHandler, ListHandler, TextHandler }
diff --git a/automerge-js/src/text.js b/automerge-js/src/text.js
deleted file mode 100644
index a7f442fe..00000000
--- a/automerge-js/src/text.js
+++ /dev/null
@@ -1,132 +0,0 @@
-const { OBJECT_ID } = require('./constants')
-const { isObject } = require('../src/common')
-
-class Text {
- constructor (text) {
- const instance = Object.create(Text.prototype)
- if (typeof text === 'string') {
- instance.elems = [...text]
- } else if (Array.isArray(text)) {
- instance.elems = text
- } else if (text === undefined) {
- instance.elems = []
- } else {
- throw new TypeError(`Unsupported initial value for Text: ${text}`)
- }
- return instance
- }
-
- get length () {
- return this.elems.length
- }
-
- get (index) {
- return this.elems[index]
- }
-
- getElemId (index) {
- return undefined
- }
-
- /**
- * Iterates over the text elements character by character, including any
- * inline objects.
- */
- [Symbol.iterator] () {
- let elems = this.elems, index = -1
- return {
- next () {
- index += 1
- if (index < elems.length) {
- return {done: false, value: elems[index]}
- } else {
- return {done: true}
- }
- }
- }
- }
-
- /**
- * Returns the content of the Text object as a simple string, ignoring any
- * non-character elements.
- */
- toString() {
- // Concatting to a string is faster than creating an array and then
- // .join()ing for small (<100KB) arrays.
- // https://jsperf.com/join-vs-loop-w-type-test
- let str = ''
- for (const elem of this.elems) {
- if (typeof elem === 'string') str += elem
- }
- return str
- }
-
- /**
- * Returns the content of the Text object as a sequence of strings,
- * interleaved with non-character elements.
- *
- * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans:
- * => ['ab', {x: 3}, 'cd']
- */
- toSpans() {
- let spans = []
- let chars = ''
- for (const elem of this.elems) {
- if (typeof elem === 'string') {
- chars += elem
- } else {
- if (chars.length > 0) {
- spans.push(chars)
- chars = ''
- }
- spans.push(elem)
- }
- }
- if (chars.length > 0) {
- spans.push(chars)
- }
- return spans
- }
-
- /**
- * Returns the content of the Text object as a simple string, so that the
- * JSON serialization of an Automerge document represents text nicely.
- */
- toJSON() {
- return this.toString()
- }
-
- /**
- * Updates the list item at position `index` to a new value `value`.
- */
- set (index, value) {
- this.elems[index] = value
- }
-
- /**
- * Inserts new list items `values` starting at position `index`.
- */
- insertAt(index, ...values) {
- this.elems.splice(index, 0, ... values)
- }
-
- /**
- * Deletes `numDelete` list items starting at position `index`.
- * if `numDelete` is not given, one item is deleted.
- */
- deleteAt(index, numDelete = 1) {
- this.elems.splice(index, numDelete)
- }
-}
-
-// Read-only methods that can delegate to the JavaScript built-in array
-for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
- 'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
- 'slice', 'some', 'toLocaleString']) {
- Text.prototype[method] = function (...args) {
- const array = [...this]
- return array[method](...args)
- }
-}
-
-module.exports = { Text }
diff --git a/automerge-js/src/uuid.js b/automerge-js/src/uuid.js
deleted file mode 100644
index 42a8cc6e..00000000
--- a/automerge-js/src/uuid.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const { v4: uuid } = require('uuid')
-
-function defaultFactory() {
- return uuid().replace(/-/g, '')
-}
-
-let factory = defaultFactory
-
-function makeUuid() {
- return factory()
-}
-
-makeUuid.setFactory = newFactory => { factory = newFactory }
-makeUuid.reset = () => { factory = defaultFactory }
-
-module.exports = makeUuid
diff --git a/automerge-js/test/basic_test.js b/automerge-js/test/basic_test.js
deleted file mode 100644
index 68d2fecf..00000000
--- a/automerge-js/test/basic_test.js
+++ /dev/null
@@ -1,164 +0,0 @@
-
-const assert = require('assert')
-const util = require('util')
-const Automerge = require('..')
-
-describe('Automerge', () => {
- describe('basics', () => {
- it('should init clone and free', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.clone(doc1);
- })
-
- it('handle basic set and read on root object', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => {
- d.hello = "world"
- d.big = "little"
- d.zip = "zop"
- d.app = "dap"
- assert.deepEqual(d, { hello: "world", big: "little", zip: "zop", app: "dap" })
- })
- assert.deepEqual(doc2, { hello: "world", big: "little", zip: "zop", app: "dap" })
- })
-
- it('handle basic sets over many changes', () => {
- let doc1 = Automerge.init()
- let timestamp = new Date();
- let counter = new Automerge.Counter(100);
- let bytes = new Uint8Array([10,11,12]);
- let doc2 = Automerge.change(doc1, (d) => {
- d.hello = "world"
- })
- let doc3 = Automerge.change(doc2, (d) => {
- d.counter1 = counter
- })
- let doc4 = Automerge.change(doc3, (d) => {
- d.timestamp1 = timestamp
- })
- let doc5 = Automerge.change(doc4, (d) => {
- d.app = null
- })
- let doc6 = Automerge.change(doc5, (d) => {
- d.bytes1 = bytes
- })
- let doc7 = Automerge.change(doc6, (d) => {
- d.uint = new Automerge.Uint(1)
- d.int = new Automerge.Int(-1)
- d.float64 = new Automerge.Float64(5.5)
- d.number1 = 100
- d.number2 = -45.67
- d.true = true
- d.false = false
- })
-
- assert.deepEqual(doc7, { hello: "world", true: true, false: false, int: -1, uint: 1, float64: 5.5, number1: 100, number2: -45.67, counter1: counter, timestamp1: timestamp, bytes1: bytes, app: null })
-
- let changes = Automerge.getAllChanges(doc7)
- let t1 = Automerge.init()
- ;let [t2] = Automerge.applyChanges(t1, changes)
- assert.deepEqual(doc7,t2)
- })
-
- it('handle overwrites to values', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => {
- d.hello = "world1"
- })
- let doc3 = Automerge.change(doc2, (d) => {
- d.hello = "world2"
- })
- let doc4 = Automerge.change(doc3, (d) => {
- d.hello = "world3"
- })
- let doc5 = Automerge.change(doc4, (d) => {
- d.hello = "world4"
- })
- assert.deepEqual(doc5, { hello: "world4" } )
- })
-
- it('handle set with object value', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => {
- d.subobj = { hello: "world", subsubobj: { zip: "zop" } }
- })
- assert.deepEqual(doc2, { subobj: { hello: "world", subsubobj: { zip: "zop" } } })
- })
-
- it('handle simple list creation', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => d.list = [])
- assert.deepEqual(doc2, { list: []})
- })
-
- it('handle simple lists', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => {
- d.list = [ 1, 2, 3 ]
- })
- assert.deepEqual(doc2.list.length, 3)
- assert.deepEqual(doc2.list[0], 1)
- assert.deepEqual(doc2.list[1], 2)
- assert.deepEqual(doc2.list[2], 3)
- assert.deepEqual(doc2, { list: [1,2,3] })
- // assert.deepStrictEqual(Automerge.toJS(doc2), { list: [1,2,3] })
-
- let doc3 = Automerge.change(doc2, (d) => {
- d.list[1] = "a"
- })
-
- assert.deepEqual(doc3.list.length, 3)
- assert.deepEqual(doc3.list[0], 1)
- assert.deepEqual(doc3.list[1], "a")
- assert.deepEqual(doc3.list[2], 3)
- assert.deepEqual(doc3, { list: [1,"a",3] })
- })
- it('handle simple lists', () => {
- let doc1 = Automerge.init()
- let doc2 = Automerge.change(doc1, (d) => {
- d.list = [ 1, 2, 3 ]
- })
- let changes = Automerge.getChanges(doc1, doc2)
- let docB1 = Automerge.init()
- ;let [docB2] = Automerge.applyChanges(docB1, changes)
- assert.deepEqual(docB2, doc2);
- })
- it('handle text', () => {
- let doc1 = Automerge.init()
- let tmp = new Automerge.Text("hello")
- let doc2 = Automerge.change(doc1, (d) => {
- d.list = new Automerge.Text("hello")
- d.list.insertAt(2,"Z")
- })
- let changes = Automerge.getChanges(doc1, doc2)
- let docB1 = Automerge.init()
- ;let [docB2] = Automerge.applyChanges(docB1, changes)
- assert.deepEqual(docB2, doc2);
- })
-
- it('have many list methods', () => {
- let doc1 = Automerge.from({ list: [1,2,3] })
- assert.deepEqual(doc1, { list: [1,2,3] });
- let doc2 = Automerge.change(doc1, (d) => {
- d.list.splice(1,1,9,10)
- })
- assert.deepEqual(doc2, { list: [1,9,10,3] });
- let doc3 = Automerge.change(doc2, (d) => {
- d.list.push(11,12)
- })
- assert.deepEqual(doc3, { list: [1,9,10,3,11,12] });
- let doc4 = Automerge.change(doc3, (d) => {
- d.list.unshift(2,2)
- })
- assert.deepEqual(doc4, { list: [2,2,1,9,10,3,11,12] });
- let doc5 = Automerge.change(doc4, (d) => {
- d.list.shift()
- })
- assert.deepEqual(doc5, { list: [2,1,9,10,3,11,12] });
- let doc6 = Automerge.change(doc5, (d) => {
- d.list.insertAt(3,100,101)
- })
- assert.deepEqual(doc6, { list: [2,1,9,100,101,10,3,11,12] });
- })
- })
-})
diff --git a/automerge-js/test/columnar_test.js b/automerge-js/test/columnar_test.js
deleted file mode 100644
index 8cbe1482..00000000
--- a/automerge-js/test/columnar_test.js
+++ /dev/null
@@ -1,97 +0,0 @@
-const assert = require('assert')
-const { checkEncoded } = require('./helpers')
-const Automerge = require('..')
-const { encodeChange, decodeChange } = Automerge
-
-describe('change encoding', () => {
- it('should encode text edits', () => {
- /*
- const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: '', deps: [], ops: [
- {action: 'makeText', obj: '_root', key: 'text', insert: false, pred: []},
- {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []},
- {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', insert: false, pred: ['2@aaaa']},
- {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []},
- {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []}
- ]}
- */
- const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: null, deps: [], ops: [
- {action: 'makeText', obj: '_root', key: 'text', pred: []},
- {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []},
- {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', pred: ['2@aaaa']},
- {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []},
- {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []}
- ]}
- checkEncoded(encodeChange(change1), [
- 0x85, 0x6f, 0x4a, 0x83, // magic bytes
- 0xe2, 0xbd, 0xfb, 0xf5, // checksum
- 1, 94, 0, 2, 0xaa, 0xaa, // chunkType: change, length, deps, actor 'aaaa'
- 1, 1, 9, 0, 0, // seq, startOp, time, message, actor list
- 12, 0x01, 4, 0x02, 4, // column count, objActor, objCtr
- 0x11, 8, 0x13, 7, 0x15, 8, // keyActor, keyCtr, keyStr
- 0x34, 4, 0x42, 6, // insert, action
- 0x56, 6, 0x57, 3, // valLen, valRaw
- 0x70, 6, 0x71, 2, 0x73, 2, // predNum, predActor, predCtr
- 0, 1, 4, 0, // objActor column: null, 0, 0, 0, 0
- 0, 1, 4, 1, // objCtr column: null, 1, 1, 1, 1
- 0, 2, 0x7f, 0, 0, 1, 0x7f, 0, // keyActor column: null, null, 0, null, 0
- 0, 1, 0x7c, 0, 2, 0x7e, 4, // keyCtr column: null, 0, 2, 0, 4
- 0x7f, 4, 0x74, 0x65, 0x78, 0x74, 0, 4, // keyStr column: 'text', null, null, null, null
- 1, 1, 1, 2, // insert column: false, true, false, true, true
- 0x7d, 4, 1, 3, 2, 1, // action column: makeText, set, del, set, set
- 0x7d, 0, 0x16, 0, 2, 0x16, // valLen column: 0, 0x16, 0, 0x16, 0x16
- 0x68, 0x48, 0x69, // valRaw column: 'h', 'H', 'i'
- 2, 0, 0x7f, 1, 2, 0, // predNum column: 0, 0, 1, 0, 0
- 0x7f, 0, // predActor column: 0
- 0x7f, 2 // predCtr column: 2
- ])
- const decoded = decodeChange(encodeChange(change1))
- assert.deepStrictEqual(decoded, Object.assign({hash: decoded.hash}, change1))
- })
-
- // FIXME - skipping this b/c it was never implemented in the rust impl and isnt trivial
-/*
- it.skip('should require strict ordering of preds', () => {
- const change = new Uint8Array([
- 133, 111, 74, 131, 31, 229, 112, 44, 1, 105, 1, 58, 30, 190, 100, 253, 180, 180, 66, 49, 126,
- 81, 142, 10, 3, 35, 140, 189, 231, 34, 145, 57, 66, 23, 224, 149, 64, 97, 88, 140, 168, 194,
- 229, 4, 244, 209, 58, 138, 67, 140, 1, 152, 236, 250, 2, 0, 1, 4, 55, 234, 66, 242, 8, 21, 11,
- 52, 1, 66, 2, 86, 3, 87, 10, 112, 2, 113, 3, 115, 4, 127, 9, 99, 111, 109, 109, 111, 110, 86,
- 97, 114, 1, 127, 1, 127, 166, 1, 52, 48, 57, 49, 52, 57, 52, 53, 56, 50, 127, 2, 126, 0, 1,
- 126, 139, 1, 0
- ])
- assert.throws(() => { decodeChange(change) }, /operation IDs are not in ascending order/)
- })
-*/
-
- describe('with trailing bytes', () => {
- let change = new Uint8Array([
- 0x85, 0x6f, 0x4a, 0x83, // magic bytes
- 0xb2, 0x98, 0x9e, 0xa9, // checksum
- 1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234'
- 1, 1, 252, 250, 220, 255, 5, // seq, startOp, time
- 14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111, 110, // message: 'Initialization'
- 0, 6, // actor list, column count
- 0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action
- 0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum
- 0x7f, 1, 0x78, // keyStr: 'x'
- 1, // insert: false
- 0x7f, 1, // action: set
- 0x7f, 19, // valLen: 1 byte of type uint
- 1, // valRaw: 1
- 0x7f, 0, // predNum: 0
- 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 // 10 trailing bytes
- ])
-
- it('should allow decoding and re-encoding', () => {
- // NOTE: This calls the JavaScript encoding and decoding functions, even when the WebAssembly
- // backend is loaded. Should the wasm backend export its own functions for testing?
- checkEncoded(change, encodeChange(decodeChange(change)))
- })
-
- it('should be preserved in document encoding', () => {
- const [doc] = Automerge.applyChanges(Automerge.init(), [change])
- const [reconstructed] = Automerge.getAllChanges(Automerge.load(Automerge.save(doc)))
- checkEncoded(change, reconstructed)
- })
- })
-})
diff --git a/automerge-js/test/legacy_tests.js b/automerge-js/test/legacy_tests.js
deleted file mode 100644
index 49cbb079..00000000
--- a/automerge-js/test/legacy_tests.js
+++ /dev/null
@@ -1,1394 +0,0 @@
-const assert = require('assert')
-//const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge')
-const Automerge = require('../src')
-const { assertEqualsOneOf } = require('./helpers')
-const { decodeChange } = require('../src/columnar')
-//const { decodeChange } = Automerge
-
-const UUID_PATTERN = /^[0-9a-f]{32}$/
-const OPID_PATTERN = /^[0-9]+@[0-9a-f]{32}$/
-
-// CORE FEATURES
-//
-// TODO - Cursors
-// TODO - Tables
-// TODO - on-pass load() & reconstruct change from opset
-// TODO - micro-patches (needed for fully hydrated object in js)
-// TODO - valueAt(heads) / GC
-//
-// AUTOMERGE UNSUPPORTED
-//
-// TODO - patchCallback
-
-
-describe('Automerge', () => {
- describe('initialization ', () => {
- it('should initially be an empty map', () => {
- const doc = Automerge.init()
- assert.deepStrictEqual(doc, {})
- })
-
- it('should allow instantiating from an existing object', () => {
- const initialState = { birds: { wrens: 3, magpies: 4 } }
- const doc = Automerge.from(initialState)
- assert.deepStrictEqual(doc, initialState)
- })
-
- it('should allow merging of an object initialized with `from`', () => {
- let doc1 = Automerge.from({ cards: [] })
- let doc2 = Automerge.merge(Automerge.init(), doc1)
- assert.deepStrictEqual(doc2, { cards: [] })
- })
-
- it('should allow passing an actorId when instantiating from an existing object', () => {
- const actorId = '1234'
- let doc = Automerge.from({ foo: 1 }, actorId)
- assert.strictEqual(Automerge.getActorId(doc), '1234')
- })
-
- it('accepts an empty object as initial state', () => {
- const doc = Automerge.from({})
- assert.deepStrictEqual(doc, {})
- })
-
- it('accepts an array as initial state, but converts it to an object', () => {
- const doc = Automerge.from(['a', 'b', 'c'])
- assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' })
- })
-
- it('accepts strings as initial values, but treats them as an array of characters', () => {
- const doc = Automerge.from('abc')
- assert.deepStrictEqual(doc, { '0': 'a', '1': 'b', '2': 'c' })
- })
-
- it('ignores numbers provided as initial values', () => {
- const doc = Automerge.from(123)
- assert.deepStrictEqual(doc, {})
- })
-
- it('ignores booleans provided as initial values', () => {
- const doc1 = Automerge.from(false)
- assert.deepStrictEqual(doc1, {})
- const doc2 = Automerge.from(true)
- assert.deepStrictEqual(doc2, {})
- })
- })
-
- describe('sequential use', () => {
- let s1, s2
- beforeEach(() => {
- s1 = Automerge.init()
- })
-
- it('should not mutate objects', () => {
- s2 = Automerge.change(s1, doc => doc.foo = 'bar')
- assert.strictEqual(s1.foo, undefined)
- assert.strictEqual(s2.foo, 'bar')
- })
-
- it('changes should be retrievable', () => {
- const change1 = Automerge.getLastLocalChange(s1)
- s2 = Automerge.change(s1, doc => doc.foo = 'bar')
- const change2 = Automerge.getLastLocalChange(s2)
- assert.strictEqual(change1, null)
- const change = decodeChange(change2)
- assert.deepStrictEqual(change, {
- actor: change.actor, deps: [], seq: 1, startOp: 1,
- hash: change.hash, message: '', time: change.time,
- ops: [{obj: '_root', key: 'foo', action: 'set', insert: false, value: 'bar', pred: []}]
- })
- })
-
- it('should not register any conflicts on repeated assignment', () => {
- assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined)
- s1 = Automerge.change(s1, 'change', doc => doc.foo = 'one')
- assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined)
- s1 = Automerge.change(s1, 'change', doc => doc.foo = 'two')
- assert.strictEqual(Automerge.getConflicts(s1, 'foo'), undefined)
- })
-
- describe('changes', () => {
- it('should group several changes', () => {
- s2 = Automerge.change(s1, 'change message', doc => {
- doc.first = 'one'
- assert.strictEqual(doc.first, 'one')
- doc.second = 'two'
- assert.deepStrictEqual(doc, {
- first: 'one', second: 'two'
- })
- })
- assert.deepStrictEqual(s1, {})
- assert.deepStrictEqual(s2, {first: 'one', second: 'two'})
- })
-
- it('should freeze objects if desired', () => {
- s1 = Automerge.init({freeze: true})
- s2 = Automerge.change(s1, doc => doc.foo = 'bar')
- try {
- s2.foo = 'lemon'
- } catch (e) { }
- assert.strictEqual(s2.foo, 'bar')
-
- let deleted = false
- try {
- deleted = delete s2.foo
- } catch (e) { }
- assert.strictEqual(s2.foo, 'bar')
- assert.strictEqual(deleted, false)
-
- Automerge.change(s2, () => {
- try {
- s2.foo = 'lemon'
- } catch (e) { }
- assert.strictEqual(s2.foo, 'bar')
- })
-
- assert.throws(() => { Object.assign(s2, {x: 4}) })
- assert.strictEqual(s2.x, undefined)
- })
-
- it('should allow repeated reading and writing of values', () => {
- s2 = Automerge.change(s1, 'change message', doc => {
- doc.value = 'a'
- assert.strictEqual(doc.value, 'a')
- doc.value = 'b'
- doc.value = 'c'
- assert.strictEqual(doc.value, 'c')
- })
- assert.deepStrictEqual(s1, {})
- assert.deepStrictEqual(s2, {value: 'c'})
- })
-
- it('should not record conflicts when writing the same field several times within one change', () => {
- s1 = Automerge.change(s1, 'change message', doc => {
- doc.value = 'a'
- doc.value = 'b'
- doc.value = 'c'
- })
- assert.strictEqual(s1.value, 'c')
- assert.strictEqual(Automerge.getConflicts(s1, 'value'), undefined)
- })
-
- it('should return the unchanged state object if nothing changed', () => {
- s2 = Automerge.change(s1, () => {})
- assert.strictEqual(s2, s1)
- })
-
- it('should ignore field updates that write the existing value', () => {
- s1 = Automerge.change(s1, doc => doc.field = 123)
- s2 = Automerge.change(s1, doc => doc.field = 123)
- assert.strictEqual(s2, s1)
- })
-
- it('should not ignore field updates that resolve a conflict', () => {
- s2 = Automerge.merge(Automerge.init(), s1)
- s1 = Automerge.change(s1, doc => doc.field = 123)
- s2 = Automerge.change(s2, doc => doc.field = 321)
- s1 = Automerge.merge(s1, s2)
- assert.strictEqual(Object.keys(Automerge.getConflicts(s1, 'field')).length, 2)
- const resolved = Automerge.change(s1, doc => doc.field = s1.field)
- assert.notStrictEqual(resolved, s1)
- assert.deepStrictEqual(resolved, {field: s1.field})
- assert.strictEqual(Automerge.getConflicts(resolved, 'field'), undefined)
- })
-
- it('should ignore list element updates that write the existing value', () => {
- s1 = Automerge.change(s1, doc => doc.list = [123])
- s2 = Automerge.change(s1, doc => doc.list[0] = 123)
- assert.strictEqual(s2, s1)
- })
-
- it('should not ignore list element updates that resolve a conflict', () => {
- s1 = Automerge.change(s1, doc => doc.list = [1])
- s2 = Automerge.merge(Automerge.init(), s1)
- s1 = Automerge.change(s1, doc => doc.list[0] = 123)
- s2 = Automerge.change(s2, doc => doc.list[0] = 321)
- s1 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(Automerge.getConflicts(s1.list, 0), {
- [`3@${Automerge.getActorId(s1)}`]: 123,
- [`3@${Automerge.getActorId(s2)}`]: 321
- })
- const resolved = Automerge.change(s1, doc => doc.list[0] = s1.list[0])
- assert.deepStrictEqual(resolved, s1)
- assert.notStrictEqual(resolved, s1)
- assert.strictEqual(Automerge.getConflicts(resolved.list, 0), undefined)
- })
-
- it('should sanity-check arguments', () => {
- s1 = Automerge.change(s1, doc => doc.nested = {})
- assert.throws(() => { Automerge.change({}, doc => doc.foo = 'bar') }, /must be the document root/)
- assert.throws(() => { Automerge.change(s1.nested, doc => doc.foo = 'bar') }, /must be the document root/)
- })
-
- it('should not allow nested change blocks', () => {
- assert.throws(() => {
- Automerge.change(s1, doc1 => {
- Automerge.change(doc1, doc2 => {
- doc2.foo = 'bar'
- })
- })
- }, /Calls to Automerge.change cannot be nested/)
- assert.throws(() => {
- s1 = Automerge.change(s1, doc1 => {
- s2 = Automerge.change(s1, doc2 => doc2.two = 2)
- doc1.one = 1
- })
- }, /Attempting to use an outdated Automerge document/)
- })
-
- it('should not allow the same base document to be used for multiple changes', () => {
- assert.throws(() => {
- Automerge.change(s1, doc => doc.one = 1)
- Automerge.change(s1, doc => doc.two = 2)
- }, /Attempting to use an outdated Automerge document/)
- })
-
- it('should allow a document to be cloned', () => {
- s1 = Automerge.change(s1, doc => doc.zero = 0)
- s2 = Automerge.clone(s1)
- s1 = Automerge.change(s1, doc => doc.one = 1)
- s2 = Automerge.change(s2, doc => doc.two = 2)
- assert.deepStrictEqual(s1, {zero: 0, one: 1})
- assert.deepStrictEqual(s2, {zero: 0, two: 2})
- Automerge.free(s1)
- Automerge.free(s2)
- })
-
- it('should work with Object.assign merges', () => {
- s1 = Automerge.change(s1, doc1 => {
- doc1.stuff = {foo: 'bar', baz: 'blur'}
- })
- s1 = Automerge.change(s1, doc1 => {
- doc1.stuff = Object.assign({}, doc1.stuff, {baz: 'updated!'})
- })
- assert.deepStrictEqual(s1, {stuff: {foo: 'bar', baz: 'updated!'}})
- })
-
- it('should support Date objects in maps', () => {
- const now = new Date()
- s1 = Automerge.change(s1, doc => doc.now = now)
- let changes = Automerge.getAllChanges(s1)
- ;[s2] = Automerge.applyChanges(Automerge.init(), changes)
- assert.strictEqual(s2.now instanceof Date, true)
- assert.strictEqual(s2.now.getTime(), now.getTime())
- })
-
- it('should support Date objects in lists', () => {
- const now = new Date()
- s1 = Automerge.change(s1, doc => doc.list = [now])
- let changes = Automerge.getAllChanges(s1)
- ;[s2] = Automerge.applyChanges(Automerge.init(), changes)
- assert.strictEqual(s2.list[0] instanceof Date, true)
- assert.strictEqual(s2.list[0].getTime(), now.getTime())
- })
-
- /*
- it.skip('should call patchCallback if supplied', () => {
- const callbacks = [], actor = Automerge.getActorId(s1)
- const s2 = Automerge.change(s1, {
- patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local})
- }, doc => {
- doc.birds = ['Goldfinch']
- })
- assert.strictEqual(callbacks.length, 1)
- assert.deepStrictEqual(callbacks[0].patch, {
- actor, seq: 1, maxOp: 2, deps: [], clock: {[actor]: 1}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
- objectId: `1@${actor}`, type: 'list', edits: [
- {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {'type': 'value', value: 'Goldfinch'}}
- ]
- }}}}
- })
- assert.strictEqual(callbacks[0].before, s1)
- assert.strictEqual(callbacks[0].after, s2)
- assert.strictEqual(callbacks[0].local, true)
- })
- */
-
- /*
- it.skip('should call a patchCallback set up on document initialisation', () => {
- const callbacks = []
- s1 = Automerge.init({
- patchCallback: (patch, before, after, local) => callbacks.push({patch, before, after, local})
- })
- const s2 = Automerge.change(s1, doc => doc.bird = 'Goldfinch')
- const actor = Automerge.getActorId(s1)
- assert.strictEqual(callbacks.length, 1)
- assert.deepStrictEqual(callbacks[0].patch, {
- actor, seq: 1, maxOp: 1, deps: [], clock: {[actor]: 1}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}}
- })
- assert.strictEqual(callbacks[0].before, s1)
- assert.strictEqual(callbacks[0].after, s2)
- assert.strictEqual(callbacks[0].local, true)
- })
- */
- })
-
- describe('emptyChange()', () => {
- it('should append an empty change to the history', () => {
- s1 = Automerge.change(s1, 'first change', doc => doc.field = 123)
- s2 = Automerge.emptyChange(s1, 'empty change')
- assert.notStrictEqual(s2, s1)
- assert.deepStrictEqual(s2, s1)
- assert.deepStrictEqual(Automerge.getHistory(s2).map(state => state.change.message), ['first change', 'empty change'])
- })
-
- it('should reference dependencies', () => {
- s1 = Automerge.change(s1, doc => doc.field = 123)
- s2 = Automerge.merge(Automerge.init(), s1)
- s2 = Automerge.change(s2, doc => doc.other = 'hello')
- s1 = Automerge.emptyChange(Automerge.merge(s1, s2))
- const history = Automerge.getHistory(s1)
- const emptyChange = history[2].change
- assert.deepStrictEqual(emptyChange.deps, [history[0].change.hash, history[1].change.hash].sort())
- assert.deepStrictEqual(emptyChange.ops, [])
- })
- })
-
- describe('root object', () => {
- it('should handle single-property assignment', () => {
- s1 = Automerge.change(s1, 'set bar', doc => doc.foo = 'bar')
- s1 = Automerge.change(s1, 'set zap', doc => doc.zip = 'zap')
- assert.strictEqual(s1.foo, 'bar')
- assert.strictEqual(s1.zip, 'zap')
- assert.deepStrictEqual(s1, {foo: 'bar', zip: 'zap'})
- })
-
- it('should allow floating-point values', () => {
- s1 = Automerge.change(s1, doc => doc.number = 1589032171.1)
- assert.strictEqual(s1.number, 1589032171.1)
- })
-
- it('should handle multi-property assignment', () => {
- s1 = Automerge.change(s1, 'multi-assign', doc => {
- Object.assign(doc, {foo: 'bar', answer: 42})
- })
- assert.strictEqual(s1.foo, 'bar')
- assert.strictEqual(s1.answer, 42)
- assert.deepStrictEqual(s1, {foo: 'bar', answer: 42})
- })
-
- it('should handle root property deletion', () => {
- s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar'; doc.something = null })
- s1 = Automerge.change(s1, 'del foo', doc => { delete doc.foo })
- assert.strictEqual(s1.foo, undefined)
- assert.strictEqual(s1.something, null)
- assert.deepStrictEqual(s1, {something: null})
- })
-
- it('should follow JS delete behavior', () => {
- s1 = Automerge.change(s1, 'set foo', doc => { doc.foo = 'bar' })
- let deleted
- s1 = Automerge.change(s1, 'del foo', doc => {
- deleted = delete doc.foo
- })
- assert.strictEqual(deleted, true)
- let deleted2
- assert.doesNotThrow(() => {
- s1 = Automerge.change(s1, 'del baz', doc => {
- deleted2 = delete doc.baz
- })
- })
- assert.strictEqual(deleted2, true)
- })
-
- it('should allow the type of a property to be changed', () => {
- s1 = Automerge.change(s1, 'set number', doc => doc.prop = 123)
- assert.strictEqual(s1.prop, 123)
- s1 = Automerge.change(s1, 'set string', doc => doc.prop = '123')
- assert.strictEqual(s1.prop, '123')
- s1 = Automerge.change(s1, 'set null', doc => doc.prop = null)
- assert.strictEqual(s1.prop, null)
- s1 = Automerge.change(s1, 'set bool', doc => doc.prop = true)
- assert.strictEqual(s1.prop, true)
- })
-
- it('should require property names to be valid', () => {
- assert.throws(() => {
- Automerge.change(s1, 'foo', doc => doc[''] = 'x')
- }, /must not be an empty string/)
- })
-
- it('should not allow assignment of unsupported datatypes', () => {
- Automerge.change(s1, doc => {
- assert.throws(() => { doc.foo = undefined }, /Unsupported type of value: undefined/)
- assert.throws(() => { doc.foo = {prop: undefined} }, /Unsupported type of value: undefined/)
- assert.throws(() => { doc.foo = () => {} }, /Unsupported type of value: function/)
- assert.throws(() => { doc.foo = Symbol('foo') }, /Unsupported type of value: symbol/)
- })
- })
- })
-
- describe('nested maps', () => {
- it('should assign an objectId to nested maps', () => {
- s1 = Automerge.change(s1, doc => { doc.nested = {} })
- let id = Automerge.getObjectId(s1.nested)
- assert.strictEqual(OPID_PATTERN.test(Automerge.getObjectId(s1.nested)), true)
- assert.notEqual(Automerge.getObjectId(s1.nested), '_root')
- })
-
- it('should handle assignment of a nested property', () => {
- s1 = Automerge.change(s1, 'first change', doc => {
- doc.nested = {}
- doc.nested.foo = 'bar'
- })
- s1 = Automerge.change(s1, 'second change', doc => {
- doc.nested.one = 1
- })
- assert.deepStrictEqual(s1, {nested: {foo: 'bar', one: 1}})
- assert.deepStrictEqual(s1.nested, {foo: 'bar', one: 1})
- assert.strictEqual(s1.nested.foo, 'bar')
- assert.strictEqual(s1.nested.one, 1)
- })
-
- it('should handle assignment of an object literal', () => {
- s1 = Automerge.change(s1, doc => {
- doc.textStyle = {bold: false, fontSize: 12}
- })
- assert.deepStrictEqual(s1, {textStyle: {bold: false, fontSize: 12}})
- assert.deepStrictEqual(s1.textStyle, {bold: false, fontSize: 12})
- assert.strictEqual(s1.textStyle.bold, false)
- assert.strictEqual(s1.textStyle.fontSize, 12)
- })
-
- it('should handle assignment of multiple nested properties', () => {
- s1 = Automerge.change(s1, doc => {
- doc.textStyle = {bold: false, fontSize: 12}
- Object.assign(doc.textStyle, {typeface: 'Optima', fontSize: 14})
- })
- assert.strictEqual(s1.textStyle.typeface, 'Optima')
- assert.strictEqual(s1.textStyle.bold, false)
- assert.strictEqual(s1.textStyle.fontSize, 14)
- assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', bold: false, fontSize: 14})
- })
-
- it('should handle arbitrary-depth nesting', () => {
- s1 = Automerge.change(s1, doc => {
- doc.a = {b: {c: {d: {e: {f: {g: 'h'}}}}}}
- })
- s1 = Automerge.change(s1, doc => {
- doc.a.b.c.d.e.f.i = 'j'
- })
- assert.deepStrictEqual(s1, {a: { b: { c: { d: { e: { f: { g: 'h', i: 'j'}}}}}}})
- assert.strictEqual(s1.a.b.c.d.e.f.g, 'h')
- assert.strictEqual(s1.a.b.c.d.e.f.i, 'j')
- })
-
- it('should allow an old object to be replaced with a new one', () => {
- s1 = Automerge.change(s1, 'change 1', doc => {
- doc.myPet = {species: 'dog', legs: 4, breed: 'dachshund'}
- })
- s2 = Automerge.change(s1, 'change 2', doc => {
- doc.myPet = {species: 'koi', variety: '紅白', colors: {red: true, white: true, black: false}}
- })
- assert.deepStrictEqual(s1.myPet, {
- species: 'dog', legs: 4, breed: 'dachshund'
- })
- assert.strictEqual(s1.myPet.breed, 'dachshund')
- assert.deepStrictEqual(s2.myPet, {
- species: 'koi', variety: '紅白',
- colors: {red: true, white: true, black: false}
- })
- assert.strictEqual(s2.myPet.breed, undefined)
- assert.strictEqual(s2.myPet.variety, '紅白')
- })
-
- it('should allow fields to be changed between primitive and nested map', () => {
- s1 = Automerge.change(s1, doc => doc.color = '#ff7f00')
- assert.strictEqual(s1.color, '#ff7f00')
- s1 = Automerge.change(s1, doc => doc.color = {red: 255, green: 127, blue: 0})
- assert.deepStrictEqual(s1.color, {red: 255, green: 127, blue: 0})
- s1 = Automerge.change(s1, doc => doc.color = '#ff7f00')
- assert.strictEqual(s1.color, '#ff7f00')
- })
-
- it('should not allow several references to the same map object', () => {
- s1 = Automerge.change(s1, doc => doc.object = {})
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = doc.object })
- }, /Cannot create a reference to an existing document object/)
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = s1.object })
- }, /Cannot create a reference to an existing document object/)
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = {}; doc.y = doc.x })
- }, /Cannot create a reference to an existing document object/)
- })
-
- it('should not allow object-copying idioms', () => {
- s1 = Automerge.change(s1, doc => {
- doc.items = [{id: 'id1', name: 'one'}, {id: 'id2', name: 'two'}]
- })
- // People who have previously worked with immutable state in JavaScript may be tempted
- // to use idioms like this, which don't work well with Automerge -- see e.g.
- // https://github.com/automerge/automerge/issues/260
- assert.throws(() => {
- Automerge.change(s1, doc => {
- doc.items = [...doc.items, {id: 'id3', name: 'three'}]
- })
- }, /Cannot create a reference to an existing document object/)
- })
-
- it('should handle deletion of properties within a map', () => {
- s1 = Automerge.change(s1, 'set style', doc => {
- doc.textStyle = {typeface: 'Optima', bold: false, fontSize: 12}
- })
- s1 = Automerge.change(s1, 'non-bold', doc => delete doc.textStyle.bold)
- assert.strictEqual(s1.textStyle.bold, undefined)
- assert.deepStrictEqual(s1.textStyle, {typeface: 'Optima', fontSize: 12})
- })
-
- it('should handle deletion of references to a map', () => {
- s1 = Automerge.change(s1, 'make rich text doc', doc => {
- Object.assign(doc, {title: 'Hello', textStyle: {typeface: 'Optima', fontSize: 12}})
- })
- s1 = Automerge.change(s1, doc => delete doc.textStyle)
- assert.strictEqual(s1.textStyle, undefined)
- assert.deepStrictEqual(s1, {title: 'Hello'})
- })
-
- it('should validate field names', () => {
- s1 = Automerge.change(s1, doc => doc.nested = {})
- assert.throws(() => { Automerge.change(s1, doc => doc.nested[''] = 'x') }, /must not be an empty string/)
- assert.throws(() => { Automerge.change(s1, doc => doc.nested = {'': 'x'}) }, /must not be an empty string/)
- })
- })
-
- describe('lists', () => {
- it('should allow elements to be inserted', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = [])
- s1 = Automerge.change(s1, doc => doc.noodles.insertAt(0, 'udon', 'soba'))
- s1 = Automerge.change(s1, doc => doc.noodles.insertAt(1, 'ramen'))
- assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']})
- assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba'])
- assert.strictEqual(s1.noodles[0], 'udon')
- assert.strictEqual(s1.noodles[1], 'ramen')
- assert.strictEqual(s1.noodles[2], 'soba')
- assert.strictEqual(s1.noodles.length, 3)
- })
-
- it('should handle assignment of a list literal', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
- assert.deepStrictEqual(s1, {noodles: ['udon', 'ramen', 'soba']})
- assert.deepStrictEqual(s1.noodles, ['udon', 'ramen', 'soba'])
- assert.strictEqual(s1.noodles[0], 'udon')
- assert.strictEqual(s1.noodles[1], 'ramen')
- assert.strictEqual(s1.noodles[2], 'soba')
- assert.strictEqual(s1.noodles[3], undefined)
- assert.strictEqual(s1.noodles.length, 3)
- })
-
- it('should only allow numeric indexes', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
- s1 = Automerge.change(s1, doc => doc.noodles[1] = 'Ramen!')
- assert.strictEqual(s1.noodles[1], 'Ramen!')
- s1 = Automerge.change(s1, doc => doc.noodles['1'] = 'RAMEN!!!')
- assert.strictEqual(s1.noodles[1], 'RAMEN!!!')
- assert.throws(() => { Automerge.change(s1, doc => doc.noodles.favourite = 'udon') }, /list index must be a number/)
- assert.throws(() => { Automerge.change(s1, doc => doc.noodles[''] = 'udon') }, /list index must be a number/)
- assert.throws(() => { Automerge.change(s1, doc => doc.noodles['1e6'] = 'udon') }, /list index must be a number/)
- })
-
- it('should handle deletion of list elements', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
- s1 = Automerge.change(s1, doc => delete doc.noodles[1])
- assert.deepStrictEqual(s1.noodles, ['udon', 'soba'])
- s1 = Automerge.change(s1, doc => doc.noodles.deleteAt(1))
- assert.deepStrictEqual(s1.noodles, ['udon'])
- assert.strictEqual(s1.noodles[0], 'udon')
- assert.strictEqual(s1.noodles[1], undefined)
- assert.strictEqual(s1.noodles[2], undefined)
- assert.strictEqual(s1.noodles.length, 1)
- })
-
- it('should handle assignment of individual list indexes', () => {
- s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon', 'ramen', 'soba'])
- s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi')
- assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi', 'soba'])
- assert.strictEqual(s1.japaneseFood[0], 'udon')
- assert.strictEqual(s1.japaneseFood[1], 'sushi')
- assert.strictEqual(s1.japaneseFood[2], 'soba')
- assert.strictEqual(s1.japaneseFood[3], undefined)
- assert.strictEqual(s1.japaneseFood.length, 3)
- })
-
- it('should treat out-by-one assignment as insertion', () => {
- s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon'])
- s1 = Automerge.change(s1, doc => doc.japaneseFood[1] = 'sushi')
- assert.deepStrictEqual(s1.japaneseFood, ['udon', 'sushi'])
- assert.strictEqual(s1.japaneseFood[0], 'udon')
- assert.strictEqual(s1.japaneseFood[1], 'sushi')
- assert.strictEqual(s1.japaneseFood[2], undefined)
- assert.strictEqual(s1.japaneseFood.length, 2)
- })
-
- it('should not allow out-of-range assignment', () => {
- s1 = Automerge.change(s1, doc => doc.japaneseFood = ['udon'])
- assert.throws(() => { Automerge.change(s1, doc => doc.japaneseFood[4] = 'ramen') }, /is out of bounds/)
- })
-
- it('should allow bulk assignment of multiple list indexes', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'ramen', 'soba'])
- s1 = Automerge.change(s1, doc => Object.assign(doc.noodles, {0: 'うどん', 2: 'そば'}))
- assert.deepStrictEqual(s1.noodles, ['うどん', 'ramen', 'そば'])
- assert.strictEqual(s1.noodles[0], 'うどん')
- assert.strictEqual(s1.noodles[1], 'ramen')
- assert.strictEqual(s1.noodles[2], 'そば')
- assert.strictEqual(s1.noodles.length, 3)
- })
-
- it('should handle nested objects', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = [{type: 'ramen', dishes: ['tonkotsu', 'shoyu']}])
- s1 = Automerge.change(s1, doc => doc.noodles.push({type: 'udon', dishes: ['tempura udon']}))
- s1 = Automerge.change(s1, doc => doc.noodles[0].dishes.push('miso'))
- assert.deepStrictEqual(s1, {noodles: [
- {type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso']},
- {type: 'udon', dishes: ['tempura udon']}
- ]})
- assert.deepStrictEqual(s1.noodles[0], {
- type: 'ramen', dishes: ['tonkotsu', 'shoyu', 'miso']
- })
- assert.deepStrictEqual(s1.noodles[1], {
- type: 'udon', dishes: ['tempura udon']
- })
- })
-
- it('should handle nested lists', () => {
- s1 = Automerge.change(s1, doc => doc.noodleMatrix = [['ramen', 'tonkotsu', 'shoyu']])
- s1 = Automerge.change(s1, doc => doc.noodleMatrix.push(['udon', 'tempura udon']))
- s1 = Automerge.change(s1, doc => doc.noodleMatrix[0].push('miso'))
- assert.deepStrictEqual(s1.noodleMatrix, [['ramen', 'tonkotsu', 'shoyu', 'miso'], ['udon', 'tempura udon']])
- assert.deepStrictEqual(s1.noodleMatrix[0], ['ramen', 'tonkotsu', 'shoyu', 'miso'])
- assert.deepStrictEqual(s1.noodleMatrix[1], ['udon', 'tempura udon'])
- })
-
- it('should handle deep nesting', () => {
- s1 = Automerge.change(s1, doc => doc.nesting = {
- maps: { m1: { m2: { foo: "bar", baz: {} }, m2a: { } } },
- lists: [ [ 1, 2, 3 ], [ [ 3, 4, 5, [6]], 7 ] ],
- mapsinlists: [ { foo: "bar" }, [ { bar: "baz" } ] ],
- listsinmaps: { foo: [1, 2, 3], bar: [ [ { baz: "123" } ] ] }
- })
- s1 = Automerge.change(s1, doc => {
- doc.nesting.maps.m1a = "123"
- doc.nesting.maps.m1.m2.baz.xxx = "123"
- delete doc.nesting.maps.m1.m2a
- doc.nesting.lists.shift()
- doc.nesting.lists[0][0].pop()
- doc.nesting.lists[0][0].push(100)
- doc.nesting.mapsinlists[0].foo = "baz"
- doc.nesting.mapsinlists[1][0].foo = "bar"
- delete doc.nesting.mapsinlists[1]
- doc.nesting.listsinmaps.foo.push(4)
- doc.nesting.listsinmaps.bar[0][0].baz = "456"
- delete doc.nesting.listsinmaps.bar
- })
- assert.deepStrictEqual(s1, { nesting: {
- maps: { m1: { m2: { foo: "bar", baz: { xxx: "123" } } }, m1a: "123" },
- lists: [ [ [ 3, 4, 5, 100 ], 7 ] ],
- mapsinlists: [ { foo: "baz" } ],
- listsinmaps: { foo: [1, 2, 3, 4] }
- }})
- })
-
- it('should handle replacement of the entire list', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen'])
- s1 = Automerge.change(s1, doc => doc.japaneseNoodles = doc.noodles.slice())
- s1 = Automerge.change(s1, doc => doc.noodles = ['wonton', 'pho'])
- assert.deepStrictEqual(s1, {
- noodles: ['wonton', 'pho'],
- japaneseNoodles: ['udon', 'soba', 'ramen']
- })
- assert.deepStrictEqual(s1.noodles, ['wonton', 'pho'])
- assert.strictEqual(s1.noodles[0], 'wonton')
- assert.strictEqual(s1.noodles[1], 'pho')
- assert.strictEqual(s1.noodles[2], undefined)
- assert.strictEqual(s1.noodles.length, 2)
- })
-
- it('should allow assignment to change the type of a list element', () => {
- s1 = Automerge.change(s1, doc => doc.noodles = ['udon', 'soba', 'ramen'])
- assert.deepStrictEqual(s1.noodles, ['udon', 'soba', 'ramen'])
- s1 = Automerge.change(s1, doc => doc.noodles[1] = {type: 'soba', options: ['hot', 'cold']})
- assert.deepStrictEqual(s1.noodles, ['udon', {type: 'soba', options: ['hot', 'cold']}, 'ramen'])
- s1 = Automerge.change(s1, doc => doc.noodles[1] = ['hot soba', 'cold soba'])
- assert.deepStrictEqual(s1.noodles, ['udon', ['hot soba', 'cold soba'], 'ramen'])
- s1 = Automerge.change(s1, doc => doc.noodles[1] = 'soba is the best')
- assert.deepStrictEqual(s1.noodles, ['udon', 'soba is the best', 'ramen'])
- })
-
- it('should allow list creation and assignment in the same change callback', () => {
- s1 = Automerge.change(Automerge.init(), doc => {
- doc.letters = ['a', 'b', 'c']
- doc.letters[1] = 'd'
- })
- assert.strictEqual(s1.letters[1], 'd')
- })
-
- it('should allow adding and removing list elements in the same change callback', () => {
- s1 = Automerge.change(Automerge.init(), doc => doc.noodles = [])
- s1 = Automerge.change(s1, doc => {
- doc.noodles.push('udon')
- doc.noodles.deleteAt(0)
- })
- assert.deepStrictEqual(s1, {noodles: []})
- // do the add-remove cycle twice, test for #151 (https://github.com/automerge/automerge/issues/151)
- s1 = Automerge.change(s1, doc => {
- doc.noodles.push('soba')
- doc.noodles.deleteAt(0)
- })
- assert.deepStrictEqual(s1, {noodles: []})
- })
-
- it('should handle arbitrary-depth nesting', () => {
- s1 = Automerge.change(s1, doc => doc.maze = [[[[[[[['noodles', ['here']]]]]]]]])
- s1 = Automerge.change(s1, doc => doc.maze[0][0][0][0][0][0][0][1].unshift('found'))
- assert.deepStrictEqual(s1.maze, [[[[[[[['noodles', ['found', 'here']]]]]]]]])
- assert.deepStrictEqual(s1.maze[0][0][0][0][0][0][0][1][1], 'here')
- s2 = Automerge.load(Automerge.save(s1))
- assert.deepStrictEqual(s1,s2)
- })
-
- it('should not allow several references to the same list object', () => {
- s1 = Automerge.change(s1, doc => doc.list = [])
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = doc.list })
- }, /Cannot create a reference to an existing document object/)
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = s1.list })
- }, /Cannot create a reference to an existing document object/)
- assert.throws(() => {
- Automerge.change(s1, doc => { doc.x = []; doc.y = doc.x })
- }, /Cannot create a reference to an existing document object/)
- })
- })
-
- describe('counters', () => {
- // counter
- it('should allow deleting counters from maps', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.birds = {wrens: new Automerge.Counter(1)})
- const s2 = Automerge.change(s1, doc => doc.birds.wrens.increment(2))
- const s3 = Automerge.change(s2, doc => delete doc.birds.wrens)
- assert.deepStrictEqual(s2, {birds: {wrens: new Automerge.Counter(3)}})
- assert.deepStrictEqual(s3, {birds: {}})
- })
-
- // counter
- it.skip('should not allow deleting counters from lists', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.recordings = [new Automerge.Counter(1)])
- const s2 = Automerge.change(s1, doc => doc.recordings[0].increment(2))
- assert.deepStrictEqual(s2, {recordings: [new Automerge.Counter(3)]})
- assert.throws(() => { Automerge.change(s2, doc => doc.recordings.deleteAt(0)) }, /Unsupported operation/)
- })
- })
- })
-
- describe('concurrent use', () => {
- let s1, s2, s3
- beforeEach(() => {
- s1 = Automerge.init()
- s2 = Automerge.init()
- s3 = Automerge.init()
- })
-
- it('should merge concurrent updates of different properties', () => {
- s1 = Automerge.change(s1, doc => doc.foo = 'bar')
- s2 = Automerge.change(s2, doc => doc.hello = 'world')
- s3 = Automerge.merge(s1, s2)
- assert.strictEqual(s3.foo, 'bar')
- assert.strictEqual(s3.hello, 'world')
- assert.deepStrictEqual(s3, {foo: 'bar', hello: 'world'})
- assert.strictEqual(Automerge.getConflicts(s3, 'foo'), undefined)
- assert.strictEqual(Automerge.getConflicts(s3, 'hello'), undefined)
- s4 = Automerge.load(Automerge.save(s3))
- assert.deepEqual(s3,s4)
- })
-
- it('should add concurrent increments of the same property', () => {
- s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter())
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.counter.increment())
- s2 = Automerge.change(s2, doc => doc.counter.increment(2))
- s3 = Automerge.merge(s1, s2)
- assert.strictEqual(s1.counter.value, 1)
- assert.strictEqual(s2.counter.value, 2)
- assert.strictEqual(s3.counter.value, 3)
- assert.strictEqual(Automerge.getConflicts(s3, 'counter'), undefined)
- s4 = Automerge.load(Automerge.save(s3))
- assert.deepEqual(s3,s4)
- })
-
- it('should add increments only to the values they precede', () => {
- s1 = Automerge.change(s1, doc => doc.counter = new Automerge.Counter(0))
- s1 = Automerge.change(s1, doc => doc.counter.increment())
- s2 = Automerge.change(s2, doc => doc.counter = new Automerge.Counter(100))
- s2 = Automerge.change(s2, doc => doc.counter.increment(3))
- s3 = Automerge.merge(s1, s2)
- if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
- assert.deepStrictEqual(s3, {counter: new Automerge.Counter(1)})
- } else {
- assert.deepStrictEqual(s3, {counter: new Automerge.Counter(103)})
- }
- assert.deepStrictEqual(Automerge.getConflicts(s3, 'counter'), {
- [`1@${Automerge.getActorId(s1)}`]: new Automerge.Counter(1),
- [`1@${Automerge.getActorId(s2)}`]: new Automerge.Counter(103)
- })
- s4 = Automerge.load(Automerge.save(s3))
- assert.deepEqual(s3,s4)
- })
-
- it('should detect concurrent updates of the same field', () => {
- s1 = Automerge.change(s1, doc => doc.field = 'one')
- s2 = Automerge.change(s2, doc => doc.field = 'two')
- s3 = Automerge.merge(s1, s2)
- if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
- assert.deepStrictEqual(s3, {field: 'one'})
- } else {
- assert.deepStrictEqual(s3, {field: 'two'})
- }
- assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), {
- [`1@${Automerge.getActorId(s1)}`]: 'one',
- [`1@${Automerge.getActorId(s2)}`]: 'two'
- })
- })
-
- it('should detect concurrent updates of the same list element', () => {
- s1 = Automerge.change(s1, doc => doc.birds = ['finch'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds[0] = 'greenfinch')
- s2 = Automerge.change(s2, doc => doc.birds[0] = 'goldfinch')
- s3 = Automerge.merge(s1, s2)
- if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
- assert.deepStrictEqual(s3.birds, ['greenfinch'])
- } else {
- assert.deepStrictEqual(s3.birds, ['goldfinch'])
- }
- assert.deepStrictEqual(Automerge.getConflicts(s3.birds, 0), {
- [`3@${Automerge.getActorId(s1)}`]: 'greenfinch',
- [`3@${Automerge.getActorId(s2)}`]: 'goldfinch'
- })
- })
-
- it('should handle assignment conflicts of different types', () => {
- s1 = Automerge.change(s1, doc => doc.field = 'string')
- s2 = Automerge.change(s2, doc => doc.field = ['list'])
- s3 = Automerge.change(s3, doc => doc.field = {thing: 'map'})
- s1 = Automerge.merge(Automerge.merge(s1, s2), s3)
- assertEqualsOneOf(s1.field, 'string', ['list'], {thing: 'map'})
- assert.deepStrictEqual(Automerge.getConflicts(s1, 'field'), {
- [`1@${Automerge.getActorId(s1)}`]: 'string',
- [`1@${Automerge.getActorId(s2)}`]: ['list'],
- [`1@${Automerge.getActorId(s3)}`]: {thing: 'map'}
- })
- })
-
- it('should handle changes within a conflicting map field', () => {
- s1 = Automerge.change(s1, doc => doc.field = 'string')
- s2 = Automerge.change(s2, doc => doc.field = {})
- s2 = Automerge.change(s2, doc => doc.field.innerKey = 42)
- s3 = Automerge.merge(s1, s2)
- assertEqualsOneOf(s3.field, 'string', {innerKey: 42})
- assert.deepStrictEqual(Automerge.getConflicts(s3, 'field'), {
- [`1@${Automerge.getActorId(s1)}`]: 'string',
- [`1@${Automerge.getActorId(s2)}`]: {innerKey: 42}
- })
- })
-
- it('should handle changes within a conflicting list element', () => {
- s1 = Automerge.change(s1, doc => doc.list = ['hello'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.list[0] = {map1: true})
- s1 = Automerge.change(s1, doc => doc.list[0].key = 1)
- s2 = Automerge.change(s2, doc => doc.list[0] = {map2: true})
- s2 = Automerge.change(s2, doc => doc.list[0].key = 2)
- s3 = Automerge.merge(s1, s2)
- if (Automerge.getActorId(s1) > Automerge.getActorId(s2)) {
- assert.deepStrictEqual(s3.list, [{map1: true, key: 1}])
- } else {
- assert.deepStrictEqual(s3.list, [{map2: true, key: 2}])
- }
- assert.deepStrictEqual(Automerge.getConflicts(s3.list, 0), {
- [`3@${Automerge.getActorId(s1)}`]: {map1: true, key: 1},
- [`3@${Automerge.getActorId(s2)}`]: {map2: true, key: 2}
- })
- })
-
- it('should not merge concurrently assigned nested maps', () => {
- s1 = Automerge.change(s1, doc => doc.config = {background: 'blue'})
- s2 = Automerge.change(s2, doc => doc.config = {logo_url: 'logo.png'})
- s3 = Automerge.merge(s1, s2)
- assertEqualsOneOf(s3.config, {background: 'blue'}, {logo_url: 'logo.png'})
- assert.deepStrictEqual(Automerge.getConflicts(s3, 'config'), {
- [`1@${Automerge.getActorId(s1)}`]: {background: 'blue'},
- [`1@${Automerge.getActorId(s2)}`]: {logo_url: 'logo.png'}
- })
- })
-
- it('should clear conflicts after assigning a new value', () => {
- s1 = Automerge.change(s1, doc => doc.field = 'one')
- s2 = Automerge.change(s2, doc => doc.field = 'two')
- s3 = Automerge.merge(s1, s2)
- s3 = Automerge.change(s3, doc => doc.field = 'three')
- assert.deepStrictEqual(s3, {field: 'three'})
- assert.strictEqual(Automerge.getConflicts(s3, 'field'), undefined)
- s2 = Automerge.merge(s2, s3)
- assert.deepStrictEqual(s2, {field: 'three'})
- assert.strictEqual(Automerge.getConflicts(s2, 'field'), undefined)
- })
-
- it('should handle concurrent insertions at different list positions', () => {
- s1 = Automerge.change(s1, doc => doc.list = ['one', 'three'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.list.splice(1, 0, 'two'))
- s2 = Automerge.change(s2, doc => doc.list.push('four'))
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s3, {list: ['one', 'two', 'three', 'four']})
- assert.strictEqual(Automerge.getConflicts(s3, 'list'), undefined)
- })
-
- it('should handle concurrent insertions at the same list position', () => {
- s1 = Automerge.change(s1, doc => doc.birds = ['parakeet'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds.push('starling'))
- s2 = Automerge.change(s2, doc => doc.birds.push('chaffinch'))
- s3 = Automerge.merge(s1, s2)
- assertEqualsOneOf(s3.birds, ['parakeet', 'starling', 'chaffinch'], ['parakeet', 'chaffinch', 'starling'])
- s2 = Automerge.merge(s2, s3)
- assert.deepStrictEqual(s2, s3)
- })
-
- it('should handle concurrent assignment and deletion of a map entry', () => {
- // Add-wins semantics
- s1 = Automerge.change(s1, doc => doc.bestBird = 'robin')
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => delete doc.bestBird)
- s2 = Automerge.change(s2, doc => doc.bestBird = 'magpie')
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s1, {})
- assert.deepStrictEqual(s2, {bestBird: 'magpie'})
- assert.deepStrictEqual(s3, {bestBird: 'magpie'})
- assert.strictEqual(Automerge.getConflicts(s3, 'bestBird'), undefined)
- })
-
- it('should handle concurrent assignment and deletion of a list element', () => {
- // Concurrent assignment ressurects a deleted list element. Perhaps a little
- // surprising, but consistent with add-wins semantics of maps (see test above)
- s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds[1] = 'starling')
- s2 = Automerge.change(s2, doc => doc.birds.splice(1, 1))
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s1.birds, ['blackbird', 'starling', 'goldfinch'])
- assert.deepStrictEqual(s2.birds, ['blackbird', 'goldfinch'])
- assert.deepStrictEqual(s3.birds, ['blackbird', 'starling', 'goldfinch'])
- s4 = Automerge.load(Automerge.save(s3))
- assert.deepStrictEqual(s3, s4);
- })
-
- it('should handle insertion after a deleted list element', () => {
- s1 = Automerge.change(s1, doc => doc.birds = ['blackbird', 'thrush', 'goldfinch'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds.splice(1, 2))
- s2 = Automerge.change(s2, doc => doc.birds.splice(2, 0, 'starling'))
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s3, {birds: ['blackbird', 'starling']})
- assert.deepStrictEqual(Automerge.merge(s2, s3), {birds: ['blackbird', 'starling']})
- })
-
- it('should handle concurrent deletion of the same element', () => {
- s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds.deleteAt(1)) // buzzard
- s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s3.birds, ['albatross', 'cormorant'])
- })
-
- it('should handle concurrent deletion of different elements', () => {
- s1 = Automerge.change(s1, doc => doc.birds = ['albatross', 'buzzard', 'cormorant'])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.birds.deleteAt(0)) // albatross
- s2 = Automerge.change(s2, doc => doc.birds.deleteAt(1)) // buzzard
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s3.birds, ['cormorant'])
- })
-
- it('should handle concurrent updates at different levels of the tree', () => {
- // A delete higher up in the tree overrides an update in a subtree
- s1 = Automerge.change(s1, doc => doc.animals = {birds: {pink: 'flamingo', black: 'starling'}, mammals: ['badger']})
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.animals.birds.brown = 'sparrow')
- s2 = Automerge.change(s2, doc => delete doc.animals.birds)
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s1.animals, {
- birds: {
- pink: 'flamingo', brown: 'sparrow', black: 'starling'
- },
- mammals: ['badger']
- })
- assert.deepStrictEqual(s2.animals, {mammals: ['badger']})
- assert.deepStrictEqual(s3.animals, {mammals: ['badger']})
- })
-
- it('should handle updates of concurrently deleted objects', () => {
- s1 = Automerge.change(s1, doc => doc.birds = {blackbird: {feathers: 'black'}})
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => delete doc.birds.blackbird)
- s2 = Automerge.change(s2, doc => doc.birds.blackbird.beak = 'orange')
- s3 = Automerge.merge(s1, s2)
- assert.deepStrictEqual(s1, {birds: {}})
- })
-
- it('should not interleave sequence insertions at the same position', () => {
- s1 = Automerge.change(s1, doc => doc.wisdom = [])
- s2 = Automerge.merge(s2, s1)
- s1 = Automerge.change(s1, doc => doc.wisdom.push('to', 'be', 'is', 'to', 'do'))
- s2 = Automerge.change(s2, doc => doc.wisdom.push('to', 'do', 'is', 'to', 'be'))
- s3 = Automerge.merge(s1, s2)
- assertEqualsOneOf(s3.wisdom,
- ['to', 'be', 'is', 'to', 'do', 'to', 'do', 'is', 'to', 'be'],
- ['to', 'do', 'is', 'to', 'be', 'to', 'be', 'is', 'to', 'do'])
- // In case you're wondering: http://quoteinvestigator.com/2013/09/16/do-be-do/
- })
-
- describe('multiple insertions at the same list position', () => {
- it('should handle insertion by greater actor ID', () => {
- s1 = Automerge.init('aaaa')
- s2 = Automerge.init('bbbb')
- s1 = Automerge.change(s1, doc => doc.list = ['two'])
- s2 = Automerge.merge(s2, s1)
- s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one'))
- assert.deepStrictEqual(s2.list, ['one', 'two'])
- })
-
- it('should handle insertion by lesser actor ID', () => {
- s1 = Automerge.init('bbbb')
- s2 = Automerge.init('aaaa')
- s1 = Automerge.change(s1, doc => doc.list = ['two'])
- s2 = Automerge.merge(s2, s1)
- s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one'))
- assert.deepStrictEqual(s2.list, ['one', 'two'])
- })
-
- it('should handle insertion regardless of actor ID', () => {
- s1 = Automerge.change(s1, doc => doc.list = ['two'])
- s2 = Automerge.merge(s2, s1)
- s2 = Automerge.change(s2, doc => doc.list.splice(0, 0, 'one'))
- assert.deepStrictEqual(s2.list, ['one', 'two'])
- })
-
- it('should make insertion order consistent with causality', () => {
- s1 = Automerge.change(s1, doc => doc.list = ['four'])
- s2 = Automerge.merge(s2, s1)
- s2 = Automerge.change(s2, doc => doc.list.unshift('three'))
- s1 = Automerge.merge(s1, s2)
- s1 = Automerge.change(s1, doc => doc.list.unshift('two'))
- s2 = Automerge.merge(s2, s1)
- s2 = Automerge.change(s2, doc => doc.list.unshift('one'))
- assert.deepStrictEqual(s2.list, ['one', 'two', 'three', 'four'])
- })
- })
- })
-
- describe('saving and loading', () => {
- it('should save and restore an empty document', () => {
- let s = Automerge.load(Automerge.save(Automerge.init()))
- assert.deepStrictEqual(s, {})
- })
-
- it('should generate a new random actor ID', () => {
- let s1 = Automerge.init()
- let s2 = Automerge.load(Automerge.save(s1))
- assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s1).toString()), true)
- assert.strictEqual(UUID_PATTERN.test(Automerge.getActorId(s2).toString()), true)
- assert.notEqual(Automerge.getActorId(s1), Automerge.getActorId(s2))
- })
-
- it('should allow a custom actor ID to be set', () => {
- let s = Automerge.load(Automerge.save(Automerge.init()), '333333')
- assert.strictEqual(Automerge.getActorId(s), '333333')
- })
-
- it('should reconstitute complex datatypes', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.todos = [{title: 'water plants', done: false}])
- let s2 = Automerge.load(Automerge.save(s1))
- assert.deepStrictEqual(s2, {todos: [{title: 'water plants', done: false}]})
- })
-
- it('should save and load maps with @ symbols in the keys', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc["123@4567"] = "hello")
- let s2 = Automerge.load(Automerge.save(s1))
- assert.deepStrictEqual(s2, { "123@4567": "hello" })
- })
-
- it('should reconstitute conflicts', () => {
- let s1 = Automerge.change(Automerge.init('111111'), doc => doc.x = 3)
- let s2 = Automerge.change(Automerge.init('222222'), doc => doc.x = 5)
- s1 = Automerge.merge(s1, s2)
- let s3 = Automerge.load(Automerge.save(s1))
- assert.strictEqual(s1.x, 5)
- assert.strictEqual(s3.x, 5)
- assert.deepStrictEqual(Automerge.getConflicts(s1, 'x'), {'1@111111': 3, '1@222222': 5})
- assert.deepStrictEqual(Automerge.getConflicts(s3, 'x'), {'1@111111': 3, '1@222222': 5})
- })
-
- it('should reconstitute element ID counters', () => {
- const s1 = Automerge.init('01234567')
- const s2 = Automerge.change(s1, doc => doc.list = ['a'])
- const listId = Automerge.getObjectId(s2.list)
- const changes12 = Automerge.getAllChanges(s2).map(decodeChange)
- assert.deepStrictEqual(changes12, [{
- hash: changes12[0].hash, actor: '01234567', seq: 1, startOp: 1,
- time: changes12[0].time, message: '', deps: [], ops: [
- {obj: '_root', action: 'makeList', key: 'list', insert: false, pred: []},
- {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'a', pred: []}
- ]
- }])
- const s3 = Automerge.change(s2, doc => doc.list.deleteAt(0))
- const s4 = Automerge.load(Automerge.save(s3), '01234567')
- const s5 = Automerge.change(s4, doc => doc.list.push('b'))
- const changes45 = Automerge.getAllChanges(s5).map(decodeChange)
- assert.deepStrictEqual(s5, {list: ['b']})
- assert.deepStrictEqual(changes45[2], {
- hash: changes45[2].hash, actor: '01234567', seq: 3, startOp: 4,
- time: changes45[2].time, message: '', deps: [changes45[1].hash], ops: [
- {obj: listId, action: 'set', elemId: '_head', insert: true, value: 'b', pred: []}
- ]
- })
- })
-
- it('should allow a reloaded list to be mutated', () => {
- let doc = Automerge.change(Automerge.init(), doc => doc.foo = [])
- doc = Automerge.load(Automerge.save(doc))
- doc = Automerge.change(doc, 'add', doc => doc.foo.push(1))
- doc = Automerge.load(Automerge.save(doc))
- assert.deepStrictEqual(doc.foo, [1])
- })
-
- it('should reload a document containing deflated columns', () => {
- // In this test, the keyCtr column is long enough for deflate compression to kick in, but the
- // keyStr column is short. Thus, the deflate bit gets set for keyCtr but not for keyStr.
- // When checking whether the columns appear in ascending order, we must ignore the deflate bit.
- let doc = Automerge.change(Automerge.init(), doc => {
- doc.list = []
- for (let i = 0; i < 200; i++) doc.list.insertAt(Math.floor(Math.random() * i), 'a')
- })
- Automerge.load(Automerge.save(doc))
- let expected = []
- for (let i = 0; i < 200; i++) expected.push('a')
- assert.deepStrictEqual(doc, {list: expected})
- })
-
- /*
- it.skip('should call patchCallback if supplied', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch'])
- const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch'))
- const callbacks = [], actor = Automerge.getActorId(s1)
- const reloaded = Automerge.load(Automerge.save(s2), {
- patchCallback(patch, before, after, local) {
- callbacks.push({patch, before, after, local})
- }
- })
- assert.strictEqual(callbacks.length, 1)
- assert.deepStrictEqual(callbacks[0].patch, {
- maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
- objectId: `1@${actor}`, type: 'list', edits: [
- {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']}
- ]
- }}}}
- })
- assert.deepStrictEqual(callbacks[0].before, {})
- assert.strictEqual(callbacks[0].after, reloaded)
- assert.strictEqual(callbacks[0].local, false)
- })
- */
- })
-
- describe('history API', () => {
- it('should return an empty history for an empty document', () => {
- assert.deepStrictEqual(Automerge.getHistory(Automerge.init()), [])
- })
-
- it('should make past document states accessible', () => {
- let s = Automerge.init()
- s = Automerge.change(s, doc => doc.config = {background: 'blue'})
- s = Automerge.change(s, doc => doc.birds = ['mallard'])
- s = Automerge.change(s, doc => doc.birds.unshift('oystercatcher'))
- assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.snapshot), [
- {config: {background: 'blue'}},
- {config: {background: 'blue'}, birds: ['mallard']},
- {config: {background: 'blue'}, birds: ['oystercatcher', 'mallard']}
- ])
- })
-
- it('should make change messages accessible', () => {
- let s = Automerge.init()
- s = Automerge.change(s, 'Empty Bookshelf', doc => doc.books = [])
- s = Automerge.change(s, 'Add Orwell', doc => doc.books.push('Nineteen Eighty-Four'))
- s = Automerge.change(s, 'Add Huxley', doc => doc.books.push('Brave New World'))
- assert.deepStrictEqual(s.books, ['Nineteen Eighty-Four', 'Brave New World'])
- assert.deepStrictEqual(Automerge.getHistory(s).map(state => state.change.message),
- ['Empty Bookshelf', 'Add Orwell', 'Add Huxley'])
- })
- })
-
- describe('changes API', () => {
- it('should return an empty list on an empty document', () => {
- let changes = Automerge.getAllChanges(Automerge.init())
- assert.deepStrictEqual(changes, [])
- })
-
- it('should return an empty list when nothing changed', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch'])
- assert.deepStrictEqual(Automerge.getChanges(s1, s1), [])
- })
-
- it('should do nothing when applying an empty list of changes', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch'])
- assert.deepStrictEqual(Automerge.applyChanges(s1, [])[0], s1)
- })
-
- it('should return all changes when compared to an empty document', () => {
- let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
- let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
- let changes = Automerge.getChanges(Automerge.init(), s2)
- assert.strictEqual(changes.length, 2)
- })
-
- it('should allow a document copy to be reconstructed from scratch', () => {
- let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
- let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
- let changes = Automerge.getAllChanges(s2)
- let [s3] = Automerge.applyChanges(Automerge.init(), changes)
- assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch'])
- })
-
- it('should return changes since the last given version', () => {
- let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
- let changes1 = Automerge.getAllChanges(s1)
- let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
- let changes2 = Automerge.getChanges(s1, s2)
- assert.strictEqual(changes1.length, 1) // Add Chaffinch
- assert.strictEqual(changes2.length, 1) // Add Bullfinch
- })
-
- it('should incrementally apply changes since the last given version', () => {
- let s1 = Automerge.change(Automerge.init(), 'Add Chaffinch', doc => doc.birds = ['Chaffinch'])
- let changes1 = Automerge.getAllChanges(s1)
- let s2 = Automerge.change(s1, 'Add Bullfinch', doc => doc.birds.push('Bullfinch'))
- let changes2 = Automerge.getChanges(s1, s2)
- let [s3] = Automerge.applyChanges(Automerge.init(), changes1)
- let [s4] = Automerge.applyChanges(s3, changes2)
- assert.deepStrictEqual(s3.birds, ['Chaffinch'])
- assert.deepStrictEqual(s4.birds, ['Chaffinch', 'Bullfinch'])
- })
-
- it('should handle updates to a list element', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch', 'Bullfinch'])
- let s2 = Automerge.change(s1, doc => doc.birds[0] = 'Goldfinch')
- let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2))
- assert.deepStrictEqual(s3.birds, ['Goldfinch', 'Bullfinch'])
- assert.strictEqual(Automerge.getConflicts(s3.birds, 0), undefined)
- })
-
- // TEXT
- it('should handle updates to a text object', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('ab'))
- let s2 = Automerge.change(s1, doc => doc.text.set(0, 'A'))
- let [s3] = Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2))
- assert.deepStrictEqual([...s3.text], ['A', 'b'])
- })
-
- /*
- it.skip('should report missing dependencies', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Chaffinch'])
- let s2 = Automerge.merge(Automerge.init(), s1)
- s2 = Automerge.change(s2, doc => doc.birds.push('Bullfinch'))
- let changes = Automerge.getAllChanges(s2)
- let [s3, patch] = Automerge.applyChanges(Automerge.init(), [changes[1]])
- assert.deepStrictEqual(s3, {})
- assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)),
- decodeChange(changes[1]).deps)
- assert.strictEqual(patch.pendingChanges, 1)
- ;[s3, patch] = Automerge.applyChanges(s3, [changes[0]])
- assert.deepStrictEqual(s3.birds, ['Chaffinch', 'Bullfinch'])
- assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s3)), [])
- assert.strictEqual(patch.pendingChanges, 0)
- })
- */
-
- it('should report missing dependencies with out-of-order applyChanges', () => {
- let s0 = Automerge.init()
- let s1 = Automerge.change(s0, doc => doc.test = ['a'])
- let changes01 = Automerge.getAllChanges(s1)
- let s2 = Automerge.change(s1, doc => doc.test = ['b'])
- let changes12 = Automerge.getChanges(s1, s2)
- let s3 = Automerge.change(s2, doc => doc.test = ['c'])
- let changes23 = Automerge.getChanges(s2, s3)
- let s4 = Automerge.init()
- let [s5] = Automerge.applyChanges(s4, changes23)
- let [s6] = Automerge.applyChanges(s5, changes12)
-// assert.deepStrictEqual(Automerge.Backend.getMissingDeps(Automerge.Frontend.getBackendState(s6)), [decodeChange(changes01[0]).hash])
- assert.deepStrictEqual(Automerge.getMissingDeps(s6), [decodeChange(changes01[0]).hash])
- })
-
- /*
- it.skip('should call patchCallback if supplied when applying changes', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch'])
- const callbacks = [], actor = Automerge.getActorId(s1)
- const before = Automerge.init()
- const [after, patch] = Automerge.applyChanges(before, Automerge.getAllChanges(s1), {
- patchCallback(patch, before, after, local) {
- callbacks.push({patch, before, after, local})
- }
- })
- assert.strictEqual(callbacks.length, 1)
- assert.deepStrictEqual(callbacks[0].patch, {
- maxOp: 2, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
- objectId: `1@${actor}`, type: 'list', edits: [
- {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`, value: {type: 'value', value: 'Goldfinch'}}
- ]
- }}}}
- })
- assert.strictEqual(callbacks[0].patch, patch)
- assert.strictEqual(callbacks[0].before, before)
- assert.strictEqual(callbacks[0].after, after)
- assert.strictEqual(callbacks[0].local, false)
- })
- */
-
- /*
- it.skip('should merge multiple applied changes into one patch', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.birds = ['Goldfinch'])
- const s2 = Automerge.change(s1, doc => doc.birds.push('Chaffinch'))
- const patches = [], actor = Automerge.getActorId(s2)
- Automerge.applyChanges(Automerge.init(), Automerge.getAllChanges(s2),
- {patchCallback: p => patches.push(p)})
- assert.deepStrictEqual(patches, [{
- maxOp: 3, deps: [decodeChange(Automerge.getAllChanges(s2)[1]).hash], clock: {[actor]: 2}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
- objectId: `1@${actor}`, type: 'list', edits: [
- {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['Goldfinch', 'Chaffinch']}
- ]
- }}}}
- }])
- })
- */
-
- /*
- it.skip('should call a patchCallback registered on doc initialisation', () => {
- const s1 = Automerge.change(Automerge.init(), doc => doc.bird = 'Goldfinch')
- const patches = [], actor = Automerge.getActorId(s1)
- const before = Automerge.init({patchCallback: p => patches.push(p)})
- Automerge.applyChanges(before, Automerge.getAllChanges(s1))
- assert.deepStrictEqual(patches, [{
- maxOp: 1, deps: [decodeChange(Automerge.getAllChanges(s1)[0]).hash], clock: {[actor]: 1}, pendingChanges: 0,
- diffs: {objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}}
- }])
- })
- */
- })
-})
diff --git a/automerge-js/test/text_test.js b/automerge-js/test/text_test.js
deleted file mode 100644
index 57e8884e..00000000
--- a/automerge-js/test/text_test.js
+++ /dev/null
@@ -1,697 +0,0 @@
-const assert = require('assert')
-const Automerge = require('..')
-const { assertEqualsOneOf } = require('./helpers')
-
-function attributeStateToAttributes(accumulatedAttributes) {
- const attributes = {}
- Object.entries(accumulatedAttributes).forEach(([key, values]) => {
- if (values.length && values[0] !== null) {
- attributes[key] = values[0]
- }
- })
- return attributes
-}
-
-function isEquivalent(a, b) {
- const aProps = Object.getOwnPropertyNames(a)
- const bProps = Object.getOwnPropertyNames(b)
-
- if (aProps.length != bProps.length) {
- return false
- }
-
- for (let i = 0; i < aProps.length; i++) {
- const propName = aProps[i]
- if (a[propName] !== b[propName]) {
- return false
- }
- }
-
- return true
-}
-
-function isControlMarker(pseudoCharacter) {
- return typeof pseudoCharacter === 'object' && pseudoCharacter.attributes
-}
-
-function opFrom(text, attributes) {
- let op = { insert: text }
- if (Object.keys(attributes).length > 0) {
- op.attributes = attributes
- }
- return op
-}
-
-function accumulateAttributes(span, accumulatedAttributes) {
- Object.entries(span).forEach(([key, value]) => {
- if (!accumulatedAttributes[key]) {
- accumulatedAttributes[key] = []
- }
- if (value === null) {
- if (accumulatedAttributes[key].length === 0 || accumulatedAttributes[key] === null) {
- accumulatedAttributes[key].unshift(null)
- } else {
- accumulatedAttributes[key].shift()
- }
- } else {
- if (accumulatedAttributes[key][0] === null) {
- accumulatedAttributes[key].shift()
- } else {
- accumulatedAttributes[key].unshift(value)
- }
- }
- })
- return accumulatedAttributes
-}
-
-function automergeTextToDeltaDoc(text) {
- let ops = []
- let controlState = {}
- let currentString = ""
- let attributes = {}
- text.toSpans().forEach((span) => {
- if (isControlMarker(span)) {
- controlState = accumulateAttributes(span.attributes, controlState)
- } else {
- let next = attributeStateToAttributes(controlState)
-
- // if the next span has the same calculated attributes as the current span
- // don't bother outputting it as a separate span, just let it ride
- if (typeof span === 'string' && isEquivalent(next, attributes)) {
- currentString = currentString + span
- return
- }
-
- if (currentString) {
- ops.push(opFrom(currentString, attributes))
- }
-
- // If we've got a string, we might be able to concatenate it to another
- // same-attributed-string, so remember it and go to the next iteration.
- if (typeof span === 'string') {
- currentString = span
- attributes = next
- } else {
- // otherwise we have an embed "character" and should output it immediately.
- // embeds are always one-"character" in length.
- ops.push(opFrom(span, next))
- currentString = ''
- attributes = {}
- }
- }
- })
-
- // at the end, flush any accumulated string out
- if (currentString) {
- ops.push(opFrom(currentString, attributes))
- }
-
- return ops
-}
-
-function inverseAttributes(attributes) {
- let invertedAttributes = {}
- Object.keys(attributes).forEach((key) => {
- invertedAttributes[key] = null
- })
- return invertedAttributes
-}
-
-function applyDeleteOp(text, offset, op) {
- let length = op.delete
- while (length > 0) {
- if (isControlMarker(text.get(offset))) {
- offset += 1
- } else {
- // we need to not delete control characters, but we do delete embed characters
- text.deleteAt(offset, 1)
- length -= 1
- }
- }
- return [text, offset]
-}
-
-function applyRetainOp(text, offset, op) {
- let length = op.retain
-
- if (op.attributes) {
- text.insertAt(offset, { attributes: op.attributes })
- offset += 1
- }
-
- while (length > 0) {
- const char = text.get(offset)
- offset += 1
- if (!isControlMarker(char)) {
- length -= 1
- }
- }
-
- if (op.attributes) {
- text.insertAt(offset, { attributes: inverseAttributes(op.attributes) })
- offset += 1
- }
-
- return [text, offset]
-}
-
-
-function applyInsertOp(text, offset, op) {
- let originalOffset = offset
-
- if (typeof op.insert === 'string') {
- text.insertAt(offset, ...op.insert.split(''))
- offset += op.insert.length
- } else {
- // we have an embed or something similar
- text.insertAt(offset, op.insert)
- offset += 1
- }
-
- if (op.attributes) {
- text.insertAt(originalOffset, { attributes: op.attributes })
- offset += 1
- }
- if (op.attributes) {
- text.insertAt(offset, { attributes: inverseAttributes(op.attributes) })
- offset += 1
- }
- return [text, offset]
-}
-
-// XXX: uhhhhh, why can't I pass in text?
-function applyDeltaDocToAutomergeText(delta, doc) {
- let offset = 0
-
- delta.forEach(op => {
- if (op.retain) {
- [, offset] = applyRetainOp(doc.text, offset, op)
- } else if (op.delete) {
- [, offset] = applyDeleteOp(doc.text, offset, op)
- } else if (op.insert) {
- [, offset] = applyInsertOp(doc.text, offset, op)
- }
- })
-}
-
-describe('Automerge.Text', () => {
- let s1, s2
- beforeEach(() => {
- s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text())
- s2 = Automerge.merge(Automerge.init(), s1)
- })
-
- it('should support insertion', () => {
- s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a'))
- assert.strictEqual(s1.text.length, 1)
- assert.strictEqual(s1.text.get(0), 'a')
- assert.strictEqual(s1.text.toString(), 'a')
- //assert.strictEqual(s1.text.getElemId(0), `2@${Automerge.getActorId(s1)}`)
- })
-
- it('should support deletion', () => {
- s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
- s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 1))
- assert.strictEqual(s1.text.length, 2)
- assert.strictEqual(s1.text.get(0), 'a')
- assert.strictEqual(s1.text.get(1), 'c')
- assert.strictEqual(s1.text.toString(), 'ac')
- })
-
- it("should support implicit and explicit deletion", () => {
- s1 = Automerge.change(s1, doc => doc.text.insertAt(0, "a", "b", "c"))
- s1 = Automerge.change(s1, doc => doc.text.deleteAt(1))
- s1 = Automerge.change(s1, doc => doc.text.deleteAt(1, 0))
- assert.strictEqual(s1.text.length, 2)
- assert.strictEqual(s1.text.get(0), "a")
- assert.strictEqual(s1.text.get(1), "c")
- assert.strictEqual(s1.text.toString(), "ac")
- })
-
- it('should handle concurrent insertion', () => {
- s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
- s2 = Automerge.change(s2, doc => doc.text.insertAt(0, 'x', 'y', 'z'))
- s1 = Automerge.merge(s1, s2)
- assert.strictEqual(s1.text.length, 6)
- assertEqualsOneOf(s1.text.toString(), 'abcxyz', 'xyzabc')
- assertEqualsOneOf(s1.text.join(''), 'abcxyz', 'xyzabc')
- })
-
- it('should handle text and other ops in the same change', () => {
- s1 = Automerge.change(s1, doc => {
- doc.foo = 'bar'
- doc.text.insertAt(0, 'a')
- })
- assert.strictEqual(s1.foo, 'bar')
- assert.strictEqual(s1.text.toString(), 'a')
- assert.strictEqual(s1.text.join(''), 'a')
- })
-
- it('should serialize to JSON as a simple string', () => {
- s1 = Automerge.change(s1, doc => doc.text.insertAt(0, 'a', '"', 'b'))
- assert.strictEqual(JSON.stringify(s1), '{"text":"a\\"b"}')
- })
-
- it('should allow modification before an object is assigned to a document', () => {
- s1 = Automerge.change(Automerge.init(), doc => {
- const text = new Automerge.Text()
- text.insertAt(0, 'a', 'b', 'c', 'd')
- text.deleteAt(2)
- doc.text = text
- assert.strictEqual(doc.text.toString(), 'abd')
- assert.strictEqual(doc.text.join(''), 'abd')
- })
- assert.strictEqual(s1.text.toString(), 'abd')
- assert.strictEqual(s1.text.join(''), 'abd')
- })
-
- it('should allow modification after an object is assigned to a document', () => {
- s1 = Automerge.change(Automerge.init(), doc => {
- const text = new Automerge.Text()
- doc.text = text
- doc.text.insertAt(0, 'a', 'b', 'c', 'd')
- doc.text.deleteAt(2)
- assert.strictEqual(doc.text.toString(), 'abd')
- assert.strictEqual(doc.text.join(''), 'abd')
- })
- assert.strictEqual(s1.text.join(''), 'abd')
- })
-
- it('should not allow modification outside of a change callback', () => {
- assert.throws(() => s1.text.insertAt(0, 'a'), /object cannot be modified outside of a change block/)
- })
-
- describe('with initial value', () => {
- it('should accept a string as initial value', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text('init'))
- assert.strictEqual(s1.text.length, 4)
- assert.strictEqual(s1.text.get(0), 'i')
- assert.strictEqual(s1.text.get(1), 'n')
- assert.strictEqual(s1.text.get(2), 'i')
- assert.strictEqual(s1.text.get(3), 't')
- assert.strictEqual(s1.text.toString(), 'init')
- })
-
- it('should accept an array as initial value', () => {
- let s1 = Automerge.change(Automerge.init(), doc => doc.text = new Automerge.Text(['i', 'n', 'i', 't']))
- assert.strictEqual(s1.text.length, 4)
- assert.strictEqual(s1.text.get(0), 'i')
- assert.strictEqual(s1.text.get(1), 'n')
- assert.strictEqual(s1.text.get(2), 'i')
- assert.strictEqual(s1.text.get(3), 't')
- assert.strictEqual(s1.text.toString(), 'init')
- })
-
- it('should initialize text in Automerge.from()', () => {
- let s1 = Automerge.from({text: new Automerge.Text('init')})
- assert.strictEqual(s1.text.length, 4)
- assert.strictEqual(s1.text.get(0), 'i')
- assert.strictEqual(s1.text.get(1), 'n')
- assert.strictEqual(s1.text.get(2), 'i')
- assert.strictEqual(s1.text.get(3), 't')
- assert.strictEqual(s1.text.toString(), 'init')
- })
-
- it('should encode the initial value as a change', () => {
- const s1 = Automerge.from({text: new Automerge.Text('init')})
- const changes = Automerge.getAllChanges(s1)
- assert.strictEqual(changes.length, 1)
- const [s2] = Automerge.applyChanges(Automerge.init(), changes)
- assert.strictEqual(s2.text instanceof Automerge.Text, true)
- assert.strictEqual(s2.text.toString(), 'init')
- assert.strictEqual(s2.text.join(''), 'init')
- })
-
- it('should allow immediate access to the value', () => {
- Automerge.change(Automerge.init(), doc => {
- const text = new Automerge.Text('init')
- assert.strictEqual(text.length, 4)
- assert.strictEqual(text.get(0), 'i')
- assert.strictEqual(text.toString(), 'init')
- doc.text = text
- assert.strictEqual(doc.text.length, 4)
- assert.strictEqual(doc.text.get(0), 'i')
- assert.strictEqual(doc.text.toString(), 'init')
- })
- })
-
- it('should allow pre-assignment modification of the initial value', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- const text = new Automerge.Text('init')
- text.deleteAt(3)
- assert.strictEqual(text.join(''), 'ini')
- doc.text = text
- assert.strictEqual(doc.text.join(''), 'ini')
- assert.strictEqual(doc.text.toString(), 'ini')
- })
- assert.strictEqual(s1.text.toString(), 'ini')
- assert.strictEqual(s1.text.join(''), 'ini')
- })
-
- it('should allow post-assignment modification of the initial value', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- const text = new Automerge.Text('init')
- doc.text = text
- doc.text.deleteAt(0)
- doc.text.insertAt(0, 'I')
- assert.strictEqual(doc.text.join(''), 'Init')
- assert.strictEqual(doc.text.toString(), 'Init')
- })
- assert.strictEqual(s1.text.join(''), 'Init')
- assert.strictEqual(s1.text.toString(), 'Init')
- })
- })
-
- describe('non-textual control characters', () => {
- let s1
- beforeEach(() => {
- s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text()
- doc.text.insertAt(0, 'a')
- doc.text.insertAt(1, { attribute: 'bold' })
- })
- })
-
- it('should allow fetching non-textual characters', () => {
- assert.deepEqual(s1.text.get(1), { attribute: 'bold' })
- //assert.strictEqual(s1.text.getElemId(1), `3@${Automerge.getActorId(s1)}`)
- })
-
- it('should include control characters in string length', () => {
- assert.strictEqual(s1.text.length, 2)
- assert.strictEqual(s1.text.get(0), 'a')
- })
-
- it('should exclude control characters from toString()', () => {
- assert.strictEqual(s1.text.toString(), 'a')
- })
-
- it('should allow control characters to be updated', () => {
- const s2 = Automerge.change(s1, doc => doc.text.get(1).attribute = 'italic')
- const s3 = Automerge.load(Automerge.save(s2))
- assert.strictEqual(s1.text.get(1).attribute, 'bold')
- assert.strictEqual(s2.text.get(1).attribute, 'italic')
- assert.strictEqual(s3.text.get(1).attribute, 'italic')
- })
-
- describe('spans interface to Text', () => {
- it('should return a simple string as a single span', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('hello world')
- })
- assert.deepEqual(s1.text.toSpans(), ['hello world'])
- })
- it('should return an empty string as an empty array', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text()
- })
- assert.deepEqual(s1.text.toSpans(), [])
- })
- it('should split a span at a control character', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('hello world')
- doc.text.insertAt(5, { attributes: { bold: true } })
- })
- assert.deepEqual(s1.text.toSpans(),
- ['hello', { attributes: { bold: true } }, ' world'])
- })
- it('should allow consecutive control characters', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('hello world')
- doc.text.insertAt(5, { attributes: { bold: true } })
- doc.text.insertAt(6, { attributes: { italic: true } })
- })
- assert.deepEqual(s1.text.toSpans(),
- ['hello',
- { attributes: { bold: true } },
- { attributes: { italic: true } },
- ' world'
- ])
- })
- it('should allow non-consecutive control characters', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('hello world')
- doc.text.insertAt(5, { attributes: { bold: true } })
- doc.text.insertAt(12, { attributes: { italic: true } })
- })
- assert.deepEqual(s1.text.toSpans(),
- ['hello',
- { attributes: { bold: true } },
- ' world',
- { attributes: { italic: true } }
- ])
- })
-
- it('should be convertable into a Quill delta', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Gandalf the Grey')
- doc.text.insertAt(0, { attributes: { bold: true } })
- doc.text.insertAt(7 + 1, { attributes: { bold: null } })
- doc.text.insertAt(12 + 2, { attributes: { color: '#cccccc' } })
- })
-
- let deltaDoc = automergeTextToDeltaDoc(s1.text)
-
- // From https://quilljs.com/docs/delta/
- let expectedDoc = [
- { insert: 'Gandalf', attributes: { bold: true } },
- { insert: ' the ' },
- { insert: 'Grey', attributes: { color: '#cccccc' } }
- ]
-
- assert.deepEqual(deltaDoc, expectedDoc)
- })
-
- it('should support embeds', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('')
- doc.text.insertAt(0, { attributes: { link: 'https://quilljs.com' } })
- doc.text.insertAt(1, {
- image: 'https://quilljs.com/assets/images/icon.png'
- })
- doc.text.insertAt(2, { attributes: { link: null } })
- })
-
- let deltaDoc = automergeTextToDeltaDoc(s1.text)
-
- // From https://quilljs.com/docs/delta/
- let expectedDoc = [{
- // An image link
- insert: {
- image: 'https://quilljs.com/assets/images/icon.png'
- },
- attributes: {
- link: 'https://quilljs.com'
- }
- }]
-
- assert.deepEqual(deltaDoc, expectedDoc)
- })
-
- it('should handle concurrent overlapping spans', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Gandalf the Grey')
- })
-
- let s2 = Automerge.merge(Automerge.init(), s1)
-
- let s3 = Automerge.change(s1, doc => {
- doc.text.insertAt(8, { attributes: { bold: true } })
- doc.text.insertAt(16 + 1, { attributes: { bold: null } })
- })
-
- let s4 = Automerge.change(s2, doc => {
- doc.text.insertAt(0, { attributes: { bold: true } })
- doc.text.insertAt(11 + 1, { attributes: { bold: null } })
- })
-
- let merged = Automerge.merge(s3, s4)
-
- let deltaDoc = automergeTextToDeltaDoc(merged.text)
-
- // From https://quilljs.com/docs/delta/
- let expectedDoc = [
- { insert: 'Gandalf the Grey', attributes: { bold: true } },
- ]
-
- assert.deepEqual(deltaDoc, expectedDoc)
- })
-
- it('should handle debolding spans', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Gandalf the Grey')
- })
-
- let s2 = Automerge.merge(Automerge.init(), s1)
-
- let s3 = Automerge.change(s1, doc => {
- doc.text.insertAt(0, { attributes: { bold: true } })
- doc.text.insertAt(16 + 1, { attributes: { bold: null } })
- })
-
- let s4 = Automerge.change(s2, doc => {
- doc.text.insertAt(8, { attributes: { bold: null } })
- doc.text.insertAt(11 + 1, { attributes: { bold: true } })
- })
-
-
- let merged = Automerge.merge(s3, s4)
-
- let deltaDoc = automergeTextToDeltaDoc(merged.text)
-
- // From https://quilljs.com/docs/delta/
- let expectedDoc = [
- { insert: 'Gandalf ', attributes: { bold: true } },
- { insert: 'the' },
- { insert: ' Grey', attributes: { bold: true } },
- ]
-
- assert.deepEqual(deltaDoc, expectedDoc)
- })
-
- // xxx: how would this work for colors?
- it('should handle destyling across destyled spans', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Gandalf the Grey')
- })
-
- let s2 = Automerge.merge(Automerge.init(), s1)
-
- let s3 = Automerge.change(s1, doc => {
- doc.text.insertAt(0, { attributes: { bold: true } })
- doc.text.insertAt(16 + 1, { attributes: { bold: null } })
- })
-
- let s4 = Automerge.change(s2, doc => {
- doc.text.insertAt(8, { attributes: { bold: null } })
- doc.text.insertAt(11 + 1, { attributes: { bold: true } })
- })
-
- let merged = Automerge.merge(s3, s4)
-
- let final = Automerge.change(merged, doc => {
- doc.text.insertAt(3 + 1, { attributes: { bold: null } })
- doc.text.insertAt(doc.text.length, { attributes: { bold: true } })
- })
-
- let deltaDoc = automergeTextToDeltaDoc(final.text)
-
- // From https://quilljs.com/docs/delta/
- let expectedDoc = [
- { insert: 'Gan', attributes: { bold: true } },
- { insert: 'dalf the Grey' },
- ]
-
- assert.deepEqual(deltaDoc, expectedDoc)
- })
-
- it('should apply an insert', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Hello world')
- })
-
- const delta = [
- { retain: 6 },
- { insert: 'reader' },
- { delete: 5 }
- ]
-
- let s2 = Automerge.change(s1, doc => {
- applyDeltaDocToAutomergeText(delta, doc)
- })
-
- assert.strictEqual(s2.text.join(''), 'Hello reader')
- })
-
- it('should apply an insert with control characters', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Hello world')
- })
-
- const delta = [
- { retain: 6 },
- { insert: 'reader', attributes: { bold: true } },
- { delete: 5 },
- { insert: '!' }
- ]
-
- let s2 = Automerge.change(s1, doc => {
- applyDeltaDocToAutomergeText(delta, doc)
- })
-
- assert.strictEqual(s2.text.toString(), 'Hello reader!')
- assert.deepEqual(s2.text.toSpans(), [
- "Hello ",
- { attributes: { bold: true } },
- "reader",
- { attributes: { bold: null } },
- "!"
- ])
- })
-
- it('should account for control characters in retain/delete lengths', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('Hello world')
- doc.text.insertAt(4, { attributes: { color: '#ccc' } })
- doc.text.insertAt(10, { attributes: { color: '#f00' } })
- })
-
- const delta = [
- { retain: 6 },
- { insert: 'reader', attributes: { bold: true } },
- { delete: 5 },
- { insert: '!' }
- ]
-
- let s2 = Automerge.change(s1, doc => {
- applyDeltaDocToAutomergeText(delta, doc)
- })
-
- assert.strictEqual(s2.text.toString(), 'Hello reader!')
- assert.deepEqual(s2.text.toSpans(), [
- "Hell",
- { attributes: { color: '#ccc'} },
- "o ",
- { attributes: { bold: true } },
- "reader",
- { attributes: { bold: null } },
- { attributes: { color: '#f00'} },
- "!"
- ])
- })
-
- it('should support embeds', () => {
- let s1 = Automerge.change(Automerge.init(), doc => {
- doc.text = new Automerge.Text('')
- })
-
- let deltaDoc = [{
- // An image link
- insert: {
- image: 'https://quilljs.com/assets/images/icon.png'
- },
- attributes: {
- link: 'https://quilljs.com'
- }
- }]
-
- let s2 = Automerge.change(s1, doc => {
- applyDeltaDocToAutomergeText(deltaDoc, doc)
- })
-
- assert.deepEqual(s2.text.toSpans(), [
- { attributes: { link: 'https://quilljs.com' } },
- { image: 'https://quilljs.com/assets/images/icon.png'},
- { attributes: { link: null } },
- ])
- })
- })
- })
-
- it('should support unicode when creating text', () => {
- s1 = Automerge.from({
- text: new Automerge.Text('🐦')
- })
- assert.strictEqual(s1.text.get(0), '🐦')
- })
-})
diff --git a/automerge-js/test/uuid_test.js b/automerge-js/test/uuid_test.js
deleted file mode 100644
index a0f83df1..00000000
--- a/automerge-js/test/uuid_test.js
+++ /dev/null
@@ -1,32 +0,0 @@
-const assert = require('assert')
-const Automerge = require('..')
-
-const uuid = Automerge.uuid
-
-describe('uuid', () => {
- afterEach(() => {
- uuid.reset()
- })
-
- describe('default implementation', () => {
- it('generates unique values', () => {
- assert.notEqual(uuid(), uuid())
- })
- })
-
- describe('custom implementation', () => {
- let counter
-
- function customUuid() {
- return `custom-uuid-${counter++}`
- }
-
- before(() => uuid.setFactory(customUuid))
- beforeEach(() => counter = 0)
-
- it('invokes the custom factory', () => {
- assert.equal(uuid(), 'custom-uuid-0')
- assert.equal(uuid(), 'custom-uuid-1')
- })
- })
-})
diff --git a/automerge-wasm/README.md b/automerge-wasm/README.md
deleted file mode 100644
index 258cd572..00000000
--- a/automerge-wasm/README.md
+++ /dev/null
@@ -1 +0,0 @@
-todo
diff --git a/automerge-wasm/package.json b/automerge-wasm/package.json
deleted file mode 100644
index 79a0781d..00000000
--- a/automerge-wasm/package.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
- "collaborators": [
- "Orion Henry ",
- "Alex Good ",
- "Martin Kleppmann"
- ],
- "name": "automerge-wasm",
- "description": "wasm-bindgen bindings to the automerge rust implementation",
- "version": "0.1.0",
- "license": "MIT",
- "files": [
- "README.md",
- "LICENSE",
- "package.json",
- "automerge_wasm_bg.wasm",
- "automerge_wasm.js"
- ],
- "main": "./dev/index.js",
- "scripts": {
- "build": "rimraf ./dev && wasm-pack build --target nodejs --dev --out-name index -d dev",
- "release": "rimraf ./dev && wasm-pack build --target nodejs --release --out-name index -d dev && yarn opt",
- "prof": "rimraf ./dev && wasm-pack build --target nodejs --profiling --out-name index -d dev",
- "opt": "wasm-opt -Oz dev/index_bg.wasm -o tmp.wasm && mv tmp.wasm dev/index_bg.wasm",
- "test": "yarn build && mocha --bail --full-trace"
- },
- "dependencies": {},
- "devDependencies": {
- "mocha": "^9.1.3",
- "rimraf": "^3.0.2"
- }
-}
diff --git a/automerge-wasm/src/lib.rs b/automerge-wasm/src/lib.rs
deleted file mode 100644
index cf22b1a1..00000000
--- a/automerge-wasm/src/lib.rs
+++ /dev/null
@@ -1,822 +0,0 @@
-extern crate web_sys;
-use automerge as am;
-use automerge::{Change, ChangeHash, Prop, Value, ExId};
-use js_sys::{Array, Object, Reflect, Uint8Array};
-use serde::de::DeserializeOwned;
-use serde::Serialize;
-use std::collections::{HashMap, HashSet};
-use std::convert::TryFrom;
-use std::convert::TryInto;
-use std::fmt::Display;
-use wasm_bindgen::prelude::*;
-use wasm_bindgen::JsCast;
-
-#[allow(unused_macros)]
-macro_rules! log {
- ( $( $t:tt )* ) => {
- web_sys::console::log_1(&format!( $( $t )* ).into());
- };
-}
-
-#[cfg(feature = "wee_alloc")]
-#[global_allocator]
-static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
-
-fn datatype(s: &am::ScalarValue) -> String {
- match s {
- am::ScalarValue::Bytes(_) => "bytes".into(),
- am::ScalarValue::Str(_) => "str".into(),
- am::ScalarValue::Int(_) => "int".into(),
- am::ScalarValue::Uint(_) => "uint".into(),
- am::ScalarValue::F64(_) => "f64".into(),
- am::ScalarValue::Counter(_) => "counter".into(),
- am::ScalarValue::Timestamp(_) => "timestamp".into(),
- am::ScalarValue::Boolean(_) => "boolean".into(),
- am::ScalarValue::Null => "null".into(),
- }
-}
-
-#[derive(Debug)]
-pub struct ScalarValue(am::ScalarValue);
-
-impl From for JsValue {
- fn from(val: ScalarValue) -> Self {
- match &val.0 {
- am::ScalarValue::Bytes(v) => Uint8Array::from(v.as_slice()).into(),
- am::ScalarValue::Str(v) => v.to_string().into(),
- am::ScalarValue::Int(v) => (*v as f64).into(),
- am::ScalarValue::Uint(v) => (*v as f64).into(),
- am::ScalarValue::F64(v) => (*v).into(),
- am::ScalarValue::Counter(v) => (*v as f64).into(),
- am::ScalarValue::Timestamp(v) => (*v as f64).into(),
- am::ScalarValue::Boolean(v) => (*v).into(),
- am::ScalarValue::Null => JsValue::null(),
- }
- }
-}
-
-#[wasm_bindgen]
-#[derive(Debug)]
-pub struct Automerge(automerge::Automerge);
-
-#[wasm_bindgen]
-#[derive(Debug)]
-pub struct SyncState(am::SyncState);
-
-#[wasm_bindgen]
-impl SyncState {
- #[wasm_bindgen(getter, js_name = sharedHeads)]
- pub fn shared_heads(&self) -> JsValue {
- rust_to_js(&self.0.shared_heads).unwrap()
- }
-
- #[wasm_bindgen(getter, js_name = lastSentHeads)]
- pub fn last_sent_heads(&self) -> JsValue {
- rust_to_js(self.0.last_sent_heads.as_ref()).unwrap()
- }
-
- #[wasm_bindgen(setter, js_name = lastSentHeads)]
- pub fn set_last_sent_heads(&mut self, heads: JsValue) {
- let heads: Option> = js_to_rust(&heads).unwrap();
- self.0.last_sent_heads = heads
- }
-
- #[wasm_bindgen(setter, js_name = sentHashes)]
- pub fn set_sent_hashes(&mut self, hashes: JsValue) {
- let hashes_map: HashMap = js_to_rust(&hashes).unwrap();
- let hashes_set: HashSet = hashes_map.keys().cloned().collect();
- self.0.sent_hashes = hashes_set
- }
-
- fn decode(data: Uint8Array) -> Result {
- let data = data.to_vec();
- let s = am::SyncState::decode(&data);
- let s = s.map_err(to_js_err)?;
- Ok(SyncState(s))
- }
-}
-
-#[derive(Debug)]
-pub struct JsErr(String);
-
-impl From for JsValue {
- fn from(err: JsErr) -> Self {
- js_sys::Error::new(&std::format!("{}", err.0)).into()
- }
-}
-
-impl<'a> From<&'a str> for JsErr {
- fn from(s: &'a str) -> Self {
- JsErr(s.to_owned())
- }
-}
-
-#[wasm_bindgen]
-impl Automerge {
- pub fn new(actor: JsValue) -> Result {
- let mut automerge = automerge::Automerge::new();
- if let Some(a) = actor.as_string() {
- let a = automerge::ActorId::from(hex::decode(a).map_err(to_js_err)?.to_vec());
- automerge.set_actor(a);
- }
- Ok(Automerge(automerge))
- }
-
- #[allow(clippy::should_implement_trait)]
- pub fn clone(&self) -> Self {
- Automerge(self.0.clone())
- }
-
- pub fn free(self) {}
-
- pub fn pending_ops(&self) -> JsValue {
- (self.0.pending_ops() as u32).into()
- }
-
- pub fn commit(&mut self, message: JsValue, time: JsValue) -> Array {
- let message = message.as_string();
- let time = time.as_f64().map(|v| v as i64);
- let heads = self.0.commit(message, time);
- let heads: Array = heads
- .iter()
- .map(|h| JsValue::from_str(&hex::encode(&h.0)))
- .collect();
- heads
- }
-
- pub fn rollback(&mut self) -> JsValue {
- self.0.rollback().into()
- }
-
- pub fn keys(&mut self, obj: JsValue, heads: JsValue) -> Result {
- let obj = self.import(obj)?;
- let result = if let Some(heads) = get_heads(heads) {
- self.0.keys_at(&obj, &heads)
- } else {
- self.0.keys(&obj)
- }
- .iter()
- .map(|s| JsValue::from_str(s))
- .collect();
- Ok(result)
- }
-
- pub fn text(&mut self, obj: JsValue, heads: JsValue) -> Result {
- let obj = self.import(obj)?;
- if let Some(heads) = get_heads(heads) {
- self.0.text_at(&obj, &heads)
- } else {
- self.0.text(&obj)
- }
- .map_err(to_js_err)
- .map(|t| t.into())
- }
-
- pub fn splice(
- &mut self,
- obj: JsValue,
- start: JsValue,
- delete_count: JsValue,
- text: JsValue,
- ) -> Result<(), JsValue> {
- let obj = self.import(obj)?;
- let start = to_usize(start, "start")?;
- let delete_count = to_usize(delete_count, "deleteCount")?;
- let mut vals = vec![];
- if let Some(t) = text.as_string() {
- self.0
- .splice_text(&obj, start, delete_count, &t)
- .map_err(to_js_err)?;
- } else {
- if let Ok(array) = text.dyn_into::() {
- for i in array.iter() {
- if let Some(t) = i.as_string() {
- vals.push(t.into());
- } else if let Ok(array) = i.dyn_into::() {
- let value = array.get(1);
- let datatype = array.get(2);
- let value = self.import_value(value, datatype)?;
- vals.push(value);
- }
- }
- }
- self.0
- .splice(&obj, start, delete_count, vals)
- .map_err(to_js_err)?;
- }
- Ok(())
- }
-
- pub fn insert(
- &mut self,
- obj: JsValue,
- index: JsValue,
- value: JsValue,
- datatype: JsValue,
- ) -> Result {
- let obj = self.import(obj)?;
- //let key = self.insert_pos_for_index(&obj, prop)?;
- let index: Result<_, JsValue> = index
- .as_f64()
- .ok_or_else(|| "insert index must be a number".into());
- let index = index?;
- let value = self.import_value(value, datatype)?;
- let opid = self
- .0
- .insert(&obj, index as usize, value)
- .map_err(to_js_err)?;
- match opid {
- Some(opid) => Ok(self.export(opid)),
- None => Ok(JsValue::null()),
- }
- }
-
- pub fn set(
- &mut self,
- obj: JsValue,
- prop: JsValue,
- value: JsValue,
- datatype: JsValue,
- ) -> Result {
- let obj = self.import(obj)?;
- let prop = self.import_prop(prop)?;
- let value = self.import_value(value, datatype)?;
- let opid = self.0.set(&obj, prop, value).map_err(to_js_err)?;
- match opid {
- Some(opid) => Ok(self.export(opid)),
- None => Ok(JsValue::null()),
- }
- }
-
- pub fn inc(&mut self, obj: JsValue, prop: JsValue, value: JsValue) -> Result<(), JsValue> {
- let obj = self.import(obj)?;
- let prop = self.import_prop(prop)?;
- let value: f64 = value
- .as_f64()
- .ok_or("inc needs a numberic value")
- .map_err(to_js_err)?;
- self.0.inc(&obj, prop, value as i64).map_err(to_js_err)?;
- Ok(())
- }
-
- pub fn value(&mut self, obj: JsValue, prop: JsValue, heads: JsValue) -> Result {
- let obj = self.import(obj)?;
- let result = Array::new();
- let prop = to_prop(prop);
- let heads = get_heads(heads);
- if let Ok(prop) = prop {
- let value = if let Some(h) = heads {
- self.0.value_at(&obj, prop, &h)
- } else {
- self.0.value(&obj, prop)
- }
- .map_err(to_js_err)?;
- match value {
- Some((Value::Object(obj_type), obj_id)) => {
- result.push(&obj_type.to_string().into());
- result.push(&self.export(obj_id));
- }
- Some((Value::Scalar(value), _)) => {
- result.push(&datatype(&value).into());
- result.push(&ScalarValue(value).into());
- }
- None => {}
- }
- }
- Ok(result)
- }
-
- pub fn values(&mut self, obj: JsValue, arg: JsValue, heads: JsValue) -> Result {
- let obj = self.import(obj)?;
- let result = Array::new();
- let prop = to_prop(arg);
- if let Ok(prop) = prop {
- let values = if let Some(heads) = get_heads(heads) {
- self.0.values_at(&obj, prop, &heads)
- } else {
- self.0.values(&obj, prop)
- }
- .map_err(to_js_err)?;
- for value in values {
- match value {
- (Value::Object(obj_type), obj_id) => {
- let sub = Array::new();
- sub.push(&obj_type.to_string().into());
- sub.push(&self.export(obj_id));
- result.push(&sub.into());
- }
- (Value::Scalar(value), id) => {
- let sub = Array::new();
- sub.push(&datatype(&value).into());
- sub.push(&ScalarValue(value).into());
- sub.push(&self.export(id));
- result.push(&sub.into());
- }
- }
- }
- }
- Ok(result)
- }
-
- pub fn length(&mut self, obj: JsValue, heads: JsValue) -> Result {
- let obj = self.import(obj)?;
- if let Some(heads) = get_heads(heads) {
- Ok((self.0.length_at(&obj, &heads) as f64).into())
- } else {
- Ok((self.0.length(&obj) as f64).into())
- }
- }
-
- pub fn del(&mut self, obj: JsValue, prop: JsValue) -> Result<(), JsValue> {
- let obj = self.import(obj)?;
- let prop = to_prop(prop)?;
- self.0.del(&obj, prop).map_err(to_js_err)?;
- Ok(())
- }
-
- pub fn save(&mut self) -> Result {
- self.0
- .save()
- .map(|v| Uint8Array::from(v.as_slice()))
- .map_err(to_js_err)
- }
-
- #[wasm_bindgen(js_name = saveIncremental)]
- pub fn save_incremental(&mut self) -> JsValue {
- let bytes = self.0.save_incremental();
- Uint8Array::from(bytes.as_slice()).into()
- }
-
- #[wasm_bindgen(js_name = loadIncremental)]
- pub fn load_incremental(&mut self, data: Uint8Array) -> Result {
- let data = data.to_vec();
- let len = self.0.load_incremental(&data).map_err(to_js_err)?;
- Ok(len.into())
- }
-
- #[wasm_bindgen(js_name = applyChanges)]
- pub fn apply_changes(&mut self, changes: JsValue) -> Result<(), JsValue> {
- let changes: Vec<_> = JS(changes).try_into()?;
- self.0.apply_changes(&changes).map_err(to_js_err)?;
- Ok(())
- }
-
- #[wasm_bindgen(js_name = getChanges)]
- pub fn get_changes(&mut self, have_deps: JsValue) -> Result {
- let deps: Vec<_> = JS(have_deps).try_into()?;
- let changes = self.0.get_changes(&deps);
- let changes: Array = changes
- .iter()
- .map(|c| Uint8Array::from(c.raw_bytes()))
- .collect();
- Ok(changes)
- }
-
- #[wasm_bindgen(js_name = getChangesAdded)]
- pub fn get_changes_added(&mut self, other: &Automerge) -> Result {
- let changes = self.0.get_changes_added(&other.0);
- let changes: Array = changes
- .iter()
- .map(|c| Uint8Array::from(c.raw_bytes()))
- .collect();
- Ok(changes)
- }
-
- #[wasm_bindgen(js_name = getHeads)]
- pub fn get_heads(&mut self) -> Result {
- let heads = self.0.get_heads();
- let heads: Array = heads
- .iter()
- .map(|h| JsValue::from_str(&hex::encode(&h.0)))
- .collect();
- Ok(heads)
- }
-
- #[wasm_bindgen(js_name = getActorId)]
- pub fn get_actor_id(&mut self) -> Result {
- let actor = self.0.get_actor();
- Ok(actor.to_string().into())
- }
-
- #[wasm_bindgen(js_name = getLastLocalChange)]
- pub fn get_last_local_change(&mut self) -> Result {
- if let Some(change) = self.0.get_last_local_change() {
- Ok(Uint8Array::from(change.raw_bytes()).into())
- } else {
- Ok(JsValue::null())
- }
- }
-
- pub fn dump(&self) {
- self.0.dump()
- }
-
- #[wasm_bindgen(js_name = getMissingDeps)]
- pub fn get_missing_deps(&mut self, heads: JsValue) -> Result {
- let heads: Vec<_> = JS(heads).try_into()?;
- let deps = self.0.get_missing_deps(&heads);
- let deps: Array = deps
- .iter()
- .map(|h| JsValue::from_str(&hex::encode(&h.0)))
- .collect();
- Ok(deps)
- }
-
- #[wasm_bindgen(js_name = receiveSyncMessage)]
- pub fn receive_sync_message(
- &mut self,
- state: &mut SyncState,
- message: Uint8Array,
- ) -> Result<(), JsValue> {
- let message = message.to_vec();
- let message = am::SyncMessage::decode(message.as_slice()).map_err(to_js_err)?;
- self.0
- .receive_sync_message(&mut state.0, message)
- .map_err(to_js_err)?;
- Ok(())
- }
-
- #[wasm_bindgen(js_name = generateSyncMessage)]
- pub fn generate_sync_message(&mut self, state: &mut SyncState) -> Result {
- if let Some(message) = self.0.generate_sync_message(&mut state.0) {
- Ok(Uint8Array::from(message.encode().map_err(to_js_err)?.as_slice()).into())
- } else {
- Ok(JsValue::null())
- }
- }
-
- fn export(&self, val: ExId) -> JsValue {
- val.to_string().into()
- }
-
- fn import(&self, id: JsValue) -> Result {
- let id_str = id
- .as_string()
- .ok_or("invalid opid/objid/elemid")
- .map_err(to_js_err)?;
- self.0.import(&id_str).map_err(to_js_err)
- }
-
- fn import_prop(&mut self, prop: JsValue) -> Result {
- if let Some(s) = prop.as_string() {
- Ok(s.into())
- } else if let Some(n) = prop.as_f64() {
- Ok((n as usize).into())
- } else {
- Err(format!("invalid prop {:?}", prop).into())
- }
- }
-
- fn import_value(&mut self, value: JsValue, datatype: JsValue) -> Result {
- let datatype = datatype.as_string();
- match datatype.as_deref() {
- Some("boolean") => value
- .as_bool()
- .ok_or_else(|| "value must be a bool".into())
- .map(|v| am::ScalarValue::Boolean(v).into()),
- Some("int") => value
- .as_f64()
- .ok_or_else(|| "value must be a number".into())
- .map(|v| am::ScalarValue::Int(v as i64).into()),
- Some("uint") => value
- .as_f64()
- .ok_or_else(|| "value must be a number".into())
- .map(|v| am::ScalarValue::Uint(v as u64).into()),
- Some("f64") => value
- .as_f64()
- .ok_or_else(|| "value must be a number".into())
- .map(|n| am::ScalarValue::F64(n).into()),
- Some("bytes") => {
- Ok(am::ScalarValue::Bytes(value.dyn_into::().unwrap().to_vec()).into())
- }
- Some("counter") => value
- .as_f64()
- .ok_or_else(|| "value must be a number".into())
- .map(|v| am::ScalarValue::Counter(v as i64).into()),
- Some("timestamp") => value
- .as_f64()
- .ok_or_else(|| "value must be a number".into())
- .map(|v| am::ScalarValue::Timestamp(v as i64).into()),
- /*
- Some("bytes") => unimplemented!(),
- Some("cursor") => unimplemented!(),
- */
- Some("null") => Ok(am::ScalarValue::Null.into()),
- Some(_) => Err(format!("unknown datatype {:?}", datatype).into()),
- None => {
- if value.is_null() {
- Ok(am::ScalarValue::Null.into())
- } else if let Some(b) = value.as_bool() {
- Ok(am::ScalarValue::Boolean(b).into())
- } else if let Some(s) = value.as_string() {
- // FIXME - we need to detect str vs int vs float vs bool here :/
- Ok(am::ScalarValue::Str(s.into()).into())
- } else if let Some(n) = value.as_f64() {
- if (n.round() - n).abs() < f64::EPSILON {
- Ok(am::ScalarValue::Int(n as i64).into())
- } else {
- Ok(am::ScalarValue::F64(n).into())
- }
- } else if let Some(o) = to_objtype(&value) {
- Ok(o.into())
- } else if let Ok(o) = &value.dyn_into::() {
- Ok(am::ScalarValue::Bytes(o.to_vec()).into())
- } else {
- Err("value is invalid".into())
- }
- }
- }
- }
-}
-
-pub fn to_usize(val: JsValue, name: &str) -> Result {
- match val.as_f64() {
- Some(n) => Ok(n as usize),
- None => Err(format!("{} must be a number", name).into()),
- }
-}
-
-pub fn to_prop(p: JsValue) -> Result {
- if let Some(s) = p.as_string() {
- Ok(Prop::Map(s))
- } else if let Some(n) = p.as_f64() {
- Ok(Prop::Seq(n as usize))
- } else {
- Err("prop must me a string or number".into())
- }
-}
-
-fn to_objtype(a: &JsValue) -> Option {
- if !a.is_function() {
- return None;
- }
- let f: js_sys::Function = a.clone().try_into().unwrap();
- let f = f.to_string();
- if f.starts_with("class MAP", 0) {
- Some(am::ObjType::Map)
- } else if f.starts_with("class LIST", 0) {
- Some(am::ObjType::List)
- } else if f.starts_with("class TEXT", 0) {
- Some(am::ObjType::Text)
- } else if f.starts_with("class TABLE", 0) {
- Some(am::ObjType::Table)
- } else {
- None
- }
-}
-
-struct ObjType(am::ObjType);
-
-impl TryFrom for ObjType {
- type Error = JsValue;
-
- fn try_from(val: JsValue) -> Result {
- match &val.as_string() {
- Some(o) if o == "map" => Ok(ObjType(am::ObjType::Map)),
- Some(o) if o == "list" => Ok(ObjType(am::ObjType::List)),
- Some(o) => Err(format!("unknown obj type {}", o).into()),
- _ => Err("obj type must be a string".into()),
- }
- }
-}
-
-#[wasm_bindgen]
-pub fn init(actor: JsValue) -> Result {
- console_error_panic_hook::set_once();
- Automerge::new(actor)
-}
-
-#[wasm_bindgen]
-pub fn load(data: Uint8Array, actor: JsValue) -> Result {
- let data = data.to_vec();
- let mut automerge = am::Automerge::load(&data).map_err(to_js_err)?;
- if let Some(s) = actor.as_string() {
- let actor = automerge::ActorId::from(hex::decode(s).map_err(to_js_err)?.to_vec());
- automerge.set_actor(actor)
- }
- Ok(Automerge(automerge))
-}
-
-#[wasm_bindgen(js_name = encodeChange)]
-pub fn encode_change(change: JsValue) -> Result {
- let change: am::ExpandedChange = change.into_serde().map_err(to_js_err)?;
- let change: Change = change.into();
- Ok(Uint8Array::from(change.raw_bytes()))
-}
-
-#[wasm_bindgen(js_name = decodeChange)]
-pub fn decode_change(change: Uint8Array) -> Result {
- let change = Change::from_bytes(change.to_vec()).map_err(to_js_err)?;
- let change: am::ExpandedChange = change.decode();
- JsValue::from_serde(&change).map_err(to_js_err)
-}
-
-#[wasm_bindgen(js_name = initSyncState)]
-pub fn init_sync_state() -> SyncState {
- SyncState(Default::default())
-}
-
-#[wasm_bindgen(js_name = encodeSyncMessage)]
-pub fn encode_sync_message(message: JsValue) -> Result {
- let heads = get(&message, "heads")?.try_into()?;
- let need = get(&message, "need")?.try_into()?;
- let changes = get(&message, "changes")?.try_into()?;
- let have = get(&message, "have")?.try_into()?;
- Ok(Uint8Array::from(
- am::SyncMessage {
- heads,
- need,
- have,
- changes,
- }
- .encode()
- .unwrap()
- .as_slice(),
- ))
-}
-
-#[wasm_bindgen(js_name = decodeSyncMessage)]
-pub fn decode_sync_message(msg: Uint8Array) -> Result {
- let data = msg.to_vec();
- let msg = am::SyncMessage::decode(&data).map_err(to_js_err)?;
- let heads: Array = VH(&msg.heads).into();
- let need: Array = VH(&msg.need).into();
- let changes: Array = VC(&msg.changes).into();
- let have: Array = VSH(&msg.have).try_into()?;
- let obj = Object::new().into();
- set(&obj, "heads", heads)?;
- set(&obj, "need", need)?;
- set(&obj, "have", have)?;
- set(&obj, "changes", changes)?;
- Ok(obj)
-}
-
-#[wasm_bindgen(js_name = encodeSyncState)]
-pub fn encode_sync_state(state: SyncState) -> Result {
- Ok(Uint8Array::from(
- state.0.encode().map_err(to_js_err)?.as_slice(),
- ))
-}
-
-#[wasm_bindgen(js_name = decodeSyncState)]
-pub fn decode_sync_state(state: Uint8Array) -> Result {
- SyncState::decode(state)
-}
-
-#[wasm_bindgen(js_name = MAP)]
-pub struct Map {}
-
-#[wasm_bindgen(js_name = LIST)]
-pub struct List {}
-
-#[wasm_bindgen(js_name = TEXT)]
-pub struct Text {}
-
-#[wasm_bindgen(js_name = TABLE)]
-pub struct Table {}
-
-fn to_js_err(err: T) -> JsValue {
- js_sys::Error::new(&std::format!("{}", err)).into()
-}
-
-fn get(obj: &JsValue, prop: &str) -> Result {
- Ok(JS(Reflect::get(obj, &prop.into())?))
-}
-
-fn set>(obj: &JsValue, prop: &str, val: V) -> Result {
- Reflect::set(obj, &prop.into(), &val.into())
-}
-
-struct JS(JsValue);
-
-impl TryFrom for Vec {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- let value = value.0.dyn_into::()?;
- let value: Result, _> = value.iter().map(|j| j.into_serde()).collect();
- let value = value.map_err(to_js_err)?;
- Ok(value)
- }
-}
-
-impl From for Option> {
- fn from(value: JS) -> Self {
- let value = value.0.dyn_into::().ok()?;
- let value: Result, _> = value.iter().map(|j| j.into_serde()).collect();
- let value = value.ok()?;
- Some(value)
- }
-}
-
-impl TryFrom for Vec {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- let value = value.0.dyn_into::()?;
- let changes: Result, _> = value.iter().map(|j| j.dyn_into()).collect();
- let changes = changes?;
- let changes: Result, _> = changes
- .iter()
- .map(|a| am::decode_change(a.to_vec()))
- .collect();
- let changes = changes.map_err(to_js_err)?;
- Ok(changes)
- }
-}
-
-impl TryFrom for Vec {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- let value = value.0.dyn_into::()?;
- let have: Result, JsValue> = value
- .iter()
- .map(|s| {
- let last_sync = get(&s, "lastSync")?.try_into()?;
- let bloom = get(&s, "bloom")?.try_into()?;
- Ok(am::SyncHave { last_sync, bloom })
- })
- .collect();
- let have = have?;
- Ok(have)
- }
-}
-
-impl TryFrom for am::BloomFilter {
- type Error = JsValue;
-
- fn try_from(value: JS) -> Result {
- let value: Uint8Array = value.0.dyn_into()?;
- let value = value.to_vec();
- let value = value.as_slice().try_into().map_err(to_js_err)?;
- Ok(value)
- }
-}
-
-struct VH<'a>(&'a [ChangeHash]);
-
-impl<'a> From> for Array {
- fn from(value: VH<'a>) -> Self {
- let heads: Array = value
- .0
- .iter()
- .map(|h| JsValue::from_str(&hex::encode(&h.0)))
- .collect();
- heads
- }
-}
-
-struct VC<'a>(&'a [Change]);
-
-impl<'a> From> for Array {
- fn from(value: VC<'a>) -> Self {
- let changes: Array = value
- .0
- .iter()
- .map(|c| Uint8Array::from(c.raw_bytes()))
- .collect();
- changes
- }
-}
-
-#[allow(clippy::upper_case_acronyms)]
-struct VSH<'a>(&'a [am::SyncHave]);
-
-impl<'a> TryFrom> for Array {
- type Error = JsValue;
-
- fn try_from(value: VSH<'a>) -> Result {
- let have: Result = value
- .0
- .iter()
- .map(|have| {
- let last_sync: Array = have
- .last_sync
- .iter()
- .map(|h| JsValue::from_str(&hex::encode(&h.0)))
- .collect();
- // FIXME - the clone and the unwrap here shouldnt be needed - look at into_bytes()
- let bloom = Uint8Array::from(have.bloom.clone().into_bytes().unwrap().as_slice());
- let obj: JsValue = Object::new().into();
- Reflect::set(&obj, &"lastSync".into(), &last_sync.into())?;
- Reflect::set(&obj, &"bloom".into(), &bloom.into())?;
- Ok(obj)
- })
- .collect();
- let have = have?;
- Ok(have)
- }
-}
-
-fn rust_to_js(value: T) -> Result {
- JsValue::from_serde(&value).map_err(to_js_err)
-}
-
-fn js_to_rust(value: &JsValue) -> Result {
- value.into_serde().map_err(to_js_err)
-}
-
-fn get_heads(heads: JsValue) -> Option> {
- JS(heads).into()
-}
diff --git a/automerge-wasm/test/test.js b/automerge-wasm/test/test.js
deleted file mode 100644
index 22aebcfd..00000000
--- a/automerge-wasm/test/test.js
+++ /dev/null
@@ -1,284 +0,0 @@
-
-const assert = require('assert')
-const util = require('util')
-const Automerge = require('..')
-const { MAP, LIST, TEXT } = Automerge
-
-// str to uint8array
-function en(str) {
- return new TextEncoder('utf8').encode(str)
-}
-// uint8array to str
-function de(bytes) {
- return new TextDecoder('utf8').decode(bytes);
-}
-
-describe('Automerge', () => {
- describe('basics', () => {
- it('should init clone and free', () => {
- let doc1 = Automerge.init()
- let doc2 = doc1.clone()
- doc1.free()
- doc2.free()
- })
-
- it('should be able to start and commit', () => {
- let doc = Automerge.init()
- doc.commit()
- })
-
- it('getting a nonexistant prop does not throw an error', () => {
- let doc = Automerge.init()
- let root = "_root"
- let result = doc.value(root,"hello")
- assert.deepEqual(result,[])
- })
-
- it('should be able to set and get a simple value', () => {
- let doc = Automerge.init()
- let root = "_root"
- let result
-
- doc.set(root, "hello", "world")
- doc.set(root, "number1", 5, "uint")
- doc.set(root, "number2", 5)
- doc.set(root, "number3", 5.5)
- doc.set(root, "number4", 5.5, "f64")
- doc.set(root, "number5", 5.5, "int")
- doc.set(root, "bool", true)
-
- result = doc.value(root,"hello")
- assert.deepEqual(result,["str","world"])
-
- result = doc.value(root,"number1")
- assert.deepEqual(result,["uint",5])
-
- result = doc.value(root,"number2")
- assert.deepEqual(result,["int",5])
-
- result = doc.value(root,"number3")
- assert.deepEqual(result,["f64",5.5])
-
- result = doc.value(root,"number4")
- assert.deepEqual(result,["f64",5.5])
-
- result = doc.value(root,"number5")
- assert.deepEqual(result,["int",5])
-
- result = doc.value(root,"bool")
- assert.deepEqual(result,["boolean",true])
-
- doc.set(root, "bool", false, "boolean")
-
- result = doc.value(root,"bool")
- assert.deepEqual(result,["boolean",false])
- })
-
- it('should be able to use bytes', () => {
- let doc = Automerge.init()
- doc.set("_root","data1", new Uint8Array([10,11,12]));
- doc.set("_root","data2", new Uint8Array([13,14,15]), "bytes");
- let value1 = doc.value("_root", "data1")
- assert.deepEqual(value1, ["bytes", new Uint8Array([10,11,12])]);
- let value2 = doc.value("_root", "data2")
- assert.deepEqual(value2, ["bytes", new Uint8Array([13,14,15])]);
- })
-
- it('should be able to make sub objects', () => {
- let doc = Automerge.init()
- let root = "_root"
- let result
-
- let submap = doc.set(root, "submap", MAP)
- doc.set(submap, "number", 6, "uint")
- assert.strictEqual(doc.pending_ops(),2)
-
- result = doc.value(root,"submap")
- assert.deepEqual(result,["map",submap])
-
- result = doc.value(submap,"number")
- assert.deepEqual(result,["uint",6])
- })
-
- it('should be able to make lists', () => {
- let doc = Automerge.init()
- let root = "_root"
-
- let submap = doc.set(root, "numbers", LIST)
- doc.insert(submap, 0, "a");
- doc.insert(submap, 1, "b");
- doc.insert(submap, 2, "c");
- doc.insert(submap, 0, "z");
-
- assert.deepEqual(doc.value(submap, 0),["str","z"])
- assert.deepEqual(doc.value(submap, 1),["str","a"])
- assert.deepEqual(doc.value(submap, 2),["str","b"])
- assert.deepEqual(doc.value(submap, 3),["str","c"])
- assert.deepEqual(doc.length(submap),4)
-
- doc.set(submap, 2, "b v2");
-
- assert.deepEqual(doc.value(submap, 2),["str","b v2"])
- assert.deepEqual(doc.length(submap),4)
- })
-
- it('should be able delete non-existant props', () => {
- let doc = Automerge.init()
-
- doc.set("_root", "foo","bar")
- doc.set("_root", "bip","bap")
- let heads1 = doc.commit()
-
- assert.deepEqual(doc.keys("_root"),["bip","foo"])
-
- doc.del("_root", "foo")
- doc.del("_root", "baz")
- let heads2 = doc.commit()
-
- assert.deepEqual(doc.keys("_root"),["bip"])
- assert.deepEqual(doc.keys("_root", heads1),["bip", "foo"])
- assert.deepEqual(doc.keys("_root", heads2),["bip"])
- })
-
- it('should be able to del', () => {
- let doc = Automerge.init()
- let root = "_root"
-
- doc.set(root, "xxx", "xxx");
- assert.deepEqual(doc.value(root, "xxx"),["str","xxx"])
- doc.del(root, "xxx");
- assert.deepEqual(doc.value(root, "xxx"),[])
- })
-
- it('should be able to use counters', () => {
- let doc = Automerge.init()
- let root = "_root"
-
- doc.set(root, "counter", 10, "counter");
- assert.deepEqual(doc.value(root, "counter"),["counter",10])
- doc.inc(root, "counter", 10);
- assert.deepEqual(doc.value(root, "counter"),["counter",20])
- doc.inc(root, "counter", -5);
- assert.deepEqual(doc.value(root, "counter"),["counter",15])
- })
-
- it('should be able to splice text', () => {
- let doc = Automerge.init()
- let root = "_root";
-
- let text = doc.set(root, "text", Automerge.TEXT);
- doc.splice(text, 0, 0, "hello ")
- doc.splice(text, 6, 0, ["w","o","r","l","d"])
- doc.splice(text, 11, 0, [["str","!"],["str","?"]])
- assert.deepEqual(doc.value(text, 0),["str","h"])
- assert.deepEqual(doc.value(text, 1),["str","e"])
- assert.deepEqual(doc.value(text, 9),["str","l"])
- assert.deepEqual(doc.value(text, 10),["str","d"])
- assert.deepEqual(doc.value(text, 11),["str","!"])
- assert.deepEqual(doc.value(text, 12),["str","?"])
- })
-
- it('should be able save all or incrementally', () => {
- let doc = Automerge.init()
-
- doc.set("_root", "foo", 1)
-
- let save1 = doc.save()
-
- doc.set("_root", "bar", 2)
-
- let saveMidway = doc.clone().save();
-
- let save2 = doc.saveIncremental();
-
- doc.set("_root", "baz", 3);
-
- let save3 = doc.saveIncremental();
-
- let saveA = doc.save();
- let saveB = new Uint8Array([... save1, ...save2, ...save3]);
-
- assert.notDeepEqual(saveA, saveB);
-
- let docA = Automerge.load(saveA);
- let docB = Automerge.load(saveB);
- let docC = Automerge.load(saveMidway)
- docC.loadIncremental(save3)
-
- assert.deepEqual(docA.keys("_root"), docB.keys("_root"));
- assert.deepEqual(docA.save(), docB.save());
- assert.deepEqual(docA.save(), docC.save());
- })
-
- it('should be able to splice text', () => {
- let doc = Automerge.init()
- let text = doc.set("_root", "text", TEXT);
- doc.splice(text, 0, 0, "hello world");
- let heads1 = doc.commit();
- doc.splice(text, 6, 0, "big bad ");
- let heads2 = doc.commit();
- assert.strictEqual(doc.text(text), "hello big bad world")
- assert.strictEqual(doc.length(text), 19)
- assert.strictEqual(doc.text(text, heads1), "hello world")
- assert.strictEqual(doc.length(text, heads1), 11)
- assert.strictEqual(doc.text(text, heads2), "hello big bad world")
- assert.strictEqual(doc.length(text, heads2), 19)
- })
-
- it('local inc increments all visible counters in a map', () => {
- let doc1 = Automerge.init("aaaa")
- doc1.set("_root", "hello", "world")
- let doc2 = Automerge.load(doc1.save(), "bbbb");
- let doc3 = Automerge.load(doc1.save(), "cccc");
- doc1.set("_root", "cnt", 20)
- doc2.set("_root", "cnt", 0, "counter")
- doc3.set("_root", "cnt", 10, "counter")
- doc1.applyChanges(doc2.getChanges(doc1.getHeads()))
- doc1.applyChanges(doc3.getChanges(doc1.getHeads()))
- let result = doc1.values("_root", "cnt")
- assert.deepEqual(result,[
- ['counter',10,'2@cccc'],
- ['counter',0,'2@bbbb'],
- ['int',20,'2@aaaa']
- ])
- doc1.inc("_root", "cnt", 5)
- result = doc1.values("_root", "cnt")
- assert.deepEqual(result, [
- [ 'counter', 15, '2@cccc' ], [ 'counter', 5, '2@bbbb' ]
- ])
-
- let save1 = doc1.save()
- let doc4 = Automerge.load(save1)
- assert.deepEqual(doc4.save(), save1);
- })
-
- it('local inc increments all visible counters in a sequence', () => {
- let doc1 = Automerge.init("aaaa")
- let seq = doc1.set("_root", "seq", LIST)
- doc1.insert(seq, 0, "hello")
- let doc2 = Automerge.load(doc1.save(), "bbbb");
- let doc3 = Automerge.load(doc1.save(), "cccc");
- doc1.set(seq, 0, 20)
- doc2.set(seq, 0, 0, "counter")
- doc3.set(seq, 0, 10, "counter")
- doc1.applyChanges(doc2.getChanges(doc1.getHeads()))
- doc1.applyChanges(doc3.getChanges(doc1.getHeads()))
- let result = doc1.values(seq, 0)
- assert.deepEqual(result,[
- ['counter',10,'3@cccc'],
- ['counter',0,'3@bbbb'],
- ['int',20,'3@aaaa']
- ])
- doc1.inc(seq, 0, 5)
- result = doc1.values(seq, 0)
- assert.deepEqual(result, [
- [ 'counter', 15, '3@cccc' ], [ 'counter', 5, '3@bbbb' ]
- ])
-
- let save = doc1.save()
- let doc4 = Automerge.load(save)
- assert.deepEqual(doc4.save(), save);
- })
-
- })
-})
diff --git a/automerge/Cargo.toml b/automerge/Cargo.toml
deleted file mode 100644
index 6a0f81e7..00000000
--- a/automerge/Cargo.toml
+++ /dev/null
@@ -1,38 +0,0 @@
-[package]
-name = "automerge"
-version = "0.1.0"
-edition = "2018"
-license = "MIT"
-
-# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
-
-[features]
-optree-visualisation = ["dot"]
-
-[dependencies]
-hex = "^0.4.3"
-leb128 = "^0.2.5"
-sha2 = "^0.10.0"
-rand = { version = "^0.8.4" }
-thiserror = "^1.0.16"
-itertools = "^0.10.3"
-flate2 = "^1.0.22"
-nonzero_ext = "^0.2.0"
-uuid = { version = "^0.8.2", features=["v4", "wasm-bindgen", "serde"] }
-smol_str = "^0.1.21"
-tracing = { version = "^0.1.29", features = ["log"] }
-fxhash = "^0.2.1"
-tinyvec = { version = "^1.5.1", features = ["alloc"] }
-unicode-segmentation = "1.7.1"
-serde = { version = "^1.0", features=["derive"] }
-dot = { version = "0.1.4", optional = true }
-
-[dependencies.web-sys]
-version = "^0.3.55"
-features = ["console"]
-
-[dev-dependencies]
-pretty_assertions = "1.0.0"
-proptest = { version = "^1.0.0", default-features = false, features = ["std"] }
-serde_json = { version = "^1.0.73", features=["float_roundtrip"], default-features=true }
-maplit = { version = "^1.0" }
diff --git a/automerge/TODO.md b/automerge/TODO.md
deleted file mode 100644
index 68fa633f..00000000
--- a/automerge/TODO.md
+++ /dev/null
@@ -1,18 +0,0 @@
-
-counters -> Visibility
-
-fast load
-
-values at clock
-length at clock
-keys at clock
-text at clock
-
-extra tests
- counters in lists -> inserts with tombstones
-
-ergronomics
-
- set(obj, prop, val) vs mapset(obj, str, val) and seqset(obj, usize, val)
- value() -> (id, value)
-
diff --git a/automerge/src/change.rs b/automerge/src/change.rs
deleted file mode 100644
index d3ab7144..00000000
--- a/automerge/src/change.rs
+++ /dev/null
@@ -1,916 +0,0 @@
-use crate::columnar::{
- ChangeEncoder, ChangeIterator, ColumnEncoder, DepsIterator, DocChange, DocOp, DocOpEncoder,
- DocOpIterator, OperationIterator, COLUMN_TYPE_DEFLATE,
-};
-use crate::decoding;
-use crate::decoding::{Decodable, InvalidChangeError};
-use crate::encoding::{Encodable, DEFLATE_MIN_SIZE};
-use crate::legacy as amp;
-use crate::{
- ActorId, AutomergeError, ElemId, IndexedCache, Key, ObjId, Op, OpId, OpType, Transaction, HEAD,
-};
-use core::ops::Range;
-use flate2::{
- bufread::{DeflateDecoder, DeflateEncoder},
- Compression,
-};
-use itertools::Itertools;
-use sha2::Digest;
-use sha2::Sha256;
-use std::collections::{HashMap, HashSet};
-use std::convert::TryInto;
-use std::fmt::Debug;
-use std::io::{Read, Write};
-use tracing::instrument;
-
-const MAGIC_BYTES: [u8; 4] = [0x85, 0x6f, 0x4a, 0x83];
-const PREAMBLE_BYTES: usize = 8;
-const HEADER_BYTES: usize = PREAMBLE_BYTES + 1;
-
-const HASH_BYTES: usize = 32;
-const BLOCK_TYPE_DOC: u8 = 0;
-const BLOCK_TYPE_CHANGE: u8 = 1;
-const BLOCK_TYPE_DEFLATE: u8 = 2;
-const CHUNK_START: usize = 8;
-const HASH_RANGE: Range = 4..8;
-
-fn get_heads(changes: &[amp::Change]) -> HashSet {
- changes.iter().fold(HashSet::new(), |mut acc, c| {
- if let Some(h) = c.hash {
- acc.insert(h);
- }
- for dep in &c.deps {
- acc.remove(dep);
- }
- acc
- })
-}
-
-pub(crate) fn encode_document(
- changes: &[amp::Change],
- doc_ops: &[Op],
- actors_index: &IndexedCache,
- props: &[String],
-) -> Result, AutomergeError> {
- let mut bytes: Vec = Vec::new();
-
- let heads = get_heads(changes);
-
- let actors_map = actors_index.encode_index();
- let actors = actors_index.sorted();
-
- /*
- // this assumes that all actor_ids referenced are seen in changes.actor_id which is true
- // so long as we have a full history
- let mut actors: Vec<_> = changes
- .iter()
- .map(|c| &c.actor)
- .unique()
- .sorted()
- .cloned()
- .collect();
- */
-
- let (change_bytes, change_info) = ChangeEncoder::encode_changes(changes, &actors);
-
- //let doc_ops = group_doc_ops(changes, &actors);
-
- let (ops_bytes, ops_info) = DocOpEncoder::encode_doc_ops(doc_ops, &actors_map, props);
-
- bytes.extend(&MAGIC_BYTES);
- bytes.extend(vec![0, 0, 0, 0]); // we dont know the hash yet so fill in a fake
- bytes.push(BLOCK_TYPE_DOC);
-
- let mut chunk = Vec::new();
-
- actors.len().encode(&mut chunk)?;
-
- for a in actors.into_iter() {
- a.to_bytes().encode(&mut chunk)?;
- }
-
- heads.len().encode(&mut chunk)?;
- for head in heads.iter().sorted() {
- chunk.write_all(&head.0).unwrap();
- }
-
- chunk.extend(change_info);
- chunk.extend(ops_info);
-
- chunk.extend(change_bytes);
- chunk.extend(ops_bytes);
-
- leb128::write::unsigned(&mut bytes, chunk.len() as u64).unwrap();
-
- bytes.extend(&chunk);
-
- let hash_result = Sha256::digest(&bytes[CHUNK_START..bytes.len()]);
-
- bytes.splice(HASH_RANGE, hash_result[0..4].iter().copied());
-
- Ok(bytes)
-}
-
-impl From for Change {
- fn from(value: amp::Change) -> Self {
- encode(&value)
- }
-}
-
-impl From<&::Change> for Change {
- fn from(value: &::Change) -> Self {
- encode(value)
- }
-}
-
-fn encode(change: &::Change) -> Change {
- let mut deps = change.deps.clone();
- deps.sort_unstable();
-
- let mut chunk = encode_chunk(change, &deps);
-
- let mut bytes = Vec::with_capacity(MAGIC_BYTES.len() + 4 + chunk.bytes.len());
-
- bytes.extend(&MAGIC_BYTES);
-
- bytes.extend(vec![0, 0, 0, 0]); // we dont know the hash yet so fill in a fake
-
- bytes.push(BLOCK_TYPE_CHANGE);
-
- leb128::write::unsigned(&mut bytes, chunk.bytes.len() as u64).unwrap();
-
- let body_start = bytes.len();
-
- increment_range(&mut chunk.body, bytes.len());
- increment_range(&mut chunk.message, bytes.len());
- increment_range(&mut chunk.extra_bytes, bytes.len());
- increment_range_map(&mut chunk.ops, bytes.len());
-
- bytes.extend(&chunk.bytes);
-
- let hash_result = Sha256::digest(&bytes[CHUNK_START..bytes.len()]);
- let hash: amp::ChangeHash = hash_result[..].try_into().unwrap();
-
- bytes.splice(HASH_RANGE, hash_result[0..4].iter().copied());
-
- // any time I make changes to the encoder decoder its a good idea
- // to run it through a round trip to detect errors the tests might not
- // catch
- // let c0 = Change::from_bytes(bytes.clone()).unwrap();
- // std::assert_eq!(c1, c0);
- // perhaps we should add something like this to the test suite
-
- let bytes = ChangeBytes::Uncompressed(bytes);
-
- Change {
- bytes,
- body_start,
- hash,
- seq: change.seq,
- start_op: change.start_op,
- time: change.time,
- actors: chunk.actors,
- message: chunk.message,
- deps,
- ops: chunk.ops,
- extra_bytes: chunk.extra_bytes,
- }
-}
-
-struct ChunkIntermediate {
- bytes: Vec,
- body: Range,
- actors: Vec,
- message: Range,
- ops: HashMap>,
- extra_bytes: Range,
-}
-
-fn encode_chunk(change: &::Change, deps: &[amp::ChangeHash]) -> ChunkIntermediate {
- let mut bytes = Vec::new();
-
- // All these unwraps are okay because we're writing to an in memory buffer so io erros should
- // not happen
-
- // encode deps
- deps.len().encode(&mut bytes).unwrap();
- for hash in deps.iter() {
- bytes.write_all(&hash.0).unwrap();
- }
-
- // encode first actor
- let mut actors = vec![change.actor_id.clone()];
- change.actor_id.to_bytes().encode(&mut bytes).unwrap();
-
- // encode seq, start_op, time, message
- change.seq.encode(&mut bytes).unwrap();
- change.start_op.encode(&mut bytes).unwrap();
- change.time.encode(&mut bytes).unwrap();
- let message = bytes.len() + 1;
- change.message.encode(&mut bytes).unwrap();
- let message = message..bytes.len();
-
- // encode ops into a side buffer - collect all other actors
- let (ops_buf, mut ops) = ColumnEncoder::encode_ops(&change.operations, &mut actors);
-
- // encode all other actors
- actors[1..].encode(&mut bytes).unwrap();
-
- // now we know how many bytes ops are offset by so we can adjust the ranges
- increment_range_map(&mut ops, bytes.len());
-
- // write out the ops
-
- bytes.write_all(&ops_buf).unwrap();
-
- // write out the extra bytes
- let extra_bytes = bytes.len()..(bytes.len() + change.extra_bytes.len());
- bytes.write_all(&change.extra_bytes).unwrap();
- let body = 0..bytes.len();
-
- ChunkIntermediate {
- bytes,
- body,
- actors,
- message,
- ops,
- extra_bytes,
- }
-}
-
-#[derive(PartialEq, Debug, Clone)]
-enum ChangeBytes {
- Compressed {
- compressed: Vec,
- uncompressed: Vec,
- },
- Uncompressed(Vec),
-}
-
-impl ChangeBytes {
- fn uncompressed(&self) -> &[u8] {
- match self {
- ChangeBytes::Compressed { uncompressed, .. } => &uncompressed[..],
- ChangeBytes::Uncompressed(b) => &b[..],
- }
- }
-
- fn compress(&mut self, body_start: usize) {
- match self {
- ChangeBytes::Compressed { .. } => {}
- ChangeBytes::Uncompressed(uncompressed) => {
- if uncompressed.len() > DEFLATE_MIN_SIZE {
- let mut result = Vec::with_capacity(uncompressed.len());
- result.extend(&uncompressed[0..8]);
- result.push(BLOCK_TYPE_DEFLATE);
- let mut deflater =
- DeflateEncoder::new(&uncompressed[body_start..], Compression::default());
- let mut deflated = Vec::new();
- let deflated_len = deflater.read_to_end(&mut deflated).unwrap();
- leb128::write::unsigned(&mut result, deflated_len as u64).unwrap();
- result.extend(&deflated[..]);
- *self = ChangeBytes::Compressed {
- compressed: result,
- uncompressed: std::mem::take(uncompressed),
- }
- }
- }
- }
- }
-
- fn raw(&self) -> &[u8] {
- match self {
- ChangeBytes::Compressed { compressed, .. } => &compressed[..],
- ChangeBytes::Uncompressed(b) => &b[..],
- }
- }
-}
-
-#[derive(PartialEq, Debug, Clone)]
-pub struct Change {
- bytes: ChangeBytes,
- body_start: usize,
- pub hash: amp::ChangeHash,
- pub seq: u64,
- pub start_op: u64,
- pub time: i64,
- message: Range,
- actors: Vec,
- pub deps: Vec,
- ops: HashMap>,
- extra_bytes: Range,
-}
-
-impl Change {
- pub fn actor_id(&self) -> &ActorId {
- &self.actors[0]
- }
-
- #[instrument(level = "debug", skip(bytes))]
- pub fn load_document(bytes: &[u8]) -> Result, AutomergeError> {
- load_blocks(bytes)
- }
-
- pub fn from_bytes(bytes: Vec) -> Result {
- decode_change(bytes)
- }
-
- pub fn is_empty(&self) -> bool {
- self.len() == 0
- }
-
- pub fn len(&self) -> usize {
- // TODO - this could be a lot more efficient
- self.iter_ops().count()
- }
-
- pub fn max_op(&self) -> u64 {
- self.start_op + (self.len() as u64) - 1
- }
-
- fn message(&self) -> Option {
- let m = &self.bytes.uncompressed()[self.message.clone()];
- if m.is_empty() {
- None
- } else {
- std::str::from_utf8(m).map(ToString::to_string).ok()
- }
- }
-
- pub fn decode(&self) -> amp::Change {
- amp::Change {
- start_op: self.start_op,
- seq: self.seq,
- time: self.time,
- hash: Some(self.hash),
- message: self.message(),
- actor_id: self.actors[0].clone(),
- deps: self.deps.clone(),
- operations: self
- .iter_ops()
- .map(|op| amp::Op {
- action: op.action.clone(),
- obj: op.obj.clone(),
- key: op.key.clone(),
- pred: op.pred.clone(),
- insert: op.insert,
- })
- .collect(),
- extra_bytes: self.extra_bytes().into(),
- }
- }
-
- pub(crate) fn iter_ops(&self) -> OperationIterator {
- OperationIterator::new(self.bytes.uncompressed(), self.actors.as_slice(), &self.ops)
- }
-
- pub fn extra_bytes(&self) -> &[u8] {
- &self.bytes.uncompressed()[self.extra_bytes.clone()]
- }
-
- pub fn compress(&mut self) {
- self.bytes.compress(self.body_start);
- }
-
- pub fn raw_bytes(&self) -> &[u8] {
- self.bytes.raw()
- }
-}
-
-fn read_leb128(bytes: &mut &[u8]) -> Result<(usize, usize), decoding::Error> {
- let mut buf = &bytes[..];
- let val = leb128::read::unsigned(&mut buf)? as usize;
- let leb128_bytes = bytes.len() - buf.len();
- Ok((val, leb128_bytes))
-}
-
-fn read_slice(
- bytes: &[u8],
- cursor: &mut Range,
-) -> Result {
- let mut view = &bytes[cursor.clone()];
- let init_len = view.len();
- let val = T::decode::<&[u8]>(&mut view).ok_or(decoding::Error::NoDecodedValue);
- let bytes_read = init_len - view.len();
- *cursor = (cursor.start + bytes_read)..cursor.end;
- val
-}
-
-fn slice_bytes(bytes: &[u8], cursor: &mut Range) -> Result, decoding::Error> {
- let (val, len) = read_leb128(&mut &bytes[cursor.clone()])?;
- let start = cursor.start + len;
- let end = start + val;
- *cursor = end..cursor.end;
- Ok(start..end)
-}
-
-fn increment_range(range: &mut Range, len: usize) {
- range.end += len;
- range.start += len;
-}
-
-fn increment_range_map(ranges: &mut HashMap>, len: usize) {
- for range in ranges.values_mut() {
- increment_range(range, len);
- }
-}
-
-fn export_objid(id: &ObjId, actors: &IndexedCache) -> amp::ObjectId {
- if id == &ObjId::root() {
- amp::ObjectId::Root
- } else {
- export_opid(&id.0, actors).into()
- }
-}
-
-fn export_elemid(id: &ElemId, actors: &IndexedCache) -> amp::ElementId {
- if id == &HEAD {
- amp::ElementId::Head
- } else {
- export_opid(&id.0, actors).into()
- }
-}
-
-fn export_opid(id: &OpId, actors: &IndexedCache) -> amp::OpId {
- amp::OpId(id.0, actors.get(id.1).clone())
-}
-
-fn export_op(op: &Op, actors: &IndexedCache, props: &IndexedCache) -> amp::Op {
- let action = op.action.clone();
- let key = match &op.key {
- Key::Map(n) => amp::Key::Map(props.get(*n).clone().into()),
- Key::Seq(id) => amp::Key::Seq(export_elemid(id, actors)),
- };
- let obj = export_objid(&op.obj, actors);
- let pred = op.pred.iter().map(|id| export_opid(id, actors)).collect();
- amp::Op {
- action,
- obj,
- insert: op.insert,
- pred,
- key,
- }
-}
-
-pub(crate) fn export_change(
- change: &Transaction,
- actors: &IndexedCache,
- props: &IndexedCache,
-) -> Change {
- amp::Change {
- actor_id: actors.get(change.actor).clone(),
- seq: change.seq,
- start_op: change.start_op,
- time: change.time,
- deps: change.deps.clone(),
- message: change.message.clone(),
- hash: change.hash,
- operations: change
- .operations
- .iter()
- .map(|op| export_op(op, actors, props))
- .collect(),
- extra_bytes: change.extra_bytes.clone(),
- }
- .into()
-}
-
-pub fn decode_change(bytes: Vec) -> Result {
- let (chunktype, body) = decode_header_without_hash(&bytes)?;
- let bytes = if chunktype == BLOCK_TYPE_DEFLATE {
- decompress_chunk(0..PREAMBLE_BYTES, body, bytes)?
- } else {
- ChangeBytes::Uncompressed(bytes)
- };
-
- let (chunktype, hash, body) = decode_header(bytes.uncompressed())?;
-
- if chunktype != BLOCK_TYPE_CHANGE {
- return Err(decoding::Error::WrongType {
- expected_one_of: vec![BLOCK_TYPE_CHANGE],
- found: chunktype,
- });
- }
-
- let body_start = body.start;
- let mut cursor = body;
-
- let deps = decode_hashes(bytes.uncompressed(), &mut cursor)?;
-
- let actor =
- ActorId::from(&bytes.uncompressed()[slice_bytes(bytes.uncompressed(), &mut cursor)?]);
- let seq = read_slice(bytes.uncompressed(), &mut cursor)?;
- let start_op = read_slice(bytes.uncompressed(), &mut cursor)?;
- let time = read_slice(bytes.uncompressed(), &mut cursor)?;
- let message = slice_bytes(bytes.uncompressed(), &mut cursor)?;
-
- let actors = decode_actors(bytes.uncompressed(), &mut cursor, Some(actor))?;
-
- let ops_info = decode_column_info(bytes.uncompressed(), &mut cursor, false)?;
- let ops = decode_columns(&mut cursor, &ops_info);
-
- Ok(Change {
- bytes,
- body_start,
- hash,
- seq,
- start_op,
- time,
- actors,
- message,
- deps,
- ops,
- extra_bytes: cursor,
- })
-}
-
-fn decompress_chunk(
- preamble: Range,
- body: Range,
- compressed: Vec,
-) -> Result {
- let mut decoder = DeflateDecoder::new(&compressed[body]);
- let mut decompressed = Vec::new();
- decoder.read_to_end(&mut decompressed)?;
- let mut result = Vec::with_capacity(decompressed.len() + preamble.len());
- result.extend(&compressed[preamble]);
- result.push(BLOCK_TYPE_CHANGE);
- leb128::write::unsigned::>(&mut result, decompressed.len() as u64).unwrap();
- result.extend(decompressed);
- Ok(ChangeBytes::Compressed {
- uncompressed: result,
- compressed,
- })
-}
-
-fn decode_hashes(
- bytes: &[u8],
- cursor: &mut Range,
-) -> Result, decoding::Error> {
- let num_hashes = read_slice(bytes, cursor)?;
- let mut hashes = Vec::with_capacity(num_hashes);
- for _ in 0..num_hashes {
- let hash = cursor.start..(cursor.start + HASH_BYTES);
- *cursor = hash.end..cursor.end;
- hashes.push(
- bytes
- .get(hash)
- .ok_or(decoding::Error::NotEnoughBytes)?
- .try_into()
- .map_err(InvalidChangeError::from)?,
- );
- }
- Ok(hashes)
-}
-
-fn decode_actors(
- bytes: &[u8],
- cursor: &mut Range,
- first: Option,
-) -> Result, decoding::Error> {
- let num_actors: usize = read_slice(bytes, cursor)?;
- let mut actors = Vec::with_capacity(num_actors + 1);
- if let Some(actor) = first {
- actors.push(actor);
- }
- for _ in 0..num_actors {
- actors.push(ActorId::from(
- bytes
- .get(slice_bytes(bytes, cursor)?)
- .ok_or(decoding::Error::NotEnoughBytes)?,
- ));
- }
- Ok(actors)
-}
-
-fn decode_column_info(
- bytes: &[u8],
- cursor: &mut Range,
- allow_compressed_column: bool,
-) -> Result, decoding::Error> {
- let num_columns = read_slice(bytes, cursor)?;
- let mut columns = Vec::with_capacity(num_columns);
- let mut last_id = 0;
- for _ in 0..num_columns {
- let id: u32 = read_slice(bytes, cursor)?;
- if (id & !COLUMN_TYPE_DEFLATE) <= (last_id & !COLUMN_TYPE_DEFLATE) {
- return Err(decoding::Error::ColumnsNotInAscendingOrder {
- last: last_id,
- found: id,
- });
- }
- if id & COLUMN_TYPE_DEFLATE != 0 && !allow_compressed_column {
- return Err(decoding::Error::ChangeContainedCompressedColumns);
- }
- last_id = id;
- let length = read_slice(bytes, cursor)?;
- columns.push((id, length));
- }
- Ok(columns)
-}
-
-fn decode_columns(
- cursor: &mut Range,
- columns: &[(u32, usize)],
-) -> HashMap> {
- let mut ops = HashMap::new();
- for (id, length) in columns {
- let start = cursor.start;
- let end = start + length;
- *cursor = end..cursor.end;
- ops.insert(*id, start..end);
- }
- ops
-}
-
-fn decode_header(bytes: &[u8]) -> Result<(u8, amp::ChangeHash, Range), decoding::Error> {
- let (chunktype, body) = decode_header_without_hash(bytes)?;
-
- let calculated_hash = Sha256::digest(&bytes[PREAMBLE_BYTES..]);
-
- let checksum = &bytes[4..8];
- if checksum != &calculated_hash[0..4] {
- return Err(decoding::Error::InvalidChecksum {
- found: checksum.try_into().unwrap(),
- calculated: calculated_hash[0..4].try_into().unwrap(),
- });
- }
-
- let hash = calculated_hash[..]
- .try_into()
- .map_err(InvalidChangeError::from)?;
-
- Ok((chunktype, hash, body))
-}
-
-fn decode_header_without_hash(bytes: &[u8]) -> Result<(u8, Range), decoding::Error> {
- if bytes.len() <= HEADER_BYTES {
- return Err(decoding::Error::NotEnoughBytes);
- }
-
- if bytes[0..4] != MAGIC_BYTES {
- return Err(decoding::Error::WrongMagicBytes);
- }
-
- let (val, len) = read_leb128(&mut &bytes[HEADER_BYTES..])?;
- let body = (HEADER_BYTES + len)..(HEADER_BYTES + len + val);
- if bytes.len() != body.end {
- return Err(decoding::Error::WrongByteLength {
- expected: body.end,
- found: bytes.len(),
- });
- }
-
- let chunktype = bytes[PREAMBLE_BYTES];
-
- Ok((chunktype, body))
-}
-
-fn load_blocks(bytes: &[u8]) -> Result, AutomergeError> {
- let mut changes = Vec::new();
- for slice in split_blocks(bytes)? {
- decode_block(slice, &mut changes)?;
- }
- Ok(changes)
-}
-
-fn split_blocks(bytes: &[u8]) -> Result, decoding::Error> {
- // split off all valid blocks - ignore the rest if its corrupted or truncated
- let mut blocks = Vec::new();
- let mut cursor = bytes;
- while let Some(block) = pop_block(cursor)? {
- blocks.push(&cursor[block.clone()]);
- if cursor.len() <= block.end {
- break;
- }
- cursor = &cursor[block.end..];
- }
- Ok(blocks)
-}
-
-fn pop_block(bytes: &[u8]) -> Result